@harness-engineering/core 0.13.1 → 0.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -41,7 +41,7 @@ import {
41
41
  runAll,
42
42
  validateDependencies,
43
43
  violationId
44
- } from "./chunk-D6VFA6AS.mjs";
44
+ } from "./chunk-BQUWXBGR.mjs";
45
45
 
46
46
  // src/index.ts
47
47
  export * from "@harness-engineering/types";
@@ -84,15 +84,15 @@ function validateConfig(data, schema) {
84
84
  let message = "Configuration validation failed";
85
85
  const suggestions = [];
86
86
  if (firstError) {
87
- const path20 = firstError.path.join(".");
88
- const pathDisplay = path20 ? ` at "${path20}"` : "";
87
+ const path23 = firstError.path.join(".");
88
+ const pathDisplay = path23 ? ` at "${path23}"` : "";
89
89
  if (firstError.code === "invalid_type") {
90
90
  const received = firstError.received;
91
91
  const expected = firstError.expected;
92
92
  if (received === "undefined") {
93
93
  code = "MISSING_FIELD";
94
94
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
95
- suggestions.push(`Field "${path20}" is required and must be of type "${expected}"`);
95
+ suggestions.push(`Field "${path23}" is required and must be of type "${expected}"`);
96
96
  } else {
97
97
  code = "INVALID_TYPE";
98
98
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -246,6 +246,43 @@ function extractMarkdownLinks(content) {
246
246
  }
247
247
  return links;
248
248
  }
249
+ function isDescriptionTerminator(trimmed) {
250
+ return trimmed.startsWith("#") || trimmed.startsWith("-") || trimmed.startsWith("*") || trimmed.startsWith("```");
251
+ }
252
+ function extractDescription(sectionLines) {
253
+ const descriptionLines = [];
254
+ for (const line of sectionLines) {
255
+ const trimmed = line.trim();
256
+ if (trimmed === "") {
257
+ if (descriptionLines.length > 0) break;
258
+ continue;
259
+ }
260
+ if (isDescriptionTerminator(trimmed)) break;
261
+ descriptionLines.push(trimmed);
262
+ }
263
+ return descriptionLines.length > 0 ? descriptionLines.join(" ") : void 0;
264
+ }
265
+ function buildAgentMapSection(section, lines) {
266
+ const endIndex = section.endIndex ?? lines.length;
267
+ const sectionLines = lines.slice(section.startIndex + 1, endIndex);
268
+ const sectionContent = sectionLines.join("\n");
269
+ const links = extractMarkdownLinks(sectionContent).map((link) => ({
270
+ ...link,
271
+ line: link.line + section.startIndex + 1,
272
+ exists: false
273
+ }));
274
+ const result = {
275
+ title: section.title,
276
+ level: section.level,
277
+ line: section.line,
278
+ links
279
+ };
280
+ const description = extractDescription(sectionLines);
281
+ if (description) {
282
+ result.description = description;
283
+ }
284
+ return result;
285
+ }
249
286
  function extractSections(content) {
250
287
  const lines = content.split("\n");
251
288
  const sections = [];
@@ -258,7 +295,6 @@ function extractSections(content) {
258
295
  title: match[2].trim(),
259
296
  level: match[1].length,
260
297
  line: i + 1,
261
- // 1-indexed
262
298
  startIndex: i
263
299
  });
264
300
  }
@@ -270,62 +306,29 @@ function extractSections(content) {
270
306
  currentSection.endIndex = nextSection ? nextSection.startIndex : lines.length;
271
307
  }
272
308
  }
273
- return sections.map((section) => {
274
- const endIndex = section.endIndex ?? lines.length;
275
- const sectionLines = lines.slice(section.startIndex + 1, endIndex);
276
- const sectionContent = sectionLines.join("\n");
277
- const links = extractMarkdownLinks(sectionContent).map((link) => ({
278
- ...link,
279
- line: link.line + section.startIndex + 1,
280
- // Adjust line number
281
- exists: false
282
- // Will be set later by validateAgentsMap
283
- }));
284
- const descriptionLines = [];
285
- for (const line of sectionLines) {
286
- const trimmed = line.trim();
287
- if (trimmed === "") {
288
- if (descriptionLines.length > 0) break;
289
- continue;
290
- }
291
- if (trimmed.startsWith("#")) break;
292
- if (trimmed.startsWith("-") || trimmed.startsWith("*")) break;
293
- if (trimmed.startsWith("```")) break;
294
- descriptionLines.push(trimmed);
295
- }
296
- const result = {
297
- title: section.title,
298
- level: section.level,
299
- line: section.line,
300
- links
301
- };
302
- if (descriptionLines.length > 0) {
303
- result.description = descriptionLines.join(" ");
304
- }
305
- return result;
306
- });
309
+ return sections.map((section) => buildAgentMapSection(section, lines));
307
310
  }
308
- function isExternalLink(path20) {
309
- return path20.startsWith("http://") || path20.startsWith("https://") || path20.startsWith("#") || path20.startsWith("mailto:");
311
+ function isExternalLink(path23) {
312
+ return path23.startsWith("http://") || path23.startsWith("https://") || path23.startsWith("#") || path23.startsWith("mailto:");
310
313
  }
311
314
  function resolveLinkPath(linkPath, baseDir) {
312
315
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
313
316
  }
314
- async function validateAgentsMap(path20 = "./AGENTS.md") {
315
- const contentResult = await readFileContent(path20);
317
+ async function validateAgentsMap(path23 = "./AGENTS.md") {
318
+ const contentResult = await readFileContent(path23);
316
319
  if (!contentResult.ok) {
317
320
  return Err(
318
321
  createError(
319
322
  "PARSE_ERROR",
320
323
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
321
- { path: path20 },
324
+ { path: path23 },
322
325
  ["Ensure the file exists", "Check file permissions"]
323
326
  )
324
327
  );
325
328
  }
326
329
  const content = contentResult.value;
327
330
  const sections = extractSections(content);
328
- const baseDir = dirname(path20);
331
+ const baseDir = dirname(path23);
329
332
  const sectionTitles = sections.map((s) => s.title);
330
333
  const missingSections = REQUIRED_SECTIONS.filter(
331
334
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -466,8 +469,8 @@ async function checkDocCoverage(domain, options = {}) {
466
469
 
467
470
  // src/context/knowledge-map.ts
468
471
  import { join as join2, basename as basename2 } from "path";
469
- function suggestFix(path20, existingFiles) {
470
- const targetName = basename2(path20).toLowerCase();
472
+ function suggestFix(path23, existingFiles) {
473
+ const targetName = basename2(path23).toLowerCase();
471
474
  const similar = existingFiles.find((file) => {
472
475
  const fileName = basename2(file).toLowerCase();
473
476
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -475,7 +478,7 @@ function suggestFix(path20, existingFiles) {
475
478
  if (similar) {
476
479
  return `Did you mean "${similar}"?`;
477
480
  }
478
- return `Create the file "${path20}" or remove the link`;
481
+ return `Create the file "${path23}" or remove the link`;
479
482
  }
480
483
  async function validateKnowledgeMap(rootDir = process.cwd()) {
481
484
  const agentsPath = join2(rootDir, "AGENTS.md");
@@ -827,8 +830,8 @@ function createBoundaryValidator(schema, name) {
827
830
  return Ok(result.data);
828
831
  }
829
832
  const suggestions = result.error.issues.map((issue) => {
830
- const path20 = issue.path.join(".");
831
- return path20 ? `${path20}: ${issue.message}` : issue.message;
833
+ const path23 = issue.path.join(".");
834
+ return path23 ? `${path23}: ${issue.message}` : issue.message;
832
835
  });
833
836
  return Err(
834
837
  createError(
@@ -1050,175 +1053,183 @@ function stringArraysEqual(a, b) {
1050
1053
  const sortedB = [...b].sort();
1051
1054
  return sortedA.every((val, i) => val === sortedB[i]);
1052
1055
  }
1053
- function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1054
- const config = { ...localConfig };
1055
- const contributions = {};
1056
- const conflicts = [];
1057
- if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1058
- const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1059
- const mergedLayers = [...localLayers];
1060
- const contributedLayerNames = [];
1061
- for (const bundleLayer of bundleConstraints.layers) {
1062
- const existing = localLayers.find((l) => l.name === bundleLayer.name);
1063
- if (!existing) {
1064
- mergedLayers.push(bundleLayer);
1065
- contributedLayerNames.push(bundleLayer.name);
1066
- } else {
1067
- const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1068
- if (!same) {
1069
- conflicts.push({
1070
- section: "layers",
1071
- key: bundleLayer.name,
1072
- localValue: existing,
1073
- packageValue: bundleLayer,
1074
- description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1075
- });
1076
- }
1056
+ function mergeLayers(localConfig, bundleLayers, config, contributions, conflicts) {
1057
+ const localLayers = Array.isArray(localConfig.layers) ? localConfig.layers : [];
1058
+ const mergedLayers = [...localLayers];
1059
+ const contributedLayerNames = [];
1060
+ for (const bundleLayer of bundleLayers) {
1061
+ const existing = localLayers.find((l) => l.name === bundleLayer.name);
1062
+ if (!existing) {
1063
+ mergedLayers.push(bundleLayer);
1064
+ contributedLayerNames.push(bundleLayer.name);
1065
+ } else {
1066
+ const same = existing.pattern === bundleLayer.pattern && stringArraysEqual(existing.allowedDependencies, bundleLayer.allowedDependencies);
1067
+ if (!same) {
1068
+ conflicts.push({
1069
+ section: "layers",
1070
+ key: bundleLayer.name,
1071
+ localValue: existing,
1072
+ packageValue: bundleLayer,
1073
+ description: `Layer '${bundleLayer.name}' already exists locally with different configuration`
1074
+ });
1075
+ }
1076
+ }
1077
+ }
1078
+ config.layers = mergedLayers;
1079
+ if (contributedLayerNames.length > 0) contributions.layers = contributedLayerNames;
1080
+ }
1081
+ function mergeForbiddenImports(localConfig, bundleRules, config, contributions, conflicts) {
1082
+ const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1083
+ const mergedFI = [...localFI];
1084
+ const contributedFromKeys = [];
1085
+ for (const bundleRule of bundleRules) {
1086
+ const existing = localFI.find((r) => r.from === bundleRule.from);
1087
+ if (!existing) {
1088
+ const entry = { from: bundleRule.from, disallow: bundleRule.disallow };
1089
+ if (bundleRule.message !== void 0) entry.message = bundleRule.message;
1090
+ mergedFI.push(entry);
1091
+ contributedFromKeys.push(bundleRule.from);
1092
+ } else {
1093
+ if (!stringArraysEqual(existing.disallow, bundleRule.disallow)) {
1094
+ conflicts.push({
1095
+ section: "forbiddenImports",
1096
+ key: bundleRule.from,
1097
+ localValue: existing,
1098
+ packageValue: bundleRule,
1099
+ description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1100
+ });
1077
1101
  }
1078
1102
  }
1079
- config.layers = mergedLayers;
1080
- if (contributedLayerNames.length > 0) {
1081
- contributions.layers = contributedLayerNames;
1103
+ }
1104
+ config.forbiddenImports = mergedFI;
1105
+ if (contributedFromKeys.length > 0) contributions.forbiddenImports = contributedFromKeys;
1106
+ }
1107
+ function mergeBoundaries(localConfig, bundleBoundaries, config, contributions) {
1108
+ const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1109
+ const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1110
+ const newSchemas = [];
1111
+ for (const schema of bundleBoundaries.requireSchema ?? []) {
1112
+ if (!localSchemas.has(schema)) {
1113
+ newSchemas.push(schema);
1114
+ localSchemas.add(schema);
1115
+ }
1116
+ }
1117
+ config.boundaries = { requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas] };
1118
+ if (newSchemas.length > 0) contributions.boundaries = newSchemas;
1119
+ }
1120
+ function mergeArchitecture(localConfig, bundleArch, config, contributions, conflicts) {
1121
+ const localArch = localConfig.architecture ?? { thresholds: {}, modules: {} };
1122
+ const mergedThresholds = { ...localArch.thresholds };
1123
+ const contributedThresholdKeys = [];
1124
+ for (const [category, value] of Object.entries(bundleArch.thresholds ?? {})) {
1125
+ if (!(category in mergedThresholds)) {
1126
+ mergedThresholds[category] = value;
1127
+ contributedThresholdKeys.push(category);
1128
+ } else if (!deepEqual(mergedThresholds[category], value)) {
1129
+ conflicts.push({
1130
+ section: "architecture.thresholds",
1131
+ key: category,
1132
+ localValue: mergedThresholds[category],
1133
+ packageValue: value,
1134
+ description: `Architecture threshold '${category}' already exists locally with a different value`
1135
+ });
1082
1136
  }
1083
1137
  }
1084
- if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1085
- const localFI = Array.isArray(localConfig.forbiddenImports) ? localConfig.forbiddenImports : [];
1086
- const mergedFI = [...localFI];
1087
- const contributedFromKeys = [];
1088
- for (const bundleRule of bundleConstraints.forbiddenImports) {
1089
- const existing = localFI.find((r) => r.from === bundleRule.from);
1090
- if (!existing) {
1091
- const entry = {
1092
- from: bundleRule.from,
1093
- disallow: bundleRule.disallow
1094
- };
1095
- if (bundleRule.message !== void 0) {
1096
- entry.message = bundleRule.message;
1097
- }
1098
- mergedFI.push(entry);
1099
- contributedFromKeys.push(bundleRule.from);
1100
- } else {
1101
- const same = stringArraysEqual(existing.disallow, bundleRule.disallow);
1102
- if (!same) {
1138
+ const mergedModules = { ...localArch.modules };
1139
+ const contributedModuleKeys = [];
1140
+ for (const [modulePath, bundleCategoryMap] of Object.entries(bundleArch.modules ?? {})) {
1141
+ if (!(modulePath in mergedModules)) {
1142
+ mergedModules[modulePath] = bundleCategoryMap;
1143
+ for (const cat of Object.keys(bundleCategoryMap))
1144
+ contributedModuleKeys.push(`${modulePath}:${cat}`);
1145
+ } else {
1146
+ const mergedCategoryMap = { ...mergedModules[modulePath] };
1147
+ for (const [category, value] of Object.entries(bundleCategoryMap)) {
1148
+ if (!(category in mergedCategoryMap)) {
1149
+ mergedCategoryMap[category] = value;
1150
+ contributedModuleKeys.push(`${modulePath}:${category}`);
1151
+ } else if (!deepEqual(mergedCategoryMap[category], value)) {
1103
1152
  conflicts.push({
1104
- section: "forbiddenImports",
1105
- key: bundleRule.from,
1106
- localValue: existing,
1107
- packageValue: bundleRule,
1108
- description: `Forbidden import rule for '${bundleRule.from}' already exists locally with different disallow list`
1153
+ section: "architecture.modules",
1154
+ key: `${modulePath}:${category}`,
1155
+ localValue: mergedCategoryMap[category],
1156
+ packageValue: value,
1157
+ description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1109
1158
  });
1110
1159
  }
1111
1160
  }
1161
+ mergedModules[modulePath] = mergedCategoryMap;
1162
+ }
1163
+ }
1164
+ config.architecture = { ...localArch, thresholds: mergedThresholds, modules: mergedModules };
1165
+ if (contributedThresholdKeys.length > 0)
1166
+ contributions["architecture.thresholds"] = contributedThresholdKeys;
1167
+ if (contributedModuleKeys.length > 0)
1168
+ contributions["architecture.modules"] = contributedModuleKeys;
1169
+ }
1170
+ function mergeSecurityRules(localConfig, bundleRules, config, contributions, conflicts) {
1171
+ const localSecurity = localConfig.security ?? { rules: {} };
1172
+ const localRules = localSecurity.rules ?? {};
1173
+ const mergedRules = { ...localRules };
1174
+ const contributedRuleIds = [];
1175
+ for (const [ruleId, severity] of Object.entries(bundleRules)) {
1176
+ if (!(ruleId in mergedRules)) {
1177
+ mergedRules[ruleId] = severity;
1178
+ contributedRuleIds.push(ruleId);
1179
+ } else if (mergedRules[ruleId] !== severity) {
1180
+ conflicts.push({
1181
+ section: "security.rules",
1182
+ key: ruleId,
1183
+ localValue: mergedRules[ruleId],
1184
+ packageValue: severity,
1185
+ description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1186
+ });
1112
1187
  }
1113
- config.forbiddenImports = mergedFI;
1114
- if (contributedFromKeys.length > 0) {
1115
- contributions.forbiddenImports = contributedFromKeys;
1116
- }
1188
+ }
1189
+ config.security = { ...localSecurity, rules: mergedRules };
1190
+ if (contributedRuleIds.length > 0) contributions["security.rules"] = contributedRuleIds;
1191
+ }
1192
+ function deepMergeConstraints(localConfig, bundleConstraints, _existingContributions) {
1193
+ const config = { ...localConfig };
1194
+ const contributions = {};
1195
+ const conflicts = [];
1196
+ if (bundleConstraints.layers && bundleConstraints.layers.length > 0) {
1197
+ mergeLayers(localConfig, bundleConstraints.layers, config, contributions, conflicts);
1198
+ }
1199
+ if (bundleConstraints.forbiddenImports && bundleConstraints.forbiddenImports.length > 0) {
1200
+ mergeForbiddenImports(
1201
+ localConfig,
1202
+ bundleConstraints.forbiddenImports,
1203
+ config,
1204
+ contributions,
1205
+ conflicts
1206
+ );
1117
1207
  }
1118
1208
  if (bundleConstraints.boundaries) {
1119
- const localBoundaries = localConfig.boundaries ?? { requireSchema: [] };
1120
- const localSchemas = new Set(localBoundaries.requireSchema ?? []);
1121
- const bundleSchemas = bundleConstraints.boundaries.requireSchema ?? [];
1122
- const newSchemas = [];
1123
- for (const schema of bundleSchemas) {
1124
- if (!localSchemas.has(schema)) {
1125
- newSchemas.push(schema);
1126
- localSchemas.add(schema);
1127
- }
1128
- }
1129
- config.boundaries = {
1130
- requireSchema: [...localBoundaries.requireSchema ?? [], ...newSchemas]
1131
- };
1132
- if (newSchemas.length > 0) {
1133
- contributions.boundaries = newSchemas;
1134
- }
1209
+ mergeBoundaries(
1210
+ localConfig,
1211
+ bundleConstraints.boundaries,
1212
+ config,
1213
+ contributions
1214
+ );
1135
1215
  }
1136
1216
  if (bundleConstraints.architecture) {
1137
- const localArch = localConfig.architecture ?? {
1138
- thresholds: {},
1139
- modules: {}
1140
- };
1141
- const mergedThresholds = { ...localArch.thresholds };
1142
- const contributedThresholdKeys = [];
1143
- const bundleThresholds = bundleConstraints.architecture.thresholds ?? {};
1144
- for (const [category, value] of Object.entries(bundleThresholds)) {
1145
- if (!(category in mergedThresholds)) {
1146
- mergedThresholds[category] = value;
1147
- contributedThresholdKeys.push(category);
1148
- } else if (!deepEqual(mergedThresholds[category], value)) {
1149
- conflicts.push({
1150
- section: "architecture.thresholds",
1151
- key: category,
1152
- localValue: mergedThresholds[category],
1153
- packageValue: value,
1154
- description: `Architecture threshold '${category}' already exists locally with a different value`
1155
- });
1156
- }
1157
- }
1158
- const mergedModules = { ...localArch.modules };
1159
- const contributedModuleKeys = [];
1160
- const bundleModules = bundleConstraints.architecture.modules ?? {};
1161
- for (const [modulePath, bundleCategoryMap] of Object.entries(bundleModules)) {
1162
- if (!(modulePath in mergedModules)) {
1163
- mergedModules[modulePath] = bundleCategoryMap;
1164
- for (const cat of Object.keys(bundleCategoryMap)) {
1165
- contributedModuleKeys.push(`${modulePath}:${cat}`);
1166
- }
1167
- } else {
1168
- const localCategoryMap = mergedModules[modulePath];
1169
- const mergedCategoryMap = { ...localCategoryMap };
1170
- for (const [category, value] of Object.entries(bundleCategoryMap)) {
1171
- if (!(category in mergedCategoryMap)) {
1172
- mergedCategoryMap[category] = value;
1173
- contributedModuleKeys.push(`${modulePath}:${category}`);
1174
- } else if (!deepEqual(mergedCategoryMap[category], value)) {
1175
- conflicts.push({
1176
- section: "architecture.modules",
1177
- key: `${modulePath}:${category}`,
1178
- localValue: mergedCategoryMap[category],
1179
- packageValue: value,
1180
- description: `Architecture module override '${modulePath}' category '${category}' already exists locally with a different value`
1181
- });
1182
- }
1183
- }
1184
- mergedModules[modulePath] = mergedCategoryMap;
1185
- }
1186
- }
1187
- config.architecture = {
1188
- ...localArch,
1189
- thresholds: mergedThresholds,
1190
- modules: mergedModules
1191
- };
1192
- if (contributedThresholdKeys.length > 0) {
1193
- contributions["architecture.thresholds"] = contributedThresholdKeys;
1194
- }
1195
- if (contributedModuleKeys.length > 0) {
1196
- contributions["architecture.modules"] = contributedModuleKeys;
1197
- }
1217
+ mergeArchitecture(
1218
+ localConfig,
1219
+ bundleConstraints.architecture,
1220
+ config,
1221
+ contributions,
1222
+ conflicts
1223
+ );
1198
1224
  }
1199
1225
  if (bundleConstraints.security?.rules) {
1200
- const localSecurity = localConfig.security ?? { rules: {} };
1201
- const localRules = localSecurity.rules ?? {};
1202
- const mergedRules = { ...localRules };
1203
- const contributedRuleIds = [];
1204
- for (const [ruleId, severity] of Object.entries(bundleConstraints.security.rules)) {
1205
- if (!(ruleId in mergedRules)) {
1206
- mergedRules[ruleId] = severity;
1207
- contributedRuleIds.push(ruleId);
1208
- } else if (mergedRules[ruleId] !== severity) {
1209
- conflicts.push({
1210
- section: "security.rules",
1211
- key: ruleId,
1212
- localValue: mergedRules[ruleId],
1213
- packageValue: severity,
1214
- description: `Security rule '${ruleId}' already exists locally with severity '${mergedRules[ruleId]}', bundle has '${severity}'`
1215
- });
1216
- }
1217
- }
1218
- config.security = { ...localSecurity, rules: mergedRules };
1219
- if (contributedRuleIds.length > 0) {
1220
- contributions["security.rules"] = contributedRuleIds;
1221
- }
1226
+ mergeSecurityRules(
1227
+ localConfig,
1228
+ bundleConstraints.security.rules,
1229
+ config,
1230
+ contributions,
1231
+ conflicts
1232
+ );
1222
1233
  }
1223
1234
  return { config, contributions, conflicts };
1224
1235
  }
@@ -1379,14 +1390,84 @@ function walk(node, visitor) {
1379
1390
  }
1380
1391
  }
1381
1392
  }
1393
+ function makeLocation(node) {
1394
+ return {
1395
+ file: "",
1396
+ line: node.loc?.start.line ?? 0,
1397
+ column: node.loc?.start.column ?? 0
1398
+ };
1399
+ }
1400
+ function processImportSpecifiers(importDecl, imp) {
1401
+ for (const spec of importDecl.specifiers) {
1402
+ if (spec.type === "ImportDefaultSpecifier") {
1403
+ imp.default = spec.local.name;
1404
+ } else if (spec.type === "ImportNamespaceSpecifier") {
1405
+ imp.namespace = spec.local.name;
1406
+ } else if (spec.type === "ImportSpecifier") {
1407
+ imp.specifiers.push(spec.local.name);
1408
+ if (spec.importKind === "type") {
1409
+ imp.kind = "type";
1410
+ }
1411
+ }
1412
+ }
1413
+ }
1414
+ function getExportedName(exported) {
1415
+ return exported.type === "Identifier" ? exported.name : String(exported.value);
1416
+ }
1417
+ function processReExportSpecifiers(exportDecl, exports) {
1418
+ for (const spec of exportDecl.specifiers) {
1419
+ if (spec.type !== "ExportSpecifier") continue;
1420
+ exports.push({
1421
+ name: getExportedName(spec.exported),
1422
+ type: "named",
1423
+ location: makeLocation(exportDecl),
1424
+ isReExport: true,
1425
+ source: exportDecl.source.value
1426
+ });
1427
+ }
1428
+ }
1429
+ function processExportDeclaration(exportDecl, exports) {
1430
+ const decl = exportDecl.declaration;
1431
+ if (!decl) return;
1432
+ if (decl.type === "VariableDeclaration") {
1433
+ for (const declarator of decl.declarations) {
1434
+ if (declarator.id.type === "Identifier") {
1435
+ exports.push({
1436
+ name: declarator.id.name,
1437
+ type: "named",
1438
+ location: makeLocation(decl),
1439
+ isReExport: false
1440
+ });
1441
+ }
1442
+ }
1443
+ } else if ((decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") && decl.id) {
1444
+ exports.push({
1445
+ name: decl.id.name,
1446
+ type: "named",
1447
+ location: makeLocation(decl),
1448
+ isReExport: false
1449
+ });
1450
+ }
1451
+ }
1452
+ function processExportListSpecifiers(exportDecl, exports) {
1453
+ for (const spec of exportDecl.specifiers) {
1454
+ if (spec.type !== "ExportSpecifier") continue;
1455
+ exports.push({
1456
+ name: getExportedName(spec.exported),
1457
+ type: "named",
1458
+ location: makeLocation(exportDecl),
1459
+ isReExport: false
1460
+ });
1461
+ }
1462
+ }
1382
1463
  var TypeScriptParser = class {
1383
1464
  name = "typescript";
1384
1465
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1385
- async parseFile(path20) {
1386
- const contentResult = await readFileContent(path20);
1466
+ async parseFile(path23) {
1467
+ const contentResult = await readFileContent(path23);
1387
1468
  if (!contentResult.ok) {
1388
1469
  return Err(
1389
- createParseError("NOT_FOUND", `File not found: ${path20}`, { path: path20 }, [
1470
+ createParseError("NOT_FOUND", `File not found: ${path23}`, { path: path23 }, [
1390
1471
  "Check that the file exists",
1391
1472
  "Verify the path is correct"
1392
1473
  ])
@@ -1396,7 +1477,7 @@ var TypeScriptParser = class {
1396
1477
  const ast = parse(contentResult.value, {
1397
1478
  loc: true,
1398
1479
  range: true,
1399
- jsx: path20.endsWith(".tsx"),
1480
+ jsx: path23.endsWith(".tsx"),
1400
1481
  errorOnUnknownASTType: false
1401
1482
  });
1402
1483
  return Ok({
@@ -1407,7 +1488,7 @@ var TypeScriptParser = class {
1407
1488
  } catch (e) {
1408
1489
  const error = e;
1409
1490
  return Err(
1410
- createParseError("SYNTAX_ERROR", `Failed to parse ${path20}: ${error.message}`, { path: path20 }, [
1491
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path23}: ${error.message}`, { path: path23 }, [
1411
1492
  "Check for syntax errors in the file",
1412
1493
  "Ensure valid TypeScript syntax"
1413
1494
  ])
@@ -1423,26 +1504,12 @@ var TypeScriptParser = class {
1423
1504
  const imp = {
1424
1505
  source: importDecl.source.value,
1425
1506
  specifiers: [],
1426
- location: {
1427
- file: "",
1428
- line: importDecl.loc?.start.line ?? 0,
1429
- column: importDecl.loc?.start.column ?? 0
1430
- },
1507
+ location: makeLocation(importDecl),
1431
1508
  kind: importDecl.importKind === "type" ? "type" : "value"
1432
1509
  };
1433
- for (const spec of importDecl.specifiers) {
1434
- if (spec.type === "ImportDefaultSpecifier") {
1435
- imp.default = spec.local.name;
1436
- } else if (spec.type === "ImportNamespaceSpecifier") {
1437
- imp.namespace = spec.local.name;
1438
- } else if (spec.type === "ImportSpecifier") {
1439
- imp.specifiers.push(spec.local.name);
1440
- if (spec.importKind === "type") {
1441
- imp.kind = "type";
1442
- }
1443
- }
1444
- }
1510
+ processImportSpecifiers(importDecl, imp);
1445
1511
  imports.push(imp);
1512
+ return;
1446
1513
  }
1447
1514
  if (node.type === "ImportExpression") {
1448
1515
  const importExpr = node;
@@ -1450,11 +1517,7 @@ var TypeScriptParser = class {
1450
1517
  imports.push({
1451
1518
  source: importExpr.source.value,
1452
1519
  specifiers: [],
1453
- location: {
1454
- file: "",
1455
- line: importExpr.loc?.start.line ?? 0,
1456
- column: importExpr.loc?.start.column ?? 0
1457
- },
1520
+ location: makeLocation(importExpr),
1458
1521
  kind: "value"
1459
1522
  });
1460
1523
  }
@@ -1469,97 +1532,29 @@ var TypeScriptParser = class {
1469
1532
  if (node.type === "ExportNamedDeclaration") {
1470
1533
  const exportDecl = node;
1471
1534
  if (exportDecl.source) {
1472
- for (const spec of exportDecl.specifiers) {
1473
- if (spec.type === "ExportSpecifier") {
1474
- const exported = spec.exported;
1475
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1476
- exports.push({
1477
- name,
1478
- type: "named",
1479
- location: {
1480
- file: "",
1481
- line: exportDecl.loc?.start.line ?? 0,
1482
- column: exportDecl.loc?.start.column ?? 0
1483
- },
1484
- isReExport: true,
1485
- source: exportDecl.source.value
1486
- });
1487
- }
1488
- }
1535
+ processReExportSpecifiers(exportDecl, exports);
1489
1536
  return;
1490
1537
  }
1491
- if (exportDecl.declaration) {
1492
- const decl = exportDecl.declaration;
1493
- if (decl.type === "VariableDeclaration") {
1494
- for (const declarator of decl.declarations) {
1495
- if (declarator.id.type === "Identifier") {
1496
- exports.push({
1497
- name: declarator.id.name,
1498
- type: "named",
1499
- location: {
1500
- file: "",
1501
- line: decl.loc?.start.line ?? 0,
1502
- column: decl.loc?.start.column ?? 0
1503
- },
1504
- isReExport: false
1505
- });
1506
- }
1507
- }
1508
- } else if (decl.type === "FunctionDeclaration" || decl.type === "ClassDeclaration") {
1509
- if (decl.id) {
1510
- exports.push({
1511
- name: decl.id.name,
1512
- type: "named",
1513
- location: {
1514
- file: "",
1515
- line: decl.loc?.start.line ?? 0,
1516
- column: decl.loc?.start.column ?? 0
1517
- },
1518
- isReExport: false
1519
- });
1520
- }
1521
- }
1522
- }
1523
- for (const spec of exportDecl.specifiers) {
1524
- if (spec.type === "ExportSpecifier") {
1525
- const exported = spec.exported;
1526
- const name = exported.type === "Identifier" ? exported.name : String(exported.value);
1527
- exports.push({
1528
- name,
1529
- type: "named",
1530
- location: {
1531
- file: "",
1532
- line: exportDecl.loc?.start.line ?? 0,
1533
- column: exportDecl.loc?.start.column ?? 0
1534
- },
1535
- isReExport: false
1536
- });
1537
- }
1538
- }
1538
+ processExportDeclaration(exportDecl, exports);
1539
+ processExportListSpecifiers(exportDecl, exports);
1540
+ return;
1539
1541
  }
1540
1542
  if (node.type === "ExportDefaultDeclaration") {
1541
1543
  const exportDecl = node;
1542
1544
  exports.push({
1543
1545
  name: "default",
1544
1546
  type: "default",
1545
- location: {
1546
- file: "",
1547
- line: exportDecl.loc?.start.line ?? 0,
1548
- column: exportDecl.loc?.start.column ?? 0
1549
- },
1547
+ location: makeLocation(exportDecl),
1550
1548
  isReExport: false
1551
1549
  });
1550
+ return;
1552
1551
  }
1553
1552
  if (node.type === "ExportAllDeclaration") {
1554
1553
  const exportDecl = node;
1555
1554
  exports.push({
1556
1555
  name: exportDecl.exported?.name ?? "*",
1557
1556
  type: "namespace",
1558
- location: {
1559
- file: "",
1560
- line: exportDecl.loc?.start.line ?? 0,
1561
- column: exportDecl.loc?.start.column ?? 0
1562
- },
1557
+ location: makeLocation(exportDecl),
1563
1558
  isReExport: true,
1564
1559
  source: exportDecl.source.value
1565
1560
  });
@@ -1575,10 +1570,27 @@ var TypeScriptParser = class {
1575
1570
  // src/entropy/snapshot.ts
1576
1571
  import { join as join3, resolve } from "path";
1577
1572
  import { minimatch as minimatch2 } from "minimatch";
1573
+ function collectFieldEntries(rootDir, field) {
1574
+ if (typeof field === "string") return [resolve(rootDir, field)];
1575
+ if (typeof field === "object" && field !== null) {
1576
+ return Object.values(field).filter((v) => typeof v === "string").map((v) => resolve(rootDir, v));
1577
+ }
1578
+ return [];
1579
+ }
1580
+ function extractPackageEntries(rootDir, pkg) {
1581
+ const entries = [];
1582
+ entries.push(...collectFieldEntries(rootDir, pkg["exports"]));
1583
+ if (entries.length === 0 && typeof pkg["main"] === "string") {
1584
+ entries.push(resolve(rootDir, pkg["main"]));
1585
+ }
1586
+ if (pkg["bin"]) {
1587
+ entries.push(...collectFieldEntries(rootDir, pkg["bin"]));
1588
+ }
1589
+ return entries;
1590
+ }
1578
1591
  async function resolveEntryPoints(rootDir, explicitEntries) {
1579
1592
  if (explicitEntries && explicitEntries.length > 0) {
1580
- const resolved = explicitEntries.map((e) => resolve(rootDir, e));
1581
- return Ok(resolved);
1593
+ return Ok(explicitEntries.map((e) => resolve(rootDir, e)));
1582
1594
  }
1583
1595
  const pkgPath = join3(rootDir, "package.json");
1584
1596
  if (await fileExists(pkgPath)) {
@@ -1586,38 +1598,8 @@ async function resolveEntryPoints(rootDir, explicitEntries) {
1586
1598
  if (pkgContent.ok) {
1587
1599
  try {
1588
1600
  const pkg = JSON.parse(pkgContent.value);
1589
- const entries = [];
1590
- if (pkg["exports"]) {
1591
- const exports = pkg["exports"];
1592
- if (typeof exports === "string") {
1593
- entries.push(resolve(rootDir, exports));
1594
- } else if (typeof exports === "object" && exports !== null) {
1595
- for (const value of Object.values(exports)) {
1596
- if (typeof value === "string") {
1597
- entries.push(resolve(rootDir, value));
1598
- }
1599
- }
1600
- }
1601
- }
1602
- const main = pkg["main"];
1603
- if (typeof main === "string" && entries.length === 0) {
1604
- entries.push(resolve(rootDir, main));
1605
- }
1606
- const bin = pkg["bin"];
1607
- if (bin) {
1608
- if (typeof bin === "string") {
1609
- entries.push(resolve(rootDir, bin));
1610
- } else if (typeof bin === "object") {
1611
- for (const value of Object.values(bin)) {
1612
- if (typeof value === "string") {
1613
- entries.push(resolve(rootDir, value));
1614
- }
1615
- }
1616
- }
1617
- }
1618
- if (entries.length > 0) {
1619
- return Ok(entries);
1620
- }
1601
+ const entries = extractPackageEntries(rootDir, pkg);
1602
+ if (entries.length > 0) return Ok(entries);
1621
1603
  } catch {
1622
1604
  }
1623
1605
  }
@@ -1691,66 +1673,49 @@ function extractInlineRefs(content) {
1691
1673
  }
1692
1674
  return refs;
1693
1675
  }
1694
- async function parseDocumentationFile(path20) {
1695
- const contentResult = await readFileContent(path20);
1676
+ async function parseDocumentationFile(path23) {
1677
+ const contentResult = await readFileContent(path23);
1696
1678
  if (!contentResult.ok) {
1697
1679
  return Err(
1698
1680
  createEntropyError(
1699
1681
  "PARSE_ERROR",
1700
- `Failed to read documentation file: ${path20}`,
1701
- { file: path20 },
1682
+ `Failed to read documentation file: ${path23}`,
1683
+ { file: path23 },
1702
1684
  ["Check that the file exists"]
1703
1685
  )
1704
1686
  );
1705
1687
  }
1706
1688
  const content = contentResult.value;
1707
- const type = path20.endsWith(".md") ? "markdown" : "text";
1689
+ const type = path23.endsWith(".md") ? "markdown" : "text";
1708
1690
  return Ok({
1709
- path: path20,
1691
+ path: path23,
1710
1692
  type,
1711
1693
  content,
1712
1694
  codeBlocks: extractCodeBlocks(content),
1713
1695
  inlineRefs: extractInlineRefs(content)
1714
1696
  });
1715
1697
  }
1698
+ function makeInternalSymbol(name, type, line) {
1699
+ return { name, type, line, references: 0, calledBy: [] };
1700
+ }
1701
+ function extractSymbolsFromNode(node) {
1702
+ const line = node.loc?.start?.line || 0;
1703
+ if (node.type === "FunctionDeclaration" && node.id?.name) {
1704
+ return [makeInternalSymbol(node.id.name, "function", line)];
1705
+ }
1706
+ if (node.type === "VariableDeclaration") {
1707
+ return (node.declarations || []).filter((decl) => decl.id?.name).map((decl) => makeInternalSymbol(decl.id.name, "variable", line));
1708
+ }
1709
+ if (node.type === "ClassDeclaration" && node.id?.name) {
1710
+ return [makeInternalSymbol(node.id.name, "class", line)];
1711
+ }
1712
+ return [];
1713
+ }
1716
1714
  function extractInternalSymbols(ast) {
1717
- const symbols = [];
1718
1715
  const body = ast.body;
1719
- if (!body?.body) return symbols;
1720
- for (const node of body.body) {
1721
- if (node.type === "FunctionDeclaration" && node.id?.name) {
1722
- symbols.push({
1723
- name: node.id.name,
1724
- type: "function",
1725
- line: node.loc?.start?.line || 0,
1726
- references: 0,
1727
- calledBy: []
1728
- });
1729
- }
1730
- if (node.type === "VariableDeclaration") {
1731
- for (const decl of node.declarations || []) {
1732
- if (decl.id?.name) {
1733
- symbols.push({
1734
- name: decl.id.name,
1735
- type: "variable",
1736
- line: node.loc?.start?.line || 0,
1737
- references: 0,
1738
- calledBy: []
1739
- });
1740
- }
1741
- }
1742
- }
1743
- if (node.type === "ClassDeclaration" && node.id?.name) {
1744
- symbols.push({
1745
- name: node.id.name,
1746
- type: "class",
1747
- line: node.loc?.start?.line || 0,
1748
- references: 0,
1749
- calledBy: []
1750
- });
1751
- }
1752
- }
1753
- return symbols;
1716
+ if (!body?.body) return [];
1717
+ const nodes = body.body;
1718
+ return nodes.flatMap(extractSymbolsFromNode);
1754
1719
  }
1755
1720
  function extractJSDocComments(ast) {
1756
1721
  const comments = [];
@@ -1891,27 +1856,34 @@ async function buildSnapshot(config) {
1891
1856
 
1892
1857
  // src/entropy/detectors/drift.ts
1893
1858
  import { dirname as dirname3, resolve as resolve2 } from "path";
1894
- function levenshteinDistance(a, b) {
1859
+ function initLevenshteinMatrix(aLen, bLen) {
1895
1860
  const matrix = [];
1896
- for (let i = 0; i <= b.length; i++) {
1861
+ for (let i = 0; i <= bLen; i++) {
1897
1862
  matrix[i] = [i];
1898
1863
  }
1899
- for (let j = 0; j <= a.length; j++) {
1900
- const row = matrix[0];
1901
- if (row) {
1902
- row[j] = j;
1864
+ const firstRow = matrix[0];
1865
+ if (firstRow) {
1866
+ for (let j = 0; j <= aLen; j++) {
1867
+ firstRow[j] = j;
1903
1868
  }
1904
1869
  }
1870
+ return matrix;
1871
+ }
1872
+ function computeLevenshteinCell(row, prevRow, j, charsMatch) {
1873
+ if (charsMatch) {
1874
+ row[j] = prevRow[j - 1] ?? 0;
1875
+ } else {
1876
+ row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
1877
+ }
1878
+ }
1879
+ function levenshteinDistance(a, b) {
1880
+ const matrix = initLevenshteinMatrix(a.length, b.length);
1905
1881
  for (let i = 1; i <= b.length; i++) {
1906
1882
  for (let j = 1; j <= a.length; j++) {
1907
1883
  const row = matrix[i];
1908
1884
  const prevRow = matrix[i - 1];
1909
1885
  if (!row || !prevRow) continue;
1910
- if (b.charAt(i - 1) === a.charAt(j - 1)) {
1911
- row[j] = prevRow[j - 1] ?? 0;
1912
- } else {
1913
- row[j] = Math.min((prevRow[j - 1] ?? 0) + 1, (row[j - 1] ?? 0) + 1, (prevRow[j] ?? 0) + 1);
1914
- }
1886
+ computeLevenshteinCell(row, prevRow, j, b.charAt(i - 1) === a.charAt(j - 1));
1915
1887
  }
1916
1888
  }
1917
1889
  const lastRow = matrix[b.length];
@@ -2197,32 +2169,27 @@ function findDeadExports(snapshot, usageMap, reachability) {
2197
2169
  }
2198
2170
  return deadExports;
2199
2171
  }
2200
- function countLinesFromAST(ast) {
2201
- if (ast.body && Array.isArray(ast.body)) {
2202
- let maxLine = 0;
2203
- const traverse = (node) => {
2204
- if (node && typeof node === "object") {
2205
- const n = node;
2206
- if (n.loc?.end?.line && n.loc.end.line > maxLine) {
2207
- maxLine = n.loc.end.line;
2208
- }
2209
- for (const key of Object.keys(node)) {
2210
- const value = node[key];
2211
- if (Array.isArray(value)) {
2212
- for (const item of value) {
2213
- traverse(item);
2214
- }
2215
- } else if (value && typeof value === "object") {
2216
- traverse(value);
2217
- }
2218
- }
2172
+ function findMaxLineInNode(node) {
2173
+ if (!node || typeof node !== "object") return 0;
2174
+ const n = node;
2175
+ let maxLine = n.loc?.end?.line ?? 0;
2176
+ for (const key of Object.keys(node)) {
2177
+ const value = node[key];
2178
+ if (Array.isArray(value)) {
2179
+ for (const item of value) {
2180
+ maxLine = Math.max(maxLine, findMaxLineInNode(item));
2219
2181
  }
2220
- };
2221
- traverse(ast);
2222
- if (maxLine > 0) return maxLine;
2223
- return Math.max(ast.body.length * 3, 1);
2182
+ } else if (value && typeof value === "object") {
2183
+ maxLine = Math.max(maxLine, findMaxLineInNode(value));
2184
+ }
2224
2185
  }
2225
- return 1;
2186
+ return maxLine;
2187
+ }
2188
+ function countLinesFromAST(ast) {
2189
+ if (!ast.body || !Array.isArray(ast.body)) return 1;
2190
+ const maxLine = findMaxLineInNode(ast);
2191
+ if (maxLine > 0) return maxLine;
2192
+ return Math.max(ast.body.length * 3, 1);
2226
2193
  }
2227
2194
  function findDeadFiles(snapshot, reachability) {
2228
2195
  const deadFiles = [];
@@ -2373,130 +2340,146 @@ function fileMatchesPattern(filePath, pattern, rootDir) {
2373
2340
  const relativePath = relativePosix(rootDir, filePath);
2374
2341
  return minimatch3(relativePath, pattern);
2375
2342
  }
2376
- function checkConfigPattern(pattern, file, rootDir) {
2343
+ var CONVENTION_DESCRIPTIONS = {
2344
+ camelCase: "camelCase (e.g., myFunction)",
2345
+ PascalCase: "PascalCase (e.g., MyClass)",
2346
+ UPPER_SNAKE: "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)",
2347
+ "kebab-case": "kebab-case (e.g., my-component)"
2348
+ };
2349
+ function checkMustExport(rule, file, message) {
2350
+ if (rule.type !== "must-export") return [];
2377
2351
  const matches = [];
2378
- const fileMatches = pattern.files.some((glob) => fileMatchesPattern(file.path, glob, rootDir));
2379
- if (!fileMatches) {
2380
- return matches;
2381
- }
2382
- const rule = pattern.rule;
2383
- switch (rule.type) {
2384
- case "must-export": {
2385
- for (const name of rule.names) {
2386
- const hasExport = file.exports.some((e) => e.name === name);
2387
- if (!hasExport) {
2388
- matches.push({
2389
- line: 1,
2390
- message: pattern.message || `Missing required export: "${name}"`,
2391
- suggestion: `Add export for "${name}"`
2392
- });
2393
- }
2394
- }
2395
- break;
2396
- }
2397
- case "must-export-default": {
2398
- const hasDefault = file.exports.some((e) => e.type === "default");
2399
- if (!hasDefault) {
2400
- matches.push({
2401
- line: 1,
2402
- message: pattern.message || "File must have a default export",
2403
- suggestion: "Add a default export"
2404
- });
2405
- }
2406
- break;
2407
- }
2408
- case "no-export": {
2409
- for (const name of rule.names) {
2410
- const exp = file.exports.find((e) => e.name === name);
2411
- if (exp) {
2412
- matches.push({
2413
- line: exp.location.line,
2414
- message: pattern.message || `Forbidden export: "${name}"`,
2415
- suggestion: `Remove export "${name}"`
2416
- });
2417
- }
2418
- }
2419
- break;
2352
+ for (const name of rule.names) {
2353
+ if (!file.exports.some((e) => e.name === name)) {
2354
+ matches.push({
2355
+ line: 1,
2356
+ message: message || `Missing required export: "${name}"`,
2357
+ suggestion: `Add export for "${name}"`
2358
+ });
2420
2359
  }
2421
- case "must-import": {
2422
- const hasImport = file.imports.some(
2423
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2424
- );
2425
- if (!hasImport) {
2426
- matches.push({
2427
- line: 1,
2428
- message: pattern.message || `Missing required import from "${rule.from}"`,
2429
- suggestion: `Add import from "${rule.from}"`
2430
- });
2360
+ }
2361
+ return matches;
2362
+ }
2363
+ function checkMustExportDefault(_rule, file, message) {
2364
+ if (!file.exports.some((e) => e.type === "default")) {
2365
+ return [
2366
+ {
2367
+ line: 1,
2368
+ message: message || "File must have a default export",
2369
+ suggestion: "Add a default export"
2431
2370
  }
2432
- break;
2371
+ ];
2372
+ }
2373
+ return [];
2374
+ }
2375
+ function checkNoExport(rule, file, message) {
2376
+ if (rule.type !== "no-export") return [];
2377
+ const matches = [];
2378
+ for (const name of rule.names) {
2379
+ const exp = file.exports.find((e) => e.name === name);
2380
+ if (exp) {
2381
+ matches.push({
2382
+ line: exp.location.line,
2383
+ message: message || `Forbidden export: "${name}"`,
2384
+ suggestion: `Remove export "${name}"`
2385
+ });
2433
2386
  }
2434
- case "no-import": {
2435
- const forbiddenImport = file.imports.find(
2436
- (i) => i.source === rule.from || i.source.endsWith(rule.from)
2437
- );
2438
- if (forbiddenImport) {
2439
- matches.push({
2440
- line: forbiddenImport.location.line,
2441
- message: pattern.message || `Forbidden import from "${rule.from}"`,
2442
- suggestion: `Remove import from "${rule.from}"`
2443
- });
2387
+ }
2388
+ return matches;
2389
+ }
2390
+ function checkMustImport(rule, file, message) {
2391
+ if (rule.type !== "must-import") return [];
2392
+ const hasImport = file.imports.some(
2393
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2394
+ );
2395
+ if (!hasImport) {
2396
+ return [
2397
+ {
2398
+ line: 1,
2399
+ message: message || `Missing required import from "${rule.from}"`,
2400
+ suggestion: `Add import from "${rule.from}"`
2444
2401
  }
2445
- break;
2446
- }
2447
- case "naming": {
2448
- const regex = new RegExp(rule.match);
2449
- for (const exp of file.exports) {
2450
- if (!regex.test(exp.name)) {
2451
- let expected = "";
2452
- switch (rule.convention) {
2453
- case "camelCase":
2454
- expected = "camelCase (e.g., myFunction)";
2455
- break;
2456
- case "PascalCase":
2457
- expected = "PascalCase (e.g., MyClass)";
2458
- break;
2459
- case "UPPER_SNAKE":
2460
- expected = "UPPER_SNAKE_CASE (e.g., MY_CONSTANT)";
2461
- break;
2462
- case "kebab-case":
2463
- expected = "kebab-case (e.g., my-component)";
2464
- break;
2465
- }
2466
- matches.push({
2467
- line: exp.location.line,
2468
- message: pattern.message || `"${exp.name}" does not follow ${rule.convention} convention`,
2469
- suggestion: `Rename to follow ${expected}`
2470
- });
2471
- }
2402
+ ];
2403
+ }
2404
+ return [];
2405
+ }
2406
+ function checkNoImport(rule, file, message) {
2407
+ if (rule.type !== "no-import") return [];
2408
+ const forbiddenImport = file.imports.find(
2409
+ (i) => i.source === rule.from || i.source.endsWith(rule.from)
2410
+ );
2411
+ if (forbiddenImport) {
2412
+ return [
2413
+ {
2414
+ line: forbiddenImport.location.line,
2415
+ message: message || `Forbidden import from "${rule.from}"`,
2416
+ suggestion: `Remove import from "${rule.from}"`
2472
2417
  }
2473
- break;
2418
+ ];
2419
+ }
2420
+ return [];
2421
+ }
2422
+ function checkNaming(rule, file, message) {
2423
+ if (rule.type !== "naming") return [];
2424
+ const regex = new RegExp(rule.match);
2425
+ const matches = [];
2426
+ for (const exp of file.exports) {
2427
+ if (!regex.test(exp.name)) {
2428
+ const expected = CONVENTION_DESCRIPTIONS[rule.convention] ?? rule.convention;
2429
+ matches.push({
2430
+ line: exp.location.line,
2431
+ message: message || `"${exp.name}" does not follow ${rule.convention} convention`,
2432
+ suggestion: `Rename to follow ${expected}`
2433
+ });
2474
2434
  }
2475
- case "max-exports": {
2476
- if (file.exports.length > rule.count) {
2477
- matches.push({
2478
- line: 1,
2479
- message: pattern.message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2480
- suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2481
- });
2435
+ }
2436
+ return matches;
2437
+ }
2438
+ function checkMaxExports(rule, file, message) {
2439
+ if (rule.type !== "max-exports") return [];
2440
+ if (file.exports.length > rule.count) {
2441
+ return [
2442
+ {
2443
+ line: 1,
2444
+ message: message || `File has ${file.exports.length} exports, max is ${rule.count}`,
2445
+ suggestion: `Split into multiple files or reduce exports to ${rule.count}`
2482
2446
  }
2483
- break;
2484
- }
2485
- case "max-lines": {
2486
- break;
2487
- }
2488
- case "require-jsdoc": {
2489
- if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2490
- matches.push({
2491
- line: 1,
2492
- message: pattern.message || "Exported symbols require JSDoc documentation",
2493
- suggestion: "Add JSDoc comments to exports"
2494
- });
2447
+ ];
2448
+ }
2449
+ return [];
2450
+ }
2451
+ function checkMaxLines(_rule, _file, _message) {
2452
+ return [];
2453
+ }
2454
+ function checkRequireJsdoc(_rule, file, message) {
2455
+ if (file.jsDocComments.length === 0 && file.exports.length > 0) {
2456
+ return [
2457
+ {
2458
+ line: 1,
2459
+ message: message || "Exported symbols require JSDoc documentation",
2460
+ suggestion: "Add JSDoc comments to exports"
2495
2461
  }
2496
- break;
2497
- }
2462
+ ];
2498
2463
  }
2499
- return matches;
2464
+ return [];
2465
+ }
2466
+ var RULE_CHECKERS = {
2467
+ "must-export": checkMustExport,
2468
+ "must-export-default": checkMustExportDefault,
2469
+ "no-export": checkNoExport,
2470
+ "must-import": checkMustImport,
2471
+ "no-import": checkNoImport,
2472
+ naming: checkNaming,
2473
+ "max-exports": checkMaxExports,
2474
+ "max-lines": checkMaxLines,
2475
+ "require-jsdoc": checkRequireJsdoc
2476
+ };
2477
+ function checkConfigPattern(pattern, file, rootDir) {
2478
+ const fileMatches = pattern.files.some((glob) => fileMatchesPattern(file.path, glob, rootDir));
2479
+ if (!fileMatches) return [];
2480
+ const checker = RULE_CHECKERS[pattern.rule.type];
2481
+ if (!checker) return [];
2482
+ return checker(pattern.rule, file, pattern.message);
2500
2483
  }
2501
2484
  async function detectPatternViolations(snapshot, config) {
2502
2485
  const violations = [];
@@ -3015,17 +2998,35 @@ function createUnusedImportFixes(deadCodeReport) {
3015
2998
  reversible: true
3016
2999
  }));
3017
3000
  }
3001
+ var EXPORT_TYPE_KEYWORD = {
3002
+ class: "class",
3003
+ function: "function",
3004
+ variable: "const",
3005
+ type: "type",
3006
+ interface: "interface",
3007
+ enum: "enum"
3008
+ };
3009
+ function getExportKeyword(exportType) {
3010
+ return EXPORT_TYPE_KEYWORD[exportType] ?? "enum";
3011
+ }
3012
+ function getDefaultExportKeyword(exportType) {
3013
+ if (exportType === "class" || exportType === "function") return exportType;
3014
+ return "";
3015
+ }
3018
3016
  function createDeadExportFixes(deadCodeReport) {
3019
- return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3020
- type: "dead-exports",
3021
- file: exp.file,
3022
- description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3023
- action: "replace",
3024
- oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3025
- newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3026
- safe: true,
3027
- reversible: true
3028
- }));
3017
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => {
3018
+ const keyword = exp.isDefault ? getDefaultExportKeyword(exp.type) : getExportKeyword(exp.type);
3019
+ return {
3020
+ type: "dead-exports",
3021
+ file: exp.file,
3022
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3023
+ action: "replace",
3024
+ oldContent: exp.isDefault ? `export default ${keyword} ${exp.name}` : `export ${keyword} ${exp.name}`,
3025
+ newContent: `${keyword} ${exp.name}`,
3026
+ safe: true,
3027
+ reversible: true
3028
+ };
3029
+ });
3029
3030
  }
3030
3031
  function createCommentedCodeFixes(blocks) {
3031
3032
  return blocks.map((block) => ({
@@ -3204,53 +3205,80 @@ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
3204
3205
  "dead-internal"
3205
3206
  ]);
3206
3207
  var idCounter = 0;
3208
+ var DEAD_CODE_FIX_ACTIONS = {
3209
+ "dead-export": "Remove export keyword",
3210
+ "dead-file": "Delete file",
3211
+ "commented-code": "Delete commented block",
3212
+ "unused-import": "Remove import"
3213
+ };
3214
+ function classifyDeadCode(input) {
3215
+ if (input.isPublicApi) {
3216
+ return {
3217
+ safety: "unsafe",
3218
+ safetyReason: "Public API export may have external consumers",
3219
+ suggestion: "Deprecate before removing"
3220
+ };
3221
+ }
3222
+ const fixAction = DEAD_CODE_FIX_ACTIONS[input.type];
3223
+ if (fixAction) {
3224
+ return {
3225
+ safety: "safe",
3226
+ safetyReason: "zero importers, non-public",
3227
+ fixAction,
3228
+ suggestion: fixAction
3229
+ };
3230
+ }
3231
+ if (input.type === "orphaned-dep") {
3232
+ return {
3233
+ safety: "probably-safe",
3234
+ safetyReason: "No imports found, but needs install+test verification",
3235
+ fixAction: "Remove from package.json",
3236
+ suggestion: "Remove from package.json"
3237
+ };
3238
+ }
3239
+ return {
3240
+ safety: "unsafe",
3241
+ safetyReason: "Unknown dead code type",
3242
+ suggestion: "Manual review required"
3243
+ };
3244
+ }
3245
+ function classifyArchitecture(input) {
3246
+ if (input.type === "import-ordering") {
3247
+ return {
3248
+ safety: "safe",
3249
+ safetyReason: "Mechanical reorder, no semantic change",
3250
+ fixAction: "Reorder imports",
3251
+ suggestion: "Reorder imports"
3252
+ };
3253
+ }
3254
+ if (input.type === "forbidden-import" && input.hasAlternative) {
3255
+ return {
3256
+ safety: "probably-safe",
3257
+ safetyReason: "Alternative configured, needs typecheck+test",
3258
+ fixAction: "Replace with configured alternative",
3259
+ suggestion: "Replace with configured alternative"
3260
+ };
3261
+ }
3262
+ return {
3263
+ safety: "unsafe",
3264
+ safetyReason: `${input.type} requires structural changes`,
3265
+ suggestion: "Restructure code to fix violation"
3266
+ };
3267
+ }
3207
3268
  function classifyFinding(input) {
3208
3269
  idCounter++;
3209
3270
  const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
3210
- let safety;
3211
- let safetyReason;
3212
- let fixAction;
3213
- let suggestion;
3271
+ let classification;
3214
3272
  if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
3215
- safety = "unsafe";
3216
- safetyReason = `${input.type} requires human judgment`;
3217
- suggestion = "Review and refactor manually";
3273
+ classification = {
3274
+ safety: "unsafe",
3275
+ safetyReason: `${input.type} requires human judgment`,
3276
+ suggestion: "Review and refactor manually"
3277
+ };
3218
3278
  } else if (input.concern === "dead-code") {
3219
- if (input.isPublicApi) {
3220
- safety = "unsafe";
3221
- safetyReason = "Public API export may have external consumers";
3222
- suggestion = "Deprecate before removing";
3223
- } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
3224
- safety = "safe";
3225
- safetyReason = "zero importers, non-public";
3226
- fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
3227
- suggestion = fixAction;
3228
- } else if (input.type === "orphaned-dep") {
3229
- safety = "probably-safe";
3230
- safetyReason = "No imports found, but needs install+test verification";
3231
- fixAction = "Remove from package.json";
3232
- suggestion = fixAction;
3233
- } else {
3234
- safety = "unsafe";
3235
- safetyReason = "Unknown dead code type";
3236
- suggestion = "Manual review required";
3237
- }
3279
+ classification = classifyDeadCode(input);
3238
3280
  } else {
3239
- if (input.type === "import-ordering") {
3240
- safety = "safe";
3241
- safetyReason = "Mechanical reorder, no semantic change";
3242
- fixAction = "Reorder imports";
3243
- suggestion = fixAction;
3244
- } else if (input.type === "forbidden-import" && input.hasAlternative) {
3245
- safety = "probably-safe";
3246
- safetyReason = "Alternative configured, needs typecheck+test";
3247
- fixAction = "Replace with configured alternative";
3248
- suggestion = fixAction;
3249
- } else {
3250
- safety = "unsafe";
3251
- safetyReason = `${input.type} requires structural changes`;
3252
- suggestion = "Restructure code to fix violation";
3253
- }
3281
+ classification = classifyArchitecture(input);
3254
3282
  }
3255
3283
  return {
3256
3284
  id,
@@ -3259,11 +3287,11 @@ function classifyFinding(input) {
3259
3287
  ...input.line !== void 0 ? { line: input.line } : {},
3260
3288
  type: input.type,
3261
3289
  description: input.description,
3262
- safety,
3263
- safetyReason,
3290
+ safety: classification.safety,
3291
+ safetyReason: classification.safetyReason,
3264
3292
  hotspotDowngraded: false,
3265
- ...fixAction !== void 0 ? { fixAction } : {},
3266
- suggestion
3293
+ ...classification.fixAction !== void 0 ? { fixAction: classification.fixAction } : {},
3294
+ suggestion: classification.suggestion
3267
3295
  };
3268
3296
  }
3269
3297
  function applyHotspotDowngrade(finding, hotspot) {
@@ -3557,43 +3585,57 @@ var BenchmarkRunner = class {
3557
3585
  };
3558
3586
  }
3559
3587
  }
3588
+ /**
3589
+ * Extract a BenchmarkResult from a single assertion with benchmark data.
3590
+ */
3591
+ parseBenchAssertion(assertion, file) {
3592
+ if (!assertion.benchmark) return null;
3593
+ const bench = assertion.benchmark;
3594
+ return {
3595
+ name: assertion.fullName || assertion.title || "unknown",
3596
+ file: file.replace(process.cwd() + "/", ""),
3597
+ opsPerSec: Math.round(bench.hz || 0),
3598
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
3599
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3600
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
3601
+ };
3602
+ }
3603
+ /**
3604
+ * Extract JSON from output that may contain non-JSON preamble.
3605
+ */
3606
+ extractJson(output) {
3607
+ const jsonStart = output.indexOf("{");
3608
+ const jsonEnd = output.lastIndexOf("}");
3609
+ if (jsonStart === -1 || jsonEnd === -1) return null;
3610
+ return JSON.parse(output.slice(jsonStart, jsonEnd + 1));
3611
+ }
3560
3612
  /**
3561
3613
  * Parse vitest bench JSON reporter output into BenchmarkResult[].
3562
3614
  * Vitest bench JSON output contains testResults with benchmark data.
3563
3615
  */
3564
- parseVitestBenchOutput(output) {
3616
+ collectAssertionResults(testResults) {
3565
3617
  const results = [];
3566
- try {
3567
- const jsonStart = output.indexOf("{");
3568
- const jsonEnd = output.lastIndexOf("}");
3569
- if (jsonStart === -1 || jsonEnd === -1) return results;
3570
- const jsonStr = output.slice(jsonStart, jsonEnd + 1);
3571
- const parsed = JSON.parse(jsonStr);
3572
- if (parsed.testResults) {
3573
- for (const testResult of parsed.testResults) {
3574
- const file = testResult.name || testResult.filepath || "";
3575
- if (testResult.assertionResults) {
3576
- for (const assertion of testResult.assertionResults) {
3577
- if (assertion.benchmark) {
3578
- const bench = assertion.benchmark;
3579
- results.push({
3580
- name: assertion.fullName || assertion.title || "unknown",
3581
- file: file.replace(process.cwd() + "/", ""),
3582
- opsPerSec: Math.round(bench.hz || 0),
3583
- meanMs: bench.mean ? bench.mean * 1e3 : 0,
3584
- // p99: use actual p99 if available, otherwise estimate as 1.5× mean
3585
- p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3586
- marginOfError: bench.rme ? bench.rme / 100 : 0.05
3587
- });
3588
- }
3589
- }
3590
- }
3591
- }
3618
+ for (const testResult of testResults) {
3619
+ const file = testResult.name || testResult.filepath || "";
3620
+ const assertions = testResult.assertionResults ?? [];
3621
+ for (const assertion of assertions) {
3622
+ const result = this.parseBenchAssertion(assertion, file);
3623
+ if (result) results.push(result);
3592
3624
  }
3593
- } catch {
3594
3625
  }
3595
3626
  return results;
3596
3627
  }
3628
+ parseVitestBenchOutput(output) {
3629
+ try {
3630
+ const parsed = this.extractJson(output);
3631
+ if (!parsed) return [];
3632
+ const testResults = parsed.testResults;
3633
+ if (!testResults) return [];
3634
+ return this.collectAssertionResults(testResults);
3635
+ } catch {
3636
+ return [];
3637
+ }
3638
+ }
3597
3639
  };
3598
3640
 
3599
3641
  // src/performance/regression-detector.ts
@@ -3903,39 +3945,31 @@ function resetFeedbackConfig() {
3903
3945
  }
3904
3946
 
3905
3947
  // src/feedback/review/diff-analyzer.ts
3948
+ function detectFileStatus(part) {
3949
+ if (/new file mode/.test(part)) return "added";
3950
+ if (/deleted file mode/.test(part)) return "deleted";
3951
+ if (part.includes("rename from")) return "renamed";
3952
+ return "modified";
3953
+ }
3954
+ function parseDiffPart(part) {
3955
+ if (!part.trim()) return null;
3956
+ const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
3957
+ if (!headerMatch || !headerMatch[2]) return null;
3958
+ const additionRegex = /^\+(?!\+\+)/gm;
3959
+ const deletionRegex = /^-(?!--)/gm;
3960
+ return {
3961
+ path: headerMatch[2],
3962
+ status: detectFileStatus(part),
3963
+ additions: (part.match(additionRegex) || []).length,
3964
+ deletions: (part.match(deletionRegex) || []).length
3965
+ };
3966
+ }
3906
3967
  function parseDiff(diff2) {
3907
3968
  try {
3908
3969
  if (!diff2.trim()) {
3909
3970
  return Ok({ diff: diff2, files: [] });
3910
3971
  }
3911
- const files = [];
3912
- const newFileRegex = /new file mode/;
3913
- const deletedFileRegex = /deleted file mode/;
3914
- const additionRegex = /^\+(?!\+\+)/gm;
3915
- const deletionRegex = /^-(?!--)/gm;
3916
- const diffParts = diff2.split(/(?=diff --git)/);
3917
- for (const part of diffParts) {
3918
- if (!part.trim()) continue;
3919
- const headerMatch = /diff --git a\/(.+?) b\/(.+?)(?:\n|$)/.exec(part);
3920
- if (!headerMatch || !headerMatch[2]) continue;
3921
- const filePath = headerMatch[2];
3922
- let status = "modified";
3923
- if (newFileRegex.test(part)) {
3924
- status = "added";
3925
- } else if (deletedFileRegex.test(part)) {
3926
- status = "deleted";
3927
- } else if (part.includes("rename from")) {
3928
- status = "renamed";
3929
- }
3930
- const additions = (part.match(additionRegex) || []).length;
3931
- const deletions = (part.match(deletionRegex) || []).length;
3932
- files.push({
3933
- path: filePath,
3934
- status,
3935
- additions,
3936
- deletions
3937
- });
3938
- }
3972
+ const files = diff2.split(/(?=diff --git)/).map(parseDiffPart).filter((f) => f !== null);
3939
3973
  return Ok({ diff: diff2, files });
3940
3974
  } catch (error) {
3941
3975
  return Err({
@@ -4101,107 +4135,123 @@ var ChecklistBuilder = class {
4101
4135
  this.graphImpactData = graphImpactData;
4102
4136
  return this;
4103
4137
  }
4104
- async run(changes) {
4105
- const startTime = Date.now();
4138
+ /**
4139
+ * Build a single harness check item with or without graph data.
4140
+ */
4141
+ buildHarnessCheckItem(id, check, fallbackDetails, graphItemBuilder) {
4142
+ if (this.graphHarnessData && graphItemBuilder) {
4143
+ return graphItemBuilder();
4144
+ }
4145
+ return {
4146
+ id,
4147
+ category: "harness",
4148
+ check,
4149
+ passed: true,
4150
+ severity: "info",
4151
+ details: fallbackDetails
4152
+ };
4153
+ }
4154
+ /**
4155
+ * Build all harness check items based on harnessOptions and graph data.
4156
+ */
4157
+ buildHarnessItems() {
4158
+ if (!this.harnessOptions) return [];
4106
4159
  const items = [];
4107
- if (this.harnessOptions) {
4108
- if (this.harnessOptions.context !== false) {
4109
- if (this.graphHarnessData) {
4110
- items.push({
4111
- id: "harness-context",
4112
- category: "harness",
4113
- check: "Context validation",
4114
- passed: this.graphHarnessData.graphExists && this.graphHarnessData.nodeCount > 0,
4115
- severity: "info",
4116
- details: this.graphHarnessData.graphExists ? `Graph loaded: ${this.graphHarnessData.nodeCount} nodes, ${this.graphHarnessData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
4117
- });
4118
- } else {
4119
- items.push({
4160
+ const graphData = this.graphHarnessData;
4161
+ if (this.harnessOptions.context !== false) {
4162
+ items.push(
4163
+ this.buildHarnessCheckItem(
4164
+ "harness-context",
4165
+ "Context validation",
4166
+ "Harness context validation not yet integrated (run with graph for real checks)",
4167
+ graphData ? () => ({
4120
4168
  id: "harness-context",
4121
4169
  category: "harness",
4122
4170
  check: "Context validation",
4123
- passed: true,
4124
- severity: "info",
4125
- details: "Harness context validation not yet integrated (run with graph for real checks)"
4126
- });
4127
- }
4128
- }
4129
- if (this.harnessOptions.constraints !== false) {
4130
- if (this.graphHarnessData) {
4131
- const violations = this.graphHarnessData.constraintViolations;
4132
- items.push({
4133
- id: "harness-constraints",
4134
- category: "harness",
4135
- check: "Constraint validation",
4136
- passed: violations === 0,
4137
- severity: violations > 0 ? "error" : "info",
4138
- details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
4139
- });
4140
- } else {
4141
- items.push({
4142
- id: "harness-constraints",
4143
- category: "harness",
4144
- check: "Constraint validation",
4145
- passed: true,
4146
- severity: "info",
4147
- details: "Harness constraint validation not yet integrated (run with graph for real checks)"
4148
- });
4149
- }
4150
- }
4151
- if (this.harnessOptions.entropy !== false) {
4152
- if (this.graphHarnessData) {
4153
- const issues = this.graphHarnessData.unreachableNodes + this.graphHarnessData.undocumentedFiles;
4154
- items.push({
4155
- id: "harness-entropy",
4156
- category: "harness",
4157
- check: "Entropy detection",
4158
- passed: issues === 0,
4159
- severity: issues > 0 ? "warning" : "info",
4160
- details: issues === 0 ? "No entropy issues detected" : `${this.graphHarnessData.unreachableNodes} unreachable node(s), ${this.graphHarnessData.undocumentedFiles} undocumented file(s)`
4161
- });
4162
- } else {
4163
- items.push({
4164
- id: "harness-entropy",
4165
- category: "harness",
4166
- check: "Entropy detection",
4167
- passed: true,
4171
+ passed: graphData.graphExists && graphData.nodeCount > 0,
4168
4172
  severity: "info",
4169
- details: "Harness entropy detection not yet integrated (run with graph for real checks)"
4170
- });
4171
- }
4172
- }
4173
+ details: graphData.graphExists ? `Graph loaded: ${graphData.nodeCount} nodes, ${graphData.edgeCount} edges` : "No graph available \u2014 run harness scan to build the knowledge graph"
4174
+ }) : void 0
4175
+ )
4176
+ );
4177
+ }
4178
+ if (this.harnessOptions.constraints !== false) {
4179
+ items.push(
4180
+ this.buildHarnessCheckItem(
4181
+ "harness-constraints",
4182
+ "Constraint validation",
4183
+ "Harness constraint validation not yet integrated (run with graph for real checks)",
4184
+ graphData ? () => {
4185
+ const violations = graphData.constraintViolations;
4186
+ return {
4187
+ id: "harness-constraints",
4188
+ category: "harness",
4189
+ check: "Constraint validation",
4190
+ passed: violations === 0,
4191
+ severity: violations > 0 ? "error" : "info",
4192
+ details: violations === 0 ? "No constraint violations detected" : `${violations} constraint violation(s) detected`
4193
+ };
4194
+ } : void 0
4195
+ )
4196
+ );
4197
+ }
4198
+ if (this.harnessOptions.entropy !== false) {
4199
+ items.push(
4200
+ this.buildHarnessCheckItem(
4201
+ "harness-entropy",
4202
+ "Entropy detection",
4203
+ "Harness entropy detection not yet integrated (run with graph for real checks)",
4204
+ graphData ? () => {
4205
+ const issues = graphData.unreachableNodes + graphData.undocumentedFiles;
4206
+ return {
4207
+ id: "harness-entropy",
4208
+ category: "harness",
4209
+ check: "Entropy detection",
4210
+ passed: issues === 0,
4211
+ severity: issues > 0 ? "warning" : "info",
4212
+ details: issues === 0 ? "No entropy issues detected" : `${graphData.unreachableNodes} unreachable node(s), ${graphData.undocumentedFiles} undocumented file(s)`
4213
+ };
4214
+ } : void 0
4215
+ )
4216
+ );
4217
+ }
4218
+ return items;
4219
+ }
4220
+ /**
4221
+ * Execute a single custom rule and return a ReviewItem.
4222
+ */
4223
+ async executeCustomRule(rule, changes) {
4224
+ try {
4225
+ const result = await rule.check(changes, this.rootDir);
4226
+ const item = {
4227
+ id: rule.id,
4228
+ category: "custom",
4229
+ check: rule.name,
4230
+ passed: result.passed,
4231
+ severity: rule.severity,
4232
+ details: result.details
4233
+ };
4234
+ if (result.suggestion !== void 0) item.suggestion = result.suggestion;
4235
+ if (result.file !== void 0) item.file = result.file;
4236
+ if (result.line !== void 0) item.line = result.line;
4237
+ return item;
4238
+ } catch (error) {
4239
+ return {
4240
+ id: rule.id,
4241
+ category: "custom",
4242
+ check: rule.name,
4243
+ passed: false,
4244
+ severity: "error",
4245
+ details: `Rule execution failed: ${String(error)}`
4246
+ };
4173
4247
  }
4248
+ }
4249
+ async run(changes) {
4250
+ const startTime = Date.now();
4251
+ const items = [];
4252
+ items.push(...this.buildHarnessItems());
4174
4253
  for (const rule of this.customRules) {
4175
- try {
4176
- const result = await rule.check(changes, this.rootDir);
4177
- const item = {
4178
- id: rule.id,
4179
- category: "custom",
4180
- check: rule.name,
4181
- passed: result.passed,
4182
- severity: rule.severity,
4183
- details: result.details
4184
- };
4185
- if (result.suggestion !== void 0) {
4186
- item.suggestion = result.suggestion;
4187
- }
4188
- if (result.file !== void 0) {
4189
- item.file = result.file;
4190
- }
4191
- if (result.line !== void 0) {
4192
- item.line = result.line;
4193
- }
4194
- items.push(item);
4195
- } catch (error) {
4196
- items.push({
4197
- id: rule.id,
4198
- category: "custom",
4199
- check: rule.name,
4200
- passed: false,
4201
- severity: "error",
4202
- details: `Rule execution failed: ${String(error)}`
4203
- });
4204
- }
4254
+ items.push(await this.executeCustomRule(rule, changes));
4205
4255
  }
4206
4256
  if (this.diffOptions) {
4207
4257
  const diffResult = await analyzeDiff(changes, this.diffOptions, this.graphImpactData);
@@ -4216,7 +4266,6 @@ var ChecklistBuilder = class {
4216
4266
  const checklist = {
4217
4267
  items,
4218
4268
  passed: failed === 0,
4219
- // Pass if no failed items
4220
4269
  summary: {
4221
4270
  total: items.length,
4222
4271
  passed,
@@ -4769,6 +4818,10 @@ var INDEX_FILE = "index.json";
4769
4818
  var SESSIONS_DIR = "sessions";
4770
4819
  var SESSION_INDEX_FILE = "index.md";
4771
4820
  var SUMMARY_FILE = "summary.md";
4821
+ var SESSION_STATE_FILE = "session-state.json";
4822
+ var ARCHIVE_DIR = "archive";
4823
+ var CONTENT_HASHES_FILE = "content-hashes.json";
4824
+ var EVENTS_FILE = "events.jsonl";
4772
4825
 
4773
4826
  // src/state/stream-resolver.ts
4774
4827
  var STREAMS_DIR = "streams";
@@ -5111,6 +5164,85 @@ async function saveState(projectPath, state, stream, session) {
5111
5164
  // src/state/learnings.ts
5112
5165
  import * as fs9 from "fs";
5113
5166
  import * as path6 from "path";
5167
+ import * as crypto from "crypto";
5168
+ function parseFrontmatter(line) {
5169
+ const match = line.match(/^<!--\s+hash:([a-f0-9]+)(?:\s+tags:([^\s]+))?\s+-->/);
5170
+ if (!match) return null;
5171
+ const hash = match[1];
5172
+ const tags = match[2] ? match[2].split(",").filter(Boolean) : [];
5173
+ return { hash, tags };
5174
+ }
5175
+ function computeEntryHash(text) {
5176
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 8);
5177
+ }
5178
+ function normalizeLearningContent(text) {
5179
+ let normalized = text;
5180
+ normalized = normalized.replace(/\d{4}-\d{2}-\d{2}/g, "");
5181
+ normalized = normalized.replace(/\[skill:[^\]]*\]/g, "");
5182
+ normalized = normalized.replace(/\[outcome:[^\]]*\]/g, "");
5183
+ normalized = normalized.replace(/^[\s]*[-*]\s+/gm, "");
5184
+ normalized = normalized.replace(/\*\*/g, "");
5185
+ normalized = normalized.replace(/:\s*/g, " ");
5186
+ normalized = normalized.toLowerCase();
5187
+ normalized = normalized.replace(/\s+/g, " ").trim();
5188
+ return normalized;
5189
+ }
5190
+ function computeContentHash(text) {
5191
+ return crypto.createHash("sha256").update(text).digest("hex").slice(0, 16);
5192
+ }
5193
+ function loadContentHashes(stateDir) {
5194
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5195
+ if (!fs9.existsSync(hashesPath)) return {};
5196
+ try {
5197
+ const raw = fs9.readFileSync(hashesPath, "utf-8");
5198
+ const parsed = JSON.parse(raw);
5199
+ if (typeof parsed !== "object" || parsed === null || Array.isArray(parsed)) return {};
5200
+ return parsed;
5201
+ } catch {
5202
+ return {};
5203
+ }
5204
+ }
5205
+ function saveContentHashes(stateDir, index) {
5206
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5207
+ fs9.writeFileSync(hashesPath, JSON.stringify(index, null, 2) + "\n");
5208
+ }
5209
+ function rebuildContentHashes(stateDir) {
5210
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5211
+ if (!fs9.existsSync(learningsPath)) return {};
5212
+ const content = fs9.readFileSync(learningsPath, "utf-8");
5213
+ const lines = content.split("\n");
5214
+ const index = {};
5215
+ for (let i = 0; i < lines.length; i++) {
5216
+ const line = lines[i];
5217
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5218
+ if (isDatedBullet) {
5219
+ const learningMatch = line.match(/:\*\*\s*(.+)$/);
5220
+ if (learningMatch?.[1]) {
5221
+ const normalized = normalizeLearningContent(learningMatch[1]);
5222
+ const hash = computeContentHash(normalized);
5223
+ const dateMatch = line.match(/(\d{4}-\d{2}-\d{2})/);
5224
+ index[hash] = { date: dateMatch?.[1] ?? "", line: i + 1 };
5225
+ }
5226
+ }
5227
+ }
5228
+ saveContentHashes(stateDir, index);
5229
+ return index;
5230
+ }
5231
+ function extractIndexEntry(entry) {
5232
+ const lines = entry.split("\n");
5233
+ const summary = lines[0] ?? entry;
5234
+ const tags = [];
5235
+ const skillMatch = entry.match(/\[skill:([^\]]+)\]/);
5236
+ if (skillMatch?.[1]) tags.push(skillMatch[1]);
5237
+ const outcomeMatch = entry.match(/\[outcome:([^\]]+)\]/);
5238
+ if (outcomeMatch?.[1]) tags.push(outcomeMatch[1]);
5239
+ return {
5240
+ hash: computeEntryHash(entry),
5241
+ tags,
5242
+ summary,
5243
+ fullText: entry
5244
+ };
5245
+ }
5114
5246
  var learningsCacheMap = /* @__PURE__ */ new Map();
5115
5247
  function clearLearningsCache() {
5116
5248
  learningsCacheMap.clear();
@@ -5122,27 +5254,55 @@ async function appendLearning(projectPath, learning, skillName, outcome, stream,
5122
5254
  const stateDir = dirResult.value;
5123
5255
  const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5124
5256
  fs9.mkdirSync(stateDir, { recursive: true });
5257
+ const normalizedContent = normalizeLearningContent(learning);
5258
+ const contentHash = computeContentHash(normalizedContent);
5259
+ const hashesPath = path6.join(stateDir, CONTENT_HASHES_FILE);
5260
+ let contentHashes;
5261
+ if (fs9.existsSync(hashesPath)) {
5262
+ contentHashes = loadContentHashes(stateDir);
5263
+ if (Object.keys(contentHashes).length === 0 && fs9.existsSync(learningsPath)) {
5264
+ contentHashes = rebuildContentHashes(stateDir);
5265
+ }
5266
+ } else if (fs9.existsSync(learningsPath)) {
5267
+ contentHashes = rebuildContentHashes(stateDir);
5268
+ } else {
5269
+ contentHashes = {};
5270
+ }
5271
+ if (contentHashes[contentHash]) {
5272
+ return Ok(void 0);
5273
+ }
5125
5274
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
5126
- let entry;
5275
+ const fmTags = [];
5276
+ if (skillName) fmTags.push(skillName);
5277
+ if (outcome) fmTags.push(outcome);
5278
+ let bulletLine;
5127
5279
  if (skillName && outcome) {
5128
- entry = `
5129
- - **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}
5130
- `;
5280
+ bulletLine = `- **${timestamp} [skill:${skillName}] [outcome:${outcome}]:** ${learning}`;
5131
5281
  } else if (skillName) {
5132
- entry = `
5133
- - **${timestamp} [skill:${skillName}]:** ${learning}
5134
- `;
5282
+ bulletLine = `- **${timestamp} [skill:${skillName}]:** ${learning}`;
5135
5283
  } else {
5136
- entry = `
5137
- - **${timestamp}:** ${learning}
5138
- `;
5284
+ bulletLine = `- **${timestamp}:** ${learning}`;
5139
5285
  }
5286
+ const hash = crypto.createHash("sha256").update(bulletLine).digest("hex").slice(0, 8);
5287
+ const tagsStr = fmTags.length > 0 ? ` tags:${fmTags.join(",")}` : "";
5288
+ const frontmatter = `<!-- hash:${hash}${tagsStr} -->`;
5289
+ const entry = `
5290
+ ${frontmatter}
5291
+ ${bulletLine}
5292
+ `;
5293
+ let existingLineCount;
5140
5294
  if (!fs9.existsSync(learningsPath)) {
5141
5295
  fs9.writeFileSync(learningsPath, `# Learnings
5142
5296
  ${entry}`);
5297
+ existingLineCount = 1;
5143
5298
  } else {
5299
+ const existingContent = fs9.readFileSync(learningsPath, "utf-8");
5300
+ existingLineCount = existingContent.split("\n").length;
5144
5301
  fs9.appendFileSync(learningsPath, entry);
5145
5302
  }
5303
+ const bulletLine_lineNum = existingLineCount + 2;
5304
+ contentHashes[contentHash] = { date: timestamp ?? "", line: bulletLine_lineNum };
5305
+ saveContentHashes(stateDir, contentHashes);
5146
5306
  learningsCacheMap.delete(learningsPath);
5147
5307
  return Ok(void 0);
5148
5308
  } catch (error) {
@@ -5190,7 +5350,30 @@ function analyzeLearningPatterns(entries) {
5190
5350
  return patterns.sort((a, b) => b.count - a.count);
5191
5351
  }
5192
5352
  async function loadBudgetedLearnings(projectPath, options) {
5193
- const { intent, tokenBudget = 1e3, skill, session, stream } = options;
5353
+ const { intent, tokenBudget = 1e3, skill, session, stream, depth = "summary" } = options;
5354
+ if (depth === "index") {
5355
+ const indexEntries = [];
5356
+ if (session) {
5357
+ const sessionResult = await loadIndexEntries(projectPath, skill, stream, session);
5358
+ if (sessionResult.ok) indexEntries.push(...sessionResult.value);
5359
+ }
5360
+ const globalResult2 = await loadIndexEntries(projectPath, skill, stream);
5361
+ if (globalResult2.ok) {
5362
+ const sessionHashes = new Set(indexEntries.map((e) => e.hash));
5363
+ const uniqueGlobal = globalResult2.value.filter((e) => !sessionHashes.has(e.hash));
5364
+ indexEntries.push(...uniqueGlobal);
5365
+ }
5366
+ const budgeted2 = [];
5367
+ let totalTokens2 = 0;
5368
+ for (const entry of indexEntries) {
5369
+ const separator = budgeted2.length > 0 ? "\n" : "";
5370
+ const entryCost = estimateTokens(entry.summary + separator);
5371
+ if (totalTokens2 + entryCost > tokenBudget) break;
5372
+ budgeted2.push(entry.summary);
5373
+ totalTokens2 += entryCost;
5374
+ }
5375
+ return Ok(budgeted2);
5376
+ }
5194
5377
  const sortByRecencyAndRelevance = (entries) => {
5195
5378
  return [...entries].sort((a, b) => {
5196
5379
  const dateA = parseDateFromEntry(a) ?? "0000-00-00";
@@ -5209,7 +5392,9 @@ async function loadBudgetedLearnings(projectPath, options) {
5209
5392
  }
5210
5393
  const globalResult = await loadRelevantLearnings(projectPath, skill, stream);
5211
5394
  if (globalResult.ok) {
5212
- allEntries.push(...sortByRecencyAndRelevance(globalResult.value));
5395
+ const sessionSet = new Set(allEntries.map((e) => e.trim()));
5396
+ const uniqueGlobal = globalResult.value.filter((e) => !sessionSet.has(e.trim()));
5397
+ allEntries.push(...sortByRecencyAndRelevance(uniqueGlobal));
5213
5398
  }
5214
5399
  const budgeted = [];
5215
5400
  let totalTokens = 0;
@@ -5222,6 +5407,68 @@ async function loadBudgetedLearnings(projectPath, options) {
5222
5407
  }
5223
5408
  return Ok(budgeted);
5224
5409
  }
5410
+ async function loadIndexEntries(projectPath, skillName, stream, session) {
5411
+ try {
5412
+ const dirResult = await getStateDir(projectPath, stream, session);
5413
+ if (!dirResult.ok) return dirResult;
5414
+ const stateDir = dirResult.value;
5415
+ const learningsPath = path6.join(stateDir, LEARNINGS_FILE);
5416
+ if (!fs9.existsSync(learningsPath)) {
5417
+ return Ok([]);
5418
+ }
5419
+ const content = fs9.readFileSync(learningsPath, "utf-8");
5420
+ const lines = content.split("\n");
5421
+ const indexEntries = [];
5422
+ let pendingFrontmatter = null;
5423
+ let currentBlock = [];
5424
+ for (const line of lines) {
5425
+ if (line.startsWith("# ")) continue;
5426
+ const fm = parseFrontmatter(line);
5427
+ if (fm) {
5428
+ pendingFrontmatter = fm;
5429
+ continue;
5430
+ }
5431
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5432
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5433
+ if (isDatedBullet || isHeading) {
5434
+ if (pendingFrontmatter) {
5435
+ indexEntries.push({
5436
+ hash: pendingFrontmatter.hash,
5437
+ tags: pendingFrontmatter.tags,
5438
+ summary: line,
5439
+ fullText: ""
5440
+ // Placeholder — full text not loaded in index mode
5441
+ });
5442
+ pendingFrontmatter = null;
5443
+ } else {
5444
+ const idx = extractIndexEntry(line);
5445
+ indexEntries.push({
5446
+ hash: idx.hash,
5447
+ tags: idx.tags,
5448
+ summary: line,
5449
+ fullText: ""
5450
+ });
5451
+ }
5452
+ currentBlock = [line];
5453
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
5454
+ currentBlock.push(line);
5455
+ }
5456
+ }
5457
+ if (skillName) {
5458
+ const filtered = indexEntries.filter(
5459
+ (e) => e.tags.includes(skillName) || e.summary.includes(`[skill:${skillName}]`)
5460
+ );
5461
+ return Ok(filtered);
5462
+ }
5463
+ return Ok(indexEntries);
5464
+ } catch (error) {
5465
+ return Err(
5466
+ new Error(
5467
+ `Failed to load index entries: ${error instanceof Error ? error.message : String(error)}`
5468
+ )
5469
+ );
5470
+ }
5471
+ }
5225
5472
  async function loadRelevantLearnings(projectPath, skillName, stream, session) {
5226
5473
  try {
5227
5474
  const dirResult = await getStateDir(projectPath, stream, session);
@@ -5244,6 +5491,7 @@ async function loadRelevantLearnings(projectPath, skillName, stream, session) {
5244
5491
  let currentBlock = [];
5245
5492
  for (const line of lines) {
5246
5493
  if (line.startsWith("# ")) continue;
5494
+ if (/^<!--\s+hash:[a-f0-9]+/.test(line)) continue;
5247
5495
  const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5248
5496
  const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5249
5497
  if (isDatedBullet || isHeading) {
@@ -5353,6 +5601,68 @@ async function pruneLearnings(projectPath, stream) {
5353
5601
  );
5354
5602
  }
5355
5603
  }
5604
+ var PROMOTABLE_OUTCOMES = ["gotcha", "decision", "observation"];
5605
+ function isGeneralizable(entry) {
5606
+ for (const outcome of PROMOTABLE_OUTCOMES) {
5607
+ if (entry.includes(`[outcome:${outcome}]`)) return true;
5608
+ }
5609
+ return false;
5610
+ }
5611
+ async function promoteSessionLearnings(projectPath, sessionSlug, stream) {
5612
+ try {
5613
+ const sessionResult = await loadRelevantLearnings(projectPath, void 0, stream, sessionSlug);
5614
+ if (!sessionResult.ok) return sessionResult;
5615
+ const sessionEntries = sessionResult.value;
5616
+ if (sessionEntries.length === 0) {
5617
+ return Ok({ promoted: 0, skipped: 0 });
5618
+ }
5619
+ const toPromote = [];
5620
+ let skipped = 0;
5621
+ for (const entry of sessionEntries) {
5622
+ if (isGeneralizable(entry)) {
5623
+ toPromote.push(entry);
5624
+ } else {
5625
+ skipped++;
5626
+ }
5627
+ }
5628
+ if (toPromote.length === 0) {
5629
+ return Ok({ promoted: 0, skipped });
5630
+ }
5631
+ const dirResult = await getStateDir(projectPath, stream);
5632
+ if (!dirResult.ok) return dirResult;
5633
+ const stateDir = dirResult.value;
5634
+ const globalPath = path6.join(stateDir, LEARNINGS_FILE);
5635
+ const existingGlobal = fs9.existsSync(globalPath) ? fs9.readFileSync(globalPath, "utf-8") : "";
5636
+ const newEntries = toPromote.filter((entry) => !existingGlobal.includes(entry.trim()));
5637
+ if (newEntries.length === 0) {
5638
+ return Ok({ promoted: 0, skipped: skipped + toPromote.length });
5639
+ }
5640
+ const promotedContent = newEntries.join("\n\n") + "\n";
5641
+ if (!existingGlobal) {
5642
+ fs9.writeFileSync(globalPath, `# Learnings
5643
+
5644
+ ${promotedContent}`);
5645
+ } else {
5646
+ fs9.appendFileSync(globalPath, "\n\n" + promotedContent);
5647
+ }
5648
+ learningsCacheMap.delete(globalPath);
5649
+ return Ok({
5650
+ promoted: newEntries.length,
5651
+ skipped: skipped + (toPromote.length - newEntries.length)
5652
+ });
5653
+ } catch (error) {
5654
+ return Err(
5655
+ new Error(
5656
+ `Failed to promote session learnings: ${error instanceof Error ? error.message : String(error)}`
5657
+ )
5658
+ );
5659
+ }
5660
+ }
5661
+ async function countLearningEntries(projectPath, stream) {
5662
+ const loadResult = await loadRelevantLearnings(projectPath, void 0, stream);
5663
+ if (!loadResult.ok) return 0;
5664
+ return loadResult.value.length;
5665
+ }
5356
5666
 
5357
5667
  // src/state/failures.ts
5358
5668
  import * as fs10 from "fs";
@@ -5677,37 +5987,319 @@ function listActiveSessions(projectPath) {
5677
5987
  }
5678
5988
  }
5679
5989
 
5680
- // src/workflow/runner.ts
5681
- async function executeWorkflow(workflow, executor) {
5682
- const stepResults = [];
5683
- const startTime = Date.now();
5684
- let previousArtifact;
5685
- let stopped = false;
5686
- for (const step of workflow.steps) {
5687
- if (stopped) {
5688
- stepResults.push({
5689
- step,
5690
- outcome: "skipped",
5691
- durationMs: 0
5692
- });
5693
- continue;
5694
- }
5695
- const stepResult = await executor(step, previousArtifact);
5696
- stepResults.push(stepResult);
5697
- if (stepResult.outcome === "pass") {
5698
- previousArtifact = stepResult.artifact;
5699
- } else {
5700
- const gate = step.gate ?? "pass-required";
5701
- if (gate === "pass-required") {
5702
- stopped = true;
5990
+ // src/state/session-sections.ts
5991
+ import * as fs14 from "fs";
5992
+ import * as path11 from "path";
5993
+ import { SESSION_SECTION_NAMES } from "@harness-engineering/types";
5994
+ function emptySections() {
5995
+ const sections = {};
5996
+ for (const name of SESSION_SECTION_NAMES) {
5997
+ sections[name] = [];
5998
+ }
5999
+ return sections;
6000
+ }
6001
+ async function loadSessionState(projectPath, sessionSlug) {
6002
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
6003
+ if (!dirResult.ok) return dirResult;
6004
+ const sessionDir = dirResult.value;
6005
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
6006
+ if (!fs14.existsSync(filePath)) {
6007
+ return Ok(emptySections());
6008
+ }
6009
+ try {
6010
+ const raw = fs14.readFileSync(filePath, "utf-8");
6011
+ const parsed = JSON.parse(raw);
6012
+ const sections = emptySections();
6013
+ for (const name of SESSION_SECTION_NAMES) {
6014
+ if (Array.isArray(parsed[name])) {
6015
+ sections[name] = parsed[name];
5703
6016
  }
5704
6017
  }
6018
+ return Ok(sections);
6019
+ } catch (error) {
6020
+ return Err(
6021
+ new Error(
6022
+ `Failed to load session state: ${error instanceof Error ? error.message : String(error)}`
6023
+ )
6024
+ );
5705
6025
  }
5706
- const hasFailure = stepResults.some((r) => r.outcome === "fail");
5707
- return {
5708
- workflow,
5709
- stepResults,
5710
- pass: !hasFailure,
6026
+ }
6027
+ async function saveSessionState(projectPath, sessionSlug, sections) {
6028
+ const dirResult = resolveSessionDir(projectPath, sessionSlug, { create: true });
6029
+ if (!dirResult.ok) return dirResult;
6030
+ const sessionDir = dirResult.value;
6031
+ const filePath = path11.join(sessionDir, SESSION_STATE_FILE);
6032
+ try {
6033
+ fs14.writeFileSync(filePath, JSON.stringify(sections, null, 2));
6034
+ return Ok(void 0);
6035
+ } catch (error) {
6036
+ return Err(
6037
+ new Error(
6038
+ `Failed to save session state: ${error instanceof Error ? error.message : String(error)}`
6039
+ )
6040
+ );
6041
+ }
6042
+ }
6043
+ async function readSessionSections(projectPath, sessionSlug) {
6044
+ return loadSessionState(projectPath, sessionSlug);
6045
+ }
6046
+ async function readSessionSection(projectPath, sessionSlug, section) {
6047
+ const result = await loadSessionState(projectPath, sessionSlug);
6048
+ if (!result.ok) return result;
6049
+ return Ok(result.value[section]);
6050
+ }
6051
+ async function appendSessionEntry(projectPath, sessionSlug, section, authorSkill, content) {
6052
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
6053
+ if (!loadResult.ok) return loadResult;
6054
+ const sections = loadResult.value;
6055
+ const entry = {
6056
+ id: generateEntryId(),
6057
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6058
+ authorSkill,
6059
+ content,
6060
+ status: "active"
6061
+ };
6062
+ sections[section].push(entry);
6063
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
6064
+ if (!saveResult.ok) return saveResult;
6065
+ return Ok(entry);
6066
+ }
6067
+ async function updateSessionEntryStatus(projectPath, sessionSlug, section, entryId, newStatus) {
6068
+ const loadResult = await loadSessionState(projectPath, sessionSlug);
6069
+ if (!loadResult.ok) return loadResult;
6070
+ const sections = loadResult.value;
6071
+ const entry = sections[section].find((e) => e.id === entryId);
6072
+ if (!entry) {
6073
+ return Err(new Error(`Entry '${entryId}' not found in section '${section}'`));
6074
+ }
6075
+ entry.status = newStatus;
6076
+ const saveResult = await saveSessionState(projectPath, sessionSlug, sections);
6077
+ if (!saveResult.ok) return saveResult;
6078
+ return Ok(entry);
6079
+ }
6080
+ function generateEntryId() {
6081
+ const timestamp = Date.now().toString(36);
6082
+ const random = Math.random().toString(36).substring(2, 8);
6083
+ return `${timestamp}-${random}`;
6084
+ }
6085
+
6086
+ // src/state/session-archive.ts
6087
+ import * as fs15 from "fs";
6088
+ import * as path12 from "path";
6089
+ async function archiveSession(projectPath, sessionSlug) {
6090
+ const dirResult = resolveSessionDir(projectPath, sessionSlug);
6091
+ if (!dirResult.ok) return dirResult;
6092
+ const sessionDir = dirResult.value;
6093
+ if (!fs15.existsSync(sessionDir)) {
6094
+ return Err(new Error(`Session '${sessionSlug}' not found at ${sessionDir}`));
6095
+ }
6096
+ const archiveBase = path12.join(projectPath, HARNESS_DIR, ARCHIVE_DIR, "sessions");
6097
+ try {
6098
+ fs15.mkdirSync(archiveBase, { recursive: true });
6099
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
6100
+ let archiveName = `${sessionSlug}-${date}`;
6101
+ let counter = 1;
6102
+ while (fs15.existsSync(path12.join(archiveBase, archiveName))) {
6103
+ archiveName = `${sessionSlug}-${date}-${counter}`;
6104
+ counter++;
6105
+ }
6106
+ const dest = path12.join(archiveBase, archiveName);
6107
+ try {
6108
+ fs15.renameSync(sessionDir, dest);
6109
+ } catch (renameErr) {
6110
+ if (renameErr instanceof Error && "code" in renameErr && renameErr.code === "EXDEV") {
6111
+ fs15.cpSync(sessionDir, dest, { recursive: true });
6112
+ fs15.rmSync(sessionDir, { recursive: true });
6113
+ } else {
6114
+ throw renameErr;
6115
+ }
6116
+ }
6117
+ return Ok(void 0);
6118
+ } catch (error) {
6119
+ return Err(
6120
+ new Error(
6121
+ `Failed to archive session: ${error instanceof Error ? error.message : String(error)}`
6122
+ )
6123
+ );
6124
+ }
6125
+ }
6126
+
6127
+ // src/state/events.ts
6128
+ import * as fs16 from "fs";
6129
+ import * as path13 from "path";
6130
+ import { z as z5 } from "zod";
6131
+ var SkillEventSchema = z5.object({
6132
+ timestamp: z5.string(),
6133
+ skill: z5.string(),
6134
+ session: z5.string().optional(),
6135
+ type: z5.enum(["phase_transition", "decision", "gate_result", "handoff", "error", "checkpoint"]),
6136
+ summary: z5.string(),
6137
+ data: z5.record(z5.unknown()).optional(),
6138
+ refs: z5.array(z5.string()).optional(),
6139
+ contentHash: z5.string().optional()
6140
+ });
6141
+ function computeEventHash(event, session) {
6142
+ const identity = `${event.skill}|${event.type}|${event.summary}|${session ?? ""}`;
6143
+ return computeContentHash(identity);
6144
+ }
6145
+ var knownHashesCache = /* @__PURE__ */ new Map();
6146
+ function loadKnownHashes(eventsPath) {
6147
+ const cached = knownHashesCache.get(eventsPath);
6148
+ if (cached) return cached;
6149
+ const hashes = /* @__PURE__ */ new Set();
6150
+ if (fs16.existsSync(eventsPath)) {
6151
+ const content = fs16.readFileSync(eventsPath, "utf-8");
6152
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
6153
+ for (const line of lines) {
6154
+ try {
6155
+ const existing = JSON.parse(line);
6156
+ if (existing.contentHash) {
6157
+ hashes.add(existing.contentHash);
6158
+ }
6159
+ } catch {
6160
+ }
6161
+ }
6162
+ }
6163
+ knownHashesCache.set(eventsPath, hashes);
6164
+ return hashes;
6165
+ }
6166
+ function clearEventHashCache() {
6167
+ knownHashesCache.clear();
6168
+ }
6169
+ async function emitEvent(projectPath, event, options) {
6170
+ try {
6171
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
6172
+ if (!dirResult.ok) return dirResult;
6173
+ const stateDir = dirResult.value;
6174
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
6175
+ fs16.mkdirSync(stateDir, { recursive: true });
6176
+ const contentHash = computeEventHash(event, options?.session);
6177
+ const knownHashes = loadKnownHashes(eventsPath);
6178
+ if (knownHashes.has(contentHash)) {
6179
+ return Ok({ written: false, reason: "duplicate" });
6180
+ }
6181
+ const fullEvent = {
6182
+ ...event,
6183
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6184
+ contentHash
6185
+ };
6186
+ if (options?.session) {
6187
+ fullEvent.session = options.session;
6188
+ }
6189
+ fs16.appendFileSync(eventsPath, JSON.stringify(fullEvent) + "\n");
6190
+ knownHashes.add(contentHash);
6191
+ return Ok({ written: true });
6192
+ } catch (error) {
6193
+ return Err(
6194
+ new Error(`Failed to emit event: ${error instanceof Error ? error.message : String(error)}`)
6195
+ );
6196
+ }
6197
+ }
6198
+ async function loadEvents(projectPath, options) {
6199
+ try {
6200
+ const dirResult = await getStateDir(projectPath, options?.stream, options?.session);
6201
+ if (!dirResult.ok) return dirResult;
6202
+ const stateDir = dirResult.value;
6203
+ const eventsPath = path13.join(stateDir, EVENTS_FILE);
6204
+ if (!fs16.existsSync(eventsPath)) {
6205
+ return Ok([]);
6206
+ }
6207
+ const content = fs16.readFileSync(eventsPath, "utf-8");
6208
+ const lines = content.split("\n").filter((line) => line.trim() !== "");
6209
+ const events = [];
6210
+ for (const line of lines) {
6211
+ try {
6212
+ const parsed = JSON.parse(line);
6213
+ const result = SkillEventSchema.safeParse(parsed);
6214
+ if (result.success) {
6215
+ events.push(result.data);
6216
+ }
6217
+ } catch {
6218
+ }
6219
+ }
6220
+ return Ok(events);
6221
+ } catch (error) {
6222
+ return Err(
6223
+ new Error(`Failed to load events: ${error instanceof Error ? error.message : String(error)}`)
6224
+ );
6225
+ }
6226
+ }
6227
+ function formatPhaseTransition(event) {
6228
+ const data = event.data;
6229
+ const suffix = data?.taskCount ? ` (${data.taskCount} tasks)` : "";
6230
+ return `phase: ${data?.from ?? "?"} -> ${data?.to ?? "?"}${suffix}`;
6231
+ }
6232
+ function formatGateResult(event) {
6233
+ const data = event.data;
6234
+ const status = data?.passed ? "passed" : "failed";
6235
+ const checks = data?.checks?.map((c) => `${c.name} ${c.passed ? "Y" : "N"}`).join(", ");
6236
+ return checks ? `gate: ${status} (${checks})` : `gate: ${status}`;
6237
+ }
6238
+ function formatHandoffDetail(event) {
6239
+ const data = event.data;
6240
+ const direction = data?.toSkill ? ` -> ${data.toSkill}` : "";
6241
+ return `handoff: ${event.summary}${direction}`;
6242
+ }
6243
+ var EVENT_FORMATTERS = {
6244
+ phase_transition: formatPhaseTransition,
6245
+ gate_result: formatGateResult,
6246
+ decision: (event) => `decision: ${event.summary}`,
6247
+ handoff: formatHandoffDetail,
6248
+ error: (event) => `error: ${event.summary}`,
6249
+ checkpoint: (event) => `checkpoint: ${event.summary}`
6250
+ };
6251
+ function formatEventTimeline(events, limit = 20) {
6252
+ if (events.length === 0) return "";
6253
+ const recent = events.slice(-limit);
6254
+ return recent.map((event) => {
6255
+ const time = formatTime(event.timestamp);
6256
+ const formatter = EVENT_FORMATTERS[event.type];
6257
+ const detail = formatter ? formatter(event) : event.summary;
6258
+ return `- ${time} [${event.skill}] ${detail}`;
6259
+ }).join("\n");
6260
+ }
6261
+ function formatTime(timestamp) {
6262
+ try {
6263
+ const date = new Date(timestamp);
6264
+ const hours = String(date.getHours()).padStart(2, "0");
6265
+ const minutes = String(date.getMinutes()).padStart(2, "0");
6266
+ return `${hours}:${minutes}`;
6267
+ } catch {
6268
+ return "??:??";
6269
+ }
6270
+ }
6271
+
6272
+ // src/workflow/runner.ts
6273
+ async function executeWorkflow(workflow, executor) {
6274
+ const stepResults = [];
6275
+ const startTime = Date.now();
6276
+ let previousArtifact;
6277
+ let stopped = false;
6278
+ for (const step of workflow.steps) {
6279
+ if (stopped) {
6280
+ stepResults.push({
6281
+ step,
6282
+ outcome: "skipped",
6283
+ durationMs: 0
6284
+ });
6285
+ continue;
6286
+ }
6287
+ const stepResult = await executor(step, previousArtifact);
6288
+ stepResults.push(stepResult);
6289
+ if (stepResult.outcome === "pass") {
6290
+ previousArtifact = stepResult.artifact;
6291
+ } else {
6292
+ const gate = step.gate ?? "pass-required";
6293
+ if (gate === "pass-required") {
6294
+ stopped = true;
6295
+ }
6296
+ }
6297
+ }
6298
+ const hasFailure = stepResults.some((r) => r.outcome === "fail");
6299
+ return {
6300
+ workflow,
6301
+ stepResults,
6302
+ pass: !hasFailure,
5711
6303
  totalDurationMs: Date.now() - startTime
5712
6304
  };
5713
6305
  }
@@ -5826,7 +6418,8 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
5826
6418
  }
5827
6419
 
5828
6420
  // src/security/scanner.ts
5829
- import * as fs15 from "fs/promises";
6421
+ import * as fs18 from "fs/promises";
6422
+ import { minimatch as minimatch4 } from "minimatch";
5830
6423
 
5831
6424
  // src/security/rules/registry.ts
5832
6425
  var RuleRegistry = class {
@@ -5857,7 +6450,7 @@ var RuleRegistry = class {
5857
6450
  };
5858
6451
 
5859
6452
  // src/security/config.ts
5860
- import { z as z5 } from "zod";
6453
+ import { z as z6 } from "zod";
5861
6454
 
5862
6455
  // src/security/types.ts
5863
6456
  var DEFAULT_SECURITY_CONFIG = {
@@ -5868,19 +6461,19 @@ var DEFAULT_SECURITY_CONFIG = {
5868
6461
  };
5869
6462
 
5870
6463
  // src/security/config.ts
5871
- var RuleOverrideSchema = z5.enum(["off", "error", "warning", "info"]);
5872
- var SecurityConfigSchema = z5.object({
5873
- enabled: z5.boolean().default(true),
5874
- strict: z5.boolean().default(false),
5875
- rules: z5.record(z5.string(), RuleOverrideSchema).optional().default({}),
5876
- exclude: z5.array(z5.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
5877
- external: z5.object({
5878
- semgrep: z5.object({
5879
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto"),
5880
- rulesets: z5.array(z5.string()).optional()
6464
+ var RuleOverrideSchema = z6.enum(["off", "error", "warning", "info"]);
6465
+ var SecurityConfigSchema = z6.object({
6466
+ enabled: z6.boolean().default(true),
6467
+ strict: z6.boolean().default(false),
6468
+ rules: z6.record(z6.string(), RuleOverrideSchema).optional().default({}),
6469
+ exclude: z6.array(z6.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
6470
+ external: z6.object({
6471
+ semgrep: z6.object({
6472
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto"),
6473
+ rulesets: z6.array(z6.string()).optional()
5881
6474
  }).optional(),
5882
- gitleaks: z5.object({
5883
- enabled: z5.union([z5.literal("auto"), z5.boolean()]).default("auto")
6475
+ gitleaks: z6.object({
6476
+ enabled: z6.union([z6.literal("auto"), z6.boolean()]).default("auto")
5884
6477
  }).optional()
5885
6478
  }).optional()
5886
6479
  });
@@ -5913,15 +6506,15 @@ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
5913
6506
  }
5914
6507
 
5915
6508
  // src/security/stack-detector.ts
5916
- import * as fs14 from "fs";
5917
- import * as path11 from "path";
6509
+ import * as fs17 from "fs";
6510
+ import * as path14 from "path";
5918
6511
  function detectStack(projectRoot) {
5919
6512
  const stacks = [];
5920
- const pkgJsonPath = path11.join(projectRoot, "package.json");
5921
- if (fs14.existsSync(pkgJsonPath)) {
6513
+ const pkgJsonPath = path14.join(projectRoot, "package.json");
6514
+ if (fs17.existsSync(pkgJsonPath)) {
5922
6515
  stacks.push("node");
5923
6516
  try {
5924
- const pkgJson = JSON.parse(fs14.readFileSync(pkgJsonPath, "utf-8"));
6517
+ const pkgJson = JSON.parse(fs17.readFileSync(pkgJsonPath, "utf-8"));
5925
6518
  const allDeps = {
5926
6519
  ...pkgJson.dependencies,
5927
6520
  ...pkgJson.devDependencies
@@ -5936,13 +6529,13 @@ function detectStack(projectRoot) {
5936
6529
  } catch {
5937
6530
  }
5938
6531
  }
5939
- const goModPath = path11.join(projectRoot, "go.mod");
5940
- if (fs14.existsSync(goModPath)) {
6532
+ const goModPath = path14.join(projectRoot, "go.mod");
6533
+ if (fs17.existsSync(goModPath)) {
5941
6534
  stacks.push("go");
5942
6535
  }
5943
- const requirementsPath = path11.join(projectRoot, "requirements.txt");
5944
- const pyprojectPath = path11.join(projectRoot, "pyproject.toml");
5945
- if (fs14.existsSync(requirementsPath) || fs14.existsSync(pyprojectPath)) {
6536
+ const requirementsPath = path14.join(projectRoot, "requirements.txt");
6537
+ const pyprojectPath = path14.join(projectRoot, "pyproject.toml");
6538
+ if (fs17.existsSync(requirementsPath) || fs17.existsSync(pyprojectPath)) {
5946
6539
  stacks.push("python");
5947
6540
  }
5948
6541
  return stacks;
@@ -6006,6 +6599,72 @@ var secretRules = [
6006
6599
  message: "Hardcoded JWT token detected",
6007
6600
  remediation: "Tokens should be fetched at runtime, not embedded in source",
6008
6601
  references: ["CWE-798"]
6602
+ },
6603
+ {
6604
+ id: "SEC-SEC-006",
6605
+ name: "Anthropic API Key",
6606
+ category: "secrets",
6607
+ severity: "error",
6608
+ confidence: "high",
6609
+ patterns: [/sk-ant-api\d{2}-[A-Za-z0-9_-]{20,}/],
6610
+ message: "Hardcoded Anthropic API key detected",
6611
+ remediation: "Use environment variables: process.env.ANTHROPIC_API_KEY",
6612
+ references: ["CWE-798"]
6613
+ },
6614
+ {
6615
+ id: "SEC-SEC-007",
6616
+ name: "OpenAI API Key",
6617
+ category: "secrets",
6618
+ severity: "error",
6619
+ confidence: "high",
6620
+ patterns: [/sk-proj-[A-Za-z0-9_-]{20,}/],
6621
+ message: "Hardcoded OpenAI API key detected",
6622
+ remediation: "Use environment variables: process.env.OPENAI_API_KEY",
6623
+ references: ["CWE-798"]
6624
+ },
6625
+ {
6626
+ id: "SEC-SEC-008",
6627
+ name: "Google API Key",
6628
+ category: "secrets",
6629
+ severity: "error",
6630
+ confidence: "high",
6631
+ patterns: [/AIza[A-Za-z0-9_-]{35}/],
6632
+ message: "Hardcoded Google API key detected",
6633
+ remediation: "Use environment variables or a secrets manager for Google API keys",
6634
+ references: ["CWE-798"]
6635
+ },
6636
+ {
6637
+ id: "SEC-SEC-009",
6638
+ name: "GitHub Personal Access Token",
6639
+ category: "secrets",
6640
+ severity: "error",
6641
+ confidence: "high",
6642
+ patterns: [/gh[pous]_[A-Za-z0-9_]{36,}/],
6643
+ message: "Hardcoded GitHub personal access token detected",
6644
+ remediation: "Use environment variables: process.env.GITHUB_TOKEN",
6645
+ references: ["CWE-798"]
6646
+ },
6647
+ {
6648
+ id: "SEC-SEC-010",
6649
+ name: "Stripe Live Key",
6650
+ category: "secrets",
6651
+ severity: "error",
6652
+ confidence: "high",
6653
+ patterns: [/\b[spr]k_live_[A-Za-z0-9]{24,}/],
6654
+ message: "Hardcoded Stripe live key detected",
6655
+ remediation: "Use environment variables for Stripe keys; never commit live keys",
6656
+ references: ["CWE-798"]
6657
+ },
6658
+ {
6659
+ id: "SEC-SEC-011",
6660
+ name: "Database Connection String with Credentials",
6661
+ category: "secrets",
6662
+ severity: "error",
6663
+ confidence: "high",
6664
+ patterns: [/(?:postgres|mysql|mongodb|redis|amqp|mssql)(?:\+\w+)?:\/\/[^/\s:]+:[^@/\s]+@/i],
6665
+ message: "Database connection string with embedded credentials detected",
6666
+ remediation: "Use environment variables for connection strings; separate credentials from URIs",
6667
+ references: ["CWE-798"]
6009
6668
  }
6010
6669
  ];
6011
6670
 
@@ -6192,6 +6851,158 @@ var deserializationRules = [
6192
6851
  }
6193
6852
  ];
6194
6853
 
6854
+ // src/security/rules/agent-config.ts
6855
+ var agentConfigRules = [
6856
+ {
6857
+ id: "SEC-AGT-001",
6858
+ name: "Hidden Unicode Characters",
6859
+ category: "agent-config",
6860
+ severity: "error",
6861
+ confidence: "high",
6862
+ patterns: [/\u200B|\u200C|\u200D|\uFEFF|\u2060/],
6863
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/*.yaml",
6864
+ message: "Hidden zero-width Unicode characters detected in agent configuration",
6865
+ remediation: "Remove invisible Unicode characters; they may hide malicious instructions",
6866
+ references: ["CWE-116"]
6867
+ },
6868
+ {
6869
+ id: "SEC-AGT-002",
6870
+ name: "URL Execution Directives",
6871
+ category: "agent-config",
6872
+ severity: "warning",
6873
+ confidence: "medium",
6874
+ patterns: [/\b(?:curl|wget)\s+\S+/i, /\bfetch\s*\(/i],
6875
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md",
6876
+ message: "URL execution directive found in agent configuration",
6877
+ remediation: "Avoid instructing agents to download and execute remote content",
6878
+ references: ["CWE-94"]
6879
+ },
6880
+ {
6881
+ id: "SEC-AGT-003",
6882
+ name: "Wildcard Tool Permissions",
6883
+ category: "agent-config",
6884
+ severity: "warning",
6885
+ confidence: "high",
6886
+ patterns: [/(?:Bash|Write|Edit)\s*\(\s*\*\s*\)/],
6887
+ fileGlob: "**/.claude/**,**/settings*.json",
6888
+ message: "Wildcard tool permissions grant unrestricted access",
6889
+ remediation: "Scope tool permissions to specific patterns instead of wildcards",
6890
+ references: ["CWE-250"]
6891
+ },
6892
+ {
6893
+ id: "SEC-AGT-004",
6894
+ name: "Auto-approve Patterns",
6895
+ category: "agent-config",
6896
+ severity: "warning",
6897
+ confidence: "high",
6898
+ patterns: [/\bautoApprove\b/i, /\bauto_approve\b/i],
6899
+ fileGlob: "**/.claude/**,**/.mcp.json",
6900
+ message: "Auto-approve configuration bypasses human review of tool calls",
6901
+ remediation: "Review auto-approved tools carefully; prefer explicit approval for destructive operations",
6902
+ references: ["CWE-862"]
6903
+ },
6904
+ {
6905
+ id: "SEC-AGT-005",
6906
+ name: "Prompt Injection Surface",
6907
+ category: "agent-config",
6908
+ severity: "warning",
6909
+ confidence: "medium",
6910
+ patterns: [/\$\{[^}]*\}/, /\{\{[^}]*\}\}/],
6911
+ fileGlob: "**/skill.yaml",
6912
+ message: "Template interpolation syntax in skill YAML may enable prompt injection",
6913
+ remediation: "Avoid dynamic interpolation in skill descriptions; use static text",
6914
+ references: ["CWE-94"]
6915
+ },
6916
+ {
6917
+ id: "SEC-AGT-006",
6918
+ name: "Permission Bypass Flags",
6919
+ category: "agent-config",
6920
+ severity: "error",
6921
+ confidence: "high",
6922
+ patterns: [/--dangerously-skip-permissions/, /--no-verify/],
6923
+ fileGlob: "**/CLAUDE.md,**/AGENTS.md,**/.claude/**",
6924
+ message: "Permission bypass flag detected in agent configuration",
6925
+ remediation: "Remove flags that bypass safety checks; they undermine enforcement",
6926
+ references: ["CWE-863"]
6927
+ },
6928
+ {
6929
+ id: "SEC-AGT-007",
6930
+ name: "Hook Injection Surface",
6931
+ category: "agent-config",
6932
+ severity: "error",
6933
+ confidence: "low",
6934
+ patterns: [/\$\(/, /`[^`]+`/, /\s&&\s/, /\s\|\|\s/],
6935
+ fileGlob: "**/settings*.json,**/hooks.json",
6936
+ message: "Shell metacharacters in hook commands may enable command injection",
6937
+ remediation: "Use simple, single-command hooks without shell operators; chain logic inside the script",
6938
+ references: ["CWE-78"]
6939
+ }
6940
+ ];
6941
+
6942
+ // src/security/rules/mcp.ts
6943
+ var mcpRules = [
6944
+ {
6945
+ id: "SEC-MCP-001",
6946
+ name: "Hardcoded MCP Secrets",
6947
+ category: "mcp",
6948
+ severity: "error",
6949
+ confidence: "medium",
6950
+ patterns: [/(?:API_KEY|SECRET|TOKEN|PASSWORD|CREDENTIAL)\s*["']?\s*:\s*["'][^"']{8,}["']/i],
6951
+ fileGlob: "**/.mcp.json",
6952
+ message: "Hardcoded secret detected in MCP server configuration",
6953
+ remediation: "Use environment variable references instead of inline secrets in .mcp.json",
6954
+ references: ["CWE-798"]
6955
+ },
6956
+ {
6957
+ id: "SEC-MCP-002",
6958
+ name: "Shell Injection in MCP Args",
6959
+ category: "mcp",
6960
+ severity: "error",
6961
+ confidence: "medium",
6962
+ patterns: [/\$\(/, /`[^`]+`/],
6963
+ fileGlob: "**/.mcp.json",
6964
+ message: "Shell metacharacters detected in MCP server arguments",
6965
+ remediation: "Use literal argument values; avoid shell interpolation in MCP args",
6966
+ references: ["CWE-78"]
6967
+ },
6968
+ {
6969
+ id: "SEC-MCP-003",
6970
+ name: "Network Exposure",
6971
+ category: "mcp",
6972
+ severity: "warning",
6973
+ confidence: "high",
6974
+ patterns: [/0\.0\.0\.0/, /["']\*["']\s*:\s*\d/, /host["']?\s*:\s*["']\*["']/i],
6975
+ fileGlob: "**/.mcp.json",
6976
+ message: "MCP server binding to all network interfaces (0.0.0.0 or wildcard *)",
6977
+ remediation: "Bind to 127.0.0.1 or localhost to restrict access to local machine",
6978
+ references: ["CWE-668"]
6979
+ },
6980
+ {
6981
+ id: "SEC-MCP-004",
6982
+ name: "Typosquatting Vector",
6983
+ category: "mcp",
6984
+ severity: "warning",
6985
+ confidence: "medium",
6986
+ patterns: [/\bnpx\s+(?:-y|--yes)\b/],
6987
+ fileGlob: "**/.mcp.json",
6988
+ message: "npx -y auto-installs packages without confirmation, enabling typosquatting",
6989
+ remediation: "Pin exact package versions or install packages explicitly before use",
6990
+ references: ["CWE-427"]
6991
+ },
6992
+ {
6993
+ id: "SEC-MCP-005",
6994
+ name: "Unencrypted Transport",
6995
+ category: "mcp",
6996
+ severity: "warning",
6997
+ confidence: "medium",
6998
+ patterns: [/http:\/\/(?!localhost\b|127\.0\.0\.1\b)/],
6999
+ fileGlob: "**/.mcp.json",
7000
+ message: "Unencrypted HTTP transport detected for MCP server connection",
7001
+ remediation: "Use https:// for all non-localhost MCP server connections",
7002
+ references: ["CWE-319"]
7003
+ }
7004
+ ];
7005
+
6195
7006
  // src/security/rules/stack/node.ts
6196
7007
  var nodeRules = [
6197
7008
  {
@@ -6319,7 +7130,9 @@ var SecurityScanner = class {
6319
7130
  ...cryptoRules,
6320
7131
  ...pathTraversalRules,
6321
7132
  ...networkRules,
6322
- ...deserializationRules
7133
+ ...deserializationRules,
7134
+ ...agentConfigRules,
7135
+ ...mcpRules
6323
7136
  ]);
6324
7137
  this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
6325
7138
  this.activeRules = this.registry.getAll();
@@ -6328,6 +7141,12 @@ var SecurityScanner = class {
6328
7141
  const stacks = detectStack(projectRoot);
6329
7142
  this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
6330
7143
  }
7144
+ /**
7145
+ * Scan raw content against all active rules. Note: this method does NOT apply
7146
+ * fileGlob filtering — every active rule is evaluated regardless of filePath.
7147
+ * If you are scanning a specific file and want fileGlob-based rule filtering,
7148
+ * use {@link scanFile} instead.
7149
+ */
6331
7150
  scanContent(content, filePath, startLine = 1) {
6332
7151
  if (!this.config.enabled) return [];
6333
7152
  const findings = [];
@@ -6369,8 +7188,52 @@ var SecurityScanner = class {
6369
7188
  }
6370
7189
  async scanFile(filePath) {
6371
7190
  if (!this.config.enabled) return [];
6372
- const content = await fs15.readFile(filePath, "utf-8");
6373
- return this.scanContent(content, filePath, 1);
7191
+ const content = await fs18.readFile(filePath, "utf-8");
7192
+ return this.scanContentForFile(content, filePath, 1);
7193
+ }
7194
+ scanContentForFile(content, filePath, startLine = 1) {
7195
+ if (!this.config.enabled) return [];
7196
+ const findings = [];
7197
+ const lines = content.split("\n");
7198
+ const applicableRules = this.activeRules.filter((rule) => {
7199
+ if (!rule.fileGlob) return true;
7200
+ const globs = rule.fileGlob.split(",").map((g) => g.trim());
7201
+ return globs.some((glob) => minimatch4(filePath, glob, { dot: true }));
7202
+ });
7203
+ for (const rule of applicableRules) {
7204
+ const resolved = resolveRuleSeverity(
7205
+ rule.id,
7206
+ rule.severity,
7207
+ this.config.rules ?? {},
7208
+ this.config.strict
7209
+ );
7210
+ if (resolved === "off") continue;
7211
+ for (let i = 0; i < lines.length; i++) {
7212
+ const line = lines[i] ?? "";
7213
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
7214
+ for (const pattern of rule.patterns) {
7215
+ pattern.lastIndex = 0;
7216
+ if (pattern.test(line)) {
7217
+ findings.push({
7218
+ ruleId: rule.id,
7219
+ ruleName: rule.name,
7220
+ category: rule.category,
7221
+ severity: resolved,
7222
+ confidence: rule.confidence,
7223
+ file: filePath,
7224
+ line: startLine + i,
7225
+ match: line.trim(),
7226
+ context: line,
7227
+ message: rule.message,
7228
+ remediation: rule.remediation,
7229
+ ...rule.references ? { references: rule.references } : {}
7230
+ });
7231
+ break;
7232
+ }
7233
+ }
7234
+ }
7235
+ }
7236
+ return findings;
6374
7237
  }
6375
7238
  async scanFiles(filePaths) {
6376
7239
  const allFindings = [];
@@ -6394,7 +7257,7 @@ var SecurityScanner = class {
6394
7257
  };
6395
7258
 
6396
7259
  // src/ci/check-orchestrator.ts
6397
- import * as path12 from "path";
7260
+ import * as path15 from "path";
6398
7261
  var ALL_CHECKS = [
6399
7262
  "validate",
6400
7263
  "deps",
@@ -6407,7 +7270,7 @@ var ALL_CHECKS = [
6407
7270
  ];
6408
7271
  async function runValidateCheck(projectRoot, config) {
6409
7272
  const issues = [];
6410
- const agentsPath = path12.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7273
+ const agentsPath = path15.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6411
7274
  const result = await validateAgentsMap(agentsPath);
6412
7275
  if (!result.ok) {
6413
7276
  issues.push({ severity: "error", message: result.error.message });
@@ -6464,7 +7327,7 @@ async function runDepsCheck(projectRoot, config) {
6464
7327
  }
6465
7328
  async function runDocsCheck(projectRoot, config) {
6466
7329
  const issues = [];
6467
- const docsDir = path12.join(projectRoot, config.docsDir ?? "docs");
7330
+ const docsDir = path15.join(projectRoot, config.docsDir ?? "docs");
6468
7331
  const entropyConfig = config.entropy || {};
6469
7332
  const result = await checkDocCoverage("project", {
6470
7333
  docsDir,
@@ -6489,10 +7352,14 @@ async function runDocsCheck(projectRoot, config) {
6489
7352
  }
6490
7353
  return issues;
6491
7354
  }
6492
- async function runEntropyCheck(projectRoot, _config) {
7355
+ async function runEntropyCheck(projectRoot, config) {
6493
7356
  const issues = [];
7357
+ const entropyConfig = config.entropy || {};
7358
+ const perfConfig = config.performance || {};
7359
+ const entryPoints = entropyConfig.entryPoints ?? perfConfig.entryPoints;
6494
7360
  const analyzer = new EntropyAnalyzer({
6495
7361
  rootDir: projectRoot,
7362
+ ...entryPoints ? { entryPoints } : {},
6496
7363
  analyze: { drift: true, deadCode: true, patterns: false }
6497
7364
  });
6498
7365
  const result = await analyzer.analyze();
@@ -6554,8 +7421,10 @@ async function runSecurityCheck(projectRoot, config) {
6554
7421
  async function runPerfCheck(projectRoot, config) {
6555
7422
  const issues = [];
6556
7423
  const perfConfig = config.performance || {};
7424
+ const entryPoints = perfConfig.entryPoints;
6557
7425
  const perfAnalyzer = new EntropyAnalyzer({
6558
7426
  rootDir: projectRoot,
7427
+ ...entryPoints ? { entryPoints } : {},
6559
7428
  analyze: {
6560
7429
  complexity: perfConfig.complexity || true,
6561
7430
  coupling: perfConfig.coupling || true,
@@ -6736,7 +7605,7 @@ async function runCIChecks(input) {
6736
7605
  }
6737
7606
 
6738
7607
  // src/review/mechanical-checks.ts
6739
- import * as path13 from "path";
7608
+ import * as path16 from "path";
6740
7609
  async function runMechanicalChecks(options) {
6741
7610
  const { projectRoot, config, skip = [], changedFiles } = options;
6742
7611
  const findings = [];
@@ -6748,7 +7617,7 @@ async function runMechanicalChecks(options) {
6748
7617
  };
6749
7618
  if (!skip.includes("validate")) {
6750
7619
  try {
6751
- const agentsPath = path13.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
7620
+ const agentsPath = path16.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6752
7621
  const result = await validateAgentsMap(agentsPath);
6753
7622
  if (!result.ok) {
6754
7623
  statuses.validate = "fail";
@@ -6785,7 +7654,7 @@ async function runMechanicalChecks(options) {
6785
7654
  statuses.validate = "fail";
6786
7655
  findings.push({
6787
7656
  tool: "validate",
6788
- file: path13.join(projectRoot, "AGENTS.md"),
7657
+ file: path16.join(projectRoot, "AGENTS.md"),
6789
7658
  message: err instanceof Error ? err.message : String(err),
6790
7659
  severity: "error"
6791
7660
  });
@@ -6849,7 +7718,7 @@ async function runMechanicalChecks(options) {
6849
7718
  (async () => {
6850
7719
  const localFindings = [];
6851
7720
  try {
6852
- const docsDir = path13.join(projectRoot, config.docsDir ?? "docs");
7721
+ const docsDir = path16.join(projectRoot, config.docsDir ?? "docs");
6853
7722
  const result = await checkDocCoverage("project", { docsDir });
6854
7723
  if (!result.ok) {
6855
7724
  statuses["check-docs"] = "warn";
@@ -6876,7 +7745,7 @@ async function runMechanicalChecks(options) {
6876
7745
  statuses["check-docs"] = "warn";
6877
7746
  localFindings.push({
6878
7747
  tool: "check-docs",
6879
- file: path13.join(projectRoot, "docs"),
7748
+ file: path16.join(projectRoot, "docs"),
6880
7749
  message: err instanceof Error ? err.message : String(err),
6881
7750
  severity: "warning"
6882
7751
  });
@@ -7024,7 +7893,7 @@ function detectChangeType(commitMessage, diff2) {
7024
7893
  }
7025
7894
 
7026
7895
  // src/review/context-scoper.ts
7027
- import * as path14 from "path";
7896
+ import * as path17 from "path";
7028
7897
  var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
7029
7898
  var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
7030
7899
  function computeContextBudget(diffLines) {
@@ -7032,18 +7901,18 @@ function computeContextBudget(diffLines) {
7032
7901
  return diffLines;
7033
7902
  }
7034
7903
  function isWithinProject(absPath, projectRoot) {
7035
- const resolvedRoot = path14.resolve(projectRoot) + path14.sep;
7036
- const resolvedPath = path14.resolve(absPath);
7037
- return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path14.resolve(projectRoot);
7904
+ const resolvedRoot = path17.resolve(projectRoot) + path17.sep;
7905
+ const resolvedPath = path17.resolve(absPath);
7906
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path17.resolve(projectRoot);
7038
7907
  }
7039
7908
  async function readContextFile(projectRoot, filePath, reason) {
7040
- const absPath = path14.isAbsolute(filePath) ? filePath : path14.join(projectRoot, filePath);
7909
+ const absPath = path17.isAbsolute(filePath) ? filePath : path17.join(projectRoot, filePath);
7041
7910
  if (!isWithinProject(absPath, projectRoot)) return null;
7042
7911
  const result = await readFileContent(absPath);
7043
7912
  if (!result.ok) return null;
7044
7913
  const content = result.value;
7045
7914
  const lines = content.split("\n").length;
7046
- const relPath = path14.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
7915
+ const relPath = path17.isAbsolute(filePath) ? relativePosix(projectRoot, filePath) : filePath;
7047
7916
  return { path: relPath, content, reason, lines };
7048
7917
  }
7049
7918
  function extractImportSources(content) {
@@ -7058,18 +7927,18 @@ function extractImportSources(content) {
7058
7927
  }
7059
7928
  async function resolveImportPath(projectRoot, fromFile, importSource) {
7060
7929
  if (!importSource.startsWith(".")) return null;
7061
- const fromDir = path14.dirname(path14.join(projectRoot, fromFile));
7062
- const basePath = path14.resolve(fromDir, importSource);
7930
+ const fromDir = path17.dirname(path17.join(projectRoot, fromFile));
7931
+ const basePath = path17.resolve(fromDir, importSource);
7063
7932
  if (!isWithinProject(basePath, projectRoot)) return null;
7064
7933
  const relBase = relativePosix(projectRoot, basePath);
7065
7934
  const candidates = [
7066
7935
  relBase + ".ts",
7067
7936
  relBase + ".tsx",
7068
7937
  relBase + ".mts",
7069
- path14.join(relBase, "index.ts")
7938
+ path17.join(relBase, "index.ts")
7070
7939
  ];
7071
7940
  for (const candidate of candidates) {
7072
- const absCandidate = path14.join(projectRoot, candidate);
7941
+ const absCandidate = path17.join(projectRoot, candidate);
7073
7942
  if (await fileExists(absCandidate)) {
7074
7943
  return candidate;
7075
7944
  }
@@ -7077,7 +7946,7 @@ async function resolveImportPath(projectRoot, fromFile, importSource) {
7077
7946
  return null;
7078
7947
  }
7079
7948
  async function findTestFiles(projectRoot, sourceFile) {
7080
- const baseName = path14.basename(sourceFile, path14.extname(sourceFile));
7949
+ const baseName = path17.basename(sourceFile, path17.extname(sourceFile));
7081
7950
  const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
7082
7951
  const results = await findFiles(pattern, projectRoot);
7083
7952
  return results.map((f) => relativePosix(projectRoot, f));
@@ -7366,101 +8235,102 @@ function findMissingJsDoc(bundle) {
7366
8235
  }
7367
8236
  return missing;
7368
8237
  }
7369
- function runComplianceAgent(bundle) {
7370
- const findings = [];
7371
- const rules = extractConventionRules(bundle);
7372
- const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
7373
- if (jsDocRuleExists) {
7374
- const missingDocs = findMissingJsDoc(bundle);
7375
- for (const m of missingDocs) {
7376
- findings.push({
7377
- id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
7378
- file: m.file,
7379
- lineRange: [m.line, m.line],
7380
- domain: "compliance",
7381
- severity: "important",
7382
- title: `Missing JSDoc on exported \`${m.exportName}\``,
7383
- rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
7384
- suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
7385
- evidence: [
7386
- `changeType: ${bundle.changeType}`,
7387
- `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
7388
- ],
7389
- validatedBy: "heuristic"
7390
- });
7391
- }
7392
- }
7393
- switch (bundle.changeType) {
7394
- case "feature": {
7395
- const hasSpecContext = bundle.contextFiles.some(
7396
- (f) => f.reason === "spec" || f.reason === "convention"
7397
- );
7398
- if (!hasSpecContext && bundle.changedFiles.length > 0) {
7399
- const firstFile = bundle.changedFiles[0];
7400
- findings.push({
7401
- id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
7402
- file: firstFile.path,
7403
- lineRange: [1, 1],
7404
- domain: "compliance",
7405
- severity: "suggestion",
7406
- title: "No spec/design doc found for feature change",
7407
- rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
7408
- evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
7409
- validatedBy: "heuristic"
7410
- });
7411
- }
7412
- break;
7413
- }
7414
- case "bugfix": {
7415
- if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
7416
- const firstFile = bundle.changedFiles[0];
7417
- findings.push({
7418
- id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
7419
- file: firstFile.path,
7420
- lineRange: [1, 1],
7421
- domain: "compliance",
7422
- severity: "suggestion",
7423
- title: "Bugfix without commit history context",
7424
- rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
7425
- evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
7426
- validatedBy: "heuristic"
7427
- });
7428
- }
7429
- break;
7430
- }
7431
- case "refactor": {
7432
- break;
8238
+ function checkMissingJsDoc(bundle, rules) {
8239
+ const jsDocRule = rules.find((r) => r.text.toLowerCase().includes("jsdoc"));
8240
+ if (!jsDocRule) return [];
8241
+ const missingDocs = findMissingJsDoc(bundle);
8242
+ return missingDocs.map((m) => ({
8243
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
8244
+ file: m.file,
8245
+ lineRange: [m.line, m.line],
8246
+ domain: "compliance",
8247
+ severity: "important",
8248
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
8249
+ rationale: `Convention requires all exports to have JSDoc comments (from ${jsDocRule.source}).`,
8250
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
8251
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${jsDocRule.text}"`],
8252
+ validatedBy: "heuristic"
8253
+ }));
8254
+ }
8255
+ function checkFeatureSpec(bundle) {
8256
+ const hasSpecContext = bundle.contextFiles.some(
8257
+ (f) => f.reason === "spec" || f.reason === "convention"
8258
+ );
8259
+ if (hasSpecContext || bundle.changedFiles.length === 0) return [];
8260
+ const firstFile = bundle.changedFiles[0];
8261
+ return [
8262
+ {
8263
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
8264
+ file: firstFile.path,
8265
+ lineRange: [1, 1],
8266
+ domain: "compliance",
8267
+ severity: "suggestion",
8268
+ title: "No spec/design doc found for feature change",
8269
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
8270
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
8271
+ validatedBy: "heuristic"
7433
8272
  }
7434
- case "docs": {
7435
- break;
8273
+ ];
8274
+ }
8275
+ function checkBugfixHistory(bundle) {
8276
+ if (bundle.commitHistory.length > 0 || bundle.changedFiles.length === 0) return [];
8277
+ const firstFile = bundle.changedFiles[0];
8278
+ return [
8279
+ {
8280
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
8281
+ file: firstFile.path,
8282
+ lineRange: [1, 1],
8283
+ domain: "compliance",
8284
+ severity: "suggestion",
8285
+ title: "Bugfix without commit history context",
8286
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
8287
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
8288
+ validatedBy: "heuristic"
7436
8289
  }
8290
+ ];
8291
+ }
8292
+ function checkChangeTypeSpecific(bundle) {
8293
+ switch (bundle.changeType) {
8294
+ case "feature":
8295
+ return checkFeatureSpec(bundle);
8296
+ case "bugfix":
8297
+ return checkBugfixHistory(bundle);
8298
+ default:
8299
+ return [];
7437
8300
  }
8301
+ }
8302
+ function checkResultTypeConvention(bundle, rules) {
7438
8303
  const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
7439
- if (resultTypeRule) {
7440
- for (const cf of bundle.changedFiles) {
7441
- const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
7442
- const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
7443
- if (hasTryCatch && !usesResult) {
7444
- findings.push({
7445
- id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
7446
- file: cf.path,
7447
- lineRange: [1, cf.lines],
7448
- domain: "compliance",
7449
- severity: "suggestion",
7450
- title: "Fallible operation uses try/catch instead of Result type",
7451
- rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
7452
- suggestion: "Refactor error handling to use the Result type pattern.",
7453
- evidence: [
7454
- `changeType: ${bundle.changeType}`,
7455
- `Convention rule: "${resultTypeRule.text}"`
7456
- ],
7457
- validatedBy: "heuristic"
7458
- });
7459
- }
8304
+ if (!resultTypeRule) return [];
8305
+ const findings = [];
8306
+ for (const cf of bundle.changedFiles) {
8307
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
8308
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
8309
+ if (hasTryCatch && !usesResult) {
8310
+ findings.push({
8311
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
8312
+ file: cf.path,
8313
+ lineRange: [1, cf.lines],
8314
+ domain: "compliance",
8315
+ severity: "suggestion",
8316
+ title: "Fallible operation uses try/catch instead of Result type",
8317
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
8318
+ suggestion: "Refactor error handling to use the Result type pattern.",
8319
+ evidence: [`changeType: ${bundle.changeType}`, `Convention rule: "${resultTypeRule.text}"`],
8320
+ validatedBy: "heuristic"
8321
+ });
7460
8322
  }
7461
8323
  }
7462
8324
  return findings;
7463
8325
  }
8326
+ function runComplianceAgent(bundle) {
8327
+ const rules = extractConventionRules(bundle);
8328
+ return [
8329
+ ...checkMissingJsDoc(bundle, rules),
8330
+ ...checkChangeTypeSpecific(bundle),
8331
+ ...checkResultTypeConvention(bundle, rules)
8332
+ ];
8333
+ }
7464
8334
 
7465
8335
  // src/review/agents/bug-agent.ts
7466
8336
  var BUG_DETECTION_DESCRIPTOR = {
@@ -7737,31 +8607,32 @@ var ARCHITECTURE_DESCRIPTOR = {
7737
8607
  ]
7738
8608
  };
7739
8609
  var LARGE_FILE_THRESHOLD = 300;
8610
+ function isViolationLine(line) {
8611
+ const lower = line.toLowerCase();
8612
+ return lower.includes("violation") || lower.includes("layer");
8613
+ }
8614
+ function createLayerViolationFinding(line, fallbackPath) {
8615
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
8616
+ const file = fileMatch?.[1] ?? fallbackPath;
8617
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
8618
+ return {
8619
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
8620
+ file,
8621
+ lineRange: [lineNum, lineNum],
8622
+ domain: "architecture",
8623
+ severity: "critical",
8624
+ title: "Layer boundary violation detected by check-deps",
8625
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
8626
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
8627
+ evidence: [line.trim()],
8628
+ validatedBy: "heuristic"
8629
+ };
8630
+ }
7740
8631
  function detectLayerViolations(bundle) {
7741
- const findings = [];
7742
8632
  const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
7743
- if (!checkDepsFile) return findings;
7744
- const lines = checkDepsFile.content.split("\n");
7745
- for (const line of lines) {
7746
- if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
7747
- const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7748
- const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
7749
- const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7750
- findings.push({
7751
- id: makeFindingId("arch", file, lineNum, "layer violation"),
7752
- file,
7753
- lineRange: [lineNum, lineNum],
7754
- domain: "architecture",
7755
- severity: "critical",
7756
- title: "Layer boundary violation detected by check-deps",
7757
- rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7758
- suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7759
- evidence: [line.trim()],
7760
- validatedBy: "heuristic"
7761
- });
7762
- }
7763
- }
7764
- return findings;
8633
+ if (!checkDepsFile) return [];
8634
+ const fallbackPath = bundle.changedFiles[0]?.path ?? "unknown";
8635
+ return checkDepsFile.content.split("\n").filter(isViolationLine).map((line) => createLayerViolationFinding(line, fallbackPath));
7765
8636
  }
7766
8637
  function detectLargeFiles(bundle) {
7767
8638
  const findings = [];
@@ -7783,45 +8654,61 @@ function detectLargeFiles(bundle) {
7783
8654
  }
7784
8655
  return findings;
7785
8656
  }
8657
+ function extractRelativeImports(content) {
8658
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
8659
+ let match;
8660
+ const imports = /* @__PURE__ */ new Set();
8661
+ while ((match = importRegex.exec(content)) !== null) {
8662
+ const source = match[1];
8663
+ if (source.startsWith(".")) {
8664
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
8665
+ }
8666
+ }
8667
+ return imports;
8668
+ }
8669
+ function fileBaseName(filePath) {
8670
+ return filePath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
8671
+ }
8672
+ function findCircularImportInCtxFile(ctxFile, changedFilePath, changedPaths, fileImports) {
8673
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
8674
+ let ctxMatch;
8675
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
8676
+ const ctxSource = ctxMatch[1];
8677
+ if (!ctxSource.startsWith(".")) continue;
8678
+ for (const changedPath of changedPaths) {
8679
+ const baseName = fileBaseName(changedPath);
8680
+ const ctxBaseName = fileBaseName(ctxFile.path);
8681
+ if (ctxSource.includes(baseName) && fileImports.has(ctxBaseName)) {
8682
+ return {
8683
+ id: makeFindingId("arch", changedFilePath, 1, `circular ${ctxFile.path}`),
8684
+ file: changedFilePath,
8685
+ lineRange: [1, 1],
8686
+ domain: "architecture",
8687
+ severity: "important",
8688
+ title: `Potential circular import between ${changedFilePath} and ${ctxFile.path}`,
8689
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
8690
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
8691
+ evidence: [
8692
+ `${changedFilePath} imports from a module that also imports from ${changedFilePath}`
8693
+ ],
8694
+ validatedBy: "heuristic"
8695
+ };
8696
+ }
8697
+ }
8698
+ }
8699
+ return null;
8700
+ }
7786
8701
  function detectCircularImports(bundle) {
7787
8702
  const findings = [];
7788
8703
  const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
8704
+ const relevantCtxFiles = bundle.contextFiles.filter(
8705
+ (f) => f.reason === "import" || f.reason === "graph-dependency"
8706
+ );
7789
8707
  for (const cf of bundle.changedFiles) {
7790
- const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7791
- let match;
7792
- const imports = /* @__PURE__ */ new Set();
7793
- while ((match = importRegex.exec(cf.content)) !== null) {
7794
- const source = match[1];
7795
- if (source.startsWith(".")) {
7796
- imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7797
- }
7798
- }
7799
- for (const ctxFile of bundle.contextFiles) {
7800
- if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
7801
- const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7802
- let ctxMatch;
7803
- while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
7804
- const ctxSource = ctxMatch[1];
7805
- if (ctxSource.startsWith(".")) {
7806
- for (const changedPath of changedPaths) {
7807
- const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7808
- if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
7809
- findings.push({
7810
- id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
7811
- file: cf.path,
7812
- lineRange: [1, 1],
7813
- domain: "architecture",
7814
- severity: "important",
7815
- title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
7816
- rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
7817
- suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
7818
- evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
7819
- validatedBy: "heuristic"
7820
- });
7821
- }
7822
- }
7823
- }
7824
- }
8708
+ const imports = extractRelativeImports(cf.content);
8709
+ for (const ctxFile of relevantCtxFiles) {
8710
+ const finding = findCircularImportInCtxFile(ctxFile, cf.path, changedPaths, imports);
8711
+ if (finding) findings.push(finding);
7825
8712
  }
7826
8713
  }
7827
8714
  return findings;
@@ -7868,7 +8755,7 @@ async function fanOutReview(options) {
7868
8755
  }
7869
8756
 
7870
8757
  // src/review/validate-findings.ts
7871
- import * as path15 from "path";
8758
+ import * as path18 from "path";
7872
8759
  var DOWNGRADE_MAP = {
7873
8760
  critical: "important",
7874
8761
  important: "suggestion",
@@ -7889,7 +8776,7 @@ function normalizePath(filePath, projectRoot) {
7889
8776
  let normalized = filePath;
7890
8777
  normalized = normalized.replace(/\\/g, "/");
7891
8778
  const normalizedRoot = projectRoot.replace(/\\/g, "/");
7892
- if (path15.isAbsolute(normalized)) {
8779
+ if (path18.isAbsolute(normalized)) {
7893
8780
  const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
7894
8781
  if (normalized.startsWith(root)) {
7895
8782
  normalized = normalized.slice(root.length);
@@ -7914,12 +8801,12 @@ function followImportChain(fromFile, fileContents, maxDepth = 2) {
7914
8801
  while ((match = importRegex.exec(content)) !== null) {
7915
8802
  const importPath = match[1];
7916
8803
  if (!importPath.startsWith(".")) continue;
7917
- const dir = path15.dirname(current.file);
7918
- let resolved = path15.join(dir, importPath).replace(/\\/g, "/");
8804
+ const dir = path18.dirname(current.file);
8805
+ let resolved = path18.join(dir, importPath).replace(/\\/g, "/");
7919
8806
  if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
7920
8807
  resolved += ".ts";
7921
8808
  }
7922
- resolved = path15.normalize(resolved).replace(/\\/g, "/");
8809
+ resolved = path18.normalize(resolved).replace(/\\/g, "/");
7923
8810
  if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
7924
8811
  queue.push({ file: resolved, depth: current.depth + 1 });
7925
8812
  }
@@ -7936,7 +8823,7 @@ async function validateFindings(options) {
7936
8823
  if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
7937
8824
  continue;
7938
8825
  }
7939
- const absoluteFile = path15.isAbsolute(finding.file) ? finding.file : path15.join(projectRoot, finding.file).replace(/\\/g, "/");
8826
+ const absoluteFile = path18.isAbsolute(finding.file) ? finding.file : path18.join(projectRoot, finding.file).replace(/\\/g, "/");
7940
8827
  if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
7941
8828
  continue;
7942
8829
  }
@@ -7993,6 +8880,28 @@ async function validateFindings(options) {
7993
8880
  function rangesOverlap(a, b, gap) {
7994
8881
  return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
7995
8882
  }
8883
+ function pickLongest(a, b) {
8884
+ if (a && b) return a.length >= b.length ? a : b;
8885
+ return a ?? b;
8886
+ }
8887
+ function buildMergedTitle(a, b, domains) {
8888
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
8889
+ const domainList = [...domains].sort().join(", ");
8890
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
8891
+ return { title: `[${domainList}] ${cleanTitle}`, primaryFinding };
8892
+ }
8893
+ function mergeSecurityFields(merged, primary, a, b) {
8894
+ const cweId = primary.cweId ?? a.cweId ?? b.cweId;
8895
+ const owaspCategory = primary.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
8896
+ const confidence = primary.confidence ?? a.confidence ?? b.confidence;
8897
+ const remediation = pickLongest(a.remediation, b.remediation);
8898
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
8899
+ if (cweId !== void 0) merged.cweId = cweId;
8900
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
8901
+ if (confidence !== void 0) merged.confidence = confidence;
8902
+ if (remediation !== void 0) merged.remediation = remediation;
8903
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
8904
+ }
7996
8905
  function mergeFindings(a, b) {
7997
8906
  const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
7998
8907
  const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
@@ -8002,18 +8911,12 @@ function mergeFindings(a, b) {
8002
8911
  Math.min(a.lineRange[0], b.lineRange[0]),
8003
8912
  Math.max(a.lineRange[1], b.lineRange[1])
8004
8913
  ];
8005
- const domains = /* @__PURE__ */ new Set();
8006
- domains.add(a.domain);
8007
- domains.add(b.domain);
8008
- const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
8009
- const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
8010
- const domainList = [...domains].sort().join(", ");
8011
- const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
8012
- const title = `[${domainList}] ${cleanTitle}`;
8914
+ const domains = /* @__PURE__ */ new Set([a.domain, b.domain]);
8915
+ const suggestion = pickLongest(a.suggestion, b.suggestion);
8916
+ const { title, primaryFinding } = buildMergedTitle(a, b, domains);
8013
8917
  const merged = {
8014
8918
  id: primaryFinding.id,
8015
8919
  file: a.file,
8016
- // same file for all merged findings
8017
8920
  lineRange,
8018
8921
  domain: primaryFinding.domain,
8019
8922
  severity: highestSeverity,
@@ -8025,16 +8928,7 @@ function mergeFindings(a, b) {
8025
8928
  if (suggestion !== void 0) {
8026
8929
  merged.suggestion = suggestion;
8027
8930
  }
8028
- const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
8029
- const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
8030
- const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
8031
- const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
8032
- const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
8033
- if (cweId !== void 0) merged.cweId = cweId;
8034
- if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
8035
- if (confidence !== void 0) merged.confidence = confidence;
8036
- if (remediation !== void 0) merged.remediation = remediation;
8037
- if (mergedRefs.length > 0) merged.references = mergedRefs;
8931
+ mergeSecurityFields(merged, primaryFinding, a, b);
8038
8932
  return merged;
8039
8933
  }
8040
8934
  function deduplicateFindings(options) {
@@ -8206,6 +9100,17 @@ function formatTerminalOutput(options) {
8206
9100
  if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
8207
9101
  sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
8208
9102
  }
9103
+ if (options.evidenceCoverage) {
9104
+ const ec = options.evidenceCoverage;
9105
+ sections.push("");
9106
+ sections.push("## Evidence Coverage\n");
9107
+ sections.push(` Evidence entries: ${ec.totalEntries}`);
9108
+ sections.push(
9109
+ ` Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
9110
+ );
9111
+ sections.push(` Uncited findings: ${ec.uncitedCount} (flagged as [UNVERIFIED])`);
9112
+ sections.push(` Coverage: ${ec.coveragePercentage}%`);
9113
+ }
8209
9114
  return sections.join("\n");
8210
9115
  }
8211
9116
 
@@ -8282,9 +9187,108 @@ function formatGitHubSummary(options) {
8282
9187
  const assessment = determineAssessment(findings);
8283
9188
  const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
8284
9189
  sections.push(`## Assessment: ${assessmentLabel}`);
9190
+ if (options.evidenceCoverage) {
9191
+ const ec = options.evidenceCoverage;
9192
+ sections.push("");
9193
+ sections.push("## Evidence Coverage\n");
9194
+ sections.push(`- Evidence entries: ${ec.totalEntries}`);
9195
+ sections.push(
9196
+ `- Findings with evidence: ${ec.findingsWithEvidence}/${ec.findingsWithEvidence + ec.uncitedCount}`
9197
+ );
9198
+ sections.push(`- Uncited findings: ${ec.uncitedCount} (flagged as \\[UNVERIFIED\\])`);
9199
+ sections.push(`- Coverage: ${ec.coveragePercentage}%`);
9200
+ }
8285
9201
  return sections.join("\n");
8286
9202
  }
8287
9203
 
9204
+ // src/review/evidence-gate.ts
9205
+ var FILE_LINE_RANGE_PATTERN = /^([\w./@-]+\.\w+):(\d+)-(\d+)/;
9206
+ var FILE_LINE_PATTERN = /^([\w./@-]+\.\w+):(\d+)/;
9207
+ var FILE_ONLY_PATTERN = /^([\w./@-]+\.\w+)\s/;
9208
+ function parseEvidenceRef(content) {
9209
+ const trimmed = content.trim();
9210
+ const rangeMatch = trimmed.match(FILE_LINE_RANGE_PATTERN);
9211
+ if (rangeMatch) {
9212
+ return {
9213
+ file: rangeMatch[1],
9214
+ lineStart: parseInt(rangeMatch[2], 10),
9215
+ lineEnd: parseInt(rangeMatch[3], 10)
9216
+ };
9217
+ }
9218
+ const lineMatch = trimmed.match(FILE_LINE_PATTERN);
9219
+ if (lineMatch) {
9220
+ return {
9221
+ file: lineMatch[1],
9222
+ lineStart: parseInt(lineMatch[2], 10)
9223
+ };
9224
+ }
9225
+ const fileMatch = trimmed.match(FILE_ONLY_PATTERN);
9226
+ if (fileMatch) {
9227
+ return { file: fileMatch[1] };
9228
+ }
9229
+ return null;
9230
+ }
9231
+ function evidenceMatchesFinding(ref, finding) {
9232
+ if (ref.file !== finding.file) return false;
9233
+ if (ref.lineStart === void 0) return true;
9234
+ const [findStart, findEnd] = finding.lineRange;
9235
+ if (ref.lineEnd !== void 0) {
9236
+ return ref.lineStart <= findEnd && ref.lineEnd >= findStart;
9237
+ }
9238
+ return ref.lineStart >= findStart && ref.lineStart <= findEnd;
9239
+ }
9240
+ function checkEvidenceCoverage(findings, evidenceEntries) {
9241
+ if (findings.length === 0) {
9242
+ return {
9243
+ totalEntries: evidenceEntries.filter((e) => e.status === "active").length,
9244
+ findingsWithEvidence: 0,
9245
+ uncitedCount: 0,
9246
+ uncitedFindings: [],
9247
+ coveragePercentage: 100
9248
+ };
9249
+ }
9250
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
9251
+ const evidenceRefs = [];
9252
+ for (const entry of activeEvidence) {
9253
+ const ref = parseEvidenceRef(entry.content);
9254
+ if (ref) evidenceRefs.push(ref);
9255
+ }
9256
+ let findingsWithEvidence = 0;
9257
+ const uncitedFindings = [];
9258
+ for (const finding of findings) {
9259
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
9260
+ if (hasEvidence) {
9261
+ findingsWithEvidence++;
9262
+ } else {
9263
+ uncitedFindings.push(finding.title);
9264
+ }
9265
+ }
9266
+ const uncitedCount = findings.length - findingsWithEvidence;
9267
+ const coveragePercentage = Math.round(findingsWithEvidence / findings.length * 100);
9268
+ return {
9269
+ totalEntries: activeEvidence.length,
9270
+ findingsWithEvidence,
9271
+ uncitedCount,
9272
+ uncitedFindings,
9273
+ coveragePercentage
9274
+ };
9275
+ }
9276
+ function tagUncitedFindings(findings, evidenceEntries) {
9277
+ const activeEvidence = evidenceEntries.filter((e) => e.status === "active");
9278
+ const evidenceRefs = [];
9279
+ for (const entry of activeEvidence) {
9280
+ const ref = parseEvidenceRef(entry.content);
9281
+ if (ref) evidenceRefs.push(ref);
9282
+ }
9283
+ for (const finding of findings) {
9284
+ const hasEvidence = evidenceRefs.some((ref) => evidenceMatchesFinding(ref, finding));
9285
+ if (!hasEvidence && !finding.title.startsWith("[UNVERIFIED]")) {
9286
+ finding.title = `[UNVERIFIED] ${finding.title}`;
9287
+ }
9288
+ }
9289
+ return findings;
9290
+ }
9291
+
8288
9292
  // src/review/pipeline-orchestrator.ts
8289
9293
  async function runReviewPipeline(options) {
8290
9294
  const {
@@ -8297,7 +9301,8 @@ async function runReviewPipeline(options) {
8297
9301
  conventionFiles,
8298
9302
  checkDepsOutput,
8299
9303
  config = {},
8300
- commitHistory
9304
+ commitHistory,
9305
+ sessionSlug
8301
9306
  } = options;
8302
9307
  if (flags.ci && prMetadata) {
8303
9308
  const eligibility = checkEligibility(prMetadata, true);
@@ -8393,13 +9398,25 @@ async function runReviewPipeline(options) {
8393
9398
  projectRoot,
8394
9399
  fileContents
8395
9400
  });
9401
+ let evidenceCoverage;
9402
+ if (sessionSlug) {
9403
+ try {
9404
+ const evidenceResult = await readSessionSection(projectRoot, sessionSlug, "evidence");
9405
+ if (evidenceResult.ok) {
9406
+ evidenceCoverage = checkEvidenceCoverage(validatedFindings, evidenceResult.value);
9407
+ tagUncitedFindings(validatedFindings, evidenceResult.value);
9408
+ }
9409
+ } catch {
9410
+ }
9411
+ }
8396
9412
  const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
8397
9413
  const strengths = [];
8398
9414
  const assessment = determineAssessment(dedupedFindings);
8399
9415
  const exitCode = getExitCode(assessment);
8400
9416
  const terminalOutput = formatTerminalOutput({
8401
9417
  findings: dedupedFindings,
8402
- strengths
9418
+ strengths,
9419
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
8403
9420
  });
8404
9421
  let githubComments = [];
8405
9422
  if (flags.comment) {
@@ -8414,7 +9431,8 @@ async function runReviewPipeline(options) {
8414
9431
  terminalOutput,
8415
9432
  githubComments,
8416
9433
  exitCode,
8417
- ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
9434
+ ...mechanicalResult != null ? { mechanicalResult } : {},
9435
+ ...evidenceCoverage != null ? { evidenceCoverage } : {}
8418
9436
  };
8419
9437
  }
8420
9438
 
@@ -8433,7 +9451,7 @@ function parseRoadmap(markdown) {
8433
9451
  if (!fmMatch) {
8434
9452
  return Err2(new Error("Missing or malformed YAML frontmatter"));
8435
9453
  }
8436
- const fmResult = parseFrontmatter(fmMatch[1]);
9454
+ const fmResult = parseFrontmatter2(fmMatch[1]);
8437
9455
  if (!fmResult.ok) return fmResult;
8438
9456
  const body = markdown.slice(fmMatch[0].length);
8439
9457
  const milestonesResult = parseMilestones(body);
@@ -8443,7 +9461,7 @@ function parseRoadmap(markdown) {
8443
9461
  milestones: milestonesResult.value
8444
9462
  });
8445
9463
  }
8446
- function parseFrontmatter(raw) {
9464
+ function parseFrontmatter2(raw) {
8447
9465
  const lines = raw.split("\n");
8448
9466
  const map = /* @__PURE__ */ new Map();
8449
9467
  for (const line of lines) {
@@ -8517,13 +9535,29 @@ function parseFeatures(sectionBody) {
8517
9535
  }
8518
9536
  return Ok2(features);
8519
9537
  }
8520
- function parseFeatureFields(name, body) {
9538
+ function extractFieldMap(body) {
8521
9539
  const fieldMap = /* @__PURE__ */ new Map();
8522
9540
  const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
8523
9541
  let match;
8524
9542
  while ((match = fieldPattern.exec(body)) !== null) {
8525
9543
  fieldMap.set(match[1], match[2]);
8526
9544
  }
9545
+ return fieldMap;
9546
+ }
9547
+ function parseListField(fieldMap, ...keys) {
9548
+ let raw = EM_DASH;
9549
+ for (const key of keys) {
9550
+ const val = fieldMap.get(key);
9551
+ if (val !== void 0) {
9552
+ raw = val;
9553
+ break;
9554
+ }
9555
+ }
9556
+ if (raw === EM_DASH || raw === "none") return [];
9557
+ return raw.split(",").map((s) => s.trim());
9558
+ }
9559
+ function parseFeatureFields(name, body) {
9560
+ const fieldMap = extractFieldMap(body);
8527
9561
  const statusRaw = fieldMap.get("Status");
8528
9562
  if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
8529
9563
  return Err2(
@@ -8532,15 +9566,17 @@ function parseFeatureFields(name, body) {
8532
9566
  )
8533
9567
  );
8534
9568
  }
8535
- const status = statusRaw;
8536
9569
  const specRaw = fieldMap.get("Spec") ?? EM_DASH;
8537
- const spec = specRaw === EM_DASH ? null : specRaw;
8538
- const plansRaw = fieldMap.get("Plans") ?? fieldMap.get("Plan") ?? EM_DASH;
8539
- const plans = plansRaw === EM_DASH || plansRaw === "none" ? [] : plansRaw.split(",").map((p) => p.trim());
8540
- const blockedByRaw = fieldMap.get("Blocked by") ?? fieldMap.get("Blockers") ?? EM_DASH;
8541
- const blockedBy = blockedByRaw === EM_DASH || blockedByRaw === "none" ? [] : blockedByRaw.split(",").map((b) => b.trim());
8542
- const summary = fieldMap.get("Summary") ?? "";
8543
- return Ok2({ name, status, spec, plans, blockedBy, summary });
9570
+ const plans = parseListField(fieldMap, "Plans", "Plan");
9571
+ const blockedBy = parseListField(fieldMap, "Blocked by", "Blockers");
9572
+ return Ok2({
9573
+ name,
9574
+ status: statusRaw,
9575
+ spec: specRaw === EM_DASH ? null : specRaw,
9576
+ plans,
9577
+ blockedBy,
9578
+ summary: fieldMap.get("Summary") ?? ""
9579
+ });
8544
9580
  }
8545
9581
 
8546
9582
  // src/roadmap/serialize.ts
@@ -8591,8 +9627,8 @@ function serializeFeature(feature) {
8591
9627
  }
8592
9628
 
8593
9629
  // src/roadmap/sync.ts
8594
- import * as fs16 from "fs";
8595
- import * as path16 from "path";
9630
+ import * as fs19 from "fs";
9631
+ import * as path19 from "path";
8596
9632
  import { Ok as Ok3 } from "@harness-engineering/types";
8597
9633
  function inferStatus(feature, projectPath, allFeatures) {
8598
9634
  if (feature.blockedBy.length > 0) {
@@ -8607,10 +9643,10 @@ function inferStatus(feature, projectPath, allFeatures) {
8607
9643
  const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8608
9644
  const useRootState = featuresWithPlans.length <= 1;
8609
9645
  if (useRootState) {
8610
- const rootStatePath = path16.join(projectPath, ".harness", "state.json");
8611
- if (fs16.existsSync(rootStatePath)) {
9646
+ const rootStatePath = path19.join(projectPath, ".harness", "state.json");
9647
+ if (fs19.existsSync(rootStatePath)) {
8612
9648
  try {
8613
- const raw = fs16.readFileSync(rootStatePath, "utf-8");
9649
+ const raw = fs19.readFileSync(rootStatePath, "utf-8");
8614
9650
  const state = JSON.parse(raw);
8615
9651
  if (state.progress) {
8616
9652
  for (const status of Object.values(state.progress)) {
@@ -8621,16 +9657,16 @@ function inferStatus(feature, projectPath, allFeatures) {
8621
9657
  }
8622
9658
  }
8623
9659
  }
8624
- const sessionsDir = path16.join(projectPath, ".harness", "sessions");
8625
- if (fs16.existsSync(sessionsDir)) {
9660
+ const sessionsDir = path19.join(projectPath, ".harness", "sessions");
9661
+ if (fs19.existsSync(sessionsDir)) {
8626
9662
  try {
8627
- const sessionDirs = fs16.readdirSync(sessionsDir, { withFileTypes: true });
9663
+ const sessionDirs = fs19.readdirSync(sessionsDir, { withFileTypes: true });
8628
9664
  for (const entry of sessionDirs) {
8629
9665
  if (!entry.isDirectory()) continue;
8630
- const autopilotPath = path16.join(sessionsDir, entry.name, "autopilot-state.json");
8631
- if (!fs16.existsSync(autopilotPath)) continue;
9666
+ const autopilotPath = path19.join(sessionsDir, entry.name, "autopilot-state.json");
9667
+ if (!fs19.existsSync(autopilotPath)) continue;
8632
9668
  try {
8633
- const raw = fs16.readFileSync(autopilotPath, "utf-8");
9669
+ const raw = fs19.readFileSync(autopilotPath, "utf-8");
8634
9670
  const autopilot = JSON.parse(raw);
8635
9671
  if (!autopilot.phases) continue;
8636
9672
  const linkedPhases = autopilot.phases.filter(
@@ -8660,17 +9696,26 @@ function inferStatus(feature, projectPath, allFeatures) {
8660
9696
  if (anyStarted) return "in-progress";
8661
9697
  return null;
8662
9698
  }
9699
+ var STATUS_RANK = {
9700
+ backlog: 0,
9701
+ planned: 1,
9702
+ blocked: 1,
9703
+ // lateral to planned — sync can move to/from blocked freely
9704
+ "in-progress": 2,
9705
+ done: 3
9706
+ };
9707
+ function isRegression(from, to) {
9708
+ return STATUS_RANK[to] < STATUS_RANK[from];
9709
+ }
8663
9710
  function syncRoadmap(options) {
8664
9711
  const { projectPath, roadmap, forceSync } = options;
8665
- const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
8666
- const skipOverride = isManuallyEdited && !forceSync;
8667
9712
  const allFeatures = roadmap.milestones.flatMap((m) => m.features);
8668
9713
  const changes = [];
8669
9714
  for (const feature of allFeatures) {
8670
- if (skipOverride) continue;
8671
9715
  const inferred = inferStatus(feature, projectPath, allFeatures);
8672
9716
  if (inferred === null) continue;
8673
9717
  if (inferred === feature.status) continue;
9718
+ if (!forceSync && isRegression(feature.status, inferred)) continue;
8674
9719
  changes.push({
8675
9720
  feature: feature.name,
8676
9721
  from: feature.status,
@@ -8679,48 +9724,60 @@ function syncRoadmap(options) {
8679
9724
  }
8680
9725
  return Ok3(changes);
8681
9726
  }
9727
+ function applySyncChanges(roadmap, changes) {
9728
+ for (const change of changes) {
9729
+ for (const m of roadmap.milestones) {
9730
+ const feature = m.features.find((f) => f.name.toLowerCase() === change.feature.toLowerCase());
9731
+ if (feature) {
9732
+ feature.status = change.to;
9733
+ break;
9734
+ }
9735
+ }
9736
+ }
9737
+ roadmap.frontmatter.lastSynced = (/* @__PURE__ */ new Date()).toISOString();
9738
+ }
8682
9739
 
8683
9740
  // src/interaction/types.ts
8684
- import { z as z6 } from "zod";
8685
- var InteractionTypeSchema = z6.enum(["question", "confirmation", "transition"]);
8686
- var QuestionSchema = z6.object({
8687
- text: z6.string(),
8688
- options: z6.array(z6.string()).optional(),
8689
- default: z6.string().optional()
9741
+ import { z as z7 } from "zod";
9742
+ var InteractionTypeSchema = z7.enum(["question", "confirmation", "transition"]);
9743
+ var QuestionSchema = z7.object({
9744
+ text: z7.string(),
9745
+ options: z7.array(z7.string()).optional(),
9746
+ default: z7.string().optional()
8690
9747
  });
8691
- var ConfirmationSchema = z6.object({
8692
- text: z6.string(),
8693
- context: z6.string()
9748
+ var ConfirmationSchema = z7.object({
9749
+ text: z7.string(),
9750
+ context: z7.string()
8694
9751
  });
8695
- var TransitionSchema = z6.object({
8696
- completedPhase: z6.string(),
8697
- suggestedNext: z6.string(),
8698
- reason: z6.string(),
8699
- artifacts: z6.array(z6.string()),
8700
- requiresConfirmation: z6.boolean(),
8701
- summary: z6.string()
9752
+ var TransitionSchema = z7.object({
9753
+ completedPhase: z7.string(),
9754
+ suggestedNext: z7.string(),
9755
+ reason: z7.string(),
9756
+ artifacts: z7.array(z7.string()),
9757
+ requiresConfirmation: z7.boolean(),
9758
+ summary: z7.string()
8702
9759
  });
8703
- var EmitInteractionInputSchema = z6.object({
8704
- path: z6.string(),
9760
+ var EmitInteractionInputSchema = z7.object({
9761
+ path: z7.string(),
8705
9762
  type: InteractionTypeSchema,
8706
- stream: z6.string().optional(),
9763
+ stream: z7.string().optional(),
8707
9764
  question: QuestionSchema.optional(),
8708
9765
  confirmation: ConfirmationSchema.optional(),
8709
9766
  transition: TransitionSchema.optional()
8710
9767
  });
8711
9768
 
8712
9769
  // src/blueprint/scanner.ts
8713
- import * as fs17 from "fs/promises";
8714
- import * as path17 from "path";
9770
+ import * as fs20 from "fs/promises";
9771
+ import * as path20 from "path";
8715
9772
  var ProjectScanner = class {
8716
9773
  constructor(rootDir) {
8717
9774
  this.rootDir = rootDir;
8718
9775
  }
8719
9776
  async scan() {
8720
- let projectName = path17.basename(this.rootDir);
9777
+ let projectName = path20.basename(this.rootDir);
8721
9778
  try {
8722
- const pkgPath = path17.join(this.rootDir, "package.json");
8723
- const pkgRaw = await fs17.readFile(pkgPath, "utf-8");
9779
+ const pkgPath = path20.join(this.rootDir, "package.json");
9780
+ const pkgRaw = await fs20.readFile(pkgPath, "utf-8");
8724
9781
  const pkg = JSON.parse(pkgRaw);
8725
9782
  if (pkg.name) projectName = pkg.name;
8726
9783
  } catch {
@@ -8761,8 +9818,8 @@ var ProjectScanner = class {
8761
9818
  };
8762
9819
 
8763
9820
  // src/blueprint/generator.ts
8764
- import * as fs18 from "fs/promises";
8765
- import * as path18 from "path";
9821
+ import * as fs21 from "fs/promises";
9822
+ import * as path21 from "path";
8766
9823
  import * as ejs from "ejs";
8767
9824
 
8768
9825
  // src/blueprint/templates.ts
@@ -8846,19 +9903,19 @@ var BlueprintGenerator = class {
8846
9903
  styles: STYLES,
8847
9904
  scripts: SCRIPTS
8848
9905
  });
8849
- await fs18.mkdir(options.outputDir, { recursive: true });
8850
- await fs18.writeFile(path18.join(options.outputDir, "index.html"), html);
9906
+ await fs21.mkdir(options.outputDir, { recursive: true });
9907
+ await fs21.writeFile(path21.join(options.outputDir, "index.html"), html);
8851
9908
  }
8852
9909
  };
8853
9910
 
8854
9911
  // src/update-checker.ts
8855
- import * as fs19 from "fs";
8856
- import * as path19 from "path";
9912
+ import * as fs22 from "fs";
9913
+ import * as path22 from "path";
8857
9914
  import * as os from "os";
8858
9915
  import { spawn } from "child_process";
8859
9916
  function getStatePath() {
8860
9917
  const home = process.env["HOME"] || os.homedir();
8861
- return path19.join(home, ".harness", "update-check.json");
9918
+ return path22.join(home, ".harness", "update-check.json");
8862
9919
  }
8863
9920
  function isUpdateCheckEnabled(configInterval) {
8864
9921
  if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
@@ -8871,7 +9928,7 @@ function shouldRunCheck(state, intervalMs) {
8871
9928
  }
8872
9929
  function readCheckState() {
8873
9930
  try {
8874
- const raw = fs19.readFileSync(getStatePath(), "utf-8");
9931
+ const raw = fs22.readFileSync(getStatePath(), "utf-8");
8875
9932
  const parsed = JSON.parse(raw);
8876
9933
  if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
8877
9934
  const state = parsed;
@@ -8888,7 +9945,7 @@ function readCheckState() {
8888
9945
  }
8889
9946
  function spawnBackgroundCheck(currentVersion) {
8890
9947
  const statePath = getStatePath();
8891
- const stateDir = path19.dirname(statePath);
9948
+ const stateDir = path22.dirname(statePath);
8892
9949
  const script = `
8893
9950
  const { execSync } = require('child_process');
8894
9951
  const fs = require('fs');
@@ -8941,8 +9998,410 @@ function getUpdateNotification(currentVersion) {
8941
9998
  Run "harness update" to upgrade.`;
8942
9999
  }
8943
10000
 
10001
+ // src/code-nav/types.ts
10002
+ var EXTENSION_MAP = {
10003
+ ".ts": "typescript",
10004
+ ".tsx": "typescript",
10005
+ ".mts": "typescript",
10006
+ ".cts": "typescript",
10007
+ ".js": "javascript",
10008
+ ".jsx": "javascript",
10009
+ ".mjs": "javascript",
10010
+ ".cjs": "javascript",
10011
+ ".py": "python"
10012
+ };
10013
+ function detectLanguage(filePath) {
10014
+ const ext = filePath.slice(filePath.lastIndexOf("."));
10015
+ return EXTENSION_MAP[ext] ?? null;
10016
+ }
10017
+
10018
+ // src/code-nav/parser.ts
10019
+ import Parser from "web-tree-sitter";
10020
+ var parserCache = /* @__PURE__ */ new Map();
10021
+ var initialized = false;
10022
+ var GRAMMAR_MAP = {
10023
+ typescript: "tree-sitter-typescript",
10024
+ javascript: "tree-sitter-javascript",
10025
+ python: "tree-sitter-python"
10026
+ };
10027
+ async function ensureInit() {
10028
+ if (!initialized) {
10029
+ await Parser.init();
10030
+ initialized = true;
10031
+ }
10032
+ }
10033
+ async function resolveWasmPath(grammarName) {
10034
+ const { createRequire } = await import("module");
10035
+ const require2 = createRequire(import.meta.url ?? __filename);
10036
+ const pkgPath = require2.resolve("tree-sitter-wasms/package.json");
10037
+ const path23 = await import("path");
10038
+ const pkgDir = path23.dirname(pkgPath);
10039
+ return path23.join(pkgDir, "out", `${grammarName}.wasm`);
10040
+ }
10041
+ async function loadLanguage(lang) {
10042
+ const grammarName = GRAMMAR_MAP[lang];
10043
+ const wasmPath = await resolveWasmPath(grammarName);
10044
+ return Parser.Language.load(wasmPath);
10045
+ }
10046
+ async function getParser(lang) {
10047
+ const cached = parserCache.get(lang);
10048
+ if (cached) return cached;
10049
+ await ensureInit();
10050
+ const parser = new Parser();
10051
+ const language = await loadLanguage(lang);
10052
+ parser.setLanguage(language);
10053
+ parserCache.set(lang, parser);
10054
+ return parser;
10055
+ }
10056
+ async function parseFile(filePath) {
10057
+ const lang = detectLanguage(filePath);
10058
+ if (!lang) {
10059
+ return Err({
10060
+ code: "UNSUPPORTED_LANGUAGE",
10061
+ message: `Unsupported file extension: ${filePath}`
10062
+ });
10063
+ }
10064
+ const contentResult = await readFileContent(filePath);
10065
+ if (!contentResult.ok) {
10066
+ return Err({
10067
+ code: "FILE_NOT_FOUND",
10068
+ message: `Cannot read file: ${filePath}`
10069
+ });
10070
+ }
10071
+ try {
10072
+ const parser = await getParser(lang);
10073
+ const tree = parser.parse(contentResult.value);
10074
+ return Ok({ tree, language: lang, source: contentResult.value, filePath });
10075
+ } catch (e) {
10076
+ return Err({
10077
+ code: "PARSE_FAILED",
10078
+ message: `Tree-sitter parse failed for ${filePath}: ${e.message}`
10079
+ });
10080
+ }
10081
+ }
10082
+ function resetParserCache() {
10083
+ parserCache.clear();
10084
+ initialized = false;
10085
+ }
10086
+
10087
+ // src/code-nav/outline.ts
10088
+ var TOP_LEVEL_TYPES = {
10089
+ typescript: {
10090
+ function_declaration: "function",
10091
+ class_declaration: "class",
10092
+ interface_declaration: "interface",
10093
+ type_alias_declaration: "type",
10094
+ lexical_declaration: "variable",
10095
+ variable_declaration: "variable",
10096
+ export_statement: "export",
10097
+ import_statement: "import",
10098
+ enum_declaration: "type"
10099
+ },
10100
+ javascript: {
10101
+ function_declaration: "function",
10102
+ class_declaration: "class",
10103
+ lexical_declaration: "variable",
10104
+ variable_declaration: "variable",
10105
+ export_statement: "export",
10106
+ import_statement: "import"
10107
+ },
10108
+ python: {
10109
+ function_definition: "function",
10110
+ class_definition: "class",
10111
+ assignment: "variable",
10112
+ import_statement: "import",
10113
+ import_from_statement: "import"
10114
+ }
10115
+ };
10116
+ var METHOD_TYPES = {
10117
+ typescript: ["method_definition", "public_field_definition"],
10118
+ javascript: ["method_definition"],
10119
+ python: ["function_definition"]
10120
+ };
10121
+ var IDENTIFIER_TYPES = /* @__PURE__ */ new Set(["identifier", "property_identifier", "type_identifier"]);
10122
+ function findIdentifier(node) {
10123
+ return node.childForFieldName("name") ?? node.children.find((c) => IDENTIFIER_TYPES.has(c.type)) ?? null;
10124
+ }
10125
+ function getVariableDeclarationName(node) {
10126
+ const declarator = node.children.find((c) => c.type === "variable_declarator");
10127
+ if (!declarator) return null;
10128
+ const id = findIdentifier(declarator);
10129
+ return id?.text ?? null;
10130
+ }
10131
+ function getExportName(node, source) {
10132
+ const decl = node.children.find(
10133
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== "comment"
10134
+ );
10135
+ return decl ? getNodeName(decl, source) : "<anonymous>";
10136
+ }
10137
+ function getAssignmentName(node) {
10138
+ const left = node.childForFieldName("left") ?? node.children[0];
10139
+ return left?.text ?? "<anonymous>";
10140
+ }
10141
+ function getNodeName(node, source) {
10142
+ const id = findIdentifier(node);
10143
+ if (id) return id.text;
10144
+ const isVarDecl = node.type === "lexical_declaration" || node.type === "variable_declaration";
10145
+ if (isVarDecl) return getVariableDeclarationName(node) ?? "<anonymous>";
10146
+ if (node.type === "export_statement") return getExportName(node, source);
10147
+ if (node.type === "assignment") return getAssignmentName(node);
10148
+ return "<anonymous>";
10149
+ }
10150
+ function getSignature(node, source) {
10151
+ const startLine = node.startPosition.row;
10152
+ const lines = source.split("\n");
10153
+ return (lines[startLine] ?? "").trim();
10154
+ }
10155
+ function extractMethods(classNode, language, source, filePath) {
10156
+ const methodTypes = METHOD_TYPES[language] ?? [];
10157
+ const body = classNode.childForFieldName("body") ?? classNode.children.find((c) => c.type === "class_body" || c.type === "block");
10158
+ if (!body) return [];
10159
+ return body.children.filter((child) => methodTypes.includes(child.type)).map((child) => ({
10160
+ name: getNodeName(child, source),
10161
+ kind: "method",
10162
+ file: filePath,
10163
+ line: child.startPosition.row + 1,
10164
+ endLine: child.endPosition.row + 1,
10165
+ signature: getSignature(child, source)
10166
+ }));
10167
+ }
10168
+ function nodeToSymbol(node, kind, source, filePath) {
10169
+ return {
10170
+ name: getNodeName(node, source),
10171
+ kind,
10172
+ file: filePath,
10173
+ line: node.startPosition.row + 1,
10174
+ endLine: node.endPosition.row + 1,
10175
+ signature: getSignature(node, source)
10176
+ };
10177
+ }
10178
+ function processExportStatement(child, topLevelTypes, lang, source, filePath) {
10179
+ const declaration = child.children.find(
10180
+ (c) => c.type !== "export" && c.type !== "default" && c.type !== ";" && c.type !== "comment"
10181
+ );
10182
+ const kind = declaration ? topLevelTypes[declaration.type] : void 0;
10183
+ if (declaration && kind) {
10184
+ const sym = nodeToSymbol(child, kind, source, filePath);
10185
+ sym.name = getNodeName(declaration, source);
10186
+ if (kind === "class") {
10187
+ sym.children = extractMethods(declaration, lang, source, filePath);
10188
+ }
10189
+ return sym;
10190
+ }
10191
+ return nodeToSymbol(child, "export", source, filePath);
10192
+ }
10193
+ function extractSymbols(rootNode, lang, source, filePath) {
10194
+ const symbols = [];
10195
+ const topLevelTypes = TOP_LEVEL_TYPES[lang] ?? {};
10196
+ for (const child of rootNode.children) {
10197
+ if (child.type === "export_statement") {
10198
+ symbols.push(processExportStatement(child, topLevelTypes, lang, source, filePath));
10199
+ continue;
10200
+ }
10201
+ const kind = topLevelTypes[child.type];
10202
+ if (!kind || kind === "import") continue;
10203
+ const sym = nodeToSymbol(child, kind, source, filePath);
10204
+ if (kind === "class") {
10205
+ sym.children = extractMethods(child, lang, source, filePath);
10206
+ }
10207
+ symbols.push(sym);
10208
+ }
10209
+ return symbols;
10210
+ }
10211
+ function buildFailedResult(filePath, lang) {
10212
+ return { file: filePath, language: lang, totalLines: 0, symbols: [], error: "[parse-failed]" };
10213
+ }
10214
+ async function getOutline(filePath) {
10215
+ const lang = detectLanguage(filePath);
10216
+ if (!lang) return buildFailedResult(filePath, "unknown");
10217
+ const result = await parseFile(filePath);
10218
+ if (!result.ok) return buildFailedResult(filePath, lang);
10219
+ const { tree, source } = result.value;
10220
+ const totalLines = source.split("\n").length;
10221
+ const symbols = extractSymbols(tree.rootNode, lang, source, filePath);
10222
+ return { file: filePath, language: lang, totalLines, symbols };
10223
+ }
10224
+ function formatOutline(outline) {
10225
+ if (outline.error) {
10226
+ return `${outline.file} ${outline.error}`;
10227
+ }
10228
+ const lines = [`${outline.file} (${outline.totalLines} lines)`];
10229
+ const last = outline.symbols.length - 1;
10230
+ outline.symbols.forEach((sym, i) => {
10231
+ const prefix = i === last ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
10232
+ lines.push(`${prefix} ${sym.signature} :${sym.line}`);
10233
+ if (sym.children) {
10234
+ const childLast = sym.children.length - 1;
10235
+ sym.children.forEach((child, j) => {
10236
+ const childConnector = i === last ? " " : "\u2502 ";
10237
+ const childPrefix = j === childLast ? "\u2514\u2500\u2500" : "\u251C\u2500\u2500";
10238
+ lines.push(`${childConnector}${childPrefix} ${child.signature} :${child.line}`);
10239
+ });
10240
+ }
10241
+ });
10242
+ return lines.join("\n");
10243
+ }
10244
+
10245
+ // src/code-nav/search.ts
10246
+ function buildGlob(directory, fileGlob) {
10247
+ const dir = directory.replaceAll("\\", "/");
10248
+ if (fileGlob) {
10249
+ return `${dir}/**/${fileGlob}`;
10250
+ }
10251
+ const exts = Object.keys(EXTENSION_MAP).map((e) => e.slice(1));
10252
+ return `${dir}/**/*.{${exts.join(",")}}`;
10253
+ }
10254
+ function matchesQuery(name, query) {
10255
+ return name.toLowerCase().includes(query.toLowerCase());
10256
+ }
10257
+ function flattenSymbols(symbols) {
10258
+ const flat = [];
10259
+ for (const sym of symbols) {
10260
+ flat.push(sym);
10261
+ if (sym.children) {
10262
+ flat.push(...sym.children);
10263
+ }
10264
+ }
10265
+ return flat;
10266
+ }
10267
+ async function searchSymbols(query, directory, fileGlob) {
10268
+ const pattern = buildGlob(directory, fileGlob);
10269
+ let files;
10270
+ try {
10271
+ files = await findFiles(pattern, directory);
10272
+ } catch {
10273
+ files = [];
10274
+ }
10275
+ const matches = [];
10276
+ const skipped = [];
10277
+ for (const file of files) {
10278
+ const lang = detectLanguage(file);
10279
+ if (!lang) {
10280
+ skipped.push(file);
10281
+ continue;
10282
+ }
10283
+ const outline = await getOutline(file);
10284
+ if (outline.error) {
10285
+ skipped.push(file);
10286
+ continue;
10287
+ }
10288
+ const allSymbols = flattenSymbols(outline.symbols);
10289
+ for (const sym of allSymbols) {
10290
+ if (matchesQuery(sym.name, query)) {
10291
+ matches.push({
10292
+ symbol: sym,
10293
+ context: sym.signature
10294
+ });
10295
+ }
10296
+ }
10297
+ }
10298
+ return { query, matches, skipped };
10299
+ }
10300
+
10301
+ // src/code-nav/unfold.ts
10302
+ function findSymbolInList(symbols, name) {
10303
+ for (const sym of symbols) {
10304
+ if (sym.name === name) return sym;
10305
+ if (sym.children) {
10306
+ const found = findSymbolInList(sym.children, name);
10307
+ if (found) return found;
10308
+ }
10309
+ }
10310
+ return null;
10311
+ }
10312
+ function extractLines(source, startLine, endLine) {
10313
+ const lines = source.split("\n");
10314
+ const start = Math.max(0, startLine - 1);
10315
+ const end = Math.min(lines.length, endLine);
10316
+ return lines.slice(start, end).join("\n");
10317
+ }
10318
+ function buildFallbackResult(filePath, symbolName, content, language) {
10319
+ const totalLines = content ? content.split("\n").length : 0;
10320
+ return {
10321
+ file: filePath,
10322
+ symbolName,
10323
+ startLine: content ? 1 : 0,
10324
+ endLine: totalLines,
10325
+ content,
10326
+ language,
10327
+ fallback: true,
10328
+ warning: "[fallback: raw content]"
10329
+ };
10330
+ }
10331
+ async function readContentSafe(filePath) {
10332
+ const result = await readFileContent(filePath);
10333
+ return result.ok ? result.value : "";
10334
+ }
10335
+ async function unfoldSymbol(filePath, symbolName) {
10336
+ const lang = detectLanguage(filePath);
10337
+ if (!lang) {
10338
+ const content2 = await readContentSafe(filePath);
10339
+ return buildFallbackResult(filePath, symbolName, content2, "unknown");
10340
+ }
10341
+ const outline = await getOutline(filePath);
10342
+ if (outline.error) {
10343
+ const content2 = await readContentSafe(filePath);
10344
+ return buildFallbackResult(filePath, symbolName, content2, lang);
10345
+ }
10346
+ const symbol = findSymbolInList(outline.symbols, symbolName);
10347
+ if (!symbol) {
10348
+ const content2 = await readContentSafe(filePath);
10349
+ return buildFallbackResult(filePath, symbolName, content2, lang);
10350
+ }
10351
+ const parseResult = await parseFile(filePath);
10352
+ if (!parseResult.ok) {
10353
+ const content2 = await readContentSafe(filePath);
10354
+ return {
10355
+ ...buildFallbackResult(
10356
+ filePath,
10357
+ symbolName,
10358
+ extractLines(content2, symbol.line, symbol.endLine),
10359
+ lang
10360
+ ),
10361
+ startLine: symbol.line,
10362
+ endLine: symbol.endLine
10363
+ };
10364
+ }
10365
+ const content = extractLines(parseResult.value.source, symbol.line, symbol.endLine);
10366
+ return {
10367
+ file: filePath,
10368
+ symbolName,
10369
+ startLine: symbol.line,
10370
+ endLine: symbol.endLine,
10371
+ content,
10372
+ language: lang,
10373
+ fallback: false
10374
+ };
10375
+ }
10376
+ async function unfoldRange(filePath, startLine, endLine) {
10377
+ const lang = detectLanguage(filePath) ?? "unknown";
10378
+ const contentResult = await readFileContent(filePath);
10379
+ if (!contentResult.ok) {
10380
+ return {
10381
+ file: filePath,
10382
+ startLine: 0,
10383
+ endLine: 0,
10384
+ content: "",
10385
+ language: lang,
10386
+ fallback: true,
10387
+ warning: "[fallback: raw content]"
10388
+ };
10389
+ }
10390
+ const totalLines = contentResult.value.split("\n").length;
10391
+ const clampedEnd = Math.min(endLine, totalLines);
10392
+ const content = extractLines(contentResult.value, startLine, clampedEnd);
10393
+ return {
10394
+ file: filePath,
10395
+ startLine,
10396
+ endLine: clampedEnd,
10397
+ content,
10398
+ language: lang,
10399
+ fallback: false
10400
+ };
10401
+ }
10402
+
8944
10403
  // src/index.ts
8945
- var VERSION = "0.13.0";
10404
+ var VERSION = "0.15.0";
8946
10405
  export {
8947
10406
  AGENT_DESCRIPTORS,
8948
10407
  ARCHITECTURE_DESCRIPTOR,
@@ -8976,6 +10435,7 @@ export {
8976
10435
  DEFAULT_STATE,
8977
10436
  DEFAULT_STREAM_INDEX,
8978
10437
  DepDepthCollector,
10438
+ EXTENSION_MAP,
8979
10439
  EmitInteractionInputSchema,
8980
10440
  EntropyAnalyzer,
8981
10441
  EntropyConfigSchema,
@@ -9010,6 +10470,7 @@ export {
9010
10470
  SharableForbiddenImportSchema,
9011
10471
  SharableLayerSchema,
9012
10472
  SharableSecurityRulesSchema,
10473
+ SkillEventSchema,
9013
10474
  StreamIndexSchema,
9014
10475
  StreamInfoSchema,
9015
10476
  ThresholdConfigSchema,
@@ -9018,30 +10479,37 @@ export {
9018
10479
  VERSION,
9019
10480
  ViolationSchema,
9020
10481
  addProvenance,
10482
+ agentConfigRules,
9021
10483
  analyzeDiff,
9022
10484
  analyzeLearningPatterns,
9023
10485
  appendFailure,
9024
10486
  appendLearning,
10487
+ appendSessionEntry,
9025
10488
  applyFixes,
9026
10489
  applyHotspotDowngrade,
10490
+ applySyncChanges,
9027
10491
  archMatchers,
9028
10492
  archModule,
9029
10493
  architecture,
9030
10494
  archiveFailures,
9031
10495
  archiveLearnings,
10496
+ archiveSession,
9032
10497
  archiveStream,
9033
10498
  buildDependencyGraph,
9034
10499
  buildExclusionSet,
9035
10500
  buildSnapshot,
9036
10501
  checkDocCoverage,
9037
10502
  checkEligibility,
10503
+ checkEvidenceCoverage,
9038
10504
  classifyFinding,
10505
+ clearEventHashCache,
9039
10506
  clearFailuresCache,
9040
10507
  clearLearningsCache,
9041
10508
  configureFeedback,
9042
10509
  constraintRuleId,
9043
10510
  contextBudget,
9044
10511
  contextFilter,
10512
+ countLearningEntries,
9045
10513
  createBoundaryValidator,
9046
10514
  createCommentedCodeFixes,
9047
10515
  createError,
@@ -9065,27 +10533,34 @@ export {
9065
10533
  detectCouplingViolations,
9066
10534
  detectDeadCode,
9067
10535
  detectDocDrift,
10536
+ detectLanguage,
9068
10537
  detectPatternViolations,
9069
10538
  detectSizeBudgetViolations,
9070
10539
  detectStack,
9071
10540
  detectStaleConstraints,
9072
10541
  determineAssessment,
9073
10542
  diff,
10543
+ emitEvent,
9074
10544
  executeWorkflow,
9075
10545
  expressRules,
9076
10546
  extractBundle,
10547
+ extractIndexEntry,
9077
10548
  extractMarkdownLinks,
9078
10549
  extractSections,
9079
10550
  fanOutReview,
10551
+ formatEventTimeline,
9080
10552
  formatFindingBlock,
9081
10553
  formatGitHubComment,
9082
10554
  formatGitHubSummary,
10555
+ formatOutline,
9083
10556
  formatTerminalOutput,
9084
10557
  generateAgentsMap,
9085
10558
  generateSuggestions,
9086
10559
  getActionEmitter,
9087
10560
  getExitCode,
9088
10561
  getFeedbackConfig,
10562
+ getOutline,
10563
+ getParser,
9089
10564
  getPhaseCategories,
9090
10565
  getStreamForBranch,
9091
10566
  getUpdateNotification,
@@ -9096,33 +10571,42 @@ export {
9096
10571
  listActiveSessions,
9097
10572
  listStreams,
9098
10573
  loadBudgetedLearnings,
10574
+ loadEvents,
9099
10575
  loadFailures,
9100
10576
  loadHandoff,
10577
+ loadIndexEntries,
9101
10578
  loadRelevantLearnings,
9102
10579
  loadSessionSummary,
9103
10580
  loadState,
9104
10581
  loadStreamIndex,
9105
10582
  logAgentAction,
10583
+ mcpRules,
9106
10584
  migrateToStreams,
9107
10585
  networkRules,
9108
10586
  nodeRules,
9109
10587
  parseDateFromEntry,
9110
10588
  parseDiff,
10589
+ parseFile,
10590
+ parseFrontmatter,
9111
10591
  parseManifest,
9112
10592
  parseRoadmap,
9113
10593
  parseSecurityConfig,
9114
10594
  parseSize,
9115
10595
  pathTraversalRules,
9116
10596
  previewFix,
10597
+ promoteSessionLearnings,
9117
10598
  pruneLearnings,
9118
10599
  reactRules,
9119
10600
  readCheckState,
9120
10601
  readLockfile,
10602
+ readSessionSection,
10603
+ readSessionSections,
9121
10604
  removeContributions,
9122
10605
  removeProvenance,
9123
10606
  requestMultiplePeerReviews,
9124
10607
  requestPeerReview,
9125
10608
  resetFeedbackConfig,
10609
+ resetParserCache,
9126
10610
  resolveFileToLayer,
9127
10611
  resolveModelTier,
9128
10612
  resolveRuleSeverity,
@@ -9144,6 +10628,7 @@ export {
9144
10628
  saveState,
9145
10629
  saveStreamIndex,
9146
10630
  scopeContext,
10631
+ searchSymbols,
9147
10632
  secretRules,
9148
10633
  serializeRoadmap,
9149
10634
  setActiveStream,
@@ -9151,8 +10636,12 @@ export {
9151
10636
  spawnBackgroundCheck,
9152
10637
  syncConstraintNodes,
9153
10638
  syncRoadmap,
10639
+ tagUncitedFindings,
9154
10640
  touchStream,
9155
10641
  trackAction,
10642
+ unfoldRange,
10643
+ unfoldSymbol,
10644
+ updateSessionEntryStatus,
9156
10645
  updateSessionIndex,
9157
10646
  validateAgentsMap,
9158
10647
  validateBoundaries,