@harness-engineering/core 0.8.0 → 0.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -18,17 +18,17 @@ import { promisify } from "util";
18
18
  import { glob } from "glob";
19
19
  var accessAsync = promisify(access);
20
20
  var readFileAsync = promisify(readFile);
21
- async function fileExists(path3) {
21
+ async function fileExists(path11) {
22
22
  try {
23
- await accessAsync(path3, constants.F_OK);
23
+ await accessAsync(path11, constants.F_OK);
24
24
  return true;
25
25
  } catch {
26
26
  return false;
27
27
  }
28
28
  }
29
- async function readFileContent(path3) {
29
+ async function readFileContent(path11) {
30
30
  try {
31
- const content = await readFileAsync(path3, "utf-8");
31
+ const content = await readFileAsync(path11, "utf-8");
32
32
  return Ok(content);
33
33
  } catch (error) {
34
34
  return Err(error);
@@ -76,15 +76,15 @@ function validateConfig(data, schema) {
76
76
  let message = "Configuration validation failed";
77
77
  const suggestions = [];
78
78
  if (firstError) {
79
- const path3 = firstError.path.join(".");
80
- const pathDisplay = path3 ? ` at "${path3}"` : "";
79
+ const path11 = firstError.path.join(".");
80
+ const pathDisplay = path11 ? ` at "${path11}"` : "";
81
81
  if (firstError.code === "invalid_type") {
82
82
  const received = firstError.received;
83
83
  const expected = firstError.expected;
84
84
  if (received === "undefined") {
85
85
  code = "MISSING_FIELD";
86
86
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
87
- suggestions.push(`Field "${path3}" is required and must be of type "${expected}"`);
87
+ suggestions.push(`Field "${path11}" is required and must be of type "${expected}"`);
88
88
  } else {
89
89
  code = "INVALID_TYPE";
90
90
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -297,30 +297,30 @@ function extractSections(content) {
297
297
  return result;
298
298
  });
299
299
  }
300
- function isExternalLink(path3) {
301
- return path3.startsWith("http://") || path3.startsWith("https://") || path3.startsWith("#") || path3.startsWith("mailto:");
300
+ function isExternalLink(path11) {
301
+ return path11.startsWith("http://") || path11.startsWith("https://") || path11.startsWith("#") || path11.startsWith("mailto:");
302
302
  }
303
303
  function resolveLinkPath(linkPath, baseDir) {
304
304
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
305
305
  }
306
- async function validateAgentsMap(path3 = "./AGENTS.md") {
306
+ async function validateAgentsMap(path11 = "./AGENTS.md") {
307
307
  console.warn(
308
308
  "[harness] validateAgentsMap() is deprecated. Use graph-based validation via Assembler.checkCoverage() from @harness-engineering/graph"
309
309
  );
310
- const contentResult = await readFileContent(path3);
310
+ const contentResult = await readFileContent(path11);
311
311
  if (!contentResult.ok) {
312
312
  return Err(
313
313
  createError(
314
314
  "PARSE_ERROR",
315
315
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
316
- { path: path3 },
316
+ { path: path11 },
317
317
  ["Ensure the file exists", "Check file permissions"]
318
318
  )
319
319
  );
320
320
  }
321
321
  const content = contentResult.value;
322
322
  const sections = extractSections(content);
323
- const baseDir = dirname(path3);
323
+ const baseDir = dirname(path11);
324
324
  const sectionTitles = sections.map((s) => s.title);
325
325
  const missingSections = REQUIRED_SECTIONS.filter(
326
326
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -462,8 +462,8 @@ async function checkDocCoverage(domain, options = {}) {
462
462
 
463
463
  // src/context/knowledge-map.ts
464
464
  import { join as join2, basename as basename2, relative as relative2 } from "path";
465
- function suggestFix(path3, existingFiles) {
466
- const targetName = basename2(path3).toLowerCase();
465
+ function suggestFix(path11, existingFiles) {
466
+ const targetName = basename2(path11).toLowerCase();
467
467
  const similar = existingFiles.find((file) => {
468
468
  const fileName = basename2(file).toLowerCase();
469
469
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -471,7 +471,7 @@ function suggestFix(path3, existingFiles) {
471
471
  if (similar) {
472
472
  return `Did you mean "${similar}"?`;
473
473
  }
474
- return `Create the file "${path3}" or remove the link`;
474
+ return `Create the file "${path11}" or remove the link`;
475
475
  }
476
476
  async function validateKnowledgeMap(rootDir = process.cwd()) {
477
477
  console.warn(
@@ -765,13 +765,17 @@ var PHASE_PRIORITIES = {
765
765
  priority: 2
766
766
  },
767
767
  { category: "tests", patterns: ["tests/**/*.test.ts", "**/*.spec.ts"], priority: 3 },
768
- { category: "specs", patterns: ["docs/specs/**/*.md"], priority: 4 },
768
+ { category: "specs", patterns: ["docs/changes/*/proposal.md"], priority: 4 },
769
769
  { category: "config", patterns: ["package.json", "tsconfig.json"], priority: 5 }
770
770
  ],
771
771
  review: [
772
772
  { category: "diff", patterns: [], priority: 1 },
773
773
  // Diff is provided, not globbed
774
- { category: "specs", patterns: ["docs/specs/**/*.md", "docs/plans/**/*.md"], priority: 2 },
774
+ {
775
+ category: "specs",
776
+ patterns: ["docs/changes/*/proposal.md", "docs/plans/**/*.md"],
777
+ priority: 2
778
+ },
775
779
  { category: "learnings", patterns: [".harness/review-learnings.md"], priority: 3 },
776
780
  { category: "types", patterns: ["src/**/types.ts", "src/**/interfaces.ts"], priority: 4 },
777
781
  { category: "tests", patterns: ["tests/**/*.test.ts"], priority: 5 }
@@ -784,7 +788,11 @@ var PHASE_PRIORITIES = {
784
788
  { category: "types", patterns: ["src/**/types.ts"], priority: 5 }
785
789
  ],
786
790
  plan: [
787
- { category: "specs", patterns: ["docs/specs/**/*.md", "docs/plans/**/*.md"], priority: 1 },
791
+ {
792
+ category: "specs",
793
+ patterns: ["docs/changes/*/proposal.md", "docs/plans/**/*.md"],
794
+ priority: 1
795
+ },
788
796
  { category: "architecture", patterns: ["AGENTS.md", "docs/standard/**/*.md"], priority: 2 },
789
797
  { category: "handoffs", patterns: [".harness/handoff.md"], priority: 3 },
790
798
  { category: "types", patterns: ["src/**/types.ts", "src/**/interfaces.ts"], priority: 4 },
@@ -838,7 +846,7 @@ function resolveImportPath(importSource, fromFile, _rootDir) {
838
846
  if (!resolved.endsWith(".ts") && !resolved.endsWith(".tsx")) {
839
847
  resolved = resolved + ".ts";
840
848
  }
841
- return resolved;
849
+ return resolved.replace(/\\/g, "/");
842
850
  }
843
851
  function getImportType(imp) {
844
852
  if (imp.kind === "type") return "type-only";
@@ -851,9 +859,10 @@ async function buildDependencyGraph(files, parser, graphDependencyData) {
851
859
  edges: graphDependencyData.edges
852
860
  });
853
861
  }
854
- const nodes = [...files];
862
+ const nodes = files.map((f) => f.replace(/\\/g, "/"));
855
863
  const edges = [];
856
864
  for (const file of files) {
865
+ const normalizedFile = file.replace(/\\/g, "/");
857
866
  const parseResult = await parser.parseFile(file);
858
867
  if (!parseResult.ok) {
859
868
  continue;
@@ -866,7 +875,7 @@ async function buildDependencyGraph(files, parser, graphDependencyData) {
866
875
  const resolvedPath = resolveImportPath(imp.source, file, "");
867
876
  if (resolvedPath) {
868
877
  edges.push({
869
- from: file,
878
+ from: normalizedFile,
870
879
  to: resolvedPath,
871
880
  importType: getImportType(imp),
872
881
  line: imp.location.line
@@ -1065,8 +1074,8 @@ function createBoundaryValidator(schema, name) {
1065
1074
  return Ok(result.data);
1066
1075
  }
1067
1076
  const suggestions = result.error.issues.map((issue) => {
1068
- const path3 = issue.path.join(".");
1069
- return path3 ? `${path3}: ${issue.message}` : issue.message;
1077
+ const path11 = issue.path.join(".");
1078
+ return path11 ? `${path11}: ${issue.message}` : issue.message;
1070
1079
  });
1071
1080
  return Err(
1072
1081
  createError(
@@ -1135,11 +1144,11 @@ function walk(node, visitor) {
1135
1144
  var TypeScriptParser = class {
1136
1145
  name = "typescript";
1137
1146
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1138
- async parseFile(path3) {
1139
- const contentResult = await readFileContent(path3);
1147
+ async parseFile(path11) {
1148
+ const contentResult = await readFileContent(path11);
1140
1149
  if (!contentResult.ok) {
1141
1150
  return Err(
1142
- createParseError("NOT_FOUND", `File not found: ${path3}`, { path: path3 }, [
1151
+ createParseError("NOT_FOUND", `File not found: ${path11}`, { path: path11 }, [
1143
1152
  "Check that the file exists",
1144
1153
  "Verify the path is correct"
1145
1154
  ])
@@ -1149,7 +1158,7 @@ var TypeScriptParser = class {
1149
1158
  const ast = parse(contentResult.value, {
1150
1159
  loc: true,
1151
1160
  range: true,
1152
- jsx: path3.endsWith(".tsx"),
1161
+ jsx: path11.endsWith(".tsx"),
1153
1162
  errorOnUnknownASTType: false
1154
1163
  });
1155
1164
  return Ok({
@@ -1160,7 +1169,7 @@ var TypeScriptParser = class {
1160
1169
  } catch (e) {
1161
1170
  const error = e;
1162
1171
  return Err(
1163
- createParseError("SYNTAX_ERROR", `Failed to parse ${path3}: ${error.message}`, { path: path3 }, [
1172
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path11}: ${error.message}`, { path: path11 }, [
1164
1173
  "Check for syntax errors in the file",
1165
1174
  "Ensure valid TypeScript syntax"
1166
1175
  ])
@@ -1444,22 +1453,22 @@ function extractInlineRefs(content) {
1444
1453
  }
1445
1454
  return refs;
1446
1455
  }
1447
- async function parseDocumentationFile(path3) {
1448
- const contentResult = await readFileContent(path3);
1456
+ async function parseDocumentationFile(path11) {
1457
+ const contentResult = await readFileContent(path11);
1449
1458
  if (!contentResult.ok) {
1450
1459
  return Err(
1451
1460
  createEntropyError(
1452
1461
  "PARSE_ERROR",
1453
- `Failed to read documentation file: ${path3}`,
1454
- { file: path3 },
1462
+ `Failed to read documentation file: ${path11}`,
1463
+ { file: path11 },
1455
1464
  ["Check that the file exists"]
1456
1465
  )
1457
1466
  );
1458
1467
  }
1459
1468
  const content = contentResult.value;
1460
- const type = path3.endsWith(".md") ? "markdown" : "text";
1469
+ const type = path11.endsWith(".md") ? "markdown" : "text";
1461
1470
  return Ok({
1462
- path: path3,
1471
+ path: path11,
1463
1472
  type,
1464
1473
  content,
1465
1474
  codeBlocks: extractCodeBlocks(content),
@@ -2287,6 +2296,496 @@ async function detectPatternViolations(snapshot, config) {
2287
2296
  });
2288
2297
  }
2289
2298
 
2299
+ // src/entropy/detectors/complexity.ts
2300
+ import { readFile as readFile2 } from "fs/promises";
2301
+ var DEFAULT_THRESHOLDS = {
2302
+ cyclomaticComplexity: { error: 15, warn: 10 },
2303
+ nestingDepth: { warn: 4 },
2304
+ functionLength: { warn: 50 },
2305
+ parameterCount: { warn: 5 },
2306
+ fileLength: { info: 300 },
2307
+ hotspotPercentile: { error: 95 }
2308
+ };
2309
+ function extractFunctions(content) {
2310
+ const functions = [];
2311
+ const lines = content.split("\n");
2312
+ const patterns = [
2313
+ // function declarations: function name(params) {
2314
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
2315
+ // method declarations: name(params) {
2316
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
2317
+ // arrow functions assigned to const/let/var: const name = (params) =>
2318
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
2319
+ // arrow functions assigned to const/let/var with single param: const name = param =>
2320
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
2321
+ ];
2322
+ for (let i = 0; i < lines.length; i++) {
2323
+ const line = lines[i];
2324
+ for (const pattern of patterns) {
2325
+ const match = line.match(pattern);
2326
+ if (match) {
2327
+ const name = match[1] ?? "anonymous";
2328
+ const paramsStr = match[2] || "";
2329
+ const params = paramsStr.trim() === "" ? 0 : paramsStr.split(",").length;
2330
+ const endLine = findFunctionEnd(lines, i);
2331
+ const body = lines.slice(i, endLine + 1).join("\n");
2332
+ functions.push({
2333
+ name,
2334
+ line: i + 1,
2335
+ params,
2336
+ startLine: i + 1,
2337
+ endLine: endLine + 1,
2338
+ body
2339
+ });
2340
+ break;
2341
+ }
2342
+ }
2343
+ }
2344
+ return functions;
2345
+ }
2346
+ function findFunctionEnd(lines, startIdx) {
2347
+ let depth = 0;
2348
+ let foundOpen = false;
2349
+ for (let i = startIdx; i < lines.length; i++) {
2350
+ const line = lines[i];
2351
+ for (const ch of line) {
2352
+ if (ch === "{") {
2353
+ depth++;
2354
+ foundOpen = true;
2355
+ } else if (ch === "}") {
2356
+ depth--;
2357
+ if (foundOpen && depth === 0) {
2358
+ return i;
2359
+ }
2360
+ }
2361
+ }
2362
+ }
2363
+ return lines.length - 1;
2364
+ }
2365
+ function computeCyclomaticComplexity(body) {
2366
+ let complexity = 1;
2367
+ const decisionPatterns = [
2368
+ /\bif\s*\(/g,
2369
+ /\belse\s+if\s*\(/g,
2370
+ /\bwhile\s*\(/g,
2371
+ /\bfor\s*\(/g,
2372
+ /\bcase\s+/g,
2373
+ /&&/g,
2374
+ /\|\|/g,
2375
+ /\?(?!=)/g,
2376
+ // Ternary ? but not ?. or ??
2377
+ /\bcatch\s*\(/g
2378
+ ];
2379
+ for (const pattern of decisionPatterns) {
2380
+ const matches = body.match(pattern);
2381
+ if (matches) {
2382
+ complexity += matches.length;
2383
+ }
2384
+ }
2385
+ const elseIfMatches = body.match(/\belse\s+if\s*\(/g);
2386
+ if (elseIfMatches) {
2387
+ complexity -= elseIfMatches.length;
2388
+ }
2389
+ return complexity;
2390
+ }
2391
+ function computeNestingDepth(body) {
2392
+ let maxDepth = 0;
2393
+ let currentDepth = 0;
2394
+ let functionBodyStarted = false;
2395
+ for (const ch of body) {
2396
+ if (ch === "{") {
2397
+ if (!functionBodyStarted) {
2398
+ functionBodyStarted = true;
2399
+ continue;
2400
+ }
2401
+ currentDepth++;
2402
+ if (currentDepth > maxDepth) {
2403
+ maxDepth = currentDepth;
2404
+ }
2405
+ } else if (ch === "}") {
2406
+ if (currentDepth > 0) {
2407
+ currentDepth--;
2408
+ }
2409
+ }
2410
+ }
2411
+ return maxDepth;
2412
+ }
2413
+ async function detectComplexityViolations(snapshot, config, graphData) {
2414
+ const violations = [];
2415
+ const thresholds = {
2416
+ cyclomaticComplexity: {
2417
+ error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
2418
+ warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
2419
+ },
2420
+ nestingDepth: {
2421
+ warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
2422
+ },
2423
+ functionLength: {
2424
+ warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
2425
+ },
2426
+ parameterCount: {
2427
+ warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
2428
+ },
2429
+ fileLength: {
2430
+ info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
2431
+ }
2432
+ };
2433
+ let totalFunctions = 0;
2434
+ for (const file of snapshot.files) {
2435
+ let content;
2436
+ try {
2437
+ content = await readFile2(file.path, "utf-8");
2438
+ } catch {
2439
+ continue;
2440
+ }
2441
+ const lines = content.split("\n");
2442
+ if (lines.length > thresholds.fileLength.info) {
2443
+ violations.push({
2444
+ file: file.path,
2445
+ function: "<file>",
2446
+ line: 1,
2447
+ metric: "fileLength",
2448
+ value: lines.length,
2449
+ threshold: thresholds.fileLength.info,
2450
+ tier: 3,
2451
+ severity: "info",
2452
+ message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
2453
+ });
2454
+ }
2455
+ const functions = extractFunctions(content);
2456
+ totalFunctions += functions.length;
2457
+ for (const fn of functions) {
2458
+ const complexity = computeCyclomaticComplexity(fn.body);
2459
+ if (complexity > thresholds.cyclomaticComplexity.error) {
2460
+ violations.push({
2461
+ file: file.path,
2462
+ function: fn.name,
2463
+ line: fn.line,
2464
+ metric: "cyclomaticComplexity",
2465
+ value: complexity,
2466
+ threshold: thresholds.cyclomaticComplexity.error,
2467
+ tier: 1,
2468
+ severity: "error",
2469
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
2470
+ });
2471
+ } else if (complexity > thresholds.cyclomaticComplexity.warn) {
2472
+ violations.push({
2473
+ file: file.path,
2474
+ function: fn.name,
2475
+ line: fn.line,
2476
+ metric: "cyclomaticComplexity",
2477
+ value: complexity,
2478
+ threshold: thresholds.cyclomaticComplexity.warn,
2479
+ tier: 2,
2480
+ severity: "warning",
2481
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
2482
+ });
2483
+ }
2484
+ const nestingDepth = computeNestingDepth(fn.body);
2485
+ if (nestingDepth > thresholds.nestingDepth.warn) {
2486
+ violations.push({
2487
+ file: file.path,
2488
+ function: fn.name,
2489
+ line: fn.line,
2490
+ metric: "nestingDepth",
2491
+ value: nestingDepth,
2492
+ threshold: thresholds.nestingDepth.warn,
2493
+ tier: 2,
2494
+ severity: "warning",
2495
+ message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
2496
+ });
2497
+ }
2498
+ const fnLength = fn.endLine - fn.startLine + 1;
2499
+ if (fnLength > thresholds.functionLength.warn) {
2500
+ violations.push({
2501
+ file: file.path,
2502
+ function: fn.name,
2503
+ line: fn.line,
2504
+ metric: "functionLength",
2505
+ value: fnLength,
2506
+ threshold: thresholds.functionLength.warn,
2507
+ tier: 2,
2508
+ severity: "warning",
2509
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
2510
+ });
2511
+ }
2512
+ if (fn.params > thresholds.parameterCount.warn) {
2513
+ violations.push({
2514
+ file: file.path,
2515
+ function: fn.name,
2516
+ line: fn.line,
2517
+ metric: "parameterCount",
2518
+ value: fn.params,
2519
+ threshold: thresholds.parameterCount.warn,
2520
+ tier: 2,
2521
+ severity: "warning",
2522
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
2523
+ });
2524
+ }
2525
+ if (graphData) {
2526
+ const hotspot = graphData.hotspots.find(
2527
+ (h) => h.file === file.path && h.function === fn.name
2528
+ );
2529
+ if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
2530
+ violations.push({
2531
+ file: file.path,
2532
+ function: fn.name,
2533
+ line: fn.line,
2534
+ metric: "hotspotScore",
2535
+ value: hotspot.hotspotScore,
2536
+ threshold: graphData.percentile95Score,
2537
+ tier: 1,
2538
+ severity: "error",
2539
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
2540
+ });
2541
+ }
2542
+ }
2543
+ }
2544
+ }
2545
+ const errorCount = violations.filter((v) => v.severity === "error").length;
2546
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2547
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2548
+ return Ok({
2549
+ violations,
2550
+ stats: {
2551
+ filesAnalyzed: snapshot.files.length,
2552
+ functionsAnalyzed: totalFunctions,
2553
+ violationCount: violations.length,
2554
+ errorCount,
2555
+ warningCount,
2556
+ infoCount
2557
+ }
2558
+ });
2559
+ }
2560
+
2561
+ // src/entropy/detectors/coupling.ts
2562
+ var DEFAULT_THRESHOLDS2 = {
2563
+ fanOut: { warn: 15 },
2564
+ fanIn: { info: 20 },
2565
+ couplingRatio: { warn: 0.7 },
2566
+ transitiveDependencyDepth: { info: 30 }
2567
+ };
2568
+ function computeMetricsFromSnapshot(snapshot) {
2569
+ const fanInMap = /* @__PURE__ */ new Map();
2570
+ for (const file of snapshot.files) {
2571
+ for (const imp of file.imports) {
2572
+ const resolved = resolveImportSource(imp.source, file.path, snapshot);
2573
+ if (resolved) {
2574
+ fanInMap.set(resolved, (fanInMap.get(resolved) || 0) + 1);
2575
+ }
2576
+ }
2577
+ }
2578
+ return snapshot.files.map((file) => {
2579
+ const fanOut = file.imports.length;
2580
+ const fanIn = fanInMap.get(file.path) || 0;
2581
+ const total = fanIn + fanOut;
2582
+ const couplingRatio = total > 0 ? fanOut / total : 0;
2583
+ return {
2584
+ file: file.path,
2585
+ fanIn,
2586
+ fanOut,
2587
+ couplingRatio,
2588
+ transitiveDepth: 0
2589
+ };
2590
+ });
2591
+ }
2592
+ function resolveRelativePath(from, source) {
2593
+ const dir = from.includes("/") ? from.substring(0, from.lastIndexOf("/")) : ".";
2594
+ const parts = dir.split("/");
2595
+ for (const segment of source.split("/")) {
2596
+ if (segment === ".") continue;
2597
+ if (segment === "..") {
2598
+ parts.pop();
2599
+ } else {
2600
+ parts.push(segment);
2601
+ }
2602
+ }
2603
+ return parts.join("/");
2604
+ }
2605
+ function resolveImportSource(source, fromFile, snapshot) {
2606
+ if (!source.startsWith(".") && !source.startsWith("/")) {
2607
+ return void 0;
2608
+ }
2609
+ const resolved = resolveRelativePath(fromFile, source);
2610
+ const filePaths = snapshot.files.map((f) => f.path);
2611
+ const candidates = [
2612
+ resolved,
2613
+ `${resolved}.ts`,
2614
+ `${resolved}.tsx`,
2615
+ `${resolved}/index.ts`,
2616
+ `${resolved}/index.tsx`
2617
+ ];
2618
+ for (const candidate of candidates) {
2619
+ const match = filePaths.find((fp) => fp === candidate);
2620
+ if (match) return match;
2621
+ }
2622
+ return void 0;
2623
+ }
2624
+ function checkViolations(metrics, config) {
2625
+ const thresholds = {
2626
+ fanOut: { ...DEFAULT_THRESHOLDS2.fanOut, ...config?.thresholds?.fanOut },
2627
+ fanIn: { ...DEFAULT_THRESHOLDS2.fanIn, ...config?.thresholds?.fanIn },
2628
+ couplingRatio: { ...DEFAULT_THRESHOLDS2.couplingRatio, ...config?.thresholds?.couplingRatio },
2629
+ transitiveDependencyDepth: {
2630
+ ...DEFAULT_THRESHOLDS2.transitiveDependencyDepth,
2631
+ ...config?.thresholds?.transitiveDependencyDepth
2632
+ }
2633
+ };
2634
+ const violations = [];
2635
+ for (const m of metrics) {
2636
+ if (thresholds.fanOut.warn !== void 0 && m.fanOut > thresholds.fanOut.warn) {
2637
+ violations.push({
2638
+ file: m.file,
2639
+ metric: "fanOut",
2640
+ value: m.fanOut,
2641
+ threshold: thresholds.fanOut.warn,
2642
+ tier: 2,
2643
+ severity: "warning",
2644
+ message: `File has ${m.fanOut} imports (threshold: ${thresholds.fanOut.warn})`
2645
+ });
2646
+ }
2647
+ if (thresholds.fanIn.info !== void 0 && m.fanIn > thresholds.fanIn.info) {
2648
+ violations.push({
2649
+ file: m.file,
2650
+ metric: "fanIn",
2651
+ value: m.fanIn,
2652
+ threshold: thresholds.fanIn.info,
2653
+ tier: 3,
2654
+ severity: "info",
2655
+ message: `File is imported by ${m.fanIn} files (threshold: ${thresholds.fanIn.info})`
2656
+ });
2657
+ }
2658
+ const totalConnections = m.fanIn + m.fanOut;
2659
+ if (totalConnections > 5 && thresholds.couplingRatio.warn !== void 0 && m.couplingRatio > thresholds.couplingRatio.warn) {
2660
+ violations.push({
2661
+ file: m.file,
2662
+ metric: "couplingRatio",
2663
+ value: m.couplingRatio,
2664
+ threshold: thresholds.couplingRatio.warn,
2665
+ tier: 2,
2666
+ severity: "warning",
2667
+ message: `Coupling ratio is ${m.couplingRatio.toFixed(2)} (threshold: ${thresholds.couplingRatio.warn})`
2668
+ });
2669
+ }
2670
+ if (thresholds.transitiveDependencyDepth.info !== void 0 && m.transitiveDepth > thresholds.transitiveDependencyDepth.info) {
2671
+ violations.push({
2672
+ file: m.file,
2673
+ metric: "transitiveDependencyDepth",
2674
+ value: m.transitiveDepth,
2675
+ threshold: thresholds.transitiveDependencyDepth.info,
2676
+ tier: 3,
2677
+ severity: "info",
2678
+ message: `Transitive dependency depth is ${m.transitiveDepth} (threshold: ${thresholds.transitiveDependencyDepth.info})`
2679
+ });
2680
+ }
2681
+ }
2682
+ return violations;
2683
+ }
2684
+ async function detectCouplingViolations(snapshot, config, graphData) {
2685
+ let metrics;
2686
+ if (graphData) {
2687
+ metrics = graphData.files.map((f) => ({
2688
+ file: f.file,
2689
+ fanIn: f.fanIn,
2690
+ fanOut: f.fanOut,
2691
+ couplingRatio: f.couplingRatio,
2692
+ transitiveDepth: f.transitiveDepth
2693
+ }));
2694
+ } else {
2695
+ metrics = computeMetricsFromSnapshot(snapshot);
2696
+ }
2697
+ const violations = checkViolations(metrics, config);
2698
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2699
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2700
+ return Ok({
2701
+ violations,
2702
+ stats: {
2703
+ filesAnalyzed: metrics.length,
2704
+ violationCount: violations.length,
2705
+ warningCount,
2706
+ infoCount
2707
+ }
2708
+ });
2709
+ }
2710
+
2711
+ // src/entropy/detectors/size-budget.ts
2712
+ import { readdirSync, statSync } from "fs";
2713
+ import { join as join4 } from "path";
2714
+ function parseSize(size) {
2715
+ const match = size.trim().match(/^(\d+(?:\.\d+)?)\s*(KB|MB|GB|B)?$/i);
2716
+ if (!match) return 0;
2717
+ const value = parseFloat(match[1]);
2718
+ const unit = (match[2] || "B").toUpperCase();
2719
+ switch (unit) {
2720
+ case "KB":
2721
+ return Math.round(value * 1024);
2722
+ case "MB":
2723
+ return Math.round(value * 1024 * 1024);
2724
+ case "GB":
2725
+ return Math.round(value * 1024 * 1024 * 1024);
2726
+ default:
2727
+ return Math.round(value);
2728
+ }
2729
+ }
2730
+ function dirSize(dirPath) {
2731
+ let total = 0;
2732
+ let entries;
2733
+ try {
2734
+ entries = readdirSync(dirPath);
2735
+ } catch {
2736
+ return 0;
2737
+ }
2738
+ for (const entry of entries) {
2739
+ if (entry === "node_modules" || entry === ".git") continue;
2740
+ const fullPath = join4(dirPath, entry);
2741
+ try {
2742
+ const stat = statSync(fullPath);
2743
+ if (stat.isDirectory()) {
2744
+ total += dirSize(fullPath);
2745
+ } else if (stat.isFile()) {
2746
+ total += stat.size;
2747
+ }
2748
+ } catch {
2749
+ continue;
2750
+ }
2751
+ }
2752
+ return total;
2753
+ }
2754
+ async function detectSizeBudgetViolations(rootDir, config) {
2755
+ const budgets = config?.budgets ?? {};
2756
+ const violations = [];
2757
+ let packagesChecked = 0;
2758
+ for (const [pkgPath, budget] of Object.entries(budgets)) {
2759
+ packagesChecked++;
2760
+ const distPath = join4(rootDir, pkgPath, "dist");
2761
+ const currentSize = dirSize(distPath);
2762
+ if (budget.warn) {
2763
+ const budgetBytes = parseSize(budget.warn);
2764
+ if (budgetBytes > 0 && currentSize > budgetBytes) {
2765
+ violations.push({
2766
+ package: pkgPath,
2767
+ currentSize,
2768
+ budgetSize: budgetBytes,
2769
+ unit: "bytes",
2770
+ tier: 2,
2771
+ severity: "warning"
2772
+ });
2773
+ }
2774
+ }
2775
+ }
2776
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2777
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2778
+ return Ok({
2779
+ violations,
2780
+ stats: {
2781
+ packagesChecked,
2782
+ violationCount: violations.length,
2783
+ warningCount,
2784
+ infoCount
2785
+ }
2786
+ });
2787
+ }
2788
+
2290
2789
  // src/entropy/fixers/suggestions.ts
2291
2790
  function generateDeadCodeSuggestions(report) {
2292
2791
  const suggestions = [];
@@ -2472,12 +2971,57 @@ var EntropyAnalyzer = class {
2472
2971
  analysisErrors.push({ analyzer: "patterns", error: result.error });
2473
2972
  }
2474
2973
  }
2974
+ let complexityReport;
2975
+ if (this.config.analyze.complexity) {
2976
+ const complexityConfig = typeof this.config.analyze.complexity === "object" ? this.config.analyze.complexity : {};
2977
+ const result = await detectComplexityViolations(
2978
+ this.snapshot,
2979
+ complexityConfig,
2980
+ graphOptions?.graphComplexityData
2981
+ );
2982
+ if (result.ok) {
2983
+ complexityReport = result.value;
2984
+ } else {
2985
+ analysisErrors.push({ analyzer: "complexity", error: result.error });
2986
+ }
2987
+ }
2988
+ let couplingReport;
2989
+ if (this.config.analyze.coupling) {
2990
+ const couplingConfig = typeof this.config.analyze.coupling === "object" ? this.config.analyze.coupling : {};
2991
+ const result = await detectCouplingViolations(
2992
+ this.snapshot,
2993
+ couplingConfig,
2994
+ graphOptions?.graphCouplingData
2995
+ );
2996
+ if (result.ok) {
2997
+ couplingReport = result.value;
2998
+ } else {
2999
+ analysisErrors.push({ analyzer: "coupling", error: result.error });
3000
+ }
3001
+ }
3002
+ let sizeBudgetReport;
3003
+ if (this.config.analyze.sizeBudget) {
3004
+ const sizeBudgetConfig = typeof this.config.analyze.sizeBudget === "object" ? this.config.analyze.sizeBudget : {};
3005
+ const result = await detectSizeBudgetViolations(this.config.rootDir, sizeBudgetConfig);
3006
+ if (result.ok) {
3007
+ sizeBudgetReport = result.value;
3008
+ } else {
3009
+ analysisErrors.push({ analyzer: "sizeBudget", error: result.error });
3010
+ }
3011
+ }
2475
3012
  const driftIssues = driftReport?.drifts.length || 0;
2476
3013
  const deadCodeIssues = (deadCodeReport?.deadExports.length || 0) + (deadCodeReport?.deadFiles.length || 0) + (deadCodeReport?.unusedImports.length || 0);
2477
3014
  const patternIssues = patternReport?.violations.length || 0;
2478
3015
  const patternErrors = patternReport?.stats.errorCount || 0;
2479
3016
  const patternWarnings = patternReport?.stats.warningCount || 0;
2480
- const totalIssues = driftIssues + deadCodeIssues + patternIssues;
3017
+ const complexityIssues = complexityReport?.violations.length || 0;
3018
+ const couplingIssues = couplingReport?.violations.length || 0;
3019
+ const sizeBudgetIssues = sizeBudgetReport?.violations.length || 0;
3020
+ const complexityErrors = complexityReport?.stats.errorCount || 0;
3021
+ const complexityWarnings = complexityReport?.stats.warningCount || 0;
3022
+ const couplingWarnings = couplingReport?.stats.warningCount || 0;
3023
+ const sizeBudgetWarnings = sizeBudgetReport?.stats.warningCount || 0;
3024
+ const totalIssues = driftIssues + deadCodeIssues + patternIssues + complexityIssues + couplingIssues + sizeBudgetIssues;
2481
3025
  const fixableCount = (deadCodeReport?.deadFiles.length || 0) + (deadCodeReport?.unusedImports.length || 0);
2482
3026
  const suggestions = generateSuggestions(deadCodeReport, driftReport, patternReport);
2483
3027
  const duration = Date.now() - startTime;
@@ -2486,8 +3030,8 @@ var EntropyAnalyzer = class {
2486
3030
  analysisErrors,
2487
3031
  summary: {
2488
3032
  totalIssues,
2489
- errors: patternErrors,
2490
- warnings: patternWarnings + driftIssues,
3033
+ errors: patternErrors + complexityErrors,
3034
+ warnings: patternWarnings + driftIssues + complexityWarnings + couplingWarnings + sizeBudgetWarnings,
2491
3035
  fixableCount,
2492
3036
  suggestionCount: suggestions.suggestions.length
2493
3037
  },
@@ -2503,6 +3047,15 @@ var EntropyAnalyzer = class {
2503
3047
  if (patternReport) {
2504
3048
  report.patterns = patternReport;
2505
3049
  }
3050
+ if (complexityReport) {
3051
+ report.complexity = complexityReport;
3052
+ }
3053
+ if (couplingReport) {
3054
+ report.coupling = couplingReport;
3055
+ }
3056
+ if (sizeBudgetReport) {
3057
+ report.sizeBudget = sizeBudgetReport;
3058
+ }
2506
3059
  this.report = report;
2507
3060
  return Ok(report);
2508
3061
  }
@@ -2585,8 +3138,8 @@ var EntropyAnalyzer = class {
2585
3138
  // src/entropy/fixers/safe-fixes.ts
2586
3139
  import * as fs from "fs";
2587
3140
  import { promisify as promisify2 } from "util";
2588
- import { dirname as dirname6, basename as basename4, join as join4 } from "path";
2589
- var readFile3 = promisify2(fs.readFile);
3141
+ import { dirname as dirname6, basename as basename4, join as join5 } from "path";
3142
+ var readFile4 = promisify2(fs.readFile);
2590
3143
  var writeFile2 = promisify2(fs.writeFile);
2591
3144
  var unlink2 = promisify2(fs.unlink);
2592
3145
  var mkdir2 = promisify2(fs.mkdir);
@@ -2618,6 +3171,40 @@ function createUnusedImportFixes(deadCodeReport) {
2618
3171
  reversible: true
2619
3172
  }));
2620
3173
  }
3174
+ function createDeadExportFixes(deadCodeReport) {
3175
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3176
+ type: "dead-exports",
3177
+ file: exp.file,
3178
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3179
+ action: "replace",
3180
+ oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3181
+ newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3182
+ safe: true,
3183
+ reversible: true
3184
+ }));
3185
+ }
3186
+ function createCommentedCodeFixes(blocks) {
3187
+ return blocks.map((block) => ({
3188
+ type: "commented-code",
3189
+ file: block.file,
3190
+ description: `Remove commented-out code block (lines ${block.startLine}-${block.endLine})`,
3191
+ action: "replace",
3192
+ oldContent: block.content,
3193
+ newContent: "",
3194
+ safe: true,
3195
+ reversible: true
3196
+ }));
3197
+ }
3198
+ function createOrphanedDepFixes(deps) {
3199
+ return deps.map((dep) => ({
3200
+ type: "orphaned-deps",
3201
+ file: dep.packageJsonPath,
3202
+ description: `Remove orphaned dependency: ${dep.name}`,
3203
+ action: "replace",
3204
+ safe: true,
3205
+ reversible: true
3206
+ }));
3207
+ }
2621
3208
  function createFixes(deadCodeReport, config) {
2622
3209
  const fullConfig = { ...DEFAULT_FIX_CONFIG, ...config };
2623
3210
  const fixes = [];
@@ -2627,6 +3214,9 @@ function createFixes(deadCodeReport, config) {
2627
3214
  if (fullConfig.fixTypes.includes("unused-imports")) {
2628
3215
  fixes.push(...createUnusedImportFixes(deadCodeReport));
2629
3216
  }
3217
+ if (fullConfig.fixTypes.includes("dead-exports")) {
3218
+ fixes.push(...createDeadExportFixes(deadCodeReport));
3219
+ }
2630
3220
  return fixes;
2631
3221
  }
2632
3222
  function previewFix(fix) {
@@ -2647,7 +3237,7 @@ function previewFix(fix) {
2647
3237
  }
2648
3238
  }
2649
3239
  async function createBackup(filePath, backupDir) {
2650
- const backupPath = join4(backupDir, `${Date.now()}-${basename4(filePath)}`);
3240
+ const backupPath = join5(backupDir, `${Date.now()}-${basename4(filePath)}`);
2651
3241
  try {
2652
3242
  await mkdir2(dirname6(backupPath), { recursive: true });
2653
3243
  await copyFile2(filePath, backupPath);
@@ -2680,7 +3270,7 @@ async function applySingleFix(fix, config) {
2680
3270
  break;
2681
3271
  case "delete-lines":
2682
3272
  if (fix.line !== void 0) {
2683
- const content = await readFile3(fix.file, "utf-8");
3273
+ const content = await readFile4(fix.file, "utf-8");
2684
3274
  const lines = content.split("\n");
2685
3275
  lines.splice(fix.line - 1, 1);
2686
3276
  await writeFile2(fix.file, lines.join("\n"));
@@ -2688,14 +3278,14 @@ async function applySingleFix(fix, config) {
2688
3278
  break;
2689
3279
  case "replace":
2690
3280
  if (fix.oldContent && fix.newContent !== void 0) {
2691
- const content = await readFile3(fix.file, "utf-8");
3281
+ const content = await readFile4(fix.file, "utf-8");
2692
3282
  const newContent = content.replace(fix.oldContent, fix.newContent);
2693
3283
  await writeFile2(fix.file, newContent);
2694
3284
  }
2695
3285
  break;
2696
3286
  case "insert":
2697
3287
  if (fix.line !== void 0 && fix.newContent) {
2698
- const content = await readFile3(fix.file, "utf-8");
3288
+ const content = await readFile4(fix.file, "utf-8");
2699
3289
  const lines = content.split("\n");
2700
3290
  lines.splice(fix.line - 1, 0, fix.newContent);
2701
3291
  await writeFile2(fix.file, lines.join("\n"));
@@ -2747,6 +3337,133 @@ async function applyFixes(fixes, config) {
2747
3337
  });
2748
3338
  }
2749
3339
 
3340
+ // src/entropy/fixers/architecture-fixes.ts
3341
+ function createForbiddenImportFixes(violations) {
3342
+ return violations.filter((v) => v.alternative !== void 0).map((v) => ({
3343
+ type: "forbidden-import-replacement",
3344
+ file: v.file,
3345
+ description: `Replace forbidden import '${v.forbiddenImport}' with '${v.alternative}'`,
3346
+ action: "replace",
3347
+ line: v.line,
3348
+ oldContent: `from '${v.forbiddenImport}'`,
3349
+ newContent: `from '${v.alternative}'`,
3350
+ safe: true,
3351
+ reversible: true
3352
+ }));
3353
+ }
3354
+
3355
+ // src/entropy/fixers/cleanup-finding.ts
3356
+ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
3357
+ "upward-dependency",
3358
+ "skip-layer-dependency",
3359
+ "circular-dependency",
3360
+ "dead-internal"
3361
+ ]);
3362
+ var idCounter = 0;
3363
+ function classifyFinding(input) {
3364
+ idCounter++;
3365
+ const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
3366
+ let safety;
3367
+ let safetyReason;
3368
+ let fixAction;
3369
+ let suggestion;
3370
+ if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
3371
+ safety = "unsafe";
3372
+ safetyReason = `${input.type} requires human judgment`;
3373
+ suggestion = "Review and refactor manually";
3374
+ } else if (input.concern === "dead-code") {
3375
+ if (input.isPublicApi) {
3376
+ safety = "unsafe";
3377
+ safetyReason = "Public API export may have external consumers";
3378
+ suggestion = "Deprecate before removing";
3379
+ } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
3380
+ safety = "safe";
3381
+ safetyReason = "zero importers, non-public";
3382
+ fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
3383
+ suggestion = fixAction;
3384
+ } else if (input.type === "orphaned-dep") {
3385
+ safety = "probably-safe";
3386
+ safetyReason = "No imports found, but needs install+test verification";
3387
+ fixAction = "Remove from package.json";
3388
+ suggestion = fixAction;
3389
+ } else {
3390
+ safety = "unsafe";
3391
+ safetyReason = "Unknown dead code type";
3392
+ suggestion = "Manual review required";
3393
+ }
3394
+ } else {
3395
+ if (input.type === "import-ordering") {
3396
+ safety = "safe";
3397
+ safetyReason = "Mechanical reorder, no semantic change";
3398
+ fixAction = "Reorder imports";
3399
+ suggestion = fixAction;
3400
+ } else if (input.type === "forbidden-import" && input.hasAlternative) {
3401
+ safety = "probably-safe";
3402
+ safetyReason = "Alternative configured, needs typecheck+test";
3403
+ fixAction = "Replace with configured alternative";
3404
+ suggestion = fixAction;
3405
+ } else {
3406
+ safety = "unsafe";
3407
+ safetyReason = `${input.type} requires structural changes`;
3408
+ suggestion = "Restructure code to fix violation";
3409
+ }
3410
+ }
3411
+ return {
3412
+ id,
3413
+ concern: input.concern,
3414
+ file: input.file,
3415
+ ...input.line !== void 0 ? { line: input.line } : {},
3416
+ type: input.type,
3417
+ description: input.description,
3418
+ safety,
3419
+ safetyReason,
3420
+ hotspotDowngraded: false,
3421
+ ...fixAction !== void 0 ? { fixAction } : {},
3422
+ suggestion
3423
+ };
3424
+ }
3425
+ function applyHotspotDowngrade(finding, hotspot) {
3426
+ if (finding.safety !== "safe") return finding;
3427
+ const churn = hotspot.churnMap.get(finding.file) ?? 0;
3428
+ if (churn >= hotspot.topPercentileThreshold) {
3429
+ return {
3430
+ ...finding,
3431
+ safety: "probably-safe",
3432
+ safetyReason: `${finding.safetyReason}; downgraded due to high churn (${churn} commits)`,
3433
+ hotspotDowngraded: true
3434
+ };
3435
+ }
3436
+ return finding;
3437
+ }
3438
+ function deduplicateCleanupFindings(findings) {
3439
+ const byFileAndLine = /* @__PURE__ */ new Map();
3440
+ for (const f of findings) {
3441
+ const key = `${f.file}:${f.line ?? "none"}`;
3442
+ const group = byFileAndLine.get(key) ?? [];
3443
+ group.push(f);
3444
+ byFileAndLine.set(key, group);
3445
+ }
3446
+ const result = [];
3447
+ for (const group of byFileAndLine.values()) {
3448
+ if (group.length === 1) {
3449
+ result.push(group[0]);
3450
+ continue;
3451
+ }
3452
+ const deadCode = group.find((f) => f.concern === "dead-code");
3453
+ const arch = group.find((f) => f.concern === "architecture");
3454
+ if (deadCode && arch) {
3455
+ result.push({
3456
+ ...deadCode,
3457
+ description: `${deadCode.description} (also violates architecture: ${arch.type})`,
3458
+ suggestion: deadCode.fixAction ? `${deadCode.fixAction} (resolves both dead code and architecture violation)` : deadCode.suggestion
3459
+ });
3460
+ } else {
3461
+ result.push(...group);
3462
+ }
3463
+ }
3464
+ return result;
3465
+ }
3466
+
2750
3467
  // src/entropy/config/schema.ts
2751
3468
  import { z } from "zod";
2752
3469
  var MustExportRuleSchema = z.object({
@@ -2856,33 +3573,382 @@ function validatePatternConfig(config) {
2856
3573
  return Ok(result.data);
2857
3574
  }
2858
3575
 
2859
- // src/feedback/telemetry/noop.ts
2860
- var NoOpTelemetryAdapter = class {
2861
- name = "noop";
2862
- async health() {
2863
- return Ok({ available: true, message: "NoOp adapter - no real telemetry" });
2864
- }
2865
- async getMetrics() {
2866
- return Ok([]);
3576
+ // src/performance/baseline-manager.ts
3577
+ import { readFileSync, writeFileSync, mkdirSync, existsSync } from "fs";
3578
+ import { join as join6, dirname as dirname7 } from "path";
3579
+ var BaselineManager = class {
3580
+ baselinesPath;
3581
+ constructor(projectRoot) {
3582
+ this.baselinesPath = join6(projectRoot, ".harness", "perf", "baselines.json");
2867
3583
  }
2868
- async getTraces() {
2869
- return Ok([]);
3584
+ /**
3585
+ * Load the baselines file from disk.
3586
+ * Returns null if the file does not exist or contains invalid JSON.
3587
+ */
3588
+ load() {
3589
+ if (!existsSync(this.baselinesPath)) {
3590
+ return null;
3591
+ }
3592
+ try {
3593
+ const raw = readFileSync(this.baselinesPath, "utf-8");
3594
+ return JSON.parse(raw);
3595
+ } catch {
3596
+ return null;
3597
+ }
2870
3598
  }
2871
- async getLogs() {
2872
- return Ok([]);
3599
+ /**
3600
+ * Save benchmark results to disk, merging with any existing baselines.
3601
+ * Each result is keyed by `${file}::${name}`.
3602
+ */
3603
+ save(results, commitHash) {
3604
+ const existing = this.load();
3605
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3606
+ const benchmarks = existing?.benchmarks ? { ...existing.benchmarks } : {};
3607
+ for (const result of results) {
3608
+ const key = `${result.file}::${result.name}`;
3609
+ benchmarks[key] = {
3610
+ opsPerSec: result.opsPerSec,
3611
+ meanMs: result.meanMs,
3612
+ p99Ms: result.p99Ms,
3613
+ marginOfError: result.marginOfError
3614
+ };
3615
+ }
3616
+ const file = {
3617
+ version: 1,
3618
+ updatedAt: now,
3619
+ updatedFrom: commitHash,
3620
+ benchmarks
3621
+ };
3622
+ const dir = dirname7(this.baselinesPath);
3623
+ if (!existsSync(dir)) {
3624
+ mkdirSync(dir, { recursive: true });
3625
+ }
3626
+ writeFileSync(this.baselinesPath, JSON.stringify(file, null, 2));
2873
3627
  }
2874
- };
2875
-
3628
+ /**
3629
+ * Remove baselines whose file prefix does not match any of the given bench files.
3630
+ * This cleans up entries for deleted benchmark files.
3631
+ */
3632
+ prune(existingBenchFiles) {
3633
+ const existing = this.load();
3634
+ if (!existing) {
3635
+ return;
3636
+ }
3637
+ const fileSet = new Set(existingBenchFiles);
3638
+ const pruned = {};
3639
+ for (const [key, baseline] of Object.entries(existing.benchmarks)) {
3640
+ const filePrefix = key.split("::")[0];
3641
+ if (fileSet.has(filePrefix)) {
3642
+ pruned[key] = baseline;
3643
+ }
3644
+ }
3645
+ existing.benchmarks = pruned;
3646
+ writeFileSync(this.baselinesPath, JSON.stringify(existing, null, 2));
3647
+ }
3648
+ };
3649
+
3650
+ // src/performance/benchmark-runner.ts
3651
+ import { execFileSync } from "child_process";
3652
+ var BenchmarkRunner = class {
3653
+ /**
3654
+ * Discover .bench.ts files matching the glob pattern.
3655
+ */
3656
+ discover(cwd, glob2) {
3657
+ try {
3658
+ const result = execFileSync(
3659
+ "find",
3660
+ [
3661
+ cwd,
3662
+ "-name",
3663
+ "*.bench.ts",
3664
+ "-not",
3665
+ "-path",
3666
+ "*/node_modules/*",
3667
+ "-not",
3668
+ "-path",
3669
+ "*/dist/*"
3670
+ ],
3671
+ { encoding: "utf-8", timeout: 5e3 }
3672
+ ).trim();
3673
+ if (!result) return [];
3674
+ const files = result.split("\n").filter(Boolean);
3675
+ if (glob2 && glob2 !== "**/*.bench.ts") {
3676
+ return files.filter((f) => f.includes(glob2.replace(/\*/g, "")));
3677
+ }
3678
+ return files;
3679
+ } catch {
3680
+ return [];
3681
+ }
3682
+ }
3683
+ /**
3684
+ * Run benchmarks via vitest bench and capture results.
3685
+ * Returns parsed BenchmarkResult[] from vitest bench JSON output.
3686
+ */
3687
+ async run(options = {}) {
3688
+ const cwd = options.cwd ?? process.cwd();
3689
+ const timeout = options.timeout ?? 12e4;
3690
+ const glob2 = options.glob;
3691
+ const args = ["vitest", "bench", "--run"];
3692
+ if (glob2) {
3693
+ args.push(glob2);
3694
+ }
3695
+ args.push("--reporter=json");
3696
+ try {
3697
+ const rawOutput = execFileSync("npx", args, {
3698
+ cwd,
3699
+ encoding: "utf-8",
3700
+ timeout,
3701
+ stdio: ["pipe", "pipe", "pipe"]
3702
+ });
3703
+ const results = this.parseVitestBenchOutput(rawOutput);
3704
+ return { results, rawOutput, success: true };
3705
+ } catch (error) {
3706
+ const err = error;
3707
+ const output = err.stdout || err.message || "";
3708
+ const results = this.parseVitestBenchOutput(output);
3709
+ return {
3710
+ results,
3711
+ rawOutput: output,
3712
+ success: results.length > 0
3713
+ };
3714
+ }
3715
+ }
3716
+ /**
3717
+ * Parse vitest bench JSON reporter output into BenchmarkResult[].
3718
+ * Vitest bench JSON output contains testResults with benchmark data.
3719
+ */
3720
+ parseVitestBenchOutput(output) {
3721
+ const results = [];
3722
+ try {
3723
+ const jsonStart = output.indexOf("{");
3724
+ const jsonEnd = output.lastIndexOf("}");
3725
+ if (jsonStart === -1 || jsonEnd === -1) return results;
3726
+ const jsonStr = output.slice(jsonStart, jsonEnd + 1);
3727
+ const parsed = JSON.parse(jsonStr);
3728
+ if (parsed.testResults) {
3729
+ for (const testResult of parsed.testResults) {
3730
+ const file = testResult.name || testResult.filepath || "";
3731
+ if (testResult.assertionResults) {
3732
+ for (const assertion of testResult.assertionResults) {
3733
+ if (assertion.benchmark) {
3734
+ const bench = assertion.benchmark;
3735
+ results.push({
3736
+ name: assertion.fullName || assertion.title || "unknown",
3737
+ file: file.replace(process.cwd() + "/", ""),
3738
+ opsPerSec: Math.round(bench.hz || 0),
3739
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
3740
+ // p99: use actual p99 if available, otherwise estimate as 1.5× mean
3741
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3742
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
3743
+ });
3744
+ }
3745
+ }
3746
+ }
3747
+ }
3748
+ }
3749
+ } catch {
3750
+ }
3751
+ return results;
3752
+ }
3753
+ };
3754
+
3755
+ // src/performance/regression-detector.ts
3756
+ var RegressionDetector = class {
3757
+ detect(results, baselines, criticalPaths) {
3758
+ const regressions = [];
3759
+ const improvements = [];
3760
+ let newBenchmarks = 0;
3761
+ for (const current of results) {
3762
+ const key = `${current.file}::${current.name}`;
3763
+ const baseline = baselines[key];
3764
+ if (!baseline) {
3765
+ newBenchmarks++;
3766
+ continue;
3767
+ }
3768
+ const regressionPct = (baseline.opsPerSec - current.opsPerSec) / baseline.opsPerSec * 100;
3769
+ const noiseThreshold = (baseline.marginOfError + current.marginOfError) * 100;
3770
+ const withinNoise = Math.abs(regressionPct) <= noiseThreshold;
3771
+ if (regressionPct < 0) {
3772
+ improvements.push({ benchmark: key, improvementPct: Math.abs(regressionPct) });
3773
+ continue;
3774
+ }
3775
+ const isCriticalPath = criticalPaths.entries.some(
3776
+ (e) => current.file.includes(e.file) || current.name === e.function
3777
+ );
3778
+ let tier;
3779
+ let severity;
3780
+ if (isCriticalPath && regressionPct > 5 && !withinNoise) {
3781
+ tier = 1;
3782
+ severity = "error";
3783
+ } else if (regressionPct > 10 && !withinNoise) {
3784
+ tier = 2;
3785
+ severity = "warning";
3786
+ } else {
3787
+ tier = 3;
3788
+ severity = "info";
3789
+ }
3790
+ regressions.push({
3791
+ benchmark: key,
3792
+ current,
3793
+ baseline,
3794
+ regressionPct,
3795
+ isCriticalPath,
3796
+ tier,
3797
+ severity,
3798
+ withinNoise
3799
+ });
3800
+ }
3801
+ return {
3802
+ regressions,
3803
+ improvements,
3804
+ stats: {
3805
+ benchmarksCompared: results.length - newBenchmarks,
3806
+ regressionCount: regressions.filter((r) => !r.withinNoise).length,
3807
+ improvementCount: improvements.length,
3808
+ newBenchmarks
3809
+ }
3810
+ };
3811
+ }
3812
+ };
3813
+
3814
+ // src/performance/critical-path.ts
3815
+ import * as fs2 from "fs";
3816
+ import * as path from "path";
3817
+ var SKIP_DIRS = /* @__PURE__ */ new Set(["node_modules", "dist", ".git"]);
3818
+ var SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx"]);
3819
+ var FUNCTION_DECL_RE = /(?:export\s+)?(?:async\s+)?function\s+(\w+)/;
3820
+ var CONST_DECL_RE = /(?:export\s+)?(?:const|let)\s+(\w+)\s*=/;
3821
+ var CriticalPathResolver = class {
3822
+ projectRoot;
3823
+ constructor(projectRoot) {
3824
+ this.projectRoot = projectRoot;
3825
+ }
3826
+ async resolve(graphData) {
3827
+ const annotated = await this.scanAnnotations();
3828
+ const seen = /* @__PURE__ */ new Map();
3829
+ for (const entry of annotated) {
3830
+ const key = `${entry.file}::${entry.function}`;
3831
+ seen.set(key, entry);
3832
+ }
3833
+ let graphInferred = 0;
3834
+ if (graphData) {
3835
+ for (const item of graphData.highFanInFunctions) {
3836
+ const key = `${item.file}::${item.function}`;
3837
+ if (!seen.has(key)) {
3838
+ seen.set(key, {
3839
+ file: item.file,
3840
+ function: item.function,
3841
+ source: "graph-inferred",
3842
+ fanIn: item.fanIn
3843
+ });
3844
+ graphInferred++;
3845
+ }
3846
+ }
3847
+ }
3848
+ const entries = Array.from(seen.values());
3849
+ const annotatedCount = annotated.length;
3850
+ return {
3851
+ entries,
3852
+ stats: {
3853
+ annotated: annotatedCount,
3854
+ graphInferred,
3855
+ total: entries.length
3856
+ }
3857
+ };
3858
+ }
3859
+ async scanAnnotations() {
3860
+ const entries = [];
3861
+ this.walkDir(this.projectRoot, entries);
3862
+ return entries;
3863
+ }
3864
+ walkDir(dir, entries) {
3865
+ let items;
3866
+ try {
3867
+ items = fs2.readdirSync(dir, { withFileTypes: true });
3868
+ } catch {
3869
+ return;
3870
+ }
3871
+ for (const item of items) {
3872
+ if (item.isDirectory()) {
3873
+ if (SKIP_DIRS.has(item.name)) continue;
3874
+ this.walkDir(path.join(dir, item.name), entries);
3875
+ } else if (item.isFile() && SOURCE_EXTENSIONS.has(path.extname(item.name))) {
3876
+ this.scanFile(path.join(dir, item.name), entries);
3877
+ }
3878
+ }
3879
+ }
3880
+ scanFile(filePath, entries) {
3881
+ let content;
3882
+ try {
3883
+ content = fs2.readFileSync(filePath, "utf-8");
3884
+ } catch {
3885
+ return;
3886
+ }
3887
+ const lines = content.split("\n");
3888
+ const relativePath = path.relative(this.projectRoot, filePath).replace(/\\/g, "/");
3889
+ for (let i = 0; i < lines.length; i++) {
3890
+ const line = lines[i];
3891
+ if (!line.includes("@perf-critical")) continue;
3892
+ for (let j = i + 1; j < lines.length; j++) {
3893
+ const nextLine = lines[j].trim();
3894
+ if (nextLine === "" || nextLine === "*/" || nextLine === "*") continue;
3895
+ if (nextLine.startsWith("*") || nextLine.startsWith("//")) continue;
3896
+ const funcMatch = nextLine.match(FUNCTION_DECL_RE);
3897
+ if (funcMatch && funcMatch[1]) {
3898
+ entries.push({
3899
+ file: relativePath,
3900
+ function: funcMatch[1],
3901
+ source: "annotation"
3902
+ });
3903
+ } else {
3904
+ const constMatch = nextLine.match(CONST_DECL_RE);
3905
+ if (constMatch && constMatch[1]) {
3906
+ entries.push({
3907
+ file: relativePath,
3908
+ function: constMatch[1],
3909
+ source: "annotation"
3910
+ });
3911
+ }
3912
+ }
3913
+ break;
3914
+ }
3915
+ }
3916
+ }
3917
+ };
3918
+
3919
+ // src/feedback/telemetry/noop.ts
3920
+ var NoOpTelemetryAdapter = class {
3921
+ name = "noop";
3922
+ async health() {
3923
+ return Ok({ available: true, message: "NoOp adapter - no real telemetry" });
3924
+ }
3925
+ async getMetrics() {
3926
+ return Ok([]);
3927
+ }
3928
+ async getTraces() {
3929
+ return Ok([]);
3930
+ }
3931
+ async getLogs() {
3932
+ return Ok([]);
3933
+ }
3934
+ };
3935
+
2876
3936
  // src/shared/uuid.ts
2877
3937
  function generateId() {
2878
3938
  if (typeof globalThis !== "undefined" && "crypto" in globalThis && typeof globalThis.crypto.randomUUID === "function") {
2879
3939
  return globalThis.crypto.randomUUID();
2880
3940
  }
2881
- return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function(c) {
2882
- const r = Math.random() * 16 | 0;
2883
- const v = c === "x" ? r : r & 3 | 8;
2884
- return v.toString(16);
2885
- });
3941
+ if (typeof globalThis.crypto?.getRandomValues !== "function") {
3942
+ throw new Error(
3943
+ "No cryptographic random source available \u2014 requires Node.js 15+ or a browser with Web Crypto API"
3944
+ );
3945
+ }
3946
+ const bytes = new Uint8Array(16);
3947
+ globalThis.crypto.getRandomValues(bytes);
3948
+ bytes[6] = bytes[6] & 15 | 64;
3949
+ bytes[8] = bytes[8] & 63 | 128;
3950
+ const hex = [...bytes].map((b) => b.toString(16).padStart(2, "0")).join("");
3951
+ return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
2886
3952
  }
2887
3953
 
2888
3954
  // src/feedback/executor/noop.ts
@@ -3543,8 +4609,8 @@ async function requestMultiplePeerReviews(requests) {
3543
4609
  }
3544
4610
 
3545
4611
  // src/feedback/logging/file-sink.ts
3546
- import { appendFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
3547
- import { dirname as dirname7 } from "path";
4612
+ import { appendFileSync, writeFileSync as writeFileSync2, existsSync as existsSync2, mkdirSync as mkdirSync2 } from "fs";
4613
+ import { dirname as dirname8 } from "path";
3548
4614
  var FileSink = class {
3549
4615
  name = "file";
3550
4616
  filePath;
@@ -3567,9 +4633,9 @@ var FileSink = class {
3567
4633
  }
3568
4634
  ensureDirectory() {
3569
4635
  if (!this.initialized) {
3570
- const dir = dirname7(this.filePath);
3571
- if (!existsSync(dir)) {
3572
- mkdirSync(dir, { recursive: true });
4636
+ const dir = dirname8(this.filePath);
4637
+ if (!existsSync2(dir)) {
4638
+ mkdirSync2(dir, { recursive: true });
3573
4639
  }
3574
4640
  this.initialized = true;
3575
4641
  }
@@ -3599,8 +4665,8 @@ var FileSink = class {
3599
4665
  this.ensureDirectory();
3600
4666
  const content = this.buffer.join("");
3601
4667
  this.buffer = [];
3602
- if (this.options.mode === "overwrite" && !existsSync(this.filePath)) {
3603
- writeFileSync(this.filePath, content);
4668
+ if (this.options.mode === "overwrite" && !existsSync2(this.filePath)) {
4669
+ writeFileSync2(this.filePath, content);
3604
4670
  } else {
3605
4671
  appendFileSync(this.filePath, content);
3606
4672
  }
@@ -3712,22 +4778,296 @@ var DEFAULT_STATE = {
3712
4778
  };
3713
4779
 
3714
4780
  // src/state/state-manager.ts
3715
- import * as fs2 from "fs";
3716
- import * as path from "path";
4781
+ import * as fs4 from "fs";
4782
+ import * as path3 from "path";
4783
+ import { execSync as execSync2 } from "child_process";
4784
+
4785
+ // src/state/stream-resolver.ts
4786
+ import * as fs3 from "fs";
4787
+ import * as path2 from "path";
3717
4788
  import { execSync } from "child_process";
4789
+
4790
+ // src/state/stream-types.ts
4791
+ import { z as z3 } from "zod";
4792
+ var StreamInfoSchema = z3.object({
4793
+ name: z3.string(),
4794
+ branch: z3.string().optional(),
4795
+ createdAt: z3.string(),
4796
+ lastActiveAt: z3.string()
4797
+ });
4798
+ var StreamIndexSchema = z3.object({
4799
+ schemaVersion: z3.literal(1),
4800
+ activeStream: z3.string().nullable(),
4801
+ streams: z3.record(StreamInfoSchema)
4802
+ });
4803
+ var DEFAULT_STREAM_INDEX = {
4804
+ schemaVersion: 1,
4805
+ activeStream: null,
4806
+ streams: {}
4807
+ };
4808
+
4809
+ // src/state/stream-resolver.ts
3718
4810
  var HARNESS_DIR = ".harness";
4811
+ var STREAMS_DIR = "streams";
4812
+ var INDEX_FILE = "index.json";
4813
+ var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
4814
+ function streamsDir(projectPath) {
4815
+ return path2.join(projectPath, HARNESS_DIR, STREAMS_DIR);
4816
+ }
4817
+ function indexPath(projectPath) {
4818
+ return path2.join(streamsDir(projectPath), INDEX_FILE);
4819
+ }
4820
+ function validateStreamName(name) {
4821
+ if (!STREAM_NAME_REGEX.test(name)) {
4822
+ return Err(
4823
+ new Error(
4824
+ `Invalid stream name '${name}'. Names must match [a-z0-9][a-z0-9._-]* (lowercase alphanumeric, dots, hyphens, underscores).`
4825
+ )
4826
+ );
4827
+ }
4828
+ return Ok(void 0);
4829
+ }
4830
+ async function loadStreamIndex(projectPath) {
4831
+ const idxPath = indexPath(projectPath);
4832
+ if (!fs3.existsSync(idxPath)) {
4833
+ return Ok({ ...DEFAULT_STREAM_INDEX, streams: {} });
4834
+ }
4835
+ try {
4836
+ const raw = fs3.readFileSync(idxPath, "utf-8");
4837
+ const parsed = JSON.parse(raw);
4838
+ const result = StreamIndexSchema.safeParse(parsed);
4839
+ if (!result.success) {
4840
+ return Err(new Error(`Invalid stream index: ${result.error.message}`));
4841
+ }
4842
+ return Ok(result.data);
4843
+ } catch (error) {
4844
+ return Err(
4845
+ new Error(
4846
+ `Failed to load stream index: ${error instanceof Error ? error.message : String(error)}`
4847
+ )
4848
+ );
4849
+ }
4850
+ }
4851
+ async function saveStreamIndex(projectPath, index) {
4852
+ const dir = streamsDir(projectPath);
4853
+ try {
4854
+ fs3.mkdirSync(dir, { recursive: true });
4855
+ fs3.writeFileSync(indexPath(projectPath), JSON.stringify(index, null, 2));
4856
+ return Ok(void 0);
4857
+ } catch (error) {
4858
+ return Err(
4859
+ new Error(
4860
+ `Failed to save stream index: ${error instanceof Error ? error.message : String(error)}`
4861
+ )
4862
+ );
4863
+ }
4864
+ }
4865
+ var branchCache = /* @__PURE__ */ new Map();
4866
+ var BRANCH_CACHE_TTL_MS = 3e4;
4867
+ function getCurrentBranch(projectPath) {
4868
+ const cached = branchCache.get(projectPath);
4869
+ if (cached && Date.now() - cached.timestamp < BRANCH_CACHE_TTL_MS) {
4870
+ return cached.branch;
4871
+ }
4872
+ try {
4873
+ const branch = execSync("git rev-parse --abbrev-ref HEAD", {
4874
+ cwd: projectPath,
4875
+ stdio: "pipe"
4876
+ }).toString().trim();
4877
+ branchCache.set(projectPath, { branch, timestamp: Date.now() });
4878
+ return branch;
4879
+ } catch {
4880
+ branchCache.set(projectPath, { branch: null, timestamp: Date.now() });
4881
+ return null;
4882
+ }
4883
+ }
4884
+ async function resolveStreamPath(projectPath, options) {
4885
+ const idxResult = await loadStreamIndex(projectPath);
4886
+ if (!idxResult.ok) return idxResult;
4887
+ const index = idxResult.value;
4888
+ if (options?.stream) {
4889
+ if (!index.streams[options.stream]) {
4890
+ return Err(
4891
+ new Error(
4892
+ `Stream '${options.stream}' not found. Known streams: ${Object.keys(index.streams).join(", ") || "none"}`
4893
+ )
4894
+ );
4895
+ }
4896
+ return Ok(path2.join(streamsDir(projectPath), options.stream));
4897
+ }
4898
+ const branch = getCurrentBranch(projectPath);
4899
+ if (branch && branch !== "main" && branch !== "master") {
4900
+ for (const [name, info] of Object.entries(index.streams)) {
4901
+ if (info.branch === branch) {
4902
+ return Ok(path2.join(streamsDir(projectPath), name));
4903
+ }
4904
+ }
4905
+ }
4906
+ if (index.activeStream && index.streams[index.activeStream]) {
4907
+ return Ok(path2.join(streamsDir(projectPath), index.activeStream));
4908
+ }
4909
+ return Err(
4910
+ new Error(
4911
+ `Cannot resolve stream. Specify --stream <name> or create a stream. Known streams: ${Object.keys(index.streams).join(", ") || "none"}`
4912
+ )
4913
+ );
4914
+ }
4915
+ async function touchStream(projectPath, name) {
4916
+ const idxResult = await loadStreamIndex(projectPath);
4917
+ if (!idxResult.ok) return idxResult;
4918
+ const index = idxResult.value;
4919
+ if (!index.streams[name]) {
4920
+ return Err(new Error(`Stream '${name}' not found`));
4921
+ }
4922
+ index.streams[name].lastActiveAt = (/* @__PURE__ */ new Date()).toISOString();
4923
+ index.activeStream = name;
4924
+ return saveStreamIndex(projectPath, index);
4925
+ }
4926
+ async function createStream(projectPath, name, branch) {
4927
+ const nameCheck = validateStreamName(name);
4928
+ if (!nameCheck.ok) return nameCheck;
4929
+ const idxResult = await loadStreamIndex(projectPath);
4930
+ if (!idxResult.ok) return idxResult;
4931
+ const index = idxResult.value;
4932
+ if (index.streams[name]) {
4933
+ return Err(new Error(`Stream '${name}' already exists`));
4934
+ }
4935
+ const streamPath = path2.join(streamsDir(projectPath), name);
4936
+ try {
4937
+ fs3.mkdirSync(streamPath, { recursive: true });
4938
+ } catch (error) {
4939
+ return Err(
4940
+ new Error(
4941
+ `Failed to create stream directory: ${error instanceof Error ? error.message : String(error)}`
4942
+ )
4943
+ );
4944
+ }
4945
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4946
+ index.streams[name] = {
4947
+ name,
4948
+ branch,
4949
+ createdAt: now,
4950
+ lastActiveAt: now
4951
+ };
4952
+ const saveResult = await saveStreamIndex(projectPath, index);
4953
+ if (!saveResult.ok) return saveResult;
4954
+ return Ok(streamPath);
4955
+ }
4956
+ async function listStreams(projectPath) {
4957
+ const idxResult = await loadStreamIndex(projectPath);
4958
+ if (!idxResult.ok) return idxResult;
4959
+ return Ok(Object.values(idxResult.value.streams));
4960
+ }
4961
+ async function setActiveStream(projectPath, name) {
4962
+ const idxResult = await loadStreamIndex(projectPath);
4963
+ if (!idxResult.ok) return idxResult;
4964
+ const index = idxResult.value;
4965
+ if (!index.streams[name]) {
4966
+ return Err(new Error(`Stream '${name}' not found`));
4967
+ }
4968
+ index.activeStream = name;
4969
+ return saveStreamIndex(projectPath, index);
4970
+ }
4971
+ async function archiveStream(projectPath, name) {
4972
+ const idxResult = await loadStreamIndex(projectPath);
4973
+ if (!idxResult.ok) return idxResult;
4974
+ const index = idxResult.value;
4975
+ if (!index.streams[name]) {
4976
+ return Err(new Error(`Stream '${name}' not found`));
4977
+ }
4978
+ const streamPath = path2.join(streamsDir(projectPath), name);
4979
+ const archiveDir = path2.join(projectPath, HARNESS_DIR, "archive", "streams");
4980
+ try {
4981
+ fs3.mkdirSync(archiveDir, { recursive: true });
4982
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
4983
+ fs3.renameSync(streamPath, path2.join(archiveDir, `${name}-${date}`));
4984
+ } catch (error) {
4985
+ return Err(
4986
+ new Error(
4987
+ `Failed to archive stream: ${error instanceof Error ? error.message : String(error)}`
4988
+ )
4989
+ );
4990
+ }
4991
+ delete index.streams[name];
4992
+ if (index.activeStream === name) {
4993
+ index.activeStream = null;
4994
+ }
4995
+ return saveStreamIndex(projectPath, index);
4996
+ }
4997
+ function getStreamForBranch(index, branch) {
4998
+ for (const [name, info] of Object.entries(index.streams)) {
4999
+ if (info.branch === branch) return name;
5000
+ }
5001
+ return null;
5002
+ }
5003
+ var STATE_FILES = ["state.json", "handoff.json", "learnings.md", "failures.md"];
5004
+ async function migrateToStreams(projectPath) {
5005
+ const harnessDir = path2.join(projectPath, HARNESS_DIR);
5006
+ if (fs3.existsSync(indexPath(projectPath))) {
5007
+ return Ok(void 0);
5008
+ }
5009
+ const filesToMove = STATE_FILES.filter((f) => fs3.existsSync(path2.join(harnessDir, f)));
5010
+ if (filesToMove.length === 0) {
5011
+ return Ok(void 0);
5012
+ }
5013
+ const defaultDir = path2.join(streamsDir(projectPath), "default");
5014
+ try {
5015
+ fs3.mkdirSync(defaultDir, { recursive: true });
5016
+ for (const file of filesToMove) {
5017
+ fs3.renameSync(path2.join(harnessDir, file), path2.join(defaultDir, file));
5018
+ }
5019
+ } catch (error) {
5020
+ return Err(
5021
+ new Error(`Migration failed: ${error instanceof Error ? error.message : String(error)}`)
5022
+ );
5023
+ }
5024
+ const now = (/* @__PURE__ */ new Date()).toISOString();
5025
+ const index = {
5026
+ schemaVersion: 1,
5027
+ activeStream: "default",
5028
+ streams: {
5029
+ default: {
5030
+ name: "default",
5031
+ createdAt: now,
5032
+ lastActiveAt: now
5033
+ }
5034
+ }
5035
+ };
5036
+ return saveStreamIndex(projectPath, index);
5037
+ }
5038
+
5039
+ // src/state/state-manager.ts
5040
+ var HARNESS_DIR2 = ".harness";
3719
5041
  var STATE_FILE = "state.json";
3720
5042
  var LEARNINGS_FILE = "learnings.md";
3721
5043
  var FAILURES_FILE = "failures.md";
3722
5044
  var HANDOFF_FILE = "handoff.json";
3723
5045
  var GATE_CONFIG_FILE = "gate.json";
3724
- async function loadState(projectPath) {
3725
- const statePath = path.join(projectPath, HARNESS_DIR, STATE_FILE);
3726
- if (!fs2.existsSync(statePath)) {
3727
- return Ok({ ...DEFAULT_STATE });
5046
+ var INDEX_FILE2 = "index.json";
5047
+ async function getStateDir(projectPath, stream) {
5048
+ const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
5049
+ const hasStreams = fs4.existsSync(streamsIndexPath);
5050
+ if (stream || hasStreams) {
5051
+ const result = await resolveStreamPath(projectPath, stream ? { stream } : void 0);
5052
+ if (result.ok) {
5053
+ return result;
5054
+ }
5055
+ if (stream) {
5056
+ return result;
5057
+ }
3728
5058
  }
5059
+ return Ok(path3.join(projectPath, HARNESS_DIR2));
5060
+ }
5061
+ async function loadState(projectPath, stream) {
3729
5062
  try {
3730
- const raw = fs2.readFileSync(statePath, "utf-8");
5063
+ const dirResult = await getStateDir(projectPath, stream);
5064
+ if (!dirResult.ok) return dirResult;
5065
+ const stateDir = dirResult.value;
5066
+ const statePath = path3.join(stateDir, STATE_FILE);
5067
+ if (!fs4.existsSync(statePath)) {
5068
+ return Ok({ ...DEFAULT_STATE });
5069
+ }
5070
+ const raw = fs4.readFileSync(statePath, "utf-8");
3731
5071
  const parsed = JSON.parse(raw);
3732
5072
  const result = HarnessStateSchema.safeParse(parsed);
3733
5073
  if (!result.success) {
@@ -3736,18 +5076,18 @@ async function loadState(projectPath) {
3736
5076
  return Ok(result.data);
3737
5077
  } catch (error) {
3738
5078
  return Err(
3739
- new Error(
3740
- `Failed to load state from ${statePath}: ${error instanceof Error ? error.message : String(error)}`
3741
- )
5079
+ new Error(`Failed to load state: ${error instanceof Error ? error.message : String(error)}`)
3742
5080
  );
3743
5081
  }
3744
5082
  }
3745
- async function saveState(projectPath, state) {
3746
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3747
- const statePath = path.join(harnessDir, STATE_FILE);
5083
+ async function saveState(projectPath, state, stream) {
3748
5084
  try {
3749
- fs2.mkdirSync(harnessDir, { recursive: true });
3750
- fs2.writeFileSync(statePath, JSON.stringify(state, null, 2));
5085
+ const dirResult = await getStateDir(projectPath, stream);
5086
+ if (!dirResult.ok) return dirResult;
5087
+ const stateDir = dirResult.value;
5088
+ const statePath = path3.join(stateDir, STATE_FILE);
5089
+ fs4.mkdirSync(stateDir, { recursive: true });
5090
+ fs4.writeFileSync(statePath, JSON.stringify(state, null, 2));
3751
5091
  return Ok(void 0);
3752
5092
  } catch (error) {
3753
5093
  return Err(
@@ -3755,11 +5095,13 @@ async function saveState(projectPath, state) {
3755
5095
  );
3756
5096
  }
3757
5097
  }
3758
- async function appendLearning(projectPath, learning, skillName, outcome) {
3759
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3760
- const learningsPath = path.join(harnessDir, LEARNINGS_FILE);
5098
+ async function appendLearning(projectPath, learning, skillName, outcome, stream) {
3761
5099
  try {
3762
- fs2.mkdirSync(harnessDir, { recursive: true });
5100
+ const dirResult = await getStateDir(projectPath, stream);
5101
+ if (!dirResult.ok) return dirResult;
5102
+ const stateDir = dirResult.value;
5103
+ const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
5104
+ fs4.mkdirSync(stateDir, { recursive: true });
3763
5105
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3764
5106
  let entry;
3765
5107
  if (skillName && outcome) {
@@ -3775,11 +5117,11 @@ async function appendLearning(projectPath, learning, skillName, outcome) {
3775
5117
  - **${timestamp}:** ${learning}
3776
5118
  `;
3777
5119
  }
3778
- if (!fs2.existsSync(learningsPath)) {
3779
- fs2.writeFileSync(learningsPath, `# Learnings
5120
+ if (!fs4.existsSync(learningsPath)) {
5121
+ fs4.writeFileSync(learningsPath, `# Learnings
3780
5122
  ${entry}`);
3781
5123
  } else {
3782
- fs2.appendFileSync(learningsPath, entry);
5124
+ fs4.appendFileSync(learningsPath, entry);
3783
5125
  }
3784
5126
  return Ok(void 0);
3785
5127
  } catch (error) {
@@ -3790,13 +5132,16 @@ ${entry}`);
3790
5132
  );
3791
5133
  }
3792
5134
  }
3793
- async function loadRelevantLearnings(projectPath, skillName) {
3794
- const learningsPath = path.join(projectPath, HARNESS_DIR, LEARNINGS_FILE);
3795
- if (!fs2.existsSync(learningsPath)) {
3796
- return Ok([]);
3797
- }
5135
+ async function loadRelevantLearnings(projectPath, skillName, stream) {
3798
5136
  try {
3799
- const content = fs2.readFileSync(learningsPath, "utf-8");
5137
+ const dirResult = await getStateDir(projectPath, stream);
5138
+ if (!dirResult.ok) return dirResult;
5139
+ const stateDir = dirResult.value;
5140
+ const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
5141
+ if (!fs4.existsSync(learningsPath)) {
5142
+ return Ok([]);
5143
+ }
5144
+ const content = fs4.readFileSync(learningsPath, "utf-8");
3800
5145
  const lines = content.split("\n");
3801
5146
  const entries = [];
3802
5147
  let currentBlock = [];
@@ -3830,20 +5175,22 @@ async function loadRelevantLearnings(projectPath, skillName) {
3830
5175
  }
3831
5176
  }
3832
5177
  var FAILURE_LINE_REGEX = /^- \*\*(\d{4}-\d{2}-\d{2}) \[skill:([^\]]+)\] \[type:([^\]]+)\]:\*\* (.+)$/;
3833
- async function appendFailure(projectPath, description, skillName, type) {
3834
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3835
- const failuresPath = path.join(harnessDir, FAILURES_FILE);
5178
+ async function appendFailure(projectPath, description, skillName, type, stream) {
3836
5179
  try {
3837
- fs2.mkdirSync(harnessDir, { recursive: true });
5180
+ const dirResult = await getStateDir(projectPath, stream);
5181
+ if (!dirResult.ok) return dirResult;
5182
+ const stateDir = dirResult.value;
5183
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5184
+ fs4.mkdirSync(stateDir, { recursive: true });
3838
5185
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3839
5186
  const entry = `
3840
5187
  - **${timestamp} [skill:${skillName}] [type:${type}]:** ${description}
3841
5188
  `;
3842
- if (!fs2.existsSync(failuresPath)) {
3843
- fs2.writeFileSync(failuresPath, `# Failures
5189
+ if (!fs4.existsSync(failuresPath)) {
5190
+ fs4.writeFileSync(failuresPath, `# Failures
3844
5191
  ${entry}`);
3845
5192
  } else {
3846
- fs2.appendFileSync(failuresPath, entry);
5193
+ fs4.appendFileSync(failuresPath, entry);
3847
5194
  }
3848
5195
  return Ok(void 0);
3849
5196
  } catch (error) {
@@ -3854,13 +5201,16 @@ ${entry}`);
3854
5201
  );
3855
5202
  }
3856
5203
  }
3857
- async function loadFailures(projectPath) {
3858
- const failuresPath = path.join(projectPath, HARNESS_DIR, FAILURES_FILE);
3859
- if (!fs2.existsSync(failuresPath)) {
3860
- return Ok([]);
3861
- }
5204
+ async function loadFailures(projectPath, stream) {
3862
5205
  try {
3863
- const content = fs2.readFileSync(failuresPath, "utf-8");
5206
+ const dirResult = await getStateDir(projectPath, stream);
5207
+ if (!dirResult.ok) return dirResult;
5208
+ const stateDir = dirResult.value;
5209
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5210
+ if (!fs4.existsSync(failuresPath)) {
5211
+ return Ok([]);
5212
+ }
5213
+ const content = fs4.readFileSync(failuresPath, "utf-8");
3864
5214
  const entries = [];
3865
5215
  for (const line of content.split("\n")) {
3866
5216
  const match = line.match(FAILURE_LINE_REGEX);
@@ -3882,23 +5232,25 @@ async function loadFailures(projectPath) {
3882
5232
  );
3883
5233
  }
3884
5234
  }
3885
- async function archiveFailures(projectPath) {
3886
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3887
- const failuresPath = path.join(harnessDir, FAILURES_FILE);
3888
- if (!fs2.existsSync(failuresPath)) {
3889
- return Ok(void 0);
3890
- }
5235
+ async function archiveFailures(projectPath, stream) {
3891
5236
  try {
3892
- const archiveDir = path.join(harnessDir, "archive");
3893
- fs2.mkdirSync(archiveDir, { recursive: true });
5237
+ const dirResult = await getStateDir(projectPath, stream);
5238
+ if (!dirResult.ok) return dirResult;
5239
+ const stateDir = dirResult.value;
5240
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5241
+ if (!fs4.existsSync(failuresPath)) {
5242
+ return Ok(void 0);
5243
+ }
5244
+ const archiveDir = path3.join(stateDir, "archive");
5245
+ fs4.mkdirSync(archiveDir, { recursive: true });
3894
5246
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3895
5247
  let archiveName = `failures-${date}.md`;
3896
5248
  let counter = 2;
3897
- while (fs2.existsSync(path.join(archiveDir, archiveName))) {
5249
+ while (fs4.existsSync(path3.join(archiveDir, archiveName))) {
3898
5250
  archiveName = `failures-${date}-${counter}.md`;
3899
5251
  counter++;
3900
5252
  }
3901
- fs2.renameSync(failuresPath, path.join(archiveDir, archiveName));
5253
+ fs4.renameSync(failuresPath, path3.join(archiveDir, archiveName));
3902
5254
  return Ok(void 0);
3903
5255
  } catch (error) {
3904
5256
  return Err(
@@ -3908,12 +5260,14 @@ async function archiveFailures(projectPath) {
3908
5260
  );
3909
5261
  }
3910
5262
  }
3911
- async function saveHandoff(projectPath, handoff) {
3912
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3913
- const handoffPath = path.join(harnessDir, HANDOFF_FILE);
5263
+ async function saveHandoff(projectPath, handoff, stream) {
3914
5264
  try {
3915
- fs2.mkdirSync(harnessDir, { recursive: true });
3916
- fs2.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
5265
+ const dirResult = await getStateDir(projectPath, stream);
5266
+ if (!dirResult.ok) return dirResult;
5267
+ const stateDir = dirResult.value;
5268
+ const handoffPath = path3.join(stateDir, HANDOFF_FILE);
5269
+ fs4.mkdirSync(stateDir, { recursive: true });
5270
+ fs4.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
3917
5271
  return Ok(void 0);
3918
5272
  } catch (error) {
3919
5273
  return Err(
@@ -3921,13 +5275,16 @@ async function saveHandoff(projectPath, handoff) {
3921
5275
  );
3922
5276
  }
3923
5277
  }
3924
- async function loadHandoff(projectPath) {
3925
- const handoffPath = path.join(projectPath, HARNESS_DIR, HANDOFF_FILE);
3926
- if (!fs2.existsSync(handoffPath)) {
3927
- return Ok(null);
3928
- }
5278
+ async function loadHandoff(projectPath, stream) {
3929
5279
  try {
3930
- const raw = fs2.readFileSync(handoffPath, "utf-8");
5280
+ const dirResult = await getStateDir(projectPath, stream);
5281
+ if (!dirResult.ok) return dirResult;
5282
+ const stateDir = dirResult.value;
5283
+ const handoffPath = path3.join(stateDir, HANDOFF_FILE);
5284
+ if (!fs4.existsSync(handoffPath)) {
5285
+ return Ok(null);
5286
+ }
5287
+ const raw = fs4.readFileSync(handoffPath, "utf-8");
3931
5288
  const parsed = JSON.parse(raw);
3932
5289
  const result = HandoffSchema.safeParse(parsed);
3933
5290
  if (!result.success) {
@@ -3941,40 +5298,51 @@ async function loadHandoff(projectPath) {
3941
5298
  }
3942
5299
  }
3943
5300
  async function runMechanicalGate(projectPath) {
3944
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3945
- const gateConfigPath = path.join(harnessDir, GATE_CONFIG_FILE);
5301
+ const harnessDir = path3.join(projectPath, HARNESS_DIR2);
5302
+ const gateConfigPath = path3.join(harnessDir, GATE_CONFIG_FILE);
3946
5303
  try {
3947
5304
  let checks = [];
3948
- if (fs2.existsSync(gateConfigPath)) {
3949
- const raw = JSON.parse(fs2.readFileSync(gateConfigPath, "utf-8"));
5305
+ if (fs4.existsSync(gateConfigPath)) {
5306
+ const raw = JSON.parse(fs4.readFileSync(gateConfigPath, "utf-8"));
3950
5307
  const config = GateConfigSchema.safeParse(raw);
3951
5308
  if (config.success && config.data.checks) {
3952
5309
  checks = config.data.checks;
3953
5310
  }
3954
5311
  }
3955
5312
  if (checks.length === 0) {
3956
- const packageJsonPath = path.join(projectPath, "package.json");
3957
- if (fs2.existsSync(packageJsonPath)) {
3958
- const pkg = JSON.parse(fs2.readFileSync(packageJsonPath, "utf-8"));
5313
+ const packageJsonPath = path3.join(projectPath, "package.json");
5314
+ if (fs4.existsSync(packageJsonPath)) {
5315
+ const pkg = JSON.parse(fs4.readFileSync(packageJsonPath, "utf-8"));
3959
5316
  const scripts = pkg.scripts || {};
3960
5317
  if (scripts.test) checks.push({ name: "test", command: "npm test" });
3961
5318
  if (scripts.lint) checks.push({ name: "lint", command: "npm run lint" });
3962
5319
  if (scripts.typecheck) checks.push({ name: "typecheck", command: "npm run typecheck" });
3963
5320
  if (scripts.build) checks.push({ name: "build", command: "npm run build" });
3964
5321
  }
3965
- if (fs2.existsSync(path.join(projectPath, "go.mod"))) {
5322
+ if (fs4.existsSync(path3.join(projectPath, "go.mod"))) {
3966
5323
  checks.push({ name: "test", command: "go test ./..." });
3967
5324
  checks.push({ name: "build", command: "go build ./..." });
3968
5325
  }
3969
- if (fs2.existsSync(path.join(projectPath, "pyproject.toml")) || fs2.existsSync(path.join(projectPath, "setup.py"))) {
5326
+ if (fs4.existsSync(path3.join(projectPath, "pyproject.toml")) || fs4.existsSync(path3.join(projectPath, "setup.py"))) {
3970
5327
  checks.push({ name: "test", command: "python -m pytest" });
3971
5328
  }
3972
5329
  }
3973
5330
  const results = [];
5331
+ const SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
3974
5332
  for (const check of checks) {
5333
+ if (!SAFE_GATE_COMMAND.test(check.command)) {
5334
+ results.push({
5335
+ name: check.name,
5336
+ passed: false,
5337
+ command: check.command,
5338
+ output: `Blocked: command does not match safe gate pattern. Allowed prefixes: npm, npx, pnpm, yarn, go, python, python3, make, cargo, gradle, mvn`,
5339
+ duration: 0
5340
+ });
5341
+ continue;
5342
+ }
3975
5343
  const start = Date.now();
3976
5344
  try {
3977
- execSync(check.command, {
5345
+ execSync2(check.command, {
3978
5346
  cwd: projectPath,
3979
5347
  stdio: "pipe",
3980
5348
  timeout: 12e4
@@ -4157,34 +5525,610 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
4157
5525
  };
4158
5526
  }
4159
5527
 
4160
- // src/ci/check-orchestrator.ts
4161
- import * as path2 from "path";
4162
- var ALL_CHECKS = ["validate", "deps", "docs", "entropy", "phase-gate"];
4163
- async function runSingleCheck(name, projectRoot, config) {
4164
- const start = Date.now();
4165
- const issues = [];
4166
- try {
4167
- switch (name) {
4168
- case "validate": {
4169
- const agentsPath = path2.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
4170
- const result = await validateAgentsMap(agentsPath);
4171
- if (!result.ok) {
4172
- issues.push({ severity: "error", message: result.error.message });
4173
- } else if (!result.value.valid) {
4174
- if (result.value.errors) {
4175
- for (const err of result.value.errors) {
4176
- issues.push({ severity: "error", message: err.message });
4177
- }
4178
- }
4179
- for (const section of result.value.missingSections) {
4180
- issues.push({ severity: "warning", message: `Missing section: ${section}` });
4181
- }
4182
- for (const link of result.value.brokenLinks) {
4183
- issues.push({
4184
- severity: "warning",
4185
- message: `Broken link: ${link.text} \u2192 ${link.path}`,
4186
- file: link.path
4187
- });
5528
+ // src/security/scanner.ts
5529
+ import * as fs6 from "fs/promises";
5530
+
5531
+ // src/security/rules/registry.ts
5532
+ var RuleRegistry = class {
5533
+ rules = /* @__PURE__ */ new Map();
5534
+ register(rule) {
5535
+ this.rules.set(rule.id, rule);
5536
+ }
5537
+ registerAll(rules) {
5538
+ for (const rule of rules) {
5539
+ this.register(rule);
5540
+ }
5541
+ }
5542
+ getById(id) {
5543
+ return this.rules.get(id);
5544
+ }
5545
+ getAll() {
5546
+ return Array.from(this.rules.values());
5547
+ }
5548
+ getByCategory(category) {
5549
+ return this.getAll().filter((r) => r.category === category);
5550
+ }
5551
+ getForStacks(stacks) {
5552
+ return this.getAll().filter((rule) => {
5553
+ if (!rule.stack || rule.stack.length === 0) return true;
5554
+ return rule.stack.some((s) => stacks.includes(s));
5555
+ });
5556
+ }
5557
+ };
5558
+
5559
+ // src/security/config.ts
5560
+ import { z as z4 } from "zod";
5561
+
5562
+ // src/security/types.ts
5563
+ var DEFAULT_SECURITY_CONFIG = {
5564
+ enabled: true,
5565
+ strict: false,
5566
+ rules: {},
5567
+ exclude: ["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]
5568
+ };
5569
+
5570
+ // src/security/config.ts
5571
+ var RuleOverrideSchema = z4.enum(["off", "error", "warning", "info"]);
5572
+ var SecurityConfigSchema = z4.object({
5573
+ enabled: z4.boolean().default(true),
5574
+ strict: z4.boolean().default(false),
5575
+ rules: z4.record(z4.string(), RuleOverrideSchema).optional().default({}),
5576
+ exclude: z4.array(z4.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
5577
+ external: z4.object({
5578
+ semgrep: z4.object({
5579
+ enabled: z4.union([z4.literal("auto"), z4.boolean()]).default("auto"),
5580
+ rulesets: z4.array(z4.string()).optional()
5581
+ }).optional(),
5582
+ gitleaks: z4.object({
5583
+ enabled: z4.union([z4.literal("auto"), z4.boolean()]).default("auto")
5584
+ }).optional()
5585
+ }).optional()
5586
+ });
5587
+ function parseSecurityConfig(input) {
5588
+ if (input === void 0 || input === null) {
5589
+ return { ...DEFAULT_SECURITY_CONFIG };
5590
+ }
5591
+ const result = SecurityConfigSchema.safeParse(input);
5592
+ if (result.success) {
5593
+ return result.data;
5594
+ }
5595
+ return { ...DEFAULT_SECURITY_CONFIG };
5596
+ }
5597
+ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
5598
+ if (overrides[ruleId] !== void 0) {
5599
+ return overrides[ruleId];
5600
+ }
5601
+ for (const [pattern, override] of Object.entries(overrides)) {
5602
+ if (pattern.endsWith("*")) {
5603
+ const prefix = pattern.slice(0, -1);
5604
+ if (ruleId.startsWith(prefix)) {
5605
+ return override;
5606
+ }
5607
+ }
5608
+ }
5609
+ if (strict && (defaultSeverity === "warning" || defaultSeverity === "info")) {
5610
+ return "error";
5611
+ }
5612
+ return defaultSeverity;
5613
+ }
5614
+
5615
+ // src/security/stack-detector.ts
5616
+ import * as fs5 from "fs";
5617
+ import * as path4 from "path";
5618
+ function detectStack(projectRoot) {
5619
+ const stacks = [];
5620
+ const pkgJsonPath = path4.join(projectRoot, "package.json");
5621
+ if (fs5.existsSync(pkgJsonPath)) {
5622
+ stacks.push("node");
5623
+ try {
5624
+ const pkgJson = JSON.parse(fs5.readFileSync(pkgJsonPath, "utf-8"));
5625
+ const allDeps = {
5626
+ ...pkgJson.dependencies,
5627
+ ...pkgJson.devDependencies
5628
+ };
5629
+ if (allDeps.react || allDeps["react-dom"]) stacks.push("react");
5630
+ if (allDeps.express) stacks.push("express");
5631
+ if (allDeps.koa) stacks.push("koa");
5632
+ if (allDeps.fastify) stacks.push("fastify");
5633
+ if (allDeps.next) stacks.push("next");
5634
+ if (allDeps.vue) stacks.push("vue");
5635
+ if (allDeps.angular || allDeps["@angular/core"]) stacks.push("angular");
5636
+ } catch {
5637
+ }
5638
+ }
5639
+ const goModPath = path4.join(projectRoot, "go.mod");
5640
+ if (fs5.existsSync(goModPath)) {
5641
+ stacks.push("go");
5642
+ }
5643
+ const requirementsPath = path4.join(projectRoot, "requirements.txt");
5644
+ const pyprojectPath = path4.join(projectRoot, "pyproject.toml");
5645
+ if (fs5.existsSync(requirementsPath) || fs5.existsSync(pyprojectPath)) {
5646
+ stacks.push("python");
5647
+ }
5648
+ return stacks;
5649
+ }
5650
+
5651
+ // src/security/rules/secrets.ts
5652
+ var secretRules = [
5653
+ {
5654
+ id: "SEC-SEC-001",
5655
+ name: "AWS Access Key",
5656
+ category: "secrets",
5657
+ severity: "error",
5658
+ confidence: "high",
5659
+ patterns: [/(?:AKIA|ABIA|ACCA|ASIA)[0-9A-Z]{16}/],
5660
+ message: "Hardcoded AWS access key detected",
5661
+ remediation: "Use environment variables or a secrets manager",
5662
+ references: ["CWE-798"]
5663
+ },
5664
+ {
5665
+ id: "SEC-SEC-002",
5666
+ name: "Generic API Key/Secret Assignment",
5667
+ category: "secrets",
5668
+ severity: "error",
5669
+ confidence: "high",
5670
+ patterns: [
5671
+ /(?:api[_-]?key|api[_-]?secret|secret[_-]?key|access[_-]?token|auth[_-]?token)\s*[:=]\s*['"][^'"]{8,}['"]/i
5672
+ ],
5673
+ message: "Hardcoded API key or secret detected",
5674
+ remediation: "Use environment variables: process.env.API_KEY",
5675
+ references: ["CWE-798"]
5676
+ },
5677
+ {
5678
+ id: "SEC-SEC-003",
5679
+ name: "Private Key",
5680
+ category: "secrets",
5681
+ severity: "error",
5682
+ confidence: "high",
5683
+ patterns: [/-----BEGIN\s(?:RSA|DSA|EC|OPENSSH|PGP)\s(?:PRIVATE\s)?KEY-----/],
5684
+ message: "Private key detected in source code",
5685
+ remediation: "Store private keys in a secrets manager, never in source",
5686
+ references: ["CWE-321"]
5687
+ },
5688
+ {
5689
+ id: "SEC-SEC-004",
5690
+ name: "Password Assignment",
5691
+ category: "secrets",
5692
+ severity: "error",
5693
+ confidence: "high",
5694
+ patterns: [/(?:password|passwd|pwd)\s*[:=]\s*['"][^'"]{4,}['"]/i],
5695
+ message: "Hardcoded password detected",
5696
+ remediation: "Use environment variables or a secrets manager",
5697
+ references: ["CWE-259"]
5698
+ },
5699
+ {
5700
+ id: "SEC-SEC-005",
5701
+ name: "JWT/Bearer Token",
5702
+ category: "secrets",
5703
+ severity: "error",
5704
+ confidence: "high",
5705
+ patterns: [/eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}/],
5706
+ message: "Hardcoded JWT token detected",
5707
+ remediation: "Tokens should be fetched at runtime, not embedded in source",
5708
+ references: ["CWE-798"]
5709
+ }
5710
+ ];
5711
+
5712
+ // src/security/rules/injection.ts
5713
+ var injectionRules = [
5714
+ {
5715
+ id: "SEC-INJ-001",
5716
+ name: "eval/Function Constructor",
5717
+ category: "injection",
5718
+ severity: "error",
5719
+ confidence: "high",
5720
+ patterns: [/\beval\s*\(/, /new\s+Function\s*\(/],
5721
+ message: "eval() and Function constructor allow arbitrary code execution",
5722
+ remediation: "Use JSON.parse() for data, or a sandboxed interpreter for dynamic code",
5723
+ references: ["CWE-95"]
5724
+ },
5725
+ {
5726
+ id: "SEC-INJ-002",
5727
+ name: "SQL String Concatenation",
5728
+ category: "injection",
5729
+ severity: "error",
5730
+ confidence: "high",
5731
+ patterns: [
5732
+ /(?:query|execute|prepare)\s*\(\s*['"][^'"]*['"]\s*\+/,
5733
+ /(?:query|execute|prepare)\s*\(\s*`[^`]*\$\{/
5734
+ ],
5735
+ message: "SQL query built with string concatenation or template literals with interpolation",
5736
+ remediation: 'Use parameterized queries: query("SELECT * FROM users WHERE id = $1", [id])',
5737
+ references: ["CWE-89"]
5738
+ },
5739
+ {
5740
+ id: "SEC-INJ-003",
5741
+ name: "Command Injection",
5742
+ category: "injection",
5743
+ severity: "error",
5744
+ confidence: "high",
5745
+ patterns: [
5746
+ /\bexec\s*\(\s*['"][^'"]*['"]\s*\+/,
5747
+ /\bexec\s*\(\s*`[^`]*\$\{/,
5748
+ /\bexecSync\s*\(\s*['"][^'"]*['"]\s*\+/,
5749
+ /\bexecSync\s*\(\s*`[^`]*\$\{/
5750
+ ],
5751
+ message: "Shell command built with string concatenation",
5752
+ remediation: "Use execFile() with argument array instead of exec() with string",
5753
+ references: ["CWE-78"]
5754
+ }
5755
+ ];
5756
+
5757
+ // src/security/rules/xss.ts
5758
+ var xssRules = [
5759
+ {
5760
+ id: "SEC-XSS-001",
5761
+ name: "innerHTML Assignment",
5762
+ category: "xss",
5763
+ severity: "error",
5764
+ confidence: "high",
5765
+ patterns: [/\.innerHTML\s*=/],
5766
+ message: "Direct innerHTML assignment can lead to XSS",
5767
+ remediation: "Use textContent for text, or a sanitizer like DOMPurify for HTML",
5768
+ references: ["CWE-79"]
5769
+ },
5770
+ {
5771
+ id: "SEC-XSS-002",
5772
+ name: "dangerouslySetInnerHTML",
5773
+ category: "xss",
5774
+ severity: "error",
5775
+ confidence: "high",
5776
+ patterns: [/dangerouslySetInnerHTML/],
5777
+ message: "dangerouslySetInnerHTML bypasses React XSS protections",
5778
+ remediation: "Sanitize HTML with DOMPurify before passing to dangerouslySetInnerHTML",
5779
+ references: ["CWE-79"]
5780
+ },
5781
+ {
5782
+ id: "SEC-XSS-003",
5783
+ name: "document.write",
5784
+ category: "xss",
5785
+ severity: "error",
5786
+ confidence: "high",
5787
+ patterns: [/document\.write\s*\(/, /document\.writeln\s*\(/],
5788
+ message: "document.write can lead to XSS and is a legacy API",
5789
+ remediation: "Use DOM APIs: createElement, appendChild, textContent",
5790
+ references: ["CWE-79"]
5791
+ }
5792
+ ];
5793
+
5794
+ // src/security/rules/crypto.ts
5795
+ var cryptoRules = [
5796
+ {
5797
+ id: "SEC-CRY-001",
5798
+ name: "Weak Hash Algorithm",
5799
+ category: "crypto",
5800
+ severity: "error",
5801
+ confidence: "high",
5802
+ patterns: [/createHash\s*\(\s*['"](?:md5|sha1|md4|ripemd160)['"]\s*\)/],
5803
+ message: "MD5 and SHA1 are cryptographically broken for security use",
5804
+ remediation: 'Use SHA-256 or higher: createHash("sha256")',
5805
+ references: ["CWE-328"]
5806
+ },
5807
+ {
5808
+ id: "SEC-CRY-002",
5809
+ name: "Hardcoded Encryption Key",
5810
+ category: "crypto",
5811
+ severity: "error",
5812
+ confidence: "high",
5813
+ patterns: [
5814
+ /(?:encryption[_-]?key|cipher[_-]?key|aes[_-]?key|secret[_-]?key)\s*[:=]\s*['"][^'"]{4,}['"]/i
5815
+ ],
5816
+ message: "Hardcoded encryption key detected",
5817
+ remediation: "Load encryption keys from environment variables or a key management service",
5818
+ references: ["CWE-321"]
5819
+ }
5820
+ ];
5821
+
5822
+ // src/security/rules/path-traversal.ts
5823
+ var pathTraversalRules = [
5824
+ {
5825
+ id: "SEC-PTH-001",
5826
+ name: "Path Traversal Pattern",
5827
+ category: "path-traversal",
5828
+ severity: "warning",
5829
+ confidence: "medium",
5830
+ patterns: [
5831
+ /(?:readFile|readFileSync|writeFile|writeFileSync|createReadStream|createWriteStream|access|stat|unlink|rmdir|mkdir)\s*\([^)]*\.{2}[/\\]/,
5832
+ /(?:readFile|readFileSync|writeFile|writeFileSync)\s*\([^)]*\+/
5833
+ ],
5834
+ message: "Potential path traversal: file operation with ../ or string concatenation",
5835
+ remediation: "Use path.resolve() and validate the resolved path stays within the expected directory",
5836
+ references: ["CWE-22"]
5837
+ }
5838
+ ];
5839
+
5840
+ // src/security/rules/network.ts
5841
+ var networkRules = [
5842
+ {
5843
+ id: "SEC-NET-001",
5844
+ name: "CORS Wildcard Origin",
5845
+ category: "network",
5846
+ severity: "warning",
5847
+ confidence: "medium",
5848
+ patterns: [/origin\s*:\s*['"][*]['"]/],
5849
+ message: "CORS wildcard origin allows any website to make requests",
5850
+ remediation: "Restrict CORS to specific trusted origins",
5851
+ references: ["CWE-942"]
5852
+ },
5853
+ {
5854
+ id: "SEC-NET-002",
5855
+ name: "Disabled TLS Verification",
5856
+ category: "network",
5857
+ severity: "warning",
5858
+ confidence: "high",
5859
+ patterns: [/rejectUnauthorized\s*:\s*false/],
5860
+ message: "TLS certificate verification is disabled, enabling MITM attacks",
5861
+ remediation: "Remove rejectUnauthorized: false, or use a proper CA bundle",
5862
+ references: ["CWE-295"]
5863
+ },
5864
+ {
5865
+ id: "SEC-NET-003",
5866
+ name: "Hardcoded HTTP URL",
5867
+ category: "network",
5868
+ severity: "info",
5869
+ confidence: "low",
5870
+ patterns: [/['"]http:\/\/(?!localhost|127\.0\.0\.1|0\.0\.0\.0)[^'"]+['"]/],
5871
+ message: "Non-TLS HTTP URL detected (excluding localhost)",
5872
+ remediation: "Use HTTPS for all non-local connections",
5873
+ references: ["CWE-319"]
5874
+ }
5875
+ ];
5876
+
5877
+ // src/security/rules/deserialization.ts
5878
+ var deserializationRules = [
5879
+ {
5880
+ id: "SEC-DES-001",
5881
+ name: "Unvalidated JSON Parse",
5882
+ category: "deserialization",
5883
+ severity: "warning",
5884
+ confidence: "medium",
5885
+ patterns: [
5886
+ /JSON\.parse\s*\(\s*(?:req|request)\.body/,
5887
+ /JSON\.parse\s*\(\s*(?:event|data|payload|input|body)\b/
5888
+ ],
5889
+ message: "JSON.parse on potentially untrusted input without schema validation",
5890
+ remediation: "Validate parsed data with Zod, ajv, or joi before use",
5891
+ references: ["CWE-502"]
5892
+ }
5893
+ ];
5894
+
5895
+ // src/security/rules/stack/node.ts
5896
+ var nodeRules = [
5897
+ {
5898
+ id: "SEC-NODE-001",
5899
+ name: "Prototype Pollution",
5900
+ category: "injection",
5901
+ severity: "warning",
5902
+ confidence: "medium",
5903
+ patterns: [
5904
+ /__proto__/,
5905
+ /\bconstructor\s*\[/,
5906
+ /\bprototype\s*\[/,
5907
+ /Object\.assign\s*\(\s*\w+\s*,\s*(?:req|request|body|input|params|query)\b/
5908
+ ],
5909
+ stack: ["node"],
5910
+ message: "Potential prototype pollution via __proto__, constructor, or Object.assign with untrusted input",
5911
+ remediation: "Validate keys against a whitelist, use Object.create(null), or use Map instead of plain objects",
5912
+ references: ["CWE-1321"]
5913
+ },
5914
+ {
5915
+ id: "SEC-NODE-002",
5916
+ name: "NoSQL Injection",
5917
+ category: "injection",
5918
+ severity: "warning",
5919
+ confidence: "medium",
5920
+ patterns: [
5921
+ /\.find\s*\(\s*\{[^}]*\$(?:gt|gte|lt|lte|ne|in|nin|regex|where|exists)/,
5922
+ /\.find\s*\(\s*(?:req|request)\.(?:body|query|params)/
5923
+ ],
5924
+ stack: ["node"],
5925
+ message: "Potential NoSQL injection: MongoDB query operators in user input",
5926
+ remediation: "Sanitize input by stripping keys starting with $ before using in queries",
5927
+ references: ["CWE-943"]
5928
+ }
5929
+ ];
5930
+
5931
+ // src/security/rules/stack/express.ts
5932
+ var expressRules = [
5933
+ {
5934
+ id: "SEC-EXPRESS-001",
5935
+ name: "Missing Helmet",
5936
+ category: "network",
5937
+ severity: "info",
5938
+ confidence: "low",
5939
+ patterns: [/app\s*=\s*express\s*\(\)/],
5940
+ stack: ["express"],
5941
+ fileGlob: "**/app.{ts,js}",
5942
+ message: "Express app initialization detected \u2014 ensure helmet middleware is applied for security headers",
5943
+ remediation: "Add helmet middleware: app.use(helmet())",
5944
+ references: ["CWE-693"]
5945
+ },
5946
+ {
5947
+ id: "SEC-EXPRESS-002",
5948
+ name: "Unprotected Route with Body Parsing",
5949
+ category: "network",
5950
+ severity: "info",
5951
+ confidence: "low",
5952
+ patterns: [/app\.(?:post|put|patch)\s*\([^)]*,\s*(?:req|request)\s*(?:,|\))/],
5953
+ stack: ["express"],
5954
+ message: "Express route accepts request body \u2014 ensure input validation and rate limiting are applied",
5955
+ remediation: "Add express-rate-limit and validate request body with Zod/joi",
5956
+ references: ["CWE-770"]
5957
+ }
5958
+ ];
5959
+
5960
+ // src/security/rules/stack/react.ts
5961
+ var reactRules = [
5962
+ {
5963
+ id: "SEC-REACT-001",
5964
+ name: "Sensitive Data in Client Storage",
5965
+ category: "secrets",
5966
+ severity: "warning",
5967
+ confidence: "medium",
5968
+ patterns: [
5969
+ /localStorage\.setItem\s*\(\s*['"](?:token|jwt|auth|session|password|secret|key|credential)/i,
5970
+ /sessionStorage\.setItem\s*\(\s*['"](?:token|jwt|auth|session|password|secret|key|credential)/i
5971
+ ],
5972
+ stack: ["react"],
5973
+ message: "Storing sensitive data in browser storage is accessible to XSS attacks",
5974
+ remediation: "Use httpOnly cookies for auth tokens instead of localStorage",
5975
+ references: ["CWE-922"]
5976
+ }
5977
+ ];
5978
+
5979
+ // src/security/rules/stack/go.ts
5980
+ var goRules = [
5981
+ {
5982
+ id: "SEC-GO-001",
5983
+ name: "Unsafe Pointer Usage",
5984
+ category: "injection",
5985
+ severity: "warning",
5986
+ confidence: "medium",
5987
+ patterns: [/unsafe\.Pointer/],
5988
+ stack: ["go"],
5989
+ message: "unsafe.Pointer bypasses Go type safety",
5990
+ remediation: "Avoid unsafe.Pointer unless absolutely necessary; document justification",
5991
+ references: ["CWE-119"]
5992
+ },
5993
+ {
5994
+ id: "SEC-GO-002",
5995
+ name: "Format String Injection",
5996
+ category: "injection",
5997
+ severity: "warning",
5998
+ confidence: "medium",
5999
+ patterns: [/fmt\.Sprintf\s*\(\s*\w+[^,)]*\)/],
6000
+ stack: ["go"],
6001
+ message: "Format string may come from user input",
6002
+ remediation: 'Use fmt.Sprintf with a literal format string: fmt.Sprintf("%s", userInput)',
6003
+ references: ["CWE-134"]
6004
+ }
6005
+ ];
6006
+
6007
+ // src/security/scanner.ts
6008
+ var SecurityScanner = class {
6009
+ registry;
6010
+ config;
6011
+ activeRules = [];
6012
+ constructor(config = {}) {
6013
+ this.config = { ...DEFAULT_SECURITY_CONFIG, ...config };
6014
+ this.registry = new RuleRegistry();
6015
+ this.registry.registerAll([
6016
+ ...secretRules,
6017
+ ...injectionRules,
6018
+ ...xssRules,
6019
+ ...cryptoRules,
6020
+ ...pathTraversalRules,
6021
+ ...networkRules,
6022
+ ...deserializationRules
6023
+ ]);
6024
+ this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
6025
+ this.activeRules = this.registry.getAll();
6026
+ }
6027
+ configureForProject(projectRoot) {
6028
+ const stacks = detectStack(projectRoot);
6029
+ this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
6030
+ }
6031
+ scanContent(content, filePath, startLine = 1) {
6032
+ if (!this.config.enabled) return [];
6033
+ const findings = [];
6034
+ const lines = content.split("\n");
6035
+ for (const rule of this.activeRules) {
6036
+ const resolved = resolveRuleSeverity(
6037
+ rule.id,
6038
+ rule.severity,
6039
+ this.config.rules ?? {},
6040
+ this.config.strict
6041
+ );
6042
+ if (resolved === "off") continue;
6043
+ for (let i = 0; i < lines.length; i++) {
6044
+ const line = lines[i] ?? "";
6045
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
6046
+ for (const pattern of rule.patterns) {
6047
+ pattern.lastIndex = 0;
6048
+ if (pattern.test(line)) {
6049
+ findings.push({
6050
+ ruleId: rule.id,
6051
+ ruleName: rule.name,
6052
+ category: rule.category,
6053
+ severity: resolved,
6054
+ confidence: rule.confidence,
6055
+ file: filePath,
6056
+ line: startLine + i,
6057
+ match: line.trim(),
6058
+ context: line,
6059
+ message: rule.message,
6060
+ remediation: rule.remediation,
6061
+ ...rule.references ? { references: rule.references } : {}
6062
+ });
6063
+ break;
6064
+ }
6065
+ }
6066
+ }
6067
+ }
6068
+ return findings;
6069
+ }
6070
+ async scanFile(filePath) {
6071
+ if (!this.config.enabled) return [];
6072
+ const content = await fs6.readFile(filePath, "utf-8");
6073
+ return this.scanContent(content, filePath, 1);
6074
+ }
6075
+ async scanFiles(filePaths) {
6076
+ const allFindings = [];
6077
+ let scannedCount = 0;
6078
+ for (const filePath of filePaths) {
6079
+ try {
6080
+ const findings = await this.scanFile(filePath);
6081
+ allFindings.push(...findings);
6082
+ scannedCount++;
6083
+ } catch {
6084
+ }
6085
+ }
6086
+ return {
6087
+ findings: allFindings,
6088
+ scannedFiles: scannedCount,
6089
+ rulesApplied: this.activeRules.length,
6090
+ externalToolsUsed: [],
6091
+ coverage: "baseline"
6092
+ };
6093
+ }
6094
+ };
6095
+
6096
+ // src/ci/check-orchestrator.ts
6097
+ import * as path5 from "path";
6098
+ var ALL_CHECKS = [
6099
+ "validate",
6100
+ "deps",
6101
+ "docs",
6102
+ "entropy",
6103
+ "security",
6104
+ "perf",
6105
+ "phase-gate"
6106
+ ];
6107
+ async function runSingleCheck(name, projectRoot, config) {
6108
+ const start = Date.now();
6109
+ const issues = [];
6110
+ try {
6111
+ switch (name) {
6112
+ case "validate": {
6113
+ const agentsPath = path5.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6114
+ const result = await validateAgentsMap(agentsPath);
6115
+ if (!result.ok) {
6116
+ issues.push({ severity: "error", message: result.error.message });
6117
+ } else if (!result.value.valid) {
6118
+ if (result.value.errors) {
6119
+ for (const err of result.value.errors) {
6120
+ issues.push({ severity: "error", message: err.message });
6121
+ }
6122
+ }
6123
+ for (const section of result.value.missingSections) {
6124
+ issues.push({ severity: "warning", message: `Missing section: ${section}` });
6125
+ }
6126
+ for (const link of result.value.brokenLinks) {
6127
+ issues.push({
6128
+ severity: "warning",
6129
+ message: `Broken link: ${link.text} \u2192 ${link.path}`,
6130
+ file: link.path
6131
+ });
4188
6132
  }
4189
6133
  }
4190
6134
  break;
@@ -4221,7 +6165,7 @@ async function runSingleCheck(name, projectRoot, config) {
4221
6165
  break;
4222
6166
  }
4223
6167
  case "docs": {
4224
- const docsDir = path2.join(projectRoot, config.docsDir ?? "docs");
6168
+ const docsDir = path5.join(projectRoot, config.docsDir ?? "docs");
4225
6169
  const result = await checkDocCoverage("project", { docsDir });
4226
6170
  if (!result.ok) {
4227
6171
  issues.push({ severity: "warning", message: result.error.message });
@@ -4269,6 +6213,68 @@ async function runSingleCheck(name, projectRoot, config) {
4269
6213
  }
4270
6214
  break;
4271
6215
  }
6216
+ case "security": {
6217
+ const securityConfig = parseSecurityConfig(config.security);
6218
+ if (!securityConfig.enabled) break;
6219
+ const scanner = new SecurityScanner(securityConfig);
6220
+ scanner.configureForProject(projectRoot);
6221
+ const { glob: globFn } = await import("glob");
6222
+ const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
6223
+ cwd: projectRoot,
6224
+ ignore: securityConfig.exclude ?? [
6225
+ "**/node_modules/**",
6226
+ "**/dist/**",
6227
+ "**/*.test.ts",
6228
+ "**/fixtures/**"
6229
+ ],
6230
+ absolute: true
6231
+ });
6232
+ const scanResult = await scanner.scanFiles(sourceFiles);
6233
+ for (const finding of scanResult.findings) {
6234
+ issues.push({
6235
+ severity: finding.severity === "info" ? "warning" : finding.severity,
6236
+ message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
6237
+ file: finding.file,
6238
+ line: finding.line
6239
+ });
6240
+ }
6241
+ break;
6242
+ }
6243
+ case "perf": {
6244
+ const perfAnalyzer = new EntropyAnalyzer({
6245
+ rootDir: projectRoot,
6246
+ analyze: {
6247
+ complexity: true,
6248
+ coupling: true
6249
+ }
6250
+ });
6251
+ const perfResult = await perfAnalyzer.analyze();
6252
+ if (!perfResult.ok) {
6253
+ issues.push({ severity: "warning", message: perfResult.error.message });
6254
+ } else {
6255
+ const perfReport = perfResult.value;
6256
+ if (perfReport.complexity) {
6257
+ for (const v of perfReport.complexity.violations) {
6258
+ issues.push({
6259
+ severity: v.severity === "info" ? "warning" : v.severity,
6260
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
6261
+ file: v.file,
6262
+ line: v.line
6263
+ });
6264
+ }
6265
+ }
6266
+ if (perfReport.coupling) {
6267
+ for (const v of perfReport.coupling.violations) {
6268
+ issues.push({
6269
+ severity: v.severity === "info" ? "warning" : v.severity,
6270
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
6271
+ file: v.file
6272
+ });
6273
+ }
6274
+ }
6275
+ }
6276
+ break;
6277
+ }
4272
6278
  case "phase-gate": {
4273
6279
  const phaseGates = config.phaseGates;
4274
6280
  if (!phaseGates?.enabled) {
@@ -4339,75 +6345,2197 @@ async function runCIChecks(input) {
4339
6345
  }
4340
6346
  }
4341
6347
 
6348
+ // src/review/mechanical-checks.ts
6349
+ import * as path6 from "path";
6350
+ async function runMechanicalChecks(options) {
6351
+ const { projectRoot, config, skip = [], changedFiles } = options;
6352
+ const findings = [];
6353
+ const statuses = {
6354
+ validate: "skip",
6355
+ "check-deps": "skip",
6356
+ "check-docs": "skip",
6357
+ "security-scan": "skip"
6358
+ };
6359
+ if (!skip.includes("validate")) {
6360
+ try {
6361
+ const agentsPath = path6.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6362
+ const result = await validateAgentsMap(agentsPath);
6363
+ if (!result.ok) {
6364
+ statuses.validate = "fail";
6365
+ findings.push({
6366
+ tool: "validate",
6367
+ file: agentsPath,
6368
+ message: result.error.message,
6369
+ severity: "error"
6370
+ });
6371
+ } else if (!result.value.valid) {
6372
+ statuses.validate = "fail";
6373
+ if (result.value.errors) {
6374
+ for (const err of result.value.errors) {
6375
+ findings.push({
6376
+ tool: "validate",
6377
+ file: agentsPath,
6378
+ message: err.message,
6379
+ severity: "error"
6380
+ });
6381
+ }
6382
+ }
6383
+ for (const section of result.value.missingSections) {
6384
+ findings.push({
6385
+ tool: "validate",
6386
+ file: agentsPath,
6387
+ message: `Missing section: ${section}`,
6388
+ severity: "warning"
6389
+ });
6390
+ }
6391
+ } else {
6392
+ statuses.validate = "pass";
6393
+ }
6394
+ } catch (err) {
6395
+ statuses.validate = "fail";
6396
+ findings.push({
6397
+ tool: "validate",
6398
+ file: path6.join(projectRoot, "AGENTS.md"),
6399
+ message: err instanceof Error ? err.message : String(err),
6400
+ severity: "error"
6401
+ });
6402
+ }
6403
+ }
6404
+ if (!skip.includes("check-deps")) {
6405
+ try {
6406
+ const rawLayers = config.layers;
6407
+ if (rawLayers && rawLayers.length > 0) {
6408
+ const parser = new TypeScriptParser();
6409
+ const layers = rawLayers.map(
6410
+ (l) => defineLayer(
6411
+ l.name,
6412
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
6413
+ l.allowedDependencies
6414
+ )
6415
+ );
6416
+ const result = await validateDependencies({
6417
+ layers,
6418
+ rootDir: projectRoot,
6419
+ parser
6420
+ });
6421
+ if (!result.ok) {
6422
+ statuses["check-deps"] = "fail";
6423
+ findings.push({
6424
+ tool: "check-deps",
6425
+ file: projectRoot,
6426
+ message: result.error.message,
6427
+ severity: "error"
6428
+ });
6429
+ } else if (result.value.violations.length > 0) {
6430
+ statuses["check-deps"] = "fail";
6431
+ for (const v of result.value.violations) {
6432
+ findings.push({
6433
+ tool: "check-deps",
6434
+ file: v.file,
6435
+ line: v.line,
6436
+ message: `Layer violation: ${v.fromLayer} -> ${v.toLayer}: ${v.reason}`,
6437
+ severity: "error"
6438
+ });
6439
+ }
6440
+ } else {
6441
+ statuses["check-deps"] = "pass";
6442
+ }
6443
+ } else {
6444
+ statuses["check-deps"] = "pass";
6445
+ }
6446
+ } catch (err) {
6447
+ statuses["check-deps"] = "fail";
6448
+ findings.push({
6449
+ tool: "check-deps",
6450
+ file: projectRoot,
6451
+ message: err instanceof Error ? err.message : String(err),
6452
+ severity: "error"
6453
+ });
6454
+ }
6455
+ }
6456
+ if (!skip.includes("check-docs")) {
6457
+ try {
6458
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6459
+ const result = await checkDocCoverage("project", { docsDir });
6460
+ if (!result.ok) {
6461
+ statuses["check-docs"] = "warn";
6462
+ findings.push({
6463
+ tool: "check-docs",
6464
+ file: docsDir,
6465
+ message: result.error.message,
6466
+ severity: "warning"
6467
+ });
6468
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6469
+ statuses["check-docs"] = "warn";
6470
+ for (const gap of result.value.gaps) {
6471
+ findings.push({
6472
+ tool: "check-docs",
6473
+ file: gap.file,
6474
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6475
+ severity: "warning"
6476
+ });
6477
+ }
6478
+ } else {
6479
+ statuses["check-docs"] = "pass";
6480
+ }
6481
+ } catch (err) {
6482
+ statuses["check-docs"] = "warn";
6483
+ findings.push({
6484
+ tool: "check-docs",
6485
+ file: path6.join(projectRoot, "docs"),
6486
+ message: err instanceof Error ? err.message : String(err),
6487
+ severity: "warning"
6488
+ });
6489
+ }
6490
+ }
6491
+ if (!skip.includes("security-scan")) {
6492
+ try {
6493
+ const securityConfig = parseSecurityConfig(config.security);
6494
+ if (!securityConfig.enabled) {
6495
+ statuses["security-scan"] = "skip";
6496
+ } else {
6497
+ const scanner = new SecurityScanner(securityConfig);
6498
+ scanner.configureForProject(projectRoot);
6499
+ const filesToScan = changedFiles ?? [];
6500
+ const scanResult = await scanner.scanFiles(filesToScan);
6501
+ if (scanResult.findings.length > 0) {
6502
+ statuses["security-scan"] = "warn";
6503
+ for (const f of scanResult.findings) {
6504
+ findings.push({
6505
+ tool: "security-scan",
6506
+ file: f.file,
6507
+ line: f.line,
6508
+ ruleId: f.ruleId,
6509
+ message: f.message,
6510
+ severity: f.severity === "info" ? "warning" : f.severity
6511
+ });
6512
+ }
6513
+ } else {
6514
+ statuses["security-scan"] = "pass";
6515
+ }
6516
+ }
6517
+ } catch (err) {
6518
+ statuses["security-scan"] = "warn";
6519
+ findings.push({
6520
+ tool: "security-scan",
6521
+ file: projectRoot,
6522
+ message: err instanceof Error ? err.message : String(err),
6523
+ severity: "warning"
6524
+ });
6525
+ }
6526
+ }
6527
+ const hasErrors = findings.some((f) => f.severity === "error");
6528
+ const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
6529
+ return Ok({
6530
+ pass: !hasErrors,
6531
+ stopPipeline,
6532
+ findings,
6533
+ checks: {
6534
+ validate: statuses.validate,
6535
+ checkDeps: statuses["check-deps"],
6536
+ checkDocs: statuses["check-docs"],
6537
+ securityScan: statuses["security-scan"]
6538
+ }
6539
+ });
6540
+ }
6541
+
6542
+ // src/review/exclusion-set.ts
6543
+ var ExclusionSet = class {
6544
+ /** Findings indexed by file path for O(1) file lookup */
6545
+ byFile;
6546
+ allFindings;
6547
+ constructor(findings) {
6548
+ this.allFindings = [...findings];
6549
+ this.byFile = /* @__PURE__ */ new Map();
6550
+ for (const f of findings) {
6551
+ const existing = this.byFile.get(f.file);
6552
+ if (existing) {
6553
+ existing.push(f);
6554
+ } else {
6555
+ this.byFile.set(f.file, [f]);
6556
+ }
6557
+ }
6558
+ }
6559
+ /**
6560
+ * Returns true if any mechanical finding covers the given file + line range.
6561
+ *
6562
+ * A mechanical finding "covers" a range if:
6563
+ * - The file matches, AND
6564
+ * - The finding has no line (file-level finding — covers everything), OR
6565
+ * - The finding's line falls within [startLine, endLine] inclusive.
6566
+ */
6567
+ isExcluded(file, lineRange) {
6568
+ const fileFindings = this.byFile.get(file);
6569
+ if (!fileFindings) return false;
6570
+ const [start, end] = lineRange;
6571
+ return fileFindings.some((f) => {
6572
+ if (f.line === void 0) return true;
6573
+ return f.line >= start && f.line <= end;
6574
+ });
6575
+ }
6576
+ /** Number of findings in the set */
6577
+ get size() {
6578
+ return this.allFindings.length;
6579
+ }
6580
+ /** Returns a copy of all findings */
6581
+ getFindings() {
6582
+ return [...this.allFindings];
6583
+ }
6584
+ };
6585
+ function buildExclusionSet(findings) {
6586
+ return new ExclusionSet(findings);
6587
+ }
6588
+
6589
+ // src/review/change-type.ts
6590
+ var PREFIX_PATTERNS = [
6591
+ { pattern: /^(feat|feature)(\([^)]*\))?:/i, type: "feature" },
6592
+ { pattern: /^(fix|bugfix)(\([^)]*\))?:/i, type: "bugfix" },
6593
+ { pattern: /^refactor(\([^)]*\))?:/i, type: "refactor" },
6594
+ { pattern: /^docs?(\([^)]*\))?:/i, type: "docs" }
6595
+ ];
6596
+ var TEST_FILE_PATTERN = /\.(test|spec)\.(ts|tsx|js|jsx|mts|cts)$/;
6597
+ var MD_FILE_PATTERN = /\.md$/;
6598
+ function detectChangeType(commitMessage, diff) {
6599
+ const trimmed = commitMessage.trim();
6600
+ for (const { pattern, type } of PREFIX_PATTERNS) {
6601
+ if (pattern.test(trimmed)) {
6602
+ return type;
6603
+ }
6604
+ }
6605
+ if (diff.changedFiles.length > 0 && diff.changedFiles.every((f) => MD_FILE_PATTERN.test(f))) {
6606
+ return "docs";
6607
+ }
6608
+ const newNonTestFiles = diff.newFiles.filter((f) => !TEST_FILE_PATTERN.test(f));
6609
+ if (newNonTestFiles.length > 0) {
6610
+ return "feature";
6611
+ }
6612
+ const hasNewTestFile = diff.newFiles.some((f) => TEST_FILE_PATTERN.test(f));
6613
+ if (diff.totalDiffLines < 20 && hasNewTestFile) {
6614
+ return "bugfix";
6615
+ }
6616
+ return "feature";
6617
+ }
6618
+
6619
+ // src/review/context-scoper.ts
6620
+ import * as path7 from "path";
6621
+ var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
6622
+ var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
6623
+ function computeContextBudget(diffLines) {
6624
+ if (diffLines < 20) return diffLines * 3;
6625
+ return diffLines;
6626
+ }
6627
+ function isWithinProject(absPath, projectRoot) {
6628
+ const resolvedRoot = path7.resolve(projectRoot) + path7.sep;
6629
+ const resolvedPath = path7.resolve(absPath);
6630
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path7.resolve(projectRoot);
6631
+ }
6632
+ async function readContextFile(projectRoot, filePath, reason) {
6633
+ const absPath = path7.isAbsolute(filePath) ? filePath : path7.join(projectRoot, filePath);
6634
+ if (!isWithinProject(absPath, projectRoot)) return null;
6635
+ const result = await readFileContent(absPath);
6636
+ if (!result.ok) return null;
6637
+ const content = result.value;
6638
+ const lines = content.split("\n").length;
6639
+ const relPath = path7.isAbsolute(filePath) ? path7.relative(projectRoot, filePath) : filePath;
6640
+ return { path: relPath, content, reason, lines };
6641
+ }
6642
+ function extractImportSources(content) {
6643
+ const sources = [];
6644
+ const importRegex = /(?:import\s+(?:.*?\s+from\s+)?['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g;
6645
+ let match;
6646
+ while ((match = importRegex.exec(content)) !== null) {
6647
+ const source = match[1] ?? match[2];
6648
+ if (source) sources.push(source);
6649
+ }
6650
+ return sources;
6651
+ }
6652
+ async function resolveImportPath2(projectRoot, fromFile, importSource) {
6653
+ if (!importSource.startsWith(".")) return null;
6654
+ const fromDir = path7.dirname(path7.join(projectRoot, fromFile));
6655
+ const basePath = path7.resolve(fromDir, importSource);
6656
+ if (!isWithinProject(basePath, projectRoot)) return null;
6657
+ const relBase = path7.relative(projectRoot, basePath);
6658
+ const candidates = [
6659
+ relBase + ".ts",
6660
+ relBase + ".tsx",
6661
+ relBase + ".mts",
6662
+ path7.join(relBase, "index.ts")
6663
+ ];
6664
+ for (const candidate of candidates) {
6665
+ const absCandidate = path7.join(projectRoot, candidate);
6666
+ if (await fileExists(absCandidate)) {
6667
+ return candidate;
6668
+ }
6669
+ }
6670
+ return null;
6671
+ }
6672
+ async function findTestFiles(projectRoot, sourceFile) {
6673
+ const baseName = path7.basename(sourceFile, path7.extname(sourceFile));
6674
+ const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
6675
+ const results = await findFiles(pattern, projectRoot);
6676
+ return results.map((f) => path7.relative(projectRoot, f));
6677
+ }
6678
+ async function gatherImportContext(projectRoot, changedFiles, budget) {
6679
+ const contextFiles = [];
6680
+ let linesGathered = 0;
6681
+ const seen = new Set(changedFiles.map((f) => f.path));
6682
+ for (const cf of changedFiles) {
6683
+ if (linesGathered >= budget) break;
6684
+ const sources = extractImportSources(cf.content);
6685
+ for (const source of sources) {
6686
+ if (linesGathered >= budget) break;
6687
+ const resolved = await resolveImportPath2(projectRoot, cf.path, source);
6688
+ if (resolved && !seen.has(resolved)) {
6689
+ seen.add(resolved);
6690
+ const contextFile = await readContextFile(projectRoot, resolved, "import");
6691
+ if (contextFile) {
6692
+ contextFiles.push(contextFile);
6693
+ linesGathered += contextFile.lines;
6694
+ }
6695
+ }
6696
+ }
6697
+ }
6698
+ return contextFiles;
6699
+ }
6700
+ async function gatherGraphDependencyContext(projectRoot, changedFilePaths, graph, budget) {
6701
+ const contextFiles = [];
6702
+ let linesGathered = 0;
6703
+ const seen = new Set(changedFilePaths);
6704
+ for (const filePath of changedFilePaths) {
6705
+ if (linesGathered >= budget) break;
6706
+ let deps;
6707
+ try {
6708
+ deps = await graph.getDependencies(filePath);
6709
+ } catch {
6710
+ continue;
6711
+ }
6712
+ for (const dep of deps) {
6713
+ if (linesGathered >= budget) break;
6714
+ if (seen.has(dep)) continue;
6715
+ seen.add(dep);
6716
+ const contextFile = await readContextFile(projectRoot, dep, "graph-dependency");
6717
+ if (contextFile) {
6718
+ contextFiles.push(contextFile);
6719
+ linesGathered += contextFile.lines;
6720
+ }
6721
+ }
6722
+ }
6723
+ return contextFiles;
6724
+ }
6725
+ async function gatherTestContext(projectRoot, changedFilePaths, graph) {
6726
+ const testFiles = [];
6727
+ const seen = /* @__PURE__ */ new Set();
6728
+ if (graph) {
6729
+ for (const filePath of changedFilePaths) {
6730
+ let impact;
6731
+ try {
6732
+ impact = await graph.getImpact(filePath);
6733
+ } catch {
6734
+ continue;
6735
+ }
6736
+ for (const testFile of impact.tests) {
6737
+ if (seen.has(testFile)) continue;
6738
+ seen.add(testFile);
6739
+ const cf = await readContextFile(projectRoot, testFile, "test");
6740
+ if (cf) testFiles.push(cf);
6741
+ }
6742
+ }
6743
+ } else {
6744
+ for (const filePath of changedFilePaths) {
6745
+ const found = await findTestFiles(projectRoot, filePath);
6746
+ for (const testFile of found) {
6747
+ if (seen.has(testFile)) continue;
6748
+ seen.add(testFile);
6749
+ const cf = await readContextFile(projectRoot, testFile, "test");
6750
+ if (cf) testFiles.push(cf);
6751
+ }
6752
+ }
6753
+ }
6754
+ return testFiles;
6755
+ }
6756
+ async function scopeComplianceContext(projectRoot, _changedFiles, options) {
6757
+ const contextFiles = [];
6758
+ const conventionFiles = options.conventionFiles ?? ["CLAUDE.md", "AGENTS.md"];
6759
+ for (const cf of conventionFiles) {
6760
+ const file = await readContextFile(projectRoot, cf, "convention");
6761
+ if (file) contextFiles.push(file);
6762
+ }
6763
+ return contextFiles;
6764
+ }
6765
+ async function scopeBugContext(projectRoot, changedFiles, budget, options) {
6766
+ const contextFiles = [];
6767
+ const changedPaths = changedFiles.map((f) => f.path);
6768
+ if (options.graph) {
6769
+ const deps = await gatherGraphDependencyContext(
6770
+ projectRoot,
6771
+ changedPaths,
6772
+ options.graph,
6773
+ budget
6774
+ );
6775
+ contextFiles.push(...deps);
6776
+ } else {
6777
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6778
+ contextFiles.push(...deps);
6779
+ }
6780
+ const tests = await gatherTestContext(projectRoot, changedPaths, options.graph);
6781
+ contextFiles.push(...tests);
6782
+ return contextFiles;
6783
+ }
6784
+ async function scopeSecurityContext(projectRoot, changedFiles, budget, options) {
6785
+ const contextFiles = [];
6786
+ const changedPaths = changedFiles.map((f) => f.path);
6787
+ if (options.graph) {
6788
+ const allPaths = [];
6789
+ for (const filePath of changedPaths) {
6790
+ try {
6791
+ const deps = await options.graph.getDependencies(filePath);
6792
+ allPaths.push(...deps);
6793
+ } catch {
6794
+ continue;
6795
+ }
6796
+ }
6797
+ const uniquePaths = [...new Set(allPaths)];
6798
+ const securityFirst = uniquePaths.sort((a, b) => {
6799
+ const aMatch = SECURITY_PATTERNS.test(a) ? 0 : 1;
6800
+ const bMatch = SECURITY_PATTERNS.test(b) ? 0 : 1;
6801
+ return aMatch - bMatch;
6802
+ });
6803
+ for (const depPath of securityFirst) {
6804
+ if (contextFiles.reduce((sum, f) => sum + f.lines, 0) >= budget) break;
6805
+ const cf = await readContextFile(projectRoot, depPath, "graph-dependency");
6806
+ if (cf) contextFiles.push(cf);
6807
+ }
6808
+ } else {
6809
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6810
+ contextFiles.push(...deps);
6811
+ }
6812
+ return contextFiles;
6813
+ }
6814
+ async function scopeArchitectureContext(projectRoot, changedFiles, budget, options) {
6815
+ const contextFiles = [];
6816
+ const changedPaths = changedFiles.map((f) => f.path);
6817
+ if (options.graph) {
6818
+ let linesGathered = 0;
6819
+ for (const filePath of changedPaths) {
6820
+ if (linesGathered >= budget) break;
6821
+ let impact;
6822
+ try {
6823
+ impact = await options.graph.getImpact(filePath);
6824
+ } catch {
6825
+ continue;
6826
+ }
6827
+ for (const codePath of impact.code) {
6828
+ if (linesGathered >= budget) break;
6829
+ const cf = await readContextFile(projectRoot, codePath, "graph-impact");
6830
+ if (cf) {
6831
+ contextFiles.push(cf);
6832
+ linesGathered += cf.lines;
6833
+ }
6834
+ }
6835
+ }
6836
+ } else {
6837
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6838
+ contextFiles.push(...deps);
6839
+ if (options.checkDepsOutput) {
6840
+ contextFiles.push({
6841
+ path: "harness-check-deps-output",
6842
+ content: options.checkDepsOutput,
6843
+ lines: options.checkDepsOutput.split("\n").length,
6844
+ reason: "convention"
6845
+ });
6846
+ }
6847
+ }
6848
+ return contextFiles;
6849
+ }
6850
+ async function scopeContext(options) {
6851
+ const { projectRoot, diff, commitMessage } = options;
6852
+ const changeType = detectChangeType(commitMessage, diff);
6853
+ const budget = computeContextBudget(diff.totalDiffLines);
6854
+ const changedFiles = [];
6855
+ for (const filePath of diff.changedFiles) {
6856
+ const cf = await readContextFile(projectRoot, filePath, "changed");
6857
+ if (cf) changedFiles.push(cf);
6858
+ }
6859
+ const scopers = {
6860
+ compliance: () => scopeComplianceContext(projectRoot, changedFiles, options),
6861
+ bug: () => scopeBugContext(projectRoot, changedFiles, budget, options),
6862
+ security: () => scopeSecurityContext(projectRoot, changedFiles, budget, options),
6863
+ architecture: () => scopeArchitectureContext(projectRoot, changedFiles, budget, options)
6864
+ };
6865
+ const bundles = [];
6866
+ for (const domain of ALL_DOMAINS) {
6867
+ const contextFiles = await scopers[domain]();
6868
+ const contextLines = contextFiles.reduce((sum, f) => sum + f.lines, 0);
6869
+ bundles.push({
6870
+ domain,
6871
+ changeType,
6872
+ changedFiles: [...changedFiles],
6873
+ contextFiles,
6874
+ commitHistory: options.commitHistory ?? [],
6875
+ diffLines: diff.totalDiffLines,
6876
+ contextLines
6877
+ });
6878
+ }
6879
+ return bundles;
6880
+ }
6881
+
6882
+ // src/review/constants.ts
6883
+ var SEVERITY_RANK = {
6884
+ suggestion: 0,
6885
+ important: 1,
6886
+ critical: 2
6887
+ };
6888
+ var SEVERITY_ORDER = ["critical", "important", "suggestion"];
6889
+ var SEVERITY_LABELS = {
6890
+ critical: "Critical",
6891
+ important: "Important",
6892
+ suggestion: "Suggestion"
6893
+ };
6894
+ var VALIDATED_BY_RANK = {
6895
+ mechanical: 0,
6896
+ heuristic: 1,
6897
+ graph: 2
6898
+ };
6899
+ function makeFindingId(domain, file, line, title) {
6900
+ const hash = title.slice(0, 20).replace(/[^a-zA-Z0-9]/g, "");
6901
+ return `${domain}-${file.replace(/[^a-zA-Z0-9]/g, "-")}-${line}-${hash}`;
6902
+ }
6903
+
6904
+ // src/review/agents/compliance-agent.ts
6905
+ var COMPLIANCE_DESCRIPTOR = {
6906
+ domain: "compliance",
6907
+ tier: "standard",
6908
+ displayName: "Compliance",
6909
+ focusAreas: [
6910
+ "Spec alignment \u2014 implementation matches design doc",
6911
+ "API surface \u2014 new public interfaces are minimal and well-named",
6912
+ "Backward compatibility \u2014 no breaking changes without migration path",
6913
+ "Convention adherence \u2014 project conventions from CLAUDE.md/AGENTS.md followed",
6914
+ "Documentation completeness \u2014 all public interfaces documented"
6915
+ ]
6916
+ };
6917
+ function extractConventionRules(bundle) {
6918
+ const rules = [];
6919
+ const conventionFiles = bundle.contextFiles.filter((f) => f.reason === "convention");
6920
+ for (const file of conventionFiles) {
6921
+ const lines = file.content.split("\n");
6922
+ for (const line of lines) {
6923
+ const trimmed = line.trim();
6924
+ if (trimmed.startsWith("- ") || trimmed.startsWith("* ")) {
6925
+ rules.push({ text: trimmed.slice(2).trim(), source: file.path });
6926
+ }
6927
+ }
6928
+ }
6929
+ return rules;
6930
+ }
6931
+ function findMissingJsDoc(bundle) {
6932
+ const missing = [];
6933
+ for (const cf of bundle.changedFiles) {
6934
+ const lines = cf.content.split("\n");
6935
+ for (let i = 0; i < lines.length; i++) {
6936
+ const line = lines[i];
6937
+ const exportMatch = line.match(
6938
+ /export\s+(?:async\s+)?(?:function|const|class|interface|type)\s+(\w+)/
6939
+ );
6940
+ if (exportMatch) {
6941
+ let hasJsDoc = false;
6942
+ for (let j = i - 1; j >= 0; j--) {
6943
+ const prev = lines[j].trim();
6944
+ if (prev === "") continue;
6945
+ if (prev.endsWith("*/")) {
6946
+ hasJsDoc = true;
6947
+ }
6948
+ break;
6949
+ }
6950
+ if (!hasJsDoc) {
6951
+ missing.push({
6952
+ file: cf.path,
6953
+ line: i + 1,
6954
+ exportName: exportMatch[1]
6955
+ });
6956
+ }
6957
+ }
6958
+ }
6959
+ }
6960
+ return missing;
6961
+ }
6962
+ function runComplianceAgent(bundle) {
6963
+ const findings = [];
6964
+ const rules = extractConventionRules(bundle);
6965
+ const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
6966
+ if (jsDocRuleExists) {
6967
+ const missingDocs = findMissingJsDoc(bundle);
6968
+ for (const m of missingDocs) {
6969
+ findings.push({
6970
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
6971
+ file: m.file,
6972
+ lineRange: [m.line, m.line],
6973
+ domain: "compliance",
6974
+ severity: "important",
6975
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
6976
+ rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
6977
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
6978
+ evidence: [
6979
+ `changeType: ${bundle.changeType}`,
6980
+ `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
6981
+ ],
6982
+ validatedBy: "heuristic"
6983
+ });
6984
+ }
6985
+ }
6986
+ switch (bundle.changeType) {
6987
+ case "feature": {
6988
+ const hasSpecContext = bundle.contextFiles.some(
6989
+ (f) => f.reason === "spec" || f.reason === "convention"
6990
+ );
6991
+ if (!hasSpecContext && bundle.changedFiles.length > 0) {
6992
+ const firstFile = bundle.changedFiles[0];
6993
+ findings.push({
6994
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
6995
+ file: firstFile.path,
6996
+ lineRange: [1, 1],
6997
+ domain: "compliance",
6998
+ severity: "suggestion",
6999
+ title: "No spec/design doc found for feature change",
7000
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
7001
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
7002
+ validatedBy: "heuristic"
7003
+ });
7004
+ }
7005
+ break;
7006
+ }
7007
+ case "bugfix": {
7008
+ if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
7009
+ const firstFile = bundle.changedFiles[0];
7010
+ findings.push({
7011
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
7012
+ file: firstFile.path,
7013
+ lineRange: [1, 1],
7014
+ domain: "compliance",
7015
+ severity: "suggestion",
7016
+ title: "Bugfix without commit history context",
7017
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
7018
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
7019
+ validatedBy: "heuristic"
7020
+ });
7021
+ }
7022
+ break;
7023
+ }
7024
+ case "refactor": {
7025
+ break;
7026
+ }
7027
+ case "docs": {
7028
+ break;
7029
+ }
7030
+ }
7031
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
7032
+ if (resultTypeRule) {
7033
+ for (const cf of bundle.changedFiles) {
7034
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
7035
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
7036
+ if (hasTryCatch && !usesResult) {
7037
+ findings.push({
7038
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
7039
+ file: cf.path,
7040
+ lineRange: [1, cf.lines],
7041
+ domain: "compliance",
7042
+ severity: "suggestion",
7043
+ title: "Fallible operation uses try/catch instead of Result type",
7044
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
7045
+ suggestion: "Refactor error handling to use the Result type pattern.",
7046
+ evidence: [
7047
+ `changeType: ${bundle.changeType}`,
7048
+ `Convention rule: "${resultTypeRule.text}"`
7049
+ ],
7050
+ validatedBy: "heuristic"
7051
+ });
7052
+ }
7053
+ }
7054
+ }
7055
+ return findings;
7056
+ }
7057
+
7058
+ // src/review/agents/bug-agent.ts
7059
+ var BUG_DETECTION_DESCRIPTOR = {
7060
+ domain: "bug",
7061
+ tier: "strong",
7062
+ displayName: "Bug Detection",
7063
+ focusAreas: [
7064
+ "Edge cases \u2014 boundary conditions, empty input, max values, null, concurrent access",
7065
+ "Error handling \u2014 errors handled at appropriate level, no silent swallowing",
7066
+ "Logic errors \u2014 off-by-one, incorrect boolean logic, missing early returns",
7067
+ "Race conditions \u2014 concurrent access to shared state",
7068
+ "Resource leaks \u2014 unclosed handles, missing cleanup in error paths",
7069
+ "Type safety \u2014 type mismatches, unsafe casts, missing null checks",
7070
+ "Test coverage \u2014 tests for happy path, error paths, and edge cases"
7071
+ ]
7072
+ };
7073
+ function detectDivisionByZero(bundle) {
7074
+ const findings = [];
7075
+ for (const cf of bundle.changedFiles) {
7076
+ const lines = cf.content.split("\n");
7077
+ for (let i = 0; i < lines.length; i++) {
7078
+ const line = lines[i];
7079
+ if (line.match(/[^=!<>]\s*\/\s*[a-zA-Z_]\w*/) && !line.includes("//")) {
7080
+ const preceding = lines.slice(Math.max(0, i - 3), i).join("\n");
7081
+ if (!preceding.includes("=== 0") && !preceding.includes("!== 0") && !preceding.includes("== 0") && !preceding.includes("!= 0")) {
7082
+ findings.push({
7083
+ id: makeFindingId("bug", cf.path, i + 1, "division by zero"),
7084
+ file: cf.path,
7085
+ lineRange: [i + 1, i + 1],
7086
+ domain: "bug",
7087
+ severity: "important",
7088
+ title: "Potential division by zero without guard",
7089
+ rationale: "Division operation found without a preceding zero check on the divisor. This can cause Infinity or NaN at runtime.",
7090
+ suggestion: "Add a check for zero before dividing, or use a safe division utility.",
7091
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7092
+ validatedBy: "heuristic"
7093
+ });
7094
+ }
7095
+ }
7096
+ }
7097
+ }
7098
+ return findings;
7099
+ }
7100
+ function detectEmptyCatch(bundle) {
7101
+ const findings = [];
7102
+ for (const cf of bundle.changedFiles) {
7103
+ const lines = cf.content.split("\n");
7104
+ for (let i = 0; i < lines.length; i++) {
7105
+ const line = lines[i];
7106
+ if (line.match(/catch\s*\([^)]*\)\s*\{\s*\}/) || line.match(/catch\s*\([^)]*\)\s*\{/) && i + 1 < lines.length && lines[i + 1].trim() === "}") {
7107
+ findings.push({
7108
+ id: makeFindingId("bug", cf.path, i + 1, "empty catch block"),
7109
+ file: cf.path,
7110
+ lineRange: [i + 1, i + 2],
7111
+ domain: "bug",
7112
+ severity: "important",
7113
+ title: "Empty catch block silently swallows error",
7114
+ rationale: "Catching an error without handling, logging, or re-throwing it hides failures and makes debugging difficult.",
7115
+ suggestion: "Log the error, re-throw it, or handle it explicitly. If intentionally ignoring, add a comment explaining why.",
7116
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7117
+ validatedBy: "heuristic"
7118
+ });
7119
+ }
7120
+ }
7121
+ }
7122
+ return findings;
7123
+ }
7124
+ function detectMissingTests(bundle) {
7125
+ const findings = [];
7126
+ const hasTestFiles = bundle.contextFiles.some((f) => f.reason === "test");
7127
+ if (!hasTestFiles) {
7128
+ const sourceFiles = bundle.changedFiles.filter(
7129
+ (f) => !f.path.match(/\.(test|spec)\.(ts|tsx|js|jsx)$/)
7130
+ );
7131
+ if (sourceFiles.length > 0) {
7132
+ const firstFile = sourceFiles[0];
7133
+ findings.push({
7134
+ id: makeFindingId("bug", firstFile.path, 1, "no test files"),
7135
+ file: firstFile.path,
7136
+ lineRange: [1, 1],
7137
+ domain: "bug",
7138
+ severity: "suggestion",
7139
+ title: "No test files found for changed source files",
7140
+ rationale: "Changed source files should have corresponding test files. No test files were found in the review context.",
7141
+ evidence: [`Source files without tests: ${sourceFiles.map((f) => f.path).join(", ")}`],
7142
+ validatedBy: "heuristic"
7143
+ });
7144
+ }
7145
+ }
7146
+ return findings;
7147
+ }
7148
+ function runBugDetectionAgent(bundle) {
7149
+ const findings = [];
7150
+ findings.push(...detectDivisionByZero(bundle));
7151
+ findings.push(...detectEmptyCatch(bundle));
7152
+ findings.push(...detectMissingTests(bundle));
7153
+ return findings;
7154
+ }
7155
+
7156
+ // src/review/agents/security-agent.ts
7157
+ var SECURITY_DESCRIPTOR = {
7158
+ domain: "security",
7159
+ tier: "strong",
7160
+ displayName: "Security",
7161
+ focusAreas: [
7162
+ "Input validation \u2014 user input flowing to dangerous sinks (SQL, shell, HTML)",
7163
+ "Authorization \u2014 missing auth checks on new/modified endpoints",
7164
+ "Data exposure \u2014 sensitive data in logs, error messages, API responses",
7165
+ "Authentication bypass \u2014 paths introduced by the change",
7166
+ "Insecure defaults \u2014 new configuration options with unsafe defaults",
7167
+ "Node.js specific \u2014 prototype pollution, ReDoS, path traversal"
7168
+ ]
7169
+ };
7170
+ var EVAL_PATTERN = /\beval\s*\(|new\s+Function\s*\(/;
7171
+ var SECRET_PATTERNS = [
7172
+ /(?:api[_-]?key|secret|password|token|private[_-]?key)\s*=\s*["'][^"']{8,}/i,
7173
+ /["'](?:sk|pk|api|key|secret|token|password)[-_][a-zA-Z0-9]{10,}["']/i
7174
+ ];
7175
+ var SQL_CONCAT_PATTERN = /(?:SELECT|INSERT|UPDATE|DELETE|DROP|CREATE|ALTER)\s+.*?\+\s*\w+|`[^`]*\$\{[^}]*\}[^`]*(?:SELECT|INSERT|UPDATE|DELETE|WHERE)/i;
7176
+ var SHELL_EXEC_PATTERN = /(?:exec|execSync|spawn|spawnSync)\s*\(\s*`[^`]*\$\{/;
7177
+ function detectEvalUsage(bundle) {
7178
+ const findings = [];
7179
+ for (const cf of bundle.changedFiles) {
7180
+ const lines = cf.content.split("\n");
7181
+ for (let i = 0; i < lines.length; i++) {
7182
+ const line = lines[i];
7183
+ if (EVAL_PATTERN.test(line)) {
7184
+ findings.push({
7185
+ id: makeFindingId("security", cf.path, i + 1, "eval usage CWE-94"),
7186
+ file: cf.path,
7187
+ lineRange: [i + 1, i + 1],
7188
+ domain: "security",
7189
+ severity: "critical",
7190
+ title: `Dangerous ${"eval"}() or new ${"Function"}() usage`,
7191
+ rationale: `${"eval"}() and new ${"Function"}() execute arbitrary code. If user input reaches these calls, it enables Remote Code Execution (CWE-94).`,
7192
+ suggestion: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7193
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7194
+ validatedBy: "heuristic",
7195
+ cweId: "CWE-94",
7196
+ owaspCategory: "A03:2021 Injection",
7197
+ confidence: "high",
7198
+ remediation: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7199
+ references: [
7200
+ "https://cwe.mitre.org/data/definitions/94.html",
7201
+ "https://owasp.org/Top10/A03_2021-Injection/"
7202
+ ]
7203
+ });
7204
+ }
7205
+ }
7206
+ }
7207
+ return findings;
7208
+ }
7209
+ function detectHardcodedSecrets(bundle) {
7210
+ const findings = [];
7211
+ for (const cf of bundle.changedFiles) {
7212
+ const lines = cf.content.split("\n");
7213
+ for (let i = 0; i < lines.length; i++) {
7214
+ const line = lines[i];
7215
+ const codePart = line.includes("//") ? line.slice(0, line.indexOf("//")) : line;
7216
+ for (const pattern of SECRET_PATTERNS) {
7217
+ if (pattern.test(codePart)) {
7218
+ findings.push({
7219
+ id: makeFindingId("security", cf.path, i + 1, "hardcoded secret CWE-798"),
7220
+ file: cf.path,
7221
+ lineRange: [i + 1, i + 1],
7222
+ domain: "security",
7223
+ severity: "critical",
7224
+ title: "Hardcoded secret or API key detected",
7225
+ rationale: "Hardcoded secrets in source code can be extracted from version history even after removal. Use environment variables or a secrets manager (CWE-798).",
7226
+ suggestion: "Move the secret to an environment variable and access it via process.env.",
7227
+ evidence: [`Line ${i + 1}: [secret detected \u2014 value redacted]`],
7228
+ validatedBy: "heuristic",
7229
+ cweId: "CWE-798",
7230
+ owaspCategory: "A07:2021 Identification and Authentication Failures",
7231
+ confidence: "high",
7232
+ remediation: "Move the secret to an environment variable and access it via process.env.",
7233
+ references: [
7234
+ "https://cwe.mitre.org/data/definitions/798.html",
7235
+ "https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/"
7236
+ ]
7237
+ });
7238
+ break;
7239
+ }
7240
+ }
7241
+ }
7242
+ }
7243
+ return findings;
7244
+ }
7245
+ function detectSqlInjection(bundle) {
7246
+ const findings = [];
7247
+ for (const cf of bundle.changedFiles) {
7248
+ const lines = cf.content.split("\n");
7249
+ for (let i = 0; i < lines.length; i++) {
7250
+ const line = lines[i];
7251
+ if (SQL_CONCAT_PATTERN.test(line)) {
7252
+ findings.push({
7253
+ id: makeFindingId("security", cf.path, i + 1, "SQL injection CWE-89"),
7254
+ file: cf.path,
7255
+ lineRange: [i + 1, i + 1],
7256
+ domain: "security",
7257
+ severity: "critical",
7258
+ title: "Potential SQL injection via string concatenation",
7259
+ rationale: "Building SQL queries with string concatenation or template literals allows attackers to inject malicious SQL (CWE-89).",
7260
+ suggestion: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7261
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7262
+ validatedBy: "heuristic",
7263
+ cweId: "CWE-89",
7264
+ owaspCategory: "A03:2021 Injection",
7265
+ confidence: "high",
7266
+ remediation: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7267
+ references: [
7268
+ "https://cwe.mitre.org/data/definitions/89.html",
7269
+ "https://owasp.org/Top10/A03_2021-Injection/"
7270
+ ]
7271
+ });
7272
+ }
7273
+ }
7274
+ }
7275
+ return findings;
7276
+ }
7277
+ function detectCommandInjection(bundle) {
7278
+ const findings = [];
7279
+ for (const cf of bundle.changedFiles) {
7280
+ const lines = cf.content.split("\n");
7281
+ for (let i = 0; i < lines.length; i++) {
7282
+ const line = lines[i];
7283
+ if (SHELL_EXEC_PATTERN.test(line)) {
7284
+ findings.push({
7285
+ id: makeFindingId("security", cf.path, i + 1, "command injection CWE-78"),
7286
+ file: cf.path,
7287
+ lineRange: [i + 1, i + 1],
7288
+ domain: "security",
7289
+ severity: "critical",
7290
+ title: "Potential command injection via shell exec with interpolation",
7291
+ rationale: "Using exec/spawn with template literal interpolation allows attackers to inject shell commands (CWE-78).",
7292
+ suggestion: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7293
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7294
+ validatedBy: "heuristic",
7295
+ cweId: "CWE-78",
7296
+ owaspCategory: "A03:2021 Injection",
7297
+ confidence: "high",
7298
+ remediation: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7299
+ references: [
7300
+ "https://cwe.mitre.org/data/definitions/78.html",
7301
+ "https://owasp.org/Top10/A03_2021-Injection/"
7302
+ ]
7303
+ });
7304
+ }
7305
+ }
7306
+ }
7307
+ return findings;
7308
+ }
7309
+ function runSecurityAgent(bundle) {
7310
+ const findings = [];
7311
+ findings.push(...detectEvalUsage(bundle));
7312
+ findings.push(...detectHardcodedSecrets(bundle));
7313
+ findings.push(...detectSqlInjection(bundle));
7314
+ findings.push(...detectCommandInjection(bundle));
7315
+ return findings;
7316
+ }
7317
+
7318
+ // src/review/agents/architecture-agent.ts
7319
+ var ARCHITECTURE_DESCRIPTOR = {
7320
+ domain: "architecture",
7321
+ tier: "standard",
7322
+ displayName: "Architecture",
7323
+ focusAreas: [
7324
+ "Layer compliance \u2014 imports flow in the correct direction per architectural layers",
7325
+ "Dependency direction \u2014 modules depend on abstractions, not concretions",
7326
+ "Single Responsibility \u2014 each module has one reason to change",
7327
+ "Pattern consistency \u2014 code follows established codebase patterns",
7328
+ "Separation of concerns \u2014 business logic separated from infrastructure",
7329
+ "DRY violations \u2014 duplicated logic that should be extracted (excluding intentional duplication)"
7330
+ ]
7331
+ };
7332
+ var LARGE_FILE_THRESHOLD = 300;
7333
+ function detectLayerViolations(bundle) {
7334
+ const findings = [];
7335
+ const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
7336
+ if (!checkDepsFile) return findings;
7337
+ const lines = checkDepsFile.content.split("\n");
7338
+ for (const line of lines) {
7339
+ if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
7340
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7341
+ const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
7342
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7343
+ findings.push({
7344
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
7345
+ file,
7346
+ lineRange: [lineNum, lineNum],
7347
+ domain: "architecture",
7348
+ severity: "critical",
7349
+ title: "Layer boundary violation detected by check-deps",
7350
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7351
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7352
+ evidence: [line.trim()],
7353
+ validatedBy: "heuristic"
7354
+ });
7355
+ }
7356
+ }
7357
+ return findings;
7358
+ }
7359
+ function detectLargeFiles(bundle) {
7360
+ const findings = [];
7361
+ for (const cf of bundle.changedFiles) {
7362
+ if (cf.lines > LARGE_FILE_THRESHOLD) {
7363
+ findings.push({
7364
+ id: makeFindingId("arch", cf.path, 1, "large file SRP"),
7365
+ file: cf.path,
7366
+ lineRange: [1, cf.lines],
7367
+ domain: "architecture",
7368
+ severity: "suggestion",
7369
+ title: `Large file (${cf.lines} lines) may violate Single Responsibility`,
7370
+ rationale: `Files over ${LARGE_FILE_THRESHOLD} lines often contain multiple responsibilities. Consider splitting into focused modules.`,
7371
+ suggestion: "Identify distinct responsibilities and extract them into separate modules.",
7372
+ evidence: [`File has ${cf.lines} lines (threshold: ${LARGE_FILE_THRESHOLD})`],
7373
+ validatedBy: "heuristic"
7374
+ });
7375
+ }
7376
+ }
7377
+ return findings;
7378
+ }
7379
+ function detectCircularImports(bundle) {
7380
+ const findings = [];
7381
+ const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
7382
+ for (const cf of bundle.changedFiles) {
7383
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7384
+ let match;
7385
+ const imports = /* @__PURE__ */ new Set();
7386
+ while ((match = importRegex.exec(cf.content)) !== null) {
7387
+ const source = match[1];
7388
+ if (source.startsWith(".")) {
7389
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7390
+ }
7391
+ }
7392
+ for (const ctxFile of bundle.contextFiles) {
7393
+ if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
7394
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7395
+ let ctxMatch;
7396
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
7397
+ const ctxSource = ctxMatch[1];
7398
+ if (ctxSource.startsWith(".")) {
7399
+ for (const changedPath of changedPaths) {
7400
+ const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7401
+ if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
7402
+ findings.push({
7403
+ id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
7404
+ file: cf.path,
7405
+ lineRange: [1, 1],
7406
+ domain: "architecture",
7407
+ severity: "important",
7408
+ title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
7409
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
7410
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
7411
+ evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
7412
+ validatedBy: "heuristic"
7413
+ });
7414
+ }
7415
+ }
7416
+ }
7417
+ }
7418
+ }
7419
+ }
7420
+ return findings;
7421
+ }
7422
+ function runArchitectureAgent(bundle) {
7423
+ const findings = [];
7424
+ findings.push(...detectLayerViolations(bundle));
7425
+ findings.push(...detectLargeFiles(bundle));
7426
+ findings.push(...detectCircularImports(bundle));
7427
+ return findings;
7428
+ }
7429
+
7430
+ // src/review/agents/index.ts
7431
+ var AGENT_DESCRIPTORS = {
7432
+ compliance: COMPLIANCE_DESCRIPTOR,
7433
+ bug: BUG_DETECTION_DESCRIPTOR,
7434
+ security: SECURITY_DESCRIPTOR,
7435
+ architecture: ARCHITECTURE_DESCRIPTOR
7436
+ };
7437
+
7438
+ // src/review/fan-out.ts
7439
+ var AGENT_RUNNERS = {
7440
+ compliance: runComplianceAgent,
7441
+ bug: runBugDetectionAgent,
7442
+ security: runSecurityAgent,
7443
+ architecture: runArchitectureAgent
7444
+ };
7445
+ async function runAgent(bundle) {
7446
+ const start = Date.now();
7447
+ const runner = AGENT_RUNNERS[bundle.domain];
7448
+ const findings = runner(bundle);
7449
+ const durationMs = Date.now() - start;
7450
+ return {
7451
+ domain: bundle.domain,
7452
+ findings,
7453
+ durationMs
7454
+ };
7455
+ }
7456
+ async function fanOutReview(options) {
7457
+ const { bundles } = options;
7458
+ if (bundles.length === 0) return [];
7459
+ const results = await Promise.all(bundles.map((bundle) => runAgent(bundle)));
7460
+ return results;
7461
+ }
7462
+
7463
+ // src/review/validate-findings.ts
7464
+ import * as path8 from "path";
7465
+ var DOWNGRADE_MAP = {
7466
+ critical: "important",
7467
+ important: "suggestion",
7468
+ suggestion: "suggestion"
7469
+ };
7470
+ function extractCrossFileRefs(finding) {
7471
+ const refs = [];
7472
+ const crossFilePattern = /([^\s]+\.(?:ts|tsx|js|jsx))\s+affects\s+([^\s]+\.(?:ts|tsx|js|jsx))/i;
7473
+ for (const ev of finding.evidence) {
7474
+ const match = ev.match(crossFilePattern);
7475
+ if (match) {
7476
+ refs.push({ from: match[1], to: match[2] });
7477
+ }
7478
+ }
7479
+ return refs;
7480
+ }
7481
+ function normalizePath(filePath, projectRoot) {
7482
+ let normalized = filePath;
7483
+ normalized = normalized.replace(/\\/g, "/");
7484
+ const normalizedRoot = projectRoot.replace(/\\/g, "/");
7485
+ if (path8.isAbsolute(normalized)) {
7486
+ const root = normalizedRoot.endsWith("/") ? normalizedRoot : normalizedRoot + "/";
7487
+ if (normalized.startsWith(root)) {
7488
+ normalized = normalized.slice(root.length);
7489
+ }
7490
+ }
7491
+ if (normalized.startsWith("./")) {
7492
+ normalized = normalized.slice(2);
7493
+ }
7494
+ return normalized;
7495
+ }
7496
+ function followImportChain(fromFile, fileContents, maxDepth = 2) {
7497
+ const visited = /* @__PURE__ */ new Set();
7498
+ const queue = [{ file: fromFile, depth: 0 }];
7499
+ while (queue.length > 0) {
7500
+ const current = queue.shift();
7501
+ if (visited.has(current.file) || current.depth > maxDepth) continue;
7502
+ visited.add(current.file);
7503
+ const content = fileContents.get(current.file);
7504
+ if (!content) continue;
7505
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7506
+ let match;
7507
+ while ((match = importRegex.exec(content)) !== null) {
7508
+ const importPath = match[1];
7509
+ if (!importPath.startsWith(".")) continue;
7510
+ const dir = path8.dirname(current.file);
7511
+ let resolved = path8.join(dir, importPath).replace(/\\/g, "/");
7512
+ if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
7513
+ resolved += ".ts";
7514
+ }
7515
+ resolved = path8.normalize(resolved).replace(/\\/g, "/");
7516
+ if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
7517
+ queue.push({ file: resolved, depth: current.depth + 1 });
7518
+ }
7519
+ }
7520
+ }
7521
+ visited.delete(fromFile);
7522
+ return visited;
7523
+ }
7524
+ async function validateFindings(options) {
7525
+ const { findings, exclusionSet, graph, projectRoot, fileContents } = options;
7526
+ const validated = [];
7527
+ for (const finding of findings) {
7528
+ const normalizedFile = normalizePath(finding.file, projectRoot);
7529
+ if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
7530
+ continue;
7531
+ }
7532
+ const absoluteFile = path8.isAbsolute(finding.file) ? finding.file : path8.join(projectRoot, finding.file).replace(/\\/g, "/");
7533
+ if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
7534
+ continue;
7535
+ }
7536
+ const crossFileRefs = extractCrossFileRefs(finding);
7537
+ if (crossFileRefs.length === 0) {
7538
+ validated.push({ ...finding });
7539
+ continue;
7540
+ }
7541
+ if (graph) {
7542
+ try {
7543
+ let allReachable = true;
7544
+ for (const ref of crossFileRefs) {
7545
+ const reachable = await graph.isReachable(ref.from, ref.to);
7546
+ if (!reachable) {
7547
+ allReachable = false;
7548
+ break;
7549
+ }
7550
+ }
7551
+ if (allReachable) {
7552
+ validated.push({ ...finding, validatedBy: "graph" });
7553
+ }
7554
+ continue;
7555
+ } catch {
7556
+ }
7557
+ }
7558
+ {
7559
+ let chainValidated = false;
7560
+ if (fileContents) {
7561
+ for (const ref of crossFileRefs) {
7562
+ const normalizedFrom = normalizePath(ref.from, projectRoot);
7563
+ const reachable = followImportChain(normalizedFrom, fileContents, 2);
7564
+ const normalizedTo = normalizePath(ref.to, projectRoot);
7565
+ if (reachable.has(normalizedTo)) {
7566
+ chainValidated = true;
7567
+ break;
7568
+ }
7569
+ }
7570
+ }
7571
+ if (chainValidated) {
7572
+ validated.push({ ...finding, validatedBy: "heuristic" });
7573
+ } else {
7574
+ validated.push({
7575
+ ...finding,
7576
+ severity: DOWNGRADE_MAP[finding.severity],
7577
+ validatedBy: "heuristic"
7578
+ });
7579
+ }
7580
+ }
7581
+ }
7582
+ return validated;
7583
+ }
7584
+
7585
+ // src/review/deduplicate-findings.ts
7586
+ function rangesOverlap(a, b, gap) {
7587
+ return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
7588
+ }
7589
+ function mergeFindings(a, b) {
7590
+ const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
7591
+ const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
7592
+ const longestRationale = a.rationale.length >= b.rationale.length ? a.rationale : b.rationale;
7593
+ const evidenceSet = /* @__PURE__ */ new Set([...a.evidence, ...b.evidence]);
7594
+ const lineRange = [
7595
+ Math.min(a.lineRange[0], b.lineRange[0]),
7596
+ Math.max(a.lineRange[1], b.lineRange[1])
7597
+ ];
7598
+ const domains = /* @__PURE__ */ new Set();
7599
+ domains.add(a.domain);
7600
+ domains.add(b.domain);
7601
+ const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
7602
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
7603
+ const domainList = [...domains].sort().join(", ");
7604
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
7605
+ const title = `[${domainList}] ${cleanTitle}`;
7606
+ const merged = {
7607
+ id: primaryFinding.id,
7608
+ file: a.file,
7609
+ // same file for all merged findings
7610
+ lineRange,
7611
+ domain: primaryFinding.domain,
7612
+ severity: highestSeverity,
7613
+ title,
7614
+ rationale: longestRationale,
7615
+ evidence: [...evidenceSet],
7616
+ validatedBy: highestValidatedBy
7617
+ };
7618
+ if (suggestion !== void 0) {
7619
+ merged.suggestion = suggestion;
7620
+ }
7621
+ const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
7622
+ const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
7623
+ const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
7624
+ const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
7625
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
7626
+ if (cweId !== void 0) merged.cweId = cweId;
7627
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
7628
+ if (confidence !== void 0) merged.confidence = confidence;
7629
+ if (remediation !== void 0) merged.remediation = remediation;
7630
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
7631
+ return merged;
7632
+ }
7633
+ function deduplicateFindings(options) {
7634
+ const { findings, lineGap = 3 } = options;
7635
+ if (findings.length === 0) return [];
7636
+ const byFile = /* @__PURE__ */ new Map();
7637
+ for (const f of findings) {
7638
+ const existing = byFile.get(f.file);
7639
+ if (existing) {
7640
+ existing.push(f);
7641
+ } else {
7642
+ byFile.set(f.file, [f]);
7643
+ }
7644
+ }
7645
+ const result = [];
7646
+ for (const [, fileFindings] of byFile) {
7647
+ const sorted = [...fileFindings].sort((a, b) => a.lineRange[0] - b.lineRange[0]);
7648
+ const clusters = [];
7649
+ let current = sorted[0];
7650
+ for (let i = 1; i < sorted.length; i++) {
7651
+ const next = sorted[i];
7652
+ if (rangesOverlap(current.lineRange, next.lineRange, lineGap)) {
7653
+ current = mergeFindings(current, next);
7654
+ } else {
7655
+ clusters.push(current);
7656
+ current = next;
7657
+ }
7658
+ }
7659
+ clusters.push(current);
7660
+ result.push(...clusters);
7661
+ }
7662
+ return result;
7663
+ }
7664
+
7665
+ // src/review/eligibility-gate.ts
7666
+ function checkEligibility(pr, ciMode) {
7667
+ if (!ciMode) {
7668
+ return { eligible: true };
7669
+ }
7670
+ if (pr.state === "closed") {
7671
+ return { eligible: false, reason: "PR is closed" };
7672
+ }
7673
+ if (pr.state === "merged") {
7674
+ return { eligible: false, reason: "PR is merged" };
7675
+ }
7676
+ if (pr.isDraft) {
7677
+ return { eligible: false, reason: "PR is a draft" };
7678
+ }
7679
+ if (pr.changedFiles.length > 0 && pr.changedFiles.every((f) => f.endsWith(".md"))) {
7680
+ return { eligible: false, reason: "Trivial change: documentation only" };
7681
+ }
7682
+ const priorMatch = pr.priorReviews.find((r) => r.headSha === pr.headSha);
7683
+ if (priorMatch) {
7684
+ return { eligible: false, reason: `Already reviewed at ${priorMatch.headSha}` };
7685
+ }
7686
+ return { eligible: true };
7687
+ }
7688
+
7689
+ // src/review/model-tier-resolver.ts
7690
+ var DEFAULT_PROVIDER_TIERS = {
7691
+ claude: {
7692
+ fast: "haiku",
7693
+ standard: "sonnet",
7694
+ strong: "opus"
7695
+ },
7696
+ openai: {
7697
+ fast: "gpt-4o-mini",
7698
+ standard: "gpt-4o",
7699
+ strong: "o1"
7700
+ },
7701
+ gemini: {
7702
+ fast: "gemini-flash",
7703
+ standard: "gemini-pro",
7704
+ strong: "gemini-ultra"
7705
+ }
7706
+ };
7707
+ function resolveModelTier(tier, config, provider) {
7708
+ const configValue = config?.[tier];
7709
+ if (configValue !== void 0) {
7710
+ return configValue;
7711
+ }
7712
+ if (provider) {
7713
+ const providerDefaults = DEFAULT_PROVIDER_TIERS[provider];
7714
+ const defaultValue = providerDefaults[tier];
7715
+ if (defaultValue !== void 0) {
7716
+ return defaultValue;
7717
+ }
7718
+ }
7719
+ return void 0;
7720
+ }
7721
+
7722
+ // src/review/output/assessment.ts
7723
+ function determineAssessment(findings) {
7724
+ if (findings.length === 0) return "approve";
7725
+ let maxSeverity = "suggestion";
7726
+ for (const f of findings) {
7727
+ if (SEVERITY_RANK[f.severity] > SEVERITY_RANK[maxSeverity]) {
7728
+ maxSeverity = f.severity;
7729
+ }
7730
+ }
7731
+ switch (maxSeverity) {
7732
+ case "critical":
7733
+ return "request-changes";
7734
+ case "important":
7735
+ return "comment";
7736
+ case "suggestion":
7737
+ return "approve";
7738
+ }
7739
+ }
7740
+ function getExitCode(assessment) {
7741
+ return assessment === "request-changes" ? 1 : 0;
7742
+ }
7743
+
7744
+ // src/review/output/format-terminal.ts
7745
+ function formatFindingBlock(finding) {
7746
+ const lines = [];
7747
+ const location = `${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}`;
7748
+ lines.push(` [${finding.domain}] ${finding.title}`);
7749
+ lines.push(` Location: ${location}`);
7750
+ lines.push(` Rationale: ${finding.rationale}`);
7751
+ if (finding.suggestion) {
7752
+ lines.push(` Suggestion: ${finding.suggestion}`);
7753
+ }
7754
+ return lines.join("\n");
7755
+ }
7756
+ function formatTerminalOutput(options) {
7757
+ const { findings, strengths } = options;
7758
+ const sections = [];
7759
+ sections.push("## Strengths\n");
7760
+ if (strengths.length === 0) {
7761
+ sections.push(" No specific strengths noted.\n");
7762
+ } else {
7763
+ for (const s of strengths) {
7764
+ const prefix = s.file ? `${s.file}: ` : "";
7765
+ sections.push(` + ${prefix}${s.description}`);
7766
+ }
7767
+ sections.push("");
7768
+ }
7769
+ sections.push("## Issues\n");
7770
+ let hasIssues = false;
7771
+ for (const severity of SEVERITY_ORDER) {
7772
+ const group = findings.filter((f) => f.severity === severity);
7773
+ if (group.length === 0) continue;
7774
+ hasIssues = true;
7775
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7776
+ `);
7777
+ for (const finding of group) {
7778
+ sections.push(formatFindingBlock(finding));
7779
+ sections.push("");
7780
+ }
7781
+ }
7782
+ if (!hasIssues) {
7783
+ sections.push(" No issues found.\n");
7784
+ }
7785
+ const assessment = determineAssessment(findings);
7786
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7787
+ sections.push(`## Assessment: ${assessmentLabel}
7788
+ `);
7789
+ const issueCount = findings.length;
7790
+ const criticalCount = findings.filter((f) => f.severity === "critical").length;
7791
+ const importantCount = findings.filter((f) => f.severity === "important").length;
7792
+ const suggestionCount = findings.filter((f) => f.severity === "suggestion").length;
7793
+ if (issueCount === 0) {
7794
+ sections.push(" No issues found. The changes look good.");
7795
+ } else {
7796
+ const parts = [];
7797
+ if (criticalCount > 0) parts.push(`${criticalCount} critical`);
7798
+ if (importantCount > 0) parts.push(`${importantCount} important`);
7799
+ if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
7800
+ sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
7801
+ }
7802
+ return sections.join("\n");
7803
+ }
7804
+
7805
+ // src/review/output/format-github.ts
7806
+ var SMALL_SUGGESTION_LINE_LIMIT = 10;
7807
+ function sanitizeMarkdown(text) {
7808
+ return text.replace(/</g, "&lt;").replace(/>/g, "&gt;");
7809
+ }
7810
+ function isSmallSuggestion(suggestion) {
7811
+ if (!suggestion) return false;
7812
+ const lineCount = suggestion.split("\n").length;
7813
+ return lineCount < SMALL_SUGGESTION_LINE_LIMIT;
7814
+ }
7815
+ function formatGitHubComment(finding) {
7816
+ const severityBadge = `**${finding.severity.toUpperCase()}**`;
7817
+ const header = `${severityBadge} [${finding.domain}] ${sanitizeMarkdown(finding.title)}`;
7818
+ let body;
7819
+ if (isSmallSuggestion(finding.suggestion)) {
7820
+ body = [
7821
+ header,
7822
+ "",
7823
+ sanitizeMarkdown(finding.rationale),
7824
+ "",
7825
+ "```suggestion",
7826
+ finding.suggestion,
7827
+ "```"
7828
+ ].join("\n");
7829
+ } else {
7830
+ const parts = [header, "", `**Rationale:** ${sanitizeMarkdown(finding.rationale)}`];
7831
+ if (finding.suggestion) {
7832
+ parts.push("", `**Suggested approach:** ${sanitizeMarkdown(finding.suggestion)}`);
7833
+ }
7834
+ body = parts.join("\n");
7835
+ }
7836
+ return {
7837
+ path: finding.file,
7838
+ line: finding.lineRange[1],
7839
+ // Comment on end line of range
7840
+ side: "RIGHT",
7841
+ body
7842
+ };
7843
+ }
7844
+ function formatGitHubSummary(options) {
7845
+ const { findings, strengths } = options;
7846
+ const sections = [];
7847
+ sections.push("## Strengths\n");
7848
+ if (strengths.length === 0) {
7849
+ sections.push("No specific strengths noted.\n");
7850
+ } else {
7851
+ for (const s of strengths) {
7852
+ const prefix = s.file ? `**${s.file}:** ` : "";
7853
+ sections.push(`- ${prefix}${sanitizeMarkdown(s.description)}`);
7854
+ }
7855
+ sections.push("");
7856
+ }
7857
+ sections.push("## Issues\n");
7858
+ let hasIssues = false;
7859
+ for (const severity of SEVERITY_ORDER) {
7860
+ const group = findings.filter((f) => f.severity === severity);
7861
+ if (group.length === 0) continue;
7862
+ hasIssues = true;
7863
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7864
+ `);
7865
+ for (const finding of group) {
7866
+ const location = `\`${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}\``;
7867
+ sections.push(`- **${sanitizeMarkdown(finding.title)}** at ${location}`);
7868
+ sections.push(` ${sanitizeMarkdown(finding.rationale)}`);
7869
+ sections.push("");
7870
+ }
7871
+ }
7872
+ if (!hasIssues) {
7873
+ sections.push("No issues found.\n");
7874
+ }
7875
+ const assessment = determineAssessment(findings);
7876
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7877
+ sections.push(`## Assessment: ${assessmentLabel}`);
7878
+ return sections.join("\n");
7879
+ }
7880
+
7881
+ // src/review/pipeline-orchestrator.ts
7882
+ async function runReviewPipeline(options) {
7883
+ const {
7884
+ projectRoot,
7885
+ diff,
7886
+ commitMessage,
7887
+ flags,
7888
+ graph,
7889
+ prMetadata,
7890
+ conventionFiles,
7891
+ checkDepsOutput,
7892
+ config = {},
7893
+ commitHistory
7894
+ } = options;
7895
+ if (flags.ci && prMetadata) {
7896
+ const eligibility = checkEligibility(prMetadata, true);
7897
+ if (!eligibility.eligible) {
7898
+ return {
7899
+ skipped: true,
7900
+ ...eligibility.reason != null ? { skipReason: eligibility.reason } : {},
7901
+ stoppedByMechanical: false,
7902
+ findings: [],
7903
+ strengths: [],
7904
+ terminalOutput: `Review skipped: ${eligibility.reason ?? "ineligible"}`,
7905
+ githubComments: [],
7906
+ exitCode: 0
7907
+ };
7908
+ }
7909
+ }
7910
+ let mechanicalResult;
7911
+ let exclusionSet;
7912
+ if (flags.noMechanical) {
7913
+ exclusionSet = buildExclusionSet([]);
7914
+ } else {
7915
+ try {
7916
+ const mechResult = await runMechanicalChecks({
7917
+ projectRoot,
7918
+ config,
7919
+ changedFiles: diff.changedFiles
7920
+ });
7921
+ if (mechResult.ok) {
7922
+ mechanicalResult = mechResult.value;
7923
+ exclusionSet = buildExclusionSet(mechResult.value.findings);
7924
+ if (mechResult.value.stopPipeline) {
7925
+ const mechFindings = mechResult.value.findings.filter((f) => f.severity === "error").map((f) => ` x ${f.tool}: ${f.file}${f.line ? `:${f.line}` : ""} - ${f.message}`).join("\n");
7926
+ const terminalOutput2 = [
7927
+ "## Strengths\n",
7928
+ " No AI review performed (mechanical checks failed).\n",
7929
+ "## Issues\n",
7930
+ "### Critical (mechanical)\n",
7931
+ mechFindings,
7932
+ "\n## Assessment: Request Changes\n",
7933
+ " Mechanical checks must pass before AI review."
7934
+ ].join("\n");
7935
+ return {
7936
+ skipped: false,
7937
+ stoppedByMechanical: true,
7938
+ assessment: "request-changes",
7939
+ findings: [],
7940
+ strengths: [],
7941
+ terminalOutput: terminalOutput2,
7942
+ githubComments: [],
7943
+ exitCode: 1,
7944
+ mechanicalResult
7945
+ };
7946
+ }
7947
+ } else {
7948
+ exclusionSet = buildExclusionSet([]);
7949
+ }
7950
+ } catch {
7951
+ exclusionSet = buildExclusionSet([]);
7952
+ }
7953
+ }
7954
+ let contextBundles;
7955
+ try {
7956
+ contextBundles = await scopeContext({
7957
+ projectRoot,
7958
+ diff,
7959
+ commitMessage,
7960
+ ...graph != null ? { graph } : {},
7961
+ ...conventionFiles != null ? { conventionFiles } : {},
7962
+ ...checkDepsOutput != null ? { checkDepsOutput } : {},
7963
+ ...commitHistory != null ? { commitHistory } : {}
7964
+ });
7965
+ } catch {
7966
+ contextBundles = ["compliance", "bug", "security", "architecture"].map((domain) => ({
7967
+ domain,
7968
+ changeType: "feature",
7969
+ changedFiles: [],
7970
+ contextFiles: [],
7971
+ commitHistory: [],
7972
+ diffLines: diff.totalDiffLines,
7973
+ contextLines: 0
7974
+ }));
7975
+ }
7976
+ const agentResults = await fanOutReview({ bundles: contextBundles });
7977
+ const rawFindings = agentResults.flatMap((r) => r.findings);
7978
+ const fileContents = /* @__PURE__ */ new Map();
7979
+ for (const [file, content] of diff.fileDiffs) {
7980
+ fileContents.set(file, content);
7981
+ }
7982
+ const validatedFindings = await validateFindings({
7983
+ findings: rawFindings,
7984
+ exclusionSet,
7985
+ ...graph != null ? { graph } : {},
7986
+ projectRoot,
7987
+ fileContents
7988
+ });
7989
+ const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
7990
+ const strengths = [];
7991
+ const assessment = determineAssessment(dedupedFindings);
7992
+ const exitCode = getExitCode(assessment);
7993
+ const terminalOutput = formatTerminalOutput({
7994
+ findings: dedupedFindings,
7995
+ strengths
7996
+ });
7997
+ let githubComments = [];
7998
+ if (flags.comment) {
7999
+ githubComments = dedupedFindings.map((f) => formatGitHubComment(f));
8000
+ }
8001
+ return {
8002
+ skipped: false,
8003
+ stoppedByMechanical: false,
8004
+ assessment,
8005
+ findings: dedupedFindings,
8006
+ strengths,
8007
+ terminalOutput,
8008
+ githubComments,
8009
+ exitCode,
8010
+ ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
8011
+ };
8012
+ }
8013
+
8014
+ // src/roadmap/parse.ts
8015
+ import { Ok as Ok2, Err as Err2 } from "@harness-engineering/types";
8016
+ var VALID_STATUSES = /* @__PURE__ */ new Set([
8017
+ "backlog",
8018
+ "planned",
8019
+ "in-progress",
8020
+ "done",
8021
+ "blocked"
8022
+ ]);
8023
+ var EM_DASH = "\u2014";
8024
+ function parseRoadmap(markdown) {
8025
+ const fmMatch = markdown.match(/^---\n([\s\S]*?)\n---/);
8026
+ if (!fmMatch) {
8027
+ return Err2(new Error("Missing or malformed YAML frontmatter"));
8028
+ }
8029
+ const fmResult = parseFrontmatter(fmMatch[1]);
8030
+ if (!fmResult.ok) return fmResult;
8031
+ const body = markdown.slice(fmMatch[0].length);
8032
+ const milestonesResult = parseMilestones(body);
8033
+ if (!milestonesResult.ok) return milestonesResult;
8034
+ return Ok2({
8035
+ frontmatter: fmResult.value,
8036
+ milestones: milestonesResult.value
8037
+ });
8038
+ }
8039
+ function parseFrontmatter(raw) {
8040
+ const lines = raw.split("\n");
8041
+ const map = /* @__PURE__ */ new Map();
8042
+ for (const line of lines) {
8043
+ const idx = line.indexOf(":");
8044
+ if (idx === -1) continue;
8045
+ const key = line.slice(0, idx).trim();
8046
+ const val = line.slice(idx + 1).trim();
8047
+ map.set(key, val);
8048
+ }
8049
+ const project = map.get("project");
8050
+ const versionStr = map.get("version");
8051
+ const lastSynced = map.get("last_synced");
8052
+ const lastManualEdit = map.get("last_manual_edit");
8053
+ if (!project || !versionStr || !lastSynced || !lastManualEdit) {
8054
+ return Err2(
8055
+ new Error(
8056
+ "Frontmatter missing required fields: project, version, last_synced, last_manual_edit"
8057
+ )
8058
+ );
8059
+ }
8060
+ const version = parseInt(versionStr, 10);
8061
+ if (isNaN(version)) {
8062
+ return Err2(new Error("Frontmatter version must be a number"));
8063
+ }
8064
+ return Ok2({ project, version, lastSynced, lastManualEdit });
8065
+ }
8066
+ function parseMilestones(body) {
8067
+ const milestones = [];
8068
+ const h2Pattern = /^## (.+)$/gm;
8069
+ const h2Matches = [];
8070
+ let match;
8071
+ while ((match = h2Pattern.exec(body)) !== null) {
8072
+ h2Matches.push({ heading: match[1], startIndex: match.index });
8073
+ }
8074
+ for (let i = 0; i < h2Matches.length; i++) {
8075
+ const h2 = h2Matches[i];
8076
+ const nextStart = i + 1 < h2Matches.length ? h2Matches[i + 1].startIndex : body.length;
8077
+ const sectionBody = body.slice(h2.startIndex + h2.heading.length + 4, nextStart);
8078
+ const isBacklog = h2.heading === "Backlog";
8079
+ const milestoneName = isBacklog ? "Backlog" : h2.heading.replace(/^Milestone:\s*/, "");
8080
+ const featuresResult = parseFeatures(sectionBody);
8081
+ if (!featuresResult.ok) return featuresResult;
8082
+ milestones.push({
8083
+ name: milestoneName,
8084
+ isBacklog,
8085
+ features: featuresResult.value
8086
+ });
8087
+ }
8088
+ return Ok2(milestones);
8089
+ }
8090
+ function parseFeatures(sectionBody) {
8091
+ const features = [];
8092
+ const h3Pattern = /^### Feature: (.+)$/gm;
8093
+ const h3Matches = [];
8094
+ let match;
8095
+ while ((match = h3Pattern.exec(sectionBody)) !== null) {
8096
+ h3Matches.push({ name: match[1], startIndex: match.index });
8097
+ }
8098
+ for (let i = 0; i < h3Matches.length; i++) {
8099
+ const h3 = h3Matches[i];
8100
+ const nextStart = i + 1 < h3Matches.length ? h3Matches[i + 1].startIndex : sectionBody.length;
8101
+ const featureBody = sectionBody.slice(
8102
+ h3.startIndex + `### Feature: ${h3.name}`.length,
8103
+ nextStart
8104
+ );
8105
+ const featureResult = parseFeatureFields(h3.name, featureBody);
8106
+ if (!featureResult.ok) return featureResult;
8107
+ features.push(featureResult.value);
8108
+ }
8109
+ return Ok2(features);
8110
+ }
8111
+ function parseFeatureFields(name, body) {
8112
+ const fieldMap = /* @__PURE__ */ new Map();
8113
+ const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
8114
+ let match;
8115
+ while ((match = fieldPattern.exec(body)) !== null) {
8116
+ fieldMap.set(match[1], match[2]);
8117
+ }
8118
+ const statusRaw = fieldMap.get("Status");
8119
+ if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
8120
+ return Err2(
8121
+ new Error(
8122
+ `Feature "${name}" has invalid status: "${statusRaw ?? "(missing)"}". Valid statuses: ${[...VALID_STATUSES].join(", ")}`
8123
+ )
8124
+ );
8125
+ }
8126
+ const status = statusRaw;
8127
+ const specRaw = fieldMap.get("Spec") ?? EM_DASH;
8128
+ const spec = specRaw === EM_DASH ? null : specRaw;
8129
+ const plansRaw = fieldMap.get("Plans") ?? EM_DASH;
8130
+ const plans = plansRaw === EM_DASH ? [] : plansRaw.split(",").map((p) => p.trim());
8131
+ const blockedByRaw = fieldMap.get("Blocked by") ?? EM_DASH;
8132
+ const blockedBy = blockedByRaw === EM_DASH ? [] : blockedByRaw.split(",").map((b) => b.trim());
8133
+ const summary = fieldMap.get("Summary") ?? "";
8134
+ return Ok2({ name, status, spec, plans, blockedBy, summary });
8135
+ }
8136
+
8137
+ // src/roadmap/serialize.ts
8138
+ var EM_DASH2 = "\u2014";
8139
+ function serializeRoadmap(roadmap) {
8140
+ const lines = [];
8141
+ lines.push("---");
8142
+ lines.push(`project: ${roadmap.frontmatter.project}`);
8143
+ lines.push(`version: ${roadmap.frontmatter.version}`);
8144
+ lines.push(`last_synced: ${roadmap.frontmatter.lastSynced}`);
8145
+ lines.push(`last_manual_edit: ${roadmap.frontmatter.lastManualEdit}`);
8146
+ lines.push("---");
8147
+ lines.push("");
8148
+ lines.push("# Project Roadmap");
8149
+ for (const milestone of roadmap.milestones) {
8150
+ lines.push("");
8151
+ lines.push(serializeMilestoneHeading(milestone));
8152
+ for (const feature of milestone.features) {
8153
+ lines.push("");
8154
+ lines.push(...serializeFeature(feature));
8155
+ }
8156
+ }
8157
+ lines.push("");
8158
+ return lines.join("\n");
8159
+ }
8160
+ function serializeMilestoneHeading(milestone) {
8161
+ return milestone.isBacklog ? "## Backlog" : `## Milestone: ${milestone.name}`;
8162
+ }
8163
+ function serializeFeature(feature) {
8164
+ const spec = feature.spec ?? EM_DASH2;
8165
+ const plans = feature.plans.length > 0 ? feature.plans.join(", ") : EM_DASH2;
8166
+ const blockedBy = feature.blockedBy.length > 0 ? feature.blockedBy.join(", ") : EM_DASH2;
8167
+ return [
8168
+ `### Feature: ${feature.name}`,
8169
+ `- **Status:** ${feature.status}`,
8170
+ `- **Spec:** ${spec}`,
8171
+ `- **Plans:** ${plans}`,
8172
+ `- **Blocked by:** ${blockedBy}`,
8173
+ `- **Summary:** ${feature.summary}`
8174
+ ];
8175
+ }
8176
+
8177
+ // src/roadmap/sync.ts
8178
+ import * as fs7 from "fs";
8179
+ import * as path9 from "path";
8180
+ import { Ok as Ok3 } from "@harness-engineering/types";
8181
+ function inferStatus(feature, projectPath, allFeatures) {
8182
+ if (feature.blockedBy.length > 0) {
8183
+ const blockerNotDone = feature.blockedBy.some((blockerName) => {
8184
+ const blocker = allFeatures.find((f) => f.name.toLowerCase() === blockerName.toLowerCase());
8185
+ return !blocker || blocker.status !== "done";
8186
+ });
8187
+ if (blockerNotDone) return "blocked";
8188
+ }
8189
+ if (feature.plans.length === 0) return null;
8190
+ const allTaskStatuses = [];
8191
+ const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8192
+ const useRootState = featuresWithPlans.length <= 1;
8193
+ if (useRootState) {
8194
+ const rootStatePath = path9.join(projectPath, ".harness", "state.json");
8195
+ if (fs7.existsSync(rootStatePath)) {
8196
+ try {
8197
+ const raw = fs7.readFileSync(rootStatePath, "utf-8");
8198
+ const state = JSON.parse(raw);
8199
+ if (state.progress) {
8200
+ for (const status of Object.values(state.progress)) {
8201
+ allTaskStatuses.push(status);
8202
+ }
8203
+ }
8204
+ } catch {
8205
+ }
8206
+ }
8207
+ }
8208
+ const sessionsDir = path9.join(projectPath, ".harness", "sessions");
8209
+ if (fs7.existsSync(sessionsDir)) {
8210
+ try {
8211
+ const sessionDirs = fs7.readdirSync(sessionsDir, { withFileTypes: true });
8212
+ for (const entry of sessionDirs) {
8213
+ if (!entry.isDirectory()) continue;
8214
+ const autopilotPath = path9.join(sessionsDir, entry.name, "autopilot-state.json");
8215
+ if (!fs7.existsSync(autopilotPath)) continue;
8216
+ try {
8217
+ const raw = fs7.readFileSync(autopilotPath, "utf-8");
8218
+ const autopilot = JSON.parse(raw);
8219
+ if (!autopilot.phases) continue;
8220
+ const linkedPhases = autopilot.phases.filter(
8221
+ (phase) => phase.planPath ? feature.plans.some((p) => p === phase.planPath || phase.planPath.endsWith(p)) : false
8222
+ );
8223
+ if (linkedPhases.length > 0) {
8224
+ for (const phase of linkedPhases) {
8225
+ if (phase.status === "complete") {
8226
+ allTaskStatuses.push("complete");
8227
+ } else if (phase.status === "pending") {
8228
+ allTaskStatuses.push("pending");
8229
+ } else {
8230
+ allTaskStatuses.push("in_progress");
8231
+ }
8232
+ }
8233
+ }
8234
+ } catch {
8235
+ }
8236
+ }
8237
+ } catch {
8238
+ }
8239
+ }
8240
+ if (allTaskStatuses.length === 0) return null;
8241
+ const allComplete = allTaskStatuses.every((s) => s === "complete");
8242
+ if (allComplete) return "done";
8243
+ const anyStarted = allTaskStatuses.some((s) => s === "in_progress" || s === "complete");
8244
+ if (anyStarted) return "in-progress";
8245
+ return null;
8246
+ }
8247
+ function syncRoadmap(options) {
8248
+ const { projectPath, roadmap, forceSync } = options;
8249
+ const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
8250
+ const skipOverride = isManuallyEdited && !forceSync;
8251
+ const allFeatures = roadmap.milestones.flatMap((m) => m.features);
8252
+ const changes = [];
8253
+ for (const feature of allFeatures) {
8254
+ if (skipOverride) continue;
8255
+ const inferred = inferStatus(feature, projectPath, allFeatures);
8256
+ if (inferred === null) continue;
8257
+ if (inferred === feature.status) continue;
8258
+ changes.push({
8259
+ feature: feature.name,
8260
+ from: feature.status,
8261
+ to: inferred
8262
+ });
8263
+ }
8264
+ return Ok3(changes);
8265
+ }
8266
+
8267
+ // src/interaction/types.ts
8268
+ import { z as z5 } from "zod";
8269
+ var InteractionTypeSchema = z5.enum(["question", "confirmation", "transition"]);
8270
+ var QuestionSchema = z5.object({
8271
+ text: z5.string(),
8272
+ options: z5.array(z5.string()).optional(),
8273
+ default: z5.string().optional()
8274
+ });
8275
+ var ConfirmationSchema = z5.object({
8276
+ text: z5.string(),
8277
+ context: z5.string()
8278
+ });
8279
+ var TransitionSchema = z5.object({
8280
+ completedPhase: z5.string(),
8281
+ suggestedNext: z5.string(),
8282
+ reason: z5.string(),
8283
+ artifacts: z5.array(z5.string()),
8284
+ requiresConfirmation: z5.boolean(),
8285
+ summary: z5.string()
8286
+ });
8287
+ var EmitInteractionInputSchema = z5.object({
8288
+ path: z5.string(),
8289
+ type: InteractionTypeSchema,
8290
+ stream: z5.string().optional(),
8291
+ question: QuestionSchema.optional(),
8292
+ confirmation: ConfirmationSchema.optional(),
8293
+ transition: TransitionSchema.optional()
8294
+ });
8295
+
8296
+ // src/update-checker.ts
8297
+ import * as fs8 from "fs";
8298
+ import * as path10 from "path";
8299
+ import * as os from "os";
8300
+ import { spawn } from "child_process";
8301
+ function getStatePath() {
8302
+ const home = process.env["HOME"] || os.homedir();
8303
+ return path10.join(home, ".harness", "update-check.json");
8304
+ }
8305
+ function isUpdateCheckEnabled(configInterval) {
8306
+ if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
8307
+ if (configInterval === 0) return false;
8308
+ return true;
8309
+ }
8310
+ function shouldRunCheck(state, intervalMs) {
8311
+ if (state === null) return true;
8312
+ return state.lastCheckTime + intervalMs <= Date.now();
8313
+ }
8314
+ function readCheckState() {
8315
+ try {
8316
+ const raw = fs8.readFileSync(getStatePath(), "utf-8");
8317
+ const parsed = JSON.parse(raw);
8318
+ if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
8319
+ const state = parsed;
8320
+ return {
8321
+ lastCheckTime: state.lastCheckTime,
8322
+ latestVersion: typeof state.latestVersion === "string" ? state.latestVersion : null,
8323
+ currentVersion: state.currentVersion
8324
+ };
8325
+ }
8326
+ return null;
8327
+ } catch {
8328
+ return null;
8329
+ }
8330
+ }
8331
+ function spawnBackgroundCheck(currentVersion) {
8332
+ const statePath = getStatePath();
8333
+ const stateDir = path10.dirname(statePath);
8334
+ const script = `
8335
+ const { execSync } = require('child_process');
8336
+ const fs = require('fs');
8337
+ const path = require('path');
8338
+ const crypto = require('crypto');
8339
+ try {
8340
+ const latest = execSync('npm view @harness-engineering/cli dist-tags.latest', {
8341
+ encoding: 'utf-8',
8342
+ timeout: 15000,
8343
+ stdio: ['ignore', 'pipe', 'ignore'],
8344
+ }).trim();
8345
+ const stateDir = ${JSON.stringify(stateDir)};
8346
+ const statePath = ${JSON.stringify(statePath)};
8347
+ fs.mkdirSync(stateDir, { recursive: true });
8348
+ const tmpFile = path.join(stateDir, '.update-check-' + crypto.randomBytes(4).toString('hex') + '.tmp');
8349
+ fs.writeFileSync(tmpFile, JSON.stringify({
8350
+ lastCheckTime: Date.now(),
8351
+ latestVersion: latest || null,
8352
+ currentVersion: ${JSON.stringify(currentVersion)},
8353
+ }), { mode: 0o644 });
8354
+ fs.renameSync(tmpFile, statePath);
8355
+ } catch (_) {}
8356
+ `.trim();
8357
+ try {
8358
+ const child = spawn(process.execPath, ["-e", script], {
8359
+ detached: true,
8360
+ stdio: "ignore"
8361
+ });
8362
+ child.unref();
8363
+ } catch {
8364
+ }
8365
+ }
8366
+ function compareVersions(a, b) {
8367
+ const pa = a.split(".").map(Number);
8368
+ const pb = b.split(".").map(Number);
8369
+ for (let i = 0; i < 3; i++) {
8370
+ const na = pa[i] ?? 0;
8371
+ const nb = pb[i] ?? 0;
8372
+ if (na > nb) return 1;
8373
+ if (na < nb) return -1;
8374
+ }
8375
+ return 0;
8376
+ }
8377
+ function getUpdateNotification(currentVersion) {
8378
+ const state = readCheckState();
8379
+ if (!state) return null;
8380
+ if (!state.latestVersion) return null;
8381
+ if (compareVersions(state.latestVersion, currentVersion) <= 0) return null;
8382
+ return `Update available: v${currentVersion} -> v${state.latestVersion}
8383
+ Run "harness update" to upgrade.`;
8384
+ }
8385
+
4342
8386
  // src/index.ts
4343
- var VERSION = "0.6.0";
8387
+ var VERSION = "0.8.0";
4344
8388
  export {
8389
+ AGENT_DESCRIPTORS,
8390
+ ARCHITECTURE_DESCRIPTOR,
4345
8391
  AgentActionEmitter,
8392
+ BUG_DETECTION_DESCRIPTOR,
8393
+ BaselineManager,
8394
+ BenchmarkRunner,
8395
+ COMPLIANCE_DESCRIPTOR,
4346
8396
  ChecklistBuilder,
8397
+ ConfirmationSchema,
4347
8398
  ConsoleSink,
8399
+ CriticalPathResolver,
8400
+ DEFAULT_PROVIDER_TIERS,
8401
+ DEFAULT_SECURITY_CONFIG,
4348
8402
  DEFAULT_STATE,
8403
+ DEFAULT_STREAM_INDEX,
8404
+ EmitInteractionInputSchema,
4349
8405
  EntropyAnalyzer,
4350
8406
  EntropyConfigSchema,
8407
+ ExclusionSet,
4351
8408
  FailureEntrySchema,
4352
8409
  FileSink,
4353
8410
  GateConfigSchema,
4354
8411
  GateResultSchema,
4355
8412
  HandoffSchema,
4356
8413
  HarnessStateSchema,
8414
+ InteractionTypeSchema,
4357
8415
  NoOpExecutor,
4358
8416
  NoOpSink,
4359
8417
  NoOpTelemetryAdapter,
4360
8418
  PatternConfigSchema,
8419
+ QuestionSchema,
4361
8420
  REQUIRED_SECTIONS,
8421
+ RegressionDetector,
8422
+ RuleRegistry,
8423
+ SECURITY_DESCRIPTOR,
8424
+ SecurityConfigSchema,
8425
+ SecurityScanner,
8426
+ StreamIndexSchema,
8427
+ StreamInfoSchema,
8428
+ TransitionSchema,
4362
8429
  TypeScriptParser,
4363
8430
  VERSION,
4364
8431
  analyzeDiff,
4365
8432
  appendFailure,
4366
8433
  appendLearning,
4367
8434
  applyFixes,
8435
+ applyHotspotDowngrade,
4368
8436
  archiveFailures,
8437
+ archiveStream,
4369
8438
  buildDependencyGraph,
8439
+ buildExclusionSet,
4370
8440
  buildSnapshot,
4371
8441
  checkDocCoverage,
8442
+ checkEligibility,
8443
+ classifyFinding,
4372
8444
  configureFeedback,
4373
8445
  contextBudget,
4374
8446
  contextFilter,
4375
8447
  createBoundaryValidator,
8448
+ createCommentedCodeFixes,
4376
8449
  createError,
4377
8450
  createFixes,
8451
+ createForbiddenImportFixes,
8452
+ createOrphanedDepFixes,
4378
8453
  createParseError,
4379
8454
  createSelfReview,
8455
+ createStream,
8456
+ cryptoRules,
8457
+ deduplicateCleanupFindings,
8458
+ deduplicateFindings,
4380
8459
  defineLayer,
8460
+ deserializationRules,
8461
+ detectChangeType,
4381
8462
  detectCircularDeps,
4382
8463
  detectCircularDepsInFiles,
8464
+ detectComplexityViolations,
8465
+ detectCouplingViolations,
4383
8466
  detectDeadCode,
4384
8467
  detectDocDrift,
4385
8468
  detectPatternViolations,
8469
+ detectSizeBudgetViolations,
8470
+ detectStack,
8471
+ determineAssessment,
4386
8472
  executeWorkflow,
8473
+ expressRules,
4387
8474
  extractMarkdownLinks,
4388
8475
  extractSections,
8476
+ fanOutReview,
8477
+ formatFindingBlock,
8478
+ formatGitHubComment,
8479
+ formatGitHubSummary,
8480
+ formatTerminalOutput,
4389
8481
  generateAgentsMap,
4390
8482
  generateSuggestions,
4391
8483
  getActionEmitter,
8484
+ getExitCode,
4392
8485
  getFeedbackConfig,
4393
8486
  getPhaseCategories,
8487
+ getStreamForBranch,
8488
+ getUpdateNotification,
8489
+ goRules,
8490
+ injectionRules,
8491
+ isSmallSuggestion,
8492
+ isUpdateCheckEnabled,
8493
+ listStreams,
4394
8494
  loadFailures,
4395
8495
  loadHandoff,
4396
8496
  loadRelevantLearnings,
4397
8497
  loadState,
8498
+ loadStreamIndex,
4398
8499
  logAgentAction,
8500
+ migrateToStreams,
8501
+ networkRules,
8502
+ nodeRules,
4399
8503
  parseDiff,
8504
+ parseRoadmap,
8505
+ parseSecurityConfig,
8506
+ parseSize,
8507
+ pathTraversalRules,
4400
8508
  previewFix,
8509
+ reactRules,
8510
+ readCheckState,
4401
8511
  requestMultiplePeerReviews,
4402
8512
  requestPeerReview,
4403
8513
  resetFeedbackConfig,
4404
8514
  resolveFileToLayer,
8515
+ resolveModelTier,
8516
+ resolveRuleSeverity,
8517
+ resolveStreamPath,
8518
+ runArchitectureAgent,
8519
+ runBugDetectionAgent,
4405
8520
  runCIChecks,
8521
+ runComplianceAgent,
8522
+ runMechanicalChecks,
4406
8523
  runMechanicalGate,
4407
8524
  runMultiTurnPipeline,
4408
8525
  runPipeline,
8526
+ runReviewPipeline,
8527
+ runSecurityAgent,
4409
8528
  saveHandoff,
4410
8529
  saveState,
8530
+ saveStreamIndex,
8531
+ scopeContext,
8532
+ secretRules,
8533
+ serializeRoadmap,
8534
+ setActiveStream,
8535
+ shouldRunCheck,
8536
+ spawnBackgroundCheck,
8537
+ syncRoadmap,
8538
+ touchStream,
4411
8539
  trackAction,
4412
8540
  validateAgentsMap,
4413
8541
  validateBoundaries,
@@ -4415,6 +8543,8 @@ export {
4415
8543
  validateConfig,
4416
8544
  validateDependencies,
4417
8545
  validateFileStructure,
8546
+ validateFindings,
4418
8547
  validateKnowledgeMap,
4419
- validatePatternConfig
8548
+ validatePatternConfig,
8549
+ xssRules
4420
8550
  };