@harness-engineering/core 0.8.0 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -18,17 +18,17 @@ import { promisify } from "util";
18
18
  import { glob } from "glob";
19
19
  var accessAsync = promisify(access);
20
20
  var readFileAsync = promisify(readFile);
21
- async function fileExists(path3) {
21
+ async function fileExists(path11) {
22
22
  try {
23
- await accessAsync(path3, constants.F_OK);
23
+ await accessAsync(path11, constants.F_OK);
24
24
  return true;
25
25
  } catch {
26
26
  return false;
27
27
  }
28
28
  }
29
- async function readFileContent(path3) {
29
+ async function readFileContent(path11) {
30
30
  try {
31
- const content = await readFileAsync(path3, "utf-8");
31
+ const content = await readFileAsync(path11, "utf-8");
32
32
  return Ok(content);
33
33
  } catch (error) {
34
34
  return Err(error);
@@ -76,15 +76,15 @@ function validateConfig(data, schema) {
76
76
  let message = "Configuration validation failed";
77
77
  const suggestions = [];
78
78
  if (firstError) {
79
- const path3 = firstError.path.join(".");
80
- const pathDisplay = path3 ? ` at "${path3}"` : "";
79
+ const path11 = firstError.path.join(".");
80
+ const pathDisplay = path11 ? ` at "${path11}"` : "";
81
81
  if (firstError.code === "invalid_type") {
82
82
  const received = firstError.received;
83
83
  const expected = firstError.expected;
84
84
  if (received === "undefined") {
85
85
  code = "MISSING_FIELD";
86
86
  message = `Missing required field${pathDisplay}: ${firstError.message}`;
87
- suggestions.push(`Field "${path3}" is required and must be of type "${expected}"`);
87
+ suggestions.push(`Field "${path11}" is required and must be of type "${expected}"`);
88
88
  } else {
89
89
  code = "INVALID_TYPE";
90
90
  message = `Invalid type${pathDisplay}: ${firstError.message}`;
@@ -297,30 +297,30 @@ function extractSections(content) {
297
297
  return result;
298
298
  });
299
299
  }
300
- function isExternalLink(path3) {
301
- return path3.startsWith("http://") || path3.startsWith("https://") || path3.startsWith("#") || path3.startsWith("mailto:");
300
+ function isExternalLink(path11) {
301
+ return path11.startsWith("http://") || path11.startsWith("https://") || path11.startsWith("#") || path11.startsWith("mailto:");
302
302
  }
303
303
  function resolveLinkPath(linkPath, baseDir) {
304
304
  return linkPath.startsWith(".") ? join(baseDir, linkPath) : linkPath;
305
305
  }
306
- async function validateAgentsMap(path3 = "./AGENTS.md") {
306
+ async function validateAgentsMap(path11 = "./AGENTS.md") {
307
307
  console.warn(
308
308
  "[harness] validateAgentsMap() is deprecated. Use graph-based validation via Assembler.checkCoverage() from @harness-engineering/graph"
309
309
  );
310
- const contentResult = await readFileContent(path3);
310
+ const contentResult = await readFileContent(path11);
311
311
  if (!contentResult.ok) {
312
312
  return Err(
313
313
  createError(
314
314
  "PARSE_ERROR",
315
315
  `Failed to read AGENTS.md: ${contentResult.error.message}`,
316
- { path: path3 },
316
+ { path: path11 },
317
317
  ["Ensure the file exists", "Check file permissions"]
318
318
  )
319
319
  );
320
320
  }
321
321
  const content = contentResult.value;
322
322
  const sections = extractSections(content);
323
- const baseDir = dirname(path3);
323
+ const baseDir = dirname(path11);
324
324
  const sectionTitles = sections.map((s) => s.title);
325
325
  const missingSections = REQUIRED_SECTIONS.filter(
326
326
  (required) => !sectionTitles.some((title) => title.toLowerCase().includes(required.toLowerCase()))
@@ -462,8 +462,8 @@ async function checkDocCoverage(domain, options = {}) {
462
462
 
463
463
  // src/context/knowledge-map.ts
464
464
  import { join as join2, basename as basename2, relative as relative2 } from "path";
465
- function suggestFix(path3, existingFiles) {
466
- const targetName = basename2(path3).toLowerCase();
465
+ function suggestFix(path11, existingFiles) {
466
+ const targetName = basename2(path11).toLowerCase();
467
467
  const similar = existingFiles.find((file) => {
468
468
  const fileName = basename2(file).toLowerCase();
469
469
  return fileName.includes(targetName) || targetName.includes(fileName);
@@ -471,7 +471,7 @@ function suggestFix(path3, existingFiles) {
471
471
  if (similar) {
472
472
  return `Did you mean "${similar}"?`;
473
473
  }
474
- return `Create the file "${path3}" or remove the link`;
474
+ return `Create the file "${path11}" or remove the link`;
475
475
  }
476
476
  async function validateKnowledgeMap(rootDir = process.cwd()) {
477
477
  console.warn(
@@ -1065,8 +1065,8 @@ function createBoundaryValidator(schema, name) {
1065
1065
  return Ok(result.data);
1066
1066
  }
1067
1067
  const suggestions = result.error.issues.map((issue) => {
1068
- const path3 = issue.path.join(".");
1069
- return path3 ? `${path3}: ${issue.message}` : issue.message;
1068
+ const path11 = issue.path.join(".");
1069
+ return path11 ? `${path11}: ${issue.message}` : issue.message;
1070
1070
  });
1071
1071
  return Err(
1072
1072
  createError(
@@ -1135,11 +1135,11 @@ function walk(node, visitor) {
1135
1135
  var TypeScriptParser = class {
1136
1136
  name = "typescript";
1137
1137
  extensions = [".ts", ".tsx", ".mts", ".cts"];
1138
- async parseFile(path3) {
1139
- const contentResult = await readFileContent(path3);
1138
+ async parseFile(path11) {
1139
+ const contentResult = await readFileContent(path11);
1140
1140
  if (!contentResult.ok) {
1141
1141
  return Err(
1142
- createParseError("NOT_FOUND", `File not found: ${path3}`, { path: path3 }, [
1142
+ createParseError("NOT_FOUND", `File not found: ${path11}`, { path: path11 }, [
1143
1143
  "Check that the file exists",
1144
1144
  "Verify the path is correct"
1145
1145
  ])
@@ -1149,7 +1149,7 @@ var TypeScriptParser = class {
1149
1149
  const ast = parse(contentResult.value, {
1150
1150
  loc: true,
1151
1151
  range: true,
1152
- jsx: path3.endsWith(".tsx"),
1152
+ jsx: path11.endsWith(".tsx"),
1153
1153
  errorOnUnknownASTType: false
1154
1154
  });
1155
1155
  return Ok({
@@ -1160,7 +1160,7 @@ var TypeScriptParser = class {
1160
1160
  } catch (e) {
1161
1161
  const error = e;
1162
1162
  return Err(
1163
- createParseError("SYNTAX_ERROR", `Failed to parse ${path3}: ${error.message}`, { path: path3 }, [
1163
+ createParseError("SYNTAX_ERROR", `Failed to parse ${path11}: ${error.message}`, { path: path11 }, [
1164
1164
  "Check for syntax errors in the file",
1165
1165
  "Ensure valid TypeScript syntax"
1166
1166
  ])
@@ -1444,22 +1444,22 @@ function extractInlineRefs(content) {
1444
1444
  }
1445
1445
  return refs;
1446
1446
  }
1447
- async function parseDocumentationFile(path3) {
1448
- const contentResult = await readFileContent(path3);
1447
+ async function parseDocumentationFile(path11) {
1448
+ const contentResult = await readFileContent(path11);
1449
1449
  if (!contentResult.ok) {
1450
1450
  return Err(
1451
1451
  createEntropyError(
1452
1452
  "PARSE_ERROR",
1453
- `Failed to read documentation file: ${path3}`,
1454
- { file: path3 },
1453
+ `Failed to read documentation file: ${path11}`,
1454
+ { file: path11 },
1455
1455
  ["Check that the file exists"]
1456
1456
  )
1457
1457
  );
1458
1458
  }
1459
1459
  const content = contentResult.value;
1460
- const type = path3.endsWith(".md") ? "markdown" : "text";
1460
+ const type = path11.endsWith(".md") ? "markdown" : "text";
1461
1461
  return Ok({
1462
- path: path3,
1462
+ path: path11,
1463
1463
  type,
1464
1464
  content,
1465
1465
  codeBlocks: extractCodeBlocks(content),
@@ -2287,6 +2287,496 @@ async function detectPatternViolations(snapshot, config) {
2287
2287
  });
2288
2288
  }
2289
2289
 
2290
+ // src/entropy/detectors/complexity.ts
2291
+ import { readFile as readFile2 } from "fs/promises";
2292
+ var DEFAULT_THRESHOLDS = {
2293
+ cyclomaticComplexity: { error: 15, warn: 10 },
2294
+ nestingDepth: { warn: 4 },
2295
+ functionLength: { warn: 50 },
2296
+ parameterCount: { warn: 5 },
2297
+ fileLength: { info: 300 },
2298
+ hotspotPercentile: { error: 95 }
2299
+ };
2300
+ function extractFunctions(content) {
2301
+ const functions = [];
2302
+ const lines = content.split("\n");
2303
+ const patterns = [
2304
+ // function declarations: function name(params) {
2305
+ /^\s*(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(([^)]*)\)/,
2306
+ // method declarations: name(params) {
2307
+ /^\s*(?:async\s+)?(\w+)\s*\(([^)]*)\)\s*(?::\s*[^{]+)?\s*\{/,
2308
+ // arrow functions assigned to const/let/var: const name = (params) =>
2309
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?\(([^)]*)\)\s*(?::\s*[^=]+)?\s*=>/,
2310
+ // arrow functions assigned to const/let/var with single param: const name = param =>
2311
+ /^\s*(?:export\s+)?(?:const|let|var)\s+(\w+)\s*=\s*(?:async\s+)?(\w+)\s*=>/
2312
+ ];
2313
+ for (let i = 0; i < lines.length; i++) {
2314
+ const line = lines[i];
2315
+ for (const pattern of patterns) {
2316
+ const match = line.match(pattern);
2317
+ if (match) {
2318
+ const name = match[1] ?? "anonymous";
2319
+ const paramsStr = match[2] || "";
2320
+ const params = paramsStr.trim() === "" ? 0 : paramsStr.split(",").length;
2321
+ const endLine = findFunctionEnd(lines, i);
2322
+ const body = lines.slice(i, endLine + 1).join("\n");
2323
+ functions.push({
2324
+ name,
2325
+ line: i + 1,
2326
+ params,
2327
+ startLine: i + 1,
2328
+ endLine: endLine + 1,
2329
+ body
2330
+ });
2331
+ break;
2332
+ }
2333
+ }
2334
+ }
2335
+ return functions;
2336
+ }
2337
+ function findFunctionEnd(lines, startIdx) {
2338
+ let depth = 0;
2339
+ let foundOpen = false;
2340
+ for (let i = startIdx; i < lines.length; i++) {
2341
+ const line = lines[i];
2342
+ for (const ch of line) {
2343
+ if (ch === "{") {
2344
+ depth++;
2345
+ foundOpen = true;
2346
+ } else if (ch === "}") {
2347
+ depth--;
2348
+ if (foundOpen && depth === 0) {
2349
+ return i;
2350
+ }
2351
+ }
2352
+ }
2353
+ }
2354
+ return lines.length - 1;
2355
+ }
2356
+ function computeCyclomaticComplexity(body) {
2357
+ let complexity = 1;
2358
+ const decisionPatterns = [
2359
+ /\bif\s*\(/g,
2360
+ /\belse\s+if\s*\(/g,
2361
+ /\bwhile\s*\(/g,
2362
+ /\bfor\s*\(/g,
2363
+ /\bcase\s+/g,
2364
+ /&&/g,
2365
+ /\|\|/g,
2366
+ /\?(?!=)/g,
2367
+ // Ternary ? but not ?. or ??
2368
+ /\bcatch\s*\(/g
2369
+ ];
2370
+ for (const pattern of decisionPatterns) {
2371
+ const matches = body.match(pattern);
2372
+ if (matches) {
2373
+ complexity += matches.length;
2374
+ }
2375
+ }
2376
+ const elseIfMatches = body.match(/\belse\s+if\s*\(/g);
2377
+ if (elseIfMatches) {
2378
+ complexity -= elseIfMatches.length;
2379
+ }
2380
+ return complexity;
2381
+ }
2382
+ function computeNestingDepth(body) {
2383
+ let maxDepth = 0;
2384
+ let currentDepth = 0;
2385
+ let functionBodyStarted = false;
2386
+ for (const ch of body) {
2387
+ if (ch === "{") {
2388
+ if (!functionBodyStarted) {
2389
+ functionBodyStarted = true;
2390
+ continue;
2391
+ }
2392
+ currentDepth++;
2393
+ if (currentDepth > maxDepth) {
2394
+ maxDepth = currentDepth;
2395
+ }
2396
+ } else if (ch === "}") {
2397
+ if (currentDepth > 0) {
2398
+ currentDepth--;
2399
+ }
2400
+ }
2401
+ }
2402
+ return maxDepth;
2403
+ }
2404
+ async function detectComplexityViolations(snapshot, config, graphData) {
2405
+ const violations = [];
2406
+ const thresholds = {
2407
+ cyclomaticComplexity: {
2408
+ error: config?.thresholds?.cyclomaticComplexity?.error ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.error,
2409
+ warn: config?.thresholds?.cyclomaticComplexity?.warn ?? DEFAULT_THRESHOLDS.cyclomaticComplexity.warn
2410
+ },
2411
+ nestingDepth: {
2412
+ warn: config?.thresholds?.nestingDepth?.warn ?? DEFAULT_THRESHOLDS.nestingDepth.warn
2413
+ },
2414
+ functionLength: {
2415
+ warn: config?.thresholds?.functionLength?.warn ?? DEFAULT_THRESHOLDS.functionLength.warn
2416
+ },
2417
+ parameterCount: {
2418
+ warn: config?.thresholds?.parameterCount?.warn ?? DEFAULT_THRESHOLDS.parameterCount.warn
2419
+ },
2420
+ fileLength: {
2421
+ info: config?.thresholds?.fileLength?.info ?? DEFAULT_THRESHOLDS.fileLength.info
2422
+ }
2423
+ };
2424
+ let totalFunctions = 0;
2425
+ for (const file of snapshot.files) {
2426
+ let content;
2427
+ try {
2428
+ content = await readFile2(file.path, "utf-8");
2429
+ } catch {
2430
+ continue;
2431
+ }
2432
+ const lines = content.split("\n");
2433
+ if (lines.length > thresholds.fileLength.info) {
2434
+ violations.push({
2435
+ file: file.path,
2436
+ function: "<file>",
2437
+ line: 1,
2438
+ metric: "fileLength",
2439
+ value: lines.length,
2440
+ threshold: thresholds.fileLength.info,
2441
+ tier: 3,
2442
+ severity: "info",
2443
+ message: `File has ${lines.length} lines (threshold: ${thresholds.fileLength.info})`
2444
+ });
2445
+ }
2446
+ const functions = extractFunctions(content);
2447
+ totalFunctions += functions.length;
2448
+ for (const fn of functions) {
2449
+ const complexity = computeCyclomaticComplexity(fn.body);
2450
+ if (complexity > thresholds.cyclomaticComplexity.error) {
2451
+ violations.push({
2452
+ file: file.path,
2453
+ function: fn.name,
2454
+ line: fn.line,
2455
+ metric: "cyclomaticComplexity",
2456
+ value: complexity,
2457
+ threshold: thresholds.cyclomaticComplexity.error,
2458
+ tier: 1,
2459
+ severity: "error",
2460
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (error threshold: ${thresholds.cyclomaticComplexity.error})`
2461
+ });
2462
+ } else if (complexity > thresholds.cyclomaticComplexity.warn) {
2463
+ violations.push({
2464
+ file: file.path,
2465
+ function: fn.name,
2466
+ line: fn.line,
2467
+ metric: "cyclomaticComplexity",
2468
+ value: complexity,
2469
+ threshold: thresholds.cyclomaticComplexity.warn,
2470
+ tier: 2,
2471
+ severity: "warning",
2472
+ message: `Function "${fn.name}" has cyclomatic complexity of ${complexity} (warning threshold: ${thresholds.cyclomaticComplexity.warn})`
2473
+ });
2474
+ }
2475
+ const nestingDepth = computeNestingDepth(fn.body);
2476
+ if (nestingDepth > thresholds.nestingDepth.warn) {
2477
+ violations.push({
2478
+ file: file.path,
2479
+ function: fn.name,
2480
+ line: fn.line,
2481
+ metric: "nestingDepth",
2482
+ value: nestingDepth,
2483
+ threshold: thresholds.nestingDepth.warn,
2484
+ tier: 2,
2485
+ severity: "warning",
2486
+ message: `Function "${fn.name}" has nesting depth of ${nestingDepth} (threshold: ${thresholds.nestingDepth.warn})`
2487
+ });
2488
+ }
2489
+ const fnLength = fn.endLine - fn.startLine + 1;
2490
+ if (fnLength > thresholds.functionLength.warn) {
2491
+ violations.push({
2492
+ file: file.path,
2493
+ function: fn.name,
2494
+ line: fn.line,
2495
+ metric: "functionLength",
2496
+ value: fnLength,
2497
+ threshold: thresholds.functionLength.warn,
2498
+ tier: 2,
2499
+ severity: "warning",
2500
+ message: `Function "${fn.name}" is ${fnLength} lines long (threshold: ${thresholds.functionLength.warn})`
2501
+ });
2502
+ }
2503
+ if (fn.params > thresholds.parameterCount.warn) {
2504
+ violations.push({
2505
+ file: file.path,
2506
+ function: fn.name,
2507
+ line: fn.line,
2508
+ metric: "parameterCount",
2509
+ value: fn.params,
2510
+ threshold: thresholds.parameterCount.warn,
2511
+ tier: 2,
2512
+ severity: "warning",
2513
+ message: `Function "${fn.name}" has ${fn.params} parameters (threshold: ${thresholds.parameterCount.warn})`
2514
+ });
2515
+ }
2516
+ if (graphData) {
2517
+ const hotspot = graphData.hotspots.find(
2518
+ (h) => h.file === file.path && h.function === fn.name
2519
+ );
2520
+ if (hotspot && hotspot.hotspotScore > graphData.percentile95Score) {
2521
+ violations.push({
2522
+ file: file.path,
2523
+ function: fn.name,
2524
+ line: fn.line,
2525
+ metric: "hotspotScore",
2526
+ value: hotspot.hotspotScore,
2527
+ threshold: graphData.percentile95Score,
2528
+ tier: 1,
2529
+ severity: "error",
2530
+ message: `Function "${fn.name}" is a complexity hotspot (score: ${hotspot.hotspotScore}, p95: ${graphData.percentile95Score})`
2531
+ });
2532
+ }
2533
+ }
2534
+ }
2535
+ }
2536
+ const errorCount = violations.filter((v) => v.severity === "error").length;
2537
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2538
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2539
+ return Ok({
2540
+ violations,
2541
+ stats: {
2542
+ filesAnalyzed: snapshot.files.length,
2543
+ functionsAnalyzed: totalFunctions,
2544
+ violationCount: violations.length,
2545
+ errorCount,
2546
+ warningCount,
2547
+ infoCount
2548
+ }
2549
+ });
2550
+ }
2551
+
2552
+ // src/entropy/detectors/coupling.ts
2553
+ var DEFAULT_THRESHOLDS2 = {
2554
+ fanOut: { warn: 15 },
2555
+ fanIn: { info: 20 },
2556
+ couplingRatio: { warn: 0.7 },
2557
+ transitiveDependencyDepth: { info: 30 }
2558
+ };
2559
+ function computeMetricsFromSnapshot(snapshot) {
2560
+ const fanInMap = /* @__PURE__ */ new Map();
2561
+ for (const file of snapshot.files) {
2562
+ for (const imp of file.imports) {
2563
+ const resolved = resolveImportSource(imp.source, file.path, snapshot);
2564
+ if (resolved) {
2565
+ fanInMap.set(resolved, (fanInMap.get(resolved) || 0) + 1);
2566
+ }
2567
+ }
2568
+ }
2569
+ return snapshot.files.map((file) => {
2570
+ const fanOut = file.imports.length;
2571
+ const fanIn = fanInMap.get(file.path) || 0;
2572
+ const total = fanIn + fanOut;
2573
+ const couplingRatio = total > 0 ? fanOut / total : 0;
2574
+ return {
2575
+ file: file.path,
2576
+ fanIn,
2577
+ fanOut,
2578
+ couplingRatio,
2579
+ transitiveDepth: 0
2580
+ };
2581
+ });
2582
+ }
2583
+ function resolveRelativePath(from, source) {
2584
+ const dir = from.includes("/") ? from.substring(0, from.lastIndexOf("/")) : ".";
2585
+ const parts = dir.split("/");
2586
+ for (const segment of source.split("/")) {
2587
+ if (segment === ".") continue;
2588
+ if (segment === "..") {
2589
+ parts.pop();
2590
+ } else {
2591
+ parts.push(segment);
2592
+ }
2593
+ }
2594
+ return parts.join("/");
2595
+ }
2596
+ function resolveImportSource(source, fromFile, snapshot) {
2597
+ if (!source.startsWith(".") && !source.startsWith("/")) {
2598
+ return void 0;
2599
+ }
2600
+ const resolved = resolveRelativePath(fromFile, source);
2601
+ const filePaths = snapshot.files.map((f) => f.path);
2602
+ const candidates = [
2603
+ resolved,
2604
+ `${resolved}.ts`,
2605
+ `${resolved}.tsx`,
2606
+ `${resolved}/index.ts`,
2607
+ `${resolved}/index.tsx`
2608
+ ];
2609
+ for (const candidate of candidates) {
2610
+ const match = filePaths.find((fp) => fp === candidate);
2611
+ if (match) return match;
2612
+ }
2613
+ return void 0;
2614
+ }
2615
+ function checkViolations(metrics, config) {
2616
+ const thresholds = {
2617
+ fanOut: { ...DEFAULT_THRESHOLDS2.fanOut, ...config?.thresholds?.fanOut },
2618
+ fanIn: { ...DEFAULT_THRESHOLDS2.fanIn, ...config?.thresholds?.fanIn },
2619
+ couplingRatio: { ...DEFAULT_THRESHOLDS2.couplingRatio, ...config?.thresholds?.couplingRatio },
2620
+ transitiveDependencyDepth: {
2621
+ ...DEFAULT_THRESHOLDS2.transitiveDependencyDepth,
2622
+ ...config?.thresholds?.transitiveDependencyDepth
2623
+ }
2624
+ };
2625
+ const violations = [];
2626
+ for (const m of metrics) {
2627
+ if (thresholds.fanOut.warn !== void 0 && m.fanOut > thresholds.fanOut.warn) {
2628
+ violations.push({
2629
+ file: m.file,
2630
+ metric: "fanOut",
2631
+ value: m.fanOut,
2632
+ threshold: thresholds.fanOut.warn,
2633
+ tier: 2,
2634
+ severity: "warning",
2635
+ message: `File has ${m.fanOut} imports (threshold: ${thresholds.fanOut.warn})`
2636
+ });
2637
+ }
2638
+ if (thresholds.fanIn.info !== void 0 && m.fanIn > thresholds.fanIn.info) {
2639
+ violations.push({
2640
+ file: m.file,
2641
+ metric: "fanIn",
2642
+ value: m.fanIn,
2643
+ threshold: thresholds.fanIn.info,
2644
+ tier: 3,
2645
+ severity: "info",
2646
+ message: `File is imported by ${m.fanIn} files (threshold: ${thresholds.fanIn.info})`
2647
+ });
2648
+ }
2649
+ const totalConnections = m.fanIn + m.fanOut;
2650
+ if (totalConnections > 5 && thresholds.couplingRatio.warn !== void 0 && m.couplingRatio > thresholds.couplingRatio.warn) {
2651
+ violations.push({
2652
+ file: m.file,
2653
+ metric: "couplingRatio",
2654
+ value: m.couplingRatio,
2655
+ threshold: thresholds.couplingRatio.warn,
2656
+ tier: 2,
2657
+ severity: "warning",
2658
+ message: `Coupling ratio is ${m.couplingRatio.toFixed(2)} (threshold: ${thresholds.couplingRatio.warn})`
2659
+ });
2660
+ }
2661
+ if (thresholds.transitiveDependencyDepth.info !== void 0 && m.transitiveDepth > thresholds.transitiveDependencyDepth.info) {
2662
+ violations.push({
2663
+ file: m.file,
2664
+ metric: "transitiveDependencyDepth",
2665
+ value: m.transitiveDepth,
2666
+ threshold: thresholds.transitiveDependencyDepth.info,
2667
+ tier: 3,
2668
+ severity: "info",
2669
+ message: `Transitive dependency depth is ${m.transitiveDepth} (threshold: ${thresholds.transitiveDependencyDepth.info})`
2670
+ });
2671
+ }
2672
+ }
2673
+ return violations;
2674
+ }
2675
+ async function detectCouplingViolations(snapshot, config, graphData) {
2676
+ let metrics;
2677
+ if (graphData) {
2678
+ metrics = graphData.files.map((f) => ({
2679
+ file: f.file,
2680
+ fanIn: f.fanIn,
2681
+ fanOut: f.fanOut,
2682
+ couplingRatio: f.couplingRatio,
2683
+ transitiveDepth: f.transitiveDepth
2684
+ }));
2685
+ } else {
2686
+ metrics = computeMetricsFromSnapshot(snapshot);
2687
+ }
2688
+ const violations = checkViolations(metrics, config);
2689
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2690
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2691
+ return Ok({
2692
+ violations,
2693
+ stats: {
2694
+ filesAnalyzed: metrics.length,
2695
+ violationCount: violations.length,
2696
+ warningCount,
2697
+ infoCount
2698
+ }
2699
+ });
2700
+ }
2701
+
2702
+ // src/entropy/detectors/size-budget.ts
2703
+ import { readdirSync, statSync } from "fs";
2704
+ import { join as join4 } from "path";
2705
+ function parseSize(size) {
2706
+ const match = size.trim().match(/^(\d+(?:\.\d+)?)\s*(KB|MB|GB|B)?$/i);
2707
+ if (!match) return 0;
2708
+ const value = parseFloat(match[1]);
2709
+ const unit = (match[2] || "B").toUpperCase();
2710
+ switch (unit) {
2711
+ case "KB":
2712
+ return Math.round(value * 1024);
2713
+ case "MB":
2714
+ return Math.round(value * 1024 * 1024);
2715
+ case "GB":
2716
+ return Math.round(value * 1024 * 1024 * 1024);
2717
+ default:
2718
+ return Math.round(value);
2719
+ }
2720
+ }
2721
+ function dirSize(dirPath) {
2722
+ let total = 0;
2723
+ let entries;
2724
+ try {
2725
+ entries = readdirSync(dirPath);
2726
+ } catch {
2727
+ return 0;
2728
+ }
2729
+ for (const entry of entries) {
2730
+ if (entry === "node_modules" || entry === ".git") continue;
2731
+ const fullPath = join4(dirPath, entry);
2732
+ try {
2733
+ const stat = statSync(fullPath);
2734
+ if (stat.isDirectory()) {
2735
+ total += dirSize(fullPath);
2736
+ } else if (stat.isFile()) {
2737
+ total += stat.size;
2738
+ }
2739
+ } catch {
2740
+ continue;
2741
+ }
2742
+ }
2743
+ return total;
2744
+ }
2745
+ async function detectSizeBudgetViolations(rootDir, config) {
2746
+ const budgets = config?.budgets ?? {};
2747
+ const violations = [];
2748
+ let packagesChecked = 0;
2749
+ for (const [pkgPath, budget] of Object.entries(budgets)) {
2750
+ packagesChecked++;
2751
+ const distPath = join4(rootDir, pkgPath, "dist");
2752
+ const currentSize = dirSize(distPath);
2753
+ if (budget.warn) {
2754
+ const budgetBytes = parseSize(budget.warn);
2755
+ if (budgetBytes > 0 && currentSize > budgetBytes) {
2756
+ violations.push({
2757
+ package: pkgPath,
2758
+ currentSize,
2759
+ budgetSize: budgetBytes,
2760
+ unit: "bytes",
2761
+ tier: 2,
2762
+ severity: "warning"
2763
+ });
2764
+ }
2765
+ }
2766
+ }
2767
+ const warningCount = violations.filter((v) => v.severity === "warning").length;
2768
+ const infoCount = violations.filter((v) => v.severity === "info").length;
2769
+ return Ok({
2770
+ violations,
2771
+ stats: {
2772
+ packagesChecked,
2773
+ violationCount: violations.length,
2774
+ warningCount,
2775
+ infoCount
2776
+ }
2777
+ });
2778
+ }
2779
+
2290
2780
  // src/entropy/fixers/suggestions.ts
2291
2781
  function generateDeadCodeSuggestions(report) {
2292
2782
  const suggestions = [];
@@ -2472,12 +2962,57 @@ var EntropyAnalyzer = class {
2472
2962
  analysisErrors.push({ analyzer: "patterns", error: result.error });
2473
2963
  }
2474
2964
  }
2965
+ let complexityReport;
2966
+ if (this.config.analyze.complexity) {
2967
+ const complexityConfig = typeof this.config.analyze.complexity === "object" ? this.config.analyze.complexity : {};
2968
+ const result = await detectComplexityViolations(
2969
+ this.snapshot,
2970
+ complexityConfig,
2971
+ graphOptions?.graphComplexityData
2972
+ );
2973
+ if (result.ok) {
2974
+ complexityReport = result.value;
2975
+ } else {
2976
+ analysisErrors.push({ analyzer: "complexity", error: result.error });
2977
+ }
2978
+ }
2979
+ let couplingReport;
2980
+ if (this.config.analyze.coupling) {
2981
+ const couplingConfig = typeof this.config.analyze.coupling === "object" ? this.config.analyze.coupling : {};
2982
+ const result = await detectCouplingViolations(
2983
+ this.snapshot,
2984
+ couplingConfig,
2985
+ graphOptions?.graphCouplingData
2986
+ );
2987
+ if (result.ok) {
2988
+ couplingReport = result.value;
2989
+ } else {
2990
+ analysisErrors.push({ analyzer: "coupling", error: result.error });
2991
+ }
2992
+ }
2993
+ let sizeBudgetReport;
2994
+ if (this.config.analyze.sizeBudget) {
2995
+ const sizeBudgetConfig = typeof this.config.analyze.sizeBudget === "object" ? this.config.analyze.sizeBudget : {};
2996
+ const result = await detectSizeBudgetViolations(this.config.rootDir, sizeBudgetConfig);
2997
+ if (result.ok) {
2998
+ sizeBudgetReport = result.value;
2999
+ } else {
3000
+ analysisErrors.push({ analyzer: "sizeBudget", error: result.error });
3001
+ }
3002
+ }
2475
3003
  const driftIssues = driftReport?.drifts.length || 0;
2476
3004
  const deadCodeIssues = (deadCodeReport?.deadExports.length || 0) + (deadCodeReport?.deadFiles.length || 0) + (deadCodeReport?.unusedImports.length || 0);
2477
3005
  const patternIssues = patternReport?.violations.length || 0;
2478
3006
  const patternErrors = patternReport?.stats.errorCount || 0;
2479
3007
  const patternWarnings = patternReport?.stats.warningCount || 0;
2480
- const totalIssues = driftIssues + deadCodeIssues + patternIssues;
3008
+ const complexityIssues = complexityReport?.violations.length || 0;
3009
+ const couplingIssues = couplingReport?.violations.length || 0;
3010
+ const sizeBudgetIssues = sizeBudgetReport?.violations.length || 0;
3011
+ const complexityErrors = complexityReport?.stats.errorCount || 0;
3012
+ const complexityWarnings = complexityReport?.stats.warningCount || 0;
3013
+ const couplingWarnings = couplingReport?.stats.warningCount || 0;
3014
+ const sizeBudgetWarnings = sizeBudgetReport?.stats.warningCount || 0;
3015
+ const totalIssues = driftIssues + deadCodeIssues + patternIssues + complexityIssues + couplingIssues + sizeBudgetIssues;
2481
3016
  const fixableCount = (deadCodeReport?.deadFiles.length || 0) + (deadCodeReport?.unusedImports.length || 0);
2482
3017
  const suggestions = generateSuggestions(deadCodeReport, driftReport, patternReport);
2483
3018
  const duration = Date.now() - startTime;
@@ -2486,8 +3021,8 @@ var EntropyAnalyzer = class {
2486
3021
  analysisErrors,
2487
3022
  summary: {
2488
3023
  totalIssues,
2489
- errors: patternErrors,
2490
- warnings: patternWarnings + driftIssues,
3024
+ errors: patternErrors + complexityErrors,
3025
+ warnings: patternWarnings + driftIssues + complexityWarnings + couplingWarnings + sizeBudgetWarnings,
2491
3026
  fixableCount,
2492
3027
  suggestionCount: suggestions.suggestions.length
2493
3028
  },
@@ -2503,6 +3038,15 @@ var EntropyAnalyzer = class {
2503
3038
  if (patternReport) {
2504
3039
  report.patterns = patternReport;
2505
3040
  }
3041
+ if (complexityReport) {
3042
+ report.complexity = complexityReport;
3043
+ }
3044
+ if (couplingReport) {
3045
+ report.coupling = couplingReport;
3046
+ }
3047
+ if (sizeBudgetReport) {
3048
+ report.sizeBudget = sizeBudgetReport;
3049
+ }
2506
3050
  this.report = report;
2507
3051
  return Ok(report);
2508
3052
  }
@@ -2585,8 +3129,8 @@ var EntropyAnalyzer = class {
2585
3129
  // src/entropy/fixers/safe-fixes.ts
2586
3130
  import * as fs from "fs";
2587
3131
  import { promisify as promisify2 } from "util";
2588
- import { dirname as dirname6, basename as basename4, join as join4 } from "path";
2589
- var readFile3 = promisify2(fs.readFile);
3132
+ import { dirname as dirname6, basename as basename4, join as join5 } from "path";
3133
+ var readFile4 = promisify2(fs.readFile);
2590
3134
  var writeFile2 = promisify2(fs.writeFile);
2591
3135
  var unlink2 = promisify2(fs.unlink);
2592
3136
  var mkdir2 = promisify2(fs.mkdir);
@@ -2618,6 +3162,40 @@ function createUnusedImportFixes(deadCodeReport) {
2618
3162
  reversible: true
2619
3163
  }));
2620
3164
  }
3165
+ function createDeadExportFixes(deadCodeReport) {
3166
+ return deadCodeReport.deadExports.filter((exp) => exp.reason === "NO_IMPORTERS").map((exp) => ({
3167
+ type: "dead-exports",
3168
+ file: exp.file,
3169
+ description: `Remove export keyword from ${exp.name} (${exp.reason})`,
3170
+ action: "replace",
3171
+ oldContent: exp.isDefault ? `export default ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `export ${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3172
+ newContent: exp.isDefault ? `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : ""} ${exp.name}` : `${exp.type === "class" ? "class" : exp.type === "function" ? "function" : exp.type === "variable" ? "const" : exp.type === "type" ? "type" : exp.type === "interface" ? "interface" : "enum"} ${exp.name}`,
3173
+ safe: true,
3174
+ reversible: true
3175
+ }));
3176
+ }
3177
+ function createCommentedCodeFixes(blocks) {
3178
+ return blocks.map((block) => ({
3179
+ type: "commented-code",
3180
+ file: block.file,
3181
+ description: `Remove commented-out code block (lines ${block.startLine}-${block.endLine})`,
3182
+ action: "replace",
3183
+ oldContent: block.content,
3184
+ newContent: "",
3185
+ safe: true,
3186
+ reversible: true
3187
+ }));
3188
+ }
3189
+ function createOrphanedDepFixes(deps) {
3190
+ return deps.map((dep) => ({
3191
+ type: "orphaned-deps",
3192
+ file: dep.packageJsonPath,
3193
+ description: `Remove orphaned dependency: ${dep.name}`,
3194
+ action: "replace",
3195
+ safe: true,
3196
+ reversible: true
3197
+ }));
3198
+ }
2621
3199
  function createFixes(deadCodeReport, config) {
2622
3200
  const fullConfig = { ...DEFAULT_FIX_CONFIG, ...config };
2623
3201
  const fixes = [];
@@ -2627,6 +3205,9 @@ function createFixes(deadCodeReport, config) {
2627
3205
  if (fullConfig.fixTypes.includes("unused-imports")) {
2628
3206
  fixes.push(...createUnusedImportFixes(deadCodeReport));
2629
3207
  }
3208
+ if (fullConfig.fixTypes.includes("dead-exports")) {
3209
+ fixes.push(...createDeadExportFixes(deadCodeReport));
3210
+ }
2630
3211
  return fixes;
2631
3212
  }
2632
3213
  function previewFix(fix) {
@@ -2647,7 +3228,7 @@ function previewFix(fix) {
2647
3228
  }
2648
3229
  }
2649
3230
  async function createBackup(filePath, backupDir) {
2650
- const backupPath = join4(backupDir, `${Date.now()}-${basename4(filePath)}`);
3231
+ const backupPath = join5(backupDir, `${Date.now()}-${basename4(filePath)}`);
2651
3232
  try {
2652
3233
  await mkdir2(dirname6(backupPath), { recursive: true });
2653
3234
  await copyFile2(filePath, backupPath);
@@ -2680,7 +3261,7 @@ async function applySingleFix(fix, config) {
2680
3261
  break;
2681
3262
  case "delete-lines":
2682
3263
  if (fix.line !== void 0) {
2683
- const content = await readFile3(fix.file, "utf-8");
3264
+ const content = await readFile4(fix.file, "utf-8");
2684
3265
  const lines = content.split("\n");
2685
3266
  lines.splice(fix.line - 1, 1);
2686
3267
  await writeFile2(fix.file, lines.join("\n"));
@@ -2688,14 +3269,14 @@ async function applySingleFix(fix, config) {
2688
3269
  break;
2689
3270
  case "replace":
2690
3271
  if (fix.oldContent && fix.newContent !== void 0) {
2691
- const content = await readFile3(fix.file, "utf-8");
3272
+ const content = await readFile4(fix.file, "utf-8");
2692
3273
  const newContent = content.replace(fix.oldContent, fix.newContent);
2693
3274
  await writeFile2(fix.file, newContent);
2694
3275
  }
2695
3276
  break;
2696
3277
  case "insert":
2697
3278
  if (fix.line !== void 0 && fix.newContent) {
2698
- const content = await readFile3(fix.file, "utf-8");
3279
+ const content = await readFile4(fix.file, "utf-8");
2699
3280
  const lines = content.split("\n");
2700
3281
  lines.splice(fix.line - 1, 0, fix.newContent);
2701
3282
  await writeFile2(fix.file, lines.join("\n"));
@@ -2747,6 +3328,133 @@ async function applyFixes(fixes, config) {
2747
3328
  });
2748
3329
  }
2749
3330
 
3331
+ // src/entropy/fixers/architecture-fixes.ts
3332
+ function createForbiddenImportFixes(violations) {
3333
+ return violations.filter((v) => v.alternative !== void 0).map((v) => ({
3334
+ type: "forbidden-import-replacement",
3335
+ file: v.file,
3336
+ description: `Replace forbidden import '${v.forbiddenImport}' with '${v.alternative}'`,
3337
+ action: "replace",
3338
+ line: v.line,
3339
+ oldContent: `from '${v.forbiddenImport}'`,
3340
+ newContent: `from '${v.alternative}'`,
3341
+ safe: true,
3342
+ reversible: true
3343
+ }));
3344
+ }
3345
+
3346
+ // src/entropy/fixers/cleanup-finding.ts
3347
+ var ALWAYS_UNSAFE_TYPES = /* @__PURE__ */ new Set([
3348
+ "upward-dependency",
3349
+ "skip-layer-dependency",
3350
+ "circular-dependency",
3351
+ "dead-internal"
3352
+ ]);
3353
+ var idCounter = 0;
3354
+ function classifyFinding(input) {
3355
+ idCounter++;
3356
+ const id = `${input.concern === "dead-code" ? "dc" : "arch"}-${idCounter}`;
3357
+ let safety;
3358
+ let safetyReason;
3359
+ let fixAction;
3360
+ let suggestion;
3361
+ if (ALWAYS_UNSAFE_TYPES.has(input.type)) {
3362
+ safety = "unsafe";
3363
+ safetyReason = `${input.type} requires human judgment`;
3364
+ suggestion = "Review and refactor manually";
3365
+ } else if (input.concern === "dead-code") {
3366
+ if (input.isPublicApi) {
3367
+ safety = "unsafe";
3368
+ safetyReason = "Public API export may have external consumers";
3369
+ suggestion = "Deprecate before removing";
3370
+ } else if (input.type === "dead-export" || input.type === "unused-import" || input.type === "commented-code" || input.type === "dead-file") {
3371
+ safety = "safe";
3372
+ safetyReason = "zero importers, non-public";
3373
+ fixAction = input.type === "dead-export" ? "Remove export keyword" : input.type === "dead-file" ? "Delete file" : input.type === "commented-code" ? "Delete commented block" : "Remove import";
3374
+ suggestion = fixAction;
3375
+ } else if (input.type === "orphaned-dep") {
3376
+ safety = "probably-safe";
3377
+ safetyReason = "No imports found, but needs install+test verification";
3378
+ fixAction = "Remove from package.json";
3379
+ suggestion = fixAction;
3380
+ } else {
3381
+ safety = "unsafe";
3382
+ safetyReason = "Unknown dead code type";
3383
+ suggestion = "Manual review required";
3384
+ }
3385
+ } else {
3386
+ if (input.type === "import-ordering") {
3387
+ safety = "safe";
3388
+ safetyReason = "Mechanical reorder, no semantic change";
3389
+ fixAction = "Reorder imports";
3390
+ suggestion = fixAction;
3391
+ } else if (input.type === "forbidden-import" && input.hasAlternative) {
3392
+ safety = "probably-safe";
3393
+ safetyReason = "Alternative configured, needs typecheck+test";
3394
+ fixAction = "Replace with configured alternative";
3395
+ suggestion = fixAction;
3396
+ } else {
3397
+ safety = "unsafe";
3398
+ safetyReason = `${input.type} requires structural changes`;
3399
+ suggestion = "Restructure code to fix violation";
3400
+ }
3401
+ }
3402
+ return {
3403
+ id,
3404
+ concern: input.concern,
3405
+ file: input.file,
3406
+ ...input.line !== void 0 ? { line: input.line } : {},
3407
+ type: input.type,
3408
+ description: input.description,
3409
+ safety,
3410
+ safetyReason,
3411
+ hotspotDowngraded: false,
3412
+ ...fixAction !== void 0 ? { fixAction } : {},
3413
+ suggestion
3414
+ };
3415
+ }
3416
+ function applyHotspotDowngrade(finding, hotspot) {
3417
+ if (finding.safety !== "safe") return finding;
3418
+ const churn = hotspot.churnMap.get(finding.file) ?? 0;
3419
+ if (churn >= hotspot.topPercentileThreshold) {
3420
+ return {
3421
+ ...finding,
3422
+ safety: "probably-safe",
3423
+ safetyReason: `${finding.safetyReason}; downgraded due to high churn (${churn} commits)`,
3424
+ hotspotDowngraded: true
3425
+ };
3426
+ }
3427
+ return finding;
3428
+ }
3429
+ function deduplicateCleanupFindings(findings) {
3430
+ const byFileAndLine = /* @__PURE__ */ new Map();
3431
+ for (const f of findings) {
3432
+ const key = `${f.file}:${f.line ?? "none"}`;
3433
+ const group = byFileAndLine.get(key) ?? [];
3434
+ group.push(f);
3435
+ byFileAndLine.set(key, group);
3436
+ }
3437
+ const result = [];
3438
+ for (const group of byFileAndLine.values()) {
3439
+ if (group.length === 1) {
3440
+ result.push(group[0]);
3441
+ continue;
3442
+ }
3443
+ const deadCode = group.find((f) => f.concern === "dead-code");
3444
+ const arch = group.find((f) => f.concern === "architecture");
3445
+ if (deadCode && arch) {
3446
+ result.push({
3447
+ ...deadCode,
3448
+ description: `${deadCode.description} (also violates architecture: ${arch.type})`,
3449
+ suggestion: deadCode.fixAction ? `${deadCode.fixAction} (resolves both dead code and architecture violation)` : deadCode.suggestion
3450
+ });
3451
+ } else {
3452
+ result.push(...group);
3453
+ }
3454
+ }
3455
+ return result;
3456
+ }
3457
+
2750
3458
  // src/entropy/config/schema.ts
2751
3459
  import { z } from "zod";
2752
3460
  var MustExportRuleSchema = z.object({
@@ -2856,33 +3564,382 @@ function validatePatternConfig(config) {
2856
3564
  return Ok(result.data);
2857
3565
  }
2858
3566
 
2859
- // src/feedback/telemetry/noop.ts
2860
- var NoOpTelemetryAdapter = class {
2861
- name = "noop";
2862
- async health() {
2863
- return Ok({ available: true, message: "NoOp adapter - no real telemetry" });
3567
+ // src/performance/baseline-manager.ts
3568
+ import { readFileSync, writeFileSync, mkdirSync, existsSync } from "fs";
3569
+ import { join as join6, dirname as dirname7 } from "path";
3570
+ var BaselineManager = class {
3571
+ baselinesPath;
3572
+ constructor(projectRoot) {
3573
+ this.baselinesPath = join6(projectRoot, ".harness", "perf", "baselines.json");
2864
3574
  }
2865
- async getMetrics() {
2866
- return Ok([]);
3575
+ /**
3576
+ * Load the baselines file from disk.
3577
+ * Returns null if the file does not exist or contains invalid JSON.
3578
+ */
3579
+ load() {
3580
+ if (!existsSync(this.baselinesPath)) {
3581
+ return null;
3582
+ }
3583
+ try {
3584
+ const raw = readFileSync(this.baselinesPath, "utf-8");
3585
+ return JSON.parse(raw);
3586
+ } catch {
3587
+ return null;
3588
+ }
2867
3589
  }
2868
- async getTraces() {
2869
- return Ok([]);
3590
+ /**
3591
+ * Save benchmark results to disk, merging with any existing baselines.
3592
+ * Each result is keyed by `${file}::${name}`.
3593
+ */
3594
+ save(results, commitHash) {
3595
+ const existing = this.load();
3596
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3597
+ const benchmarks = existing?.benchmarks ? { ...existing.benchmarks } : {};
3598
+ for (const result of results) {
3599
+ const key = `${result.file}::${result.name}`;
3600
+ benchmarks[key] = {
3601
+ opsPerSec: result.opsPerSec,
3602
+ meanMs: result.meanMs,
3603
+ p99Ms: result.p99Ms,
3604
+ marginOfError: result.marginOfError
3605
+ };
3606
+ }
3607
+ const file = {
3608
+ version: 1,
3609
+ updatedAt: now,
3610
+ updatedFrom: commitHash,
3611
+ benchmarks
3612
+ };
3613
+ const dir = dirname7(this.baselinesPath);
3614
+ if (!existsSync(dir)) {
3615
+ mkdirSync(dir, { recursive: true });
3616
+ }
3617
+ writeFileSync(this.baselinesPath, JSON.stringify(file, null, 2));
2870
3618
  }
2871
- async getLogs() {
2872
- return Ok([]);
3619
+ /**
3620
+ * Remove baselines whose file prefix does not match any of the given bench files.
3621
+ * This cleans up entries for deleted benchmark files.
3622
+ */
3623
+ prune(existingBenchFiles) {
3624
+ const existing = this.load();
3625
+ if (!existing) {
3626
+ return;
3627
+ }
3628
+ const fileSet = new Set(existingBenchFiles);
3629
+ const pruned = {};
3630
+ for (const [key, baseline] of Object.entries(existing.benchmarks)) {
3631
+ const filePrefix = key.split("::")[0];
3632
+ if (fileSet.has(filePrefix)) {
3633
+ pruned[key] = baseline;
3634
+ }
3635
+ }
3636
+ existing.benchmarks = pruned;
3637
+ writeFileSync(this.baselinesPath, JSON.stringify(existing, null, 2));
2873
3638
  }
2874
3639
  };
2875
3640
 
2876
- // src/shared/uuid.ts
2877
- function generateId() {
2878
- if (typeof globalThis !== "undefined" && "crypto" in globalThis && typeof globalThis.crypto.randomUUID === "function") {
2879
- return globalThis.crypto.randomUUID();
3641
+ // src/performance/benchmark-runner.ts
3642
+ import { execFileSync } from "child_process";
3643
+ var BenchmarkRunner = class {
3644
+ /**
3645
+ * Discover .bench.ts files matching the glob pattern.
3646
+ */
3647
+ discover(cwd, glob2) {
3648
+ try {
3649
+ const result = execFileSync(
3650
+ "find",
3651
+ [
3652
+ cwd,
3653
+ "-name",
3654
+ "*.bench.ts",
3655
+ "-not",
3656
+ "-path",
3657
+ "*/node_modules/*",
3658
+ "-not",
3659
+ "-path",
3660
+ "*/dist/*"
3661
+ ],
3662
+ { encoding: "utf-8", timeout: 5e3 }
3663
+ ).trim();
3664
+ if (!result) return [];
3665
+ const files = result.split("\n").filter(Boolean);
3666
+ if (glob2 && glob2 !== "**/*.bench.ts") {
3667
+ return files.filter((f) => f.includes(glob2.replace(/\*/g, "")));
3668
+ }
3669
+ return files;
3670
+ } catch {
3671
+ return [];
3672
+ }
2880
3673
  }
2881
- return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function(c) {
2882
- const r = Math.random() * 16 | 0;
2883
- const v = c === "x" ? r : r & 3 | 8;
2884
- return v.toString(16);
2885
- });
3674
+ /**
3675
+ * Run benchmarks via vitest bench and capture results.
3676
+ * Returns parsed BenchmarkResult[] from vitest bench JSON output.
3677
+ */
3678
+ async run(options = {}) {
3679
+ const cwd = options.cwd ?? process.cwd();
3680
+ const timeout = options.timeout ?? 12e4;
3681
+ const glob2 = options.glob;
3682
+ const args = ["vitest", "bench", "--run"];
3683
+ if (glob2) {
3684
+ args.push(glob2);
3685
+ }
3686
+ args.push("--reporter=json");
3687
+ try {
3688
+ const rawOutput = execFileSync("npx", args, {
3689
+ cwd,
3690
+ encoding: "utf-8",
3691
+ timeout,
3692
+ stdio: ["pipe", "pipe", "pipe"]
3693
+ });
3694
+ const results = this.parseVitestBenchOutput(rawOutput);
3695
+ return { results, rawOutput, success: true };
3696
+ } catch (error) {
3697
+ const err = error;
3698
+ const output = err.stdout || err.message || "";
3699
+ const results = this.parseVitestBenchOutput(output);
3700
+ return {
3701
+ results,
3702
+ rawOutput: output,
3703
+ success: results.length > 0
3704
+ };
3705
+ }
3706
+ }
3707
+ /**
3708
+ * Parse vitest bench JSON reporter output into BenchmarkResult[].
3709
+ * Vitest bench JSON output contains testResults with benchmark data.
3710
+ */
3711
+ parseVitestBenchOutput(output) {
3712
+ const results = [];
3713
+ try {
3714
+ const jsonStart = output.indexOf("{");
3715
+ const jsonEnd = output.lastIndexOf("}");
3716
+ if (jsonStart === -1 || jsonEnd === -1) return results;
3717
+ const jsonStr = output.slice(jsonStart, jsonEnd + 1);
3718
+ const parsed = JSON.parse(jsonStr);
3719
+ if (parsed.testResults) {
3720
+ for (const testResult of parsed.testResults) {
3721
+ const file = testResult.name || testResult.filepath || "";
3722
+ if (testResult.assertionResults) {
3723
+ for (const assertion of testResult.assertionResults) {
3724
+ if (assertion.benchmark) {
3725
+ const bench = assertion.benchmark;
3726
+ results.push({
3727
+ name: assertion.fullName || assertion.title || "unknown",
3728
+ file: file.replace(process.cwd() + "/", ""),
3729
+ opsPerSec: Math.round(bench.hz || 0),
3730
+ meanMs: bench.mean ? bench.mean * 1e3 : 0,
3731
+ // p99: use actual p99 if available, otherwise estimate as 1.5× mean
3732
+ p99Ms: bench.p99 ? bench.p99 * 1e3 : bench.mean ? bench.mean * 1e3 * 1.5 : 0,
3733
+ marginOfError: bench.rme ? bench.rme / 100 : 0.05
3734
+ });
3735
+ }
3736
+ }
3737
+ }
3738
+ }
3739
+ }
3740
+ } catch {
3741
+ }
3742
+ return results;
3743
+ }
3744
+ };
3745
+
3746
+ // src/performance/regression-detector.ts
3747
+ var RegressionDetector = class {
3748
+ detect(results, baselines, criticalPaths) {
3749
+ const regressions = [];
3750
+ const improvements = [];
3751
+ let newBenchmarks = 0;
3752
+ for (const current of results) {
3753
+ const key = `${current.file}::${current.name}`;
3754
+ const baseline = baselines[key];
3755
+ if (!baseline) {
3756
+ newBenchmarks++;
3757
+ continue;
3758
+ }
3759
+ const regressionPct = (baseline.opsPerSec - current.opsPerSec) / baseline.opsPerSec * 100;
3760
+ const noiseThreshold = (baseline.marginOfError + current.marginOfError) * 100;
3761
+ const withinNoise = Math.abs(regressionPct) <= noiseThreshold;
3762
+ if (regressionPct < 0) {
3763
+ improvements.push({ benchmark: key, improvementPct: Math.abs(regressionPct) });
3764
+ continue;
3765
+ }
3766
+ const isCriticalPath = criticalPaths.entries.some(
3767
+ (e) => current.file.includes(e.file) || current.name === e.function
3768
+ );
3769
+ let tier;
3770
+ let severity;
3771
+ if (isCriticalPath && regressionPct > 5 && !withinNoise) {
3772
+ tier = 1;
3773
+ severity = "error";
3774
+ } else if (regressionPct > 10 && !withinNoise) {
3775
+ tier = 2;
3776
+ severity = "warning";
3777
+ } else {
3778
+ tier = 3;
3779
+ severity = "info";
3780
+ }
3781
+ regressions.push({
3782
+ benchmark: key,
3783
+ current,
3784
+ baseline,
3785
+ regressionPct,
3786
+ isCriticalPath,
3787
+ tier,
3788
+ severity,
3789
+ withinNoise
3790
+ });
3791
+ }
3792
+ return {
3793
+ regressions,
3794
+ improvements,
3795
+ stats: {
3796
+ benchmarksCompared: results.length - newBenchmarks,
3797
+ regressionCount: regressions.filter((r) => !r.withinNoise).length,
3798
+ improvementCount: improvements.length,
3799
+ newBenchmarks
3800
+ }
3801
+ };
3802
+ }
3803
+ };
3804
+
3805
+ // src/performance/critical-path.ts
3806
+ import * as fs2 from "fs";
3807
+ import * as path from "path";
3808
+ var SKIP_DIRS = /* @__PURE__ */ new Set(["node_modules", "dist", ".git"]);
3809
+ var SOURCE_EXTENSIONS = /* @__PURE__ */ new Set([".ts", ".tsx", ".js", ".jsx"]);
3810
+ var FUNCTION_DECL_RE = /(?:export\s+)?(?:async\s+)?function\s+(\w+)/;
3811
+ var CONST_DECL_RE = /(?:export\s+)?(?:const|let)\s+(\w+)\s*=/;
3812
+ var CriticalPathResolver = class {
3813
+ projectRoot;
3814
+ constructor(projectRoot) {
3815
+ this.projectRoot = projectRoot;
3816
+ }
3817
+ async resolve(graphData) {
3818
+ const annotated = await this.scanAnnotations();
3819
+ const seen = /* @__PURE__ */ new Map();
3820
+ for (const entry of annotated) {
3821
+ const key = `${entry.file}::${entry.function}`;
3822
+ seen.set(key, entry);
3823
+ }
3824
+ let graphInferred = 0;
3825
+ if (graphData) {
3826
+ for (const item of graphData.highFanInFunctions) {
3827
+ const key = `${item.file}::${item.function}`;
3828
+ if (!seen.has(key)) {
3829
+ seen.set(key, {
3830
+ file: item.file,
3831
+ function: item.function,
3832
+ source: "graph-inferred",
3833
+ fanIn: item.fanIn
3834
+ });
3835
+ graphInferred++;
3836
+ }
3837
+ }
3838
+ }
3839
+ const entries = Array.from(seen.values());
3840
+ const annotatedCount = annotated.length;
3841
+ return {
3842
+ entries,
3843
+ stats: {
3844
+ annotated: annotatedCount,
3845
+ graphInferred,
3846
+ total: entries.length
3847
+ }
3848
+ };
3849
+ }
3850
+ async scanAnnotations() {
3851
+ const entries = [];
3852
+ this.walkDir(this.projectRoot, entries);
3853
+ return entries;
3854
+ }
3855
+ walkDir(dir, entries) {
3856
+ let items;
3857
+ try {
3858
+ items = fs2.readdirSync(dir, { withFileTypes: true });
3859
+ } catch {
3860
+ return;
3861
+ }
3862
+ for (const item of items) {
3863
+ if (item.isDirectory()) {
3864
+ if (SKIP_DIRS.has(item.name)) continue;
3865
+ this.walkDir(path.join(dir, item.name), entries);
3866
+ } else if (item.isFile() && SOURCE_EXTENSIONS.has(path.extname(item.name))) {
3867
+ this.scanFile(path.join(dir, item.name), entries);
3868
+ }
3869
+ }
3870
+ }
3871
+ scanFile(filePath, entries) {
3872
+ let content;
3873
+ try {
3874
+ content = fs2.readFileSync(filePath, "utf-8");
3875
+ } catch {
3876
+ return;
3877
+ }
3878
+ const lines = content.split("\n");
3879
+ const relativePath = path.relative(this.projectRoot, filePath);
3880
+ for (let i = 0; i < lines.length; i++) {
3881
+ const line = lines[i];
3882
+ if (!line.includes("@perf-critical")) continue;
3883
+ for (let j = i + 1; j < lines.length; j++) {
3884
+ const nextLine = lines[j].trim();
3885
+ if (nextLine === "" || nextLine === "*/" || nextLine === "*") continue;
3886
+ if (nextLine.startsWith("*") || nextLine.startsWith("//")) continue;
3887
+ const funcMatch = nextLine.match(FUNCTION_DECL_RE);
3888
+ if (funcMatch && funcMatch[1]) {
3889
+ entries.push({
3890
+ file: relativePath,
3891
+ function: funcMatch[1],
3892
+ source: "annotation"
3893
+ });
3894
+ } else {
3895
+ const constMatch = nextLine.match(CONST_DECL_RE);
3896
+ if (constMatch && constMatch[1]) {
3897
+ entries.push({
3898
+ file: relativePath,
3899
+ function: constMatch[1],
3900
+ source: "annotation"
3901
+ });
3902
+ }
3903
+ }
3904
+ break;
3905
+ }
3906
+ }
3907
+ }
3908
+ };
3909
+
3910
+ // src/feedback/telemetry/noop.ts
3911
+ var NoOpTelemetryAdapter = class {
3912
+ name = "noop";
3913
+ async health() {
3914
+ return Ok({ available: true, message: "NoOp adapter - no real telemetry" });
3915
+ }
3916
+ async getMetrics() {
3917
+ return Ok([]);
3918
+ }
3919
+ async getTraces() {
3920
+ return Ok([]);
3921
+ }
3922
+ async getLogs() {
3923
+ return Ok([]);
3924
+ }
3925
+ };
3926
+
3927
+ // src/shared/uuid.ts
3928
+ function generateId() {
3929
+ if (typeof globalThis !== "undefined" && "crypto" in globalThis && typeof globalThis.crypto.randomUUID === "function") {
3930
+ return globalThis.crypto.randomUUID();
3931
+ }
3932
+ if (typeof globalThis.crypto?.getRandomValues !== "function") {
3933
+ throw new Error(
3934
+ "No cryptographic random source available \u2014 requires Node.js 15+ or a browser with Web Crypto API"
3935
+ );
3936
+ }
3937
+ const bytes = new Uint8Array(16);
3938
+ globalThis.crypto.getRandomValues(bytes);
3939
+ bytes[6] = bytes[6] & 15 | 64;
3940
+ bytes[8] = bytes[8] & 63 | 128;
3941
+ const hex = [...bytes].map((b) => b.toString(16).padStart(2, "0")).join("");
3942
+ return `${hex.slice(0, 8)}-${hex.slice(8, 12)}-${hex.slice(12, 16)}-${hex.slice(16, 20)}-${hex.slice(20)}`;
2886
3943
  }
2887
3944
 
2888
3945
  // src/feedback/executor/noop.ts
@@ -3543,8 +4600,8 @@ async function requestMultiplePeerReviews(requests) {
3543
4600
  }
3544
4601
 
3545
4602
  // src/feedback/logging/file-sink.ts
3546
- import { appendFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
3547
- import { dirname as dirname7 } from "path";
4603
+ import { appendFileSync, writeFileSync as writeFileSync2, existsSync as existsSync2, mkdirSync as mkdirSync2 } from "fs";
4604
+ import { dirname as dirname8 } from "path";
3548
4605
  var FileSink = class {
3549
4606
  name = "file";
3550
4607
  filePath;
@@ -3567,9 +4624,9 @@ var FileSink = class {
3567
4624
  }
3568
4625
  ensureDirectory() {
3569
4626
  if (!this.initialized) {
3570
- const dir = dirname7(this.filePath);
3571
- if (!existsSync(dir)) {
3572
- mkdirSync(dir, { recursive: true });
4627
+ const dir = dirname8(this.filePath);
4628
+ if (!existsSync2(dir)) {
4629
+ mkdirSync2(dir, { recursive: true });
3573
4630
  }
3574
4631
  this.initialized = true;
3575
4632
  }
@@ -3599,8 +4656,8 @@ var FileSink = class {
3599
4656
  this.ensureDirectory();
3600
4657
  const content = this.buffer.join("");
3601
4658
  this.buffer = [];
3602
- if (this.options.mode === "overwrite" && !existsSync(this.filePath)) {
3603
- writeFileSync(this.filePath, content);
4659
+ if (this.options.mode === "overwrite" && !existsSync2(this.filePath)) {
4660
+ writeFileSync2(this.filePath, content);
3604
4661
  } else {
3605
4662
  appendFileSync(this.filePath, content);
3606
4663
  }
@@ -3712,22 +4769,296 @@ var DEFAULT_STATE = {
3712
4769
  };
3713
4770
 
3714
4771
  // src/state/state-manager.ts
3715
- import * as fs2 from "fs";
3716
- import * as path from "path";
4772
+ import * as fs4 from "fs";
4773
+ import * as path3 from "path";
4774
+ import { execSync as execSync2 } from "child_process";
4775
+
4776
+ // src/state/stream-resolver.ts
4777
+ import * as fs3 from "fs";
4778
+ import * as path2 from "path";
3717
4779
  import { execSync } from "child_process";
4780
+
4781
+ // src/state/stream-types.ts
4782
+ import { z as z3 } from "zod";
4783
+ var StreamInfoSchema = z3.object({
4784
+ name: z3.string(),
4785
+ branch: z3.string().optional(),
4786
+ createdAt: z3.string(),
4787
+ lastActiveAt: z3.string()
4788
+ });
4789
+ var StreamIndexSchema = z3.object({
4790
+ schemaVersion: z3.literal(1),
4791
+ activeStream: z3.string().nullable(),
4792
+ streams: z3.record(StreamInfoSchema)
4793
+ });
4794
+ var DEFAULT_STREAM_INDEX = {
4795
+ schemaVersion: 1,
4796
+ activeStream: null,
4797
+ streams: {}
4798
+ };
4799
+
4800
+ // src/state/stream-resolver.ts
3718
4801
  var HARNESS_DIR = ".harness";
4802
+ var STREAMS_DIR = "streams";
4803
+ var INDEX_FILE = "index.json";
4804
+ var STREAM_NAME_REGEX = /^[a-z0-9][a-z0-9._-]*$/;
4805
+ function streamsDir(projectPath) {
4806
+ return path2.join(projectPath, HARNESS_DIR, STREAMS_DIR);
4807
+ }
4808
+ function indexPath(projectPath) {
4809
+ return path2.join(streamsDir(projectPath), INDEX_FILE);
4810
+ }
4811
+ function validateStreamName(name) {
4812
+ if (!STREAM_NAME_REGEX.test(name)) {
4813
+ return Err(
4814
+ new Error(
4815
+ `Invalid stream name '${name}'. Names must match [a-z0-9][a-z0-9._-]* (lowercase alphanumeric, dots, hyphens, underscores).`
4816
+ )
4817
+ );
4818
+ }
4819
+ return Ok(void 0);
4820
+ }
4821
+ async function loadStreamIndex(projectPath) {
4822
+ const idxPath = indexPath(projectPath);
4823
+ if (!fs3.existsSync(idxPath)) {
4824
+ return Ok({ ...DEFAULT_STREAM_INDEX, streams: {} });
4825
+ }
4826
+ try {
4827
+ const raw = fs3.readFileSync(idxPath, "utf-8");
4828
+ const parsed = JSON.parse(raw);
4829
+ const result = StreamIndexSchema.safeParse(parsed);
4830
+ if (!result.success) {
4831
+ return Err(new Error(`Invalid stream index: ${result.error.message}`));
4832
+ }
4833
+ return Ok(result.data);
4834
+ } catch (error) {
4835
+ return Err(
4836
+ new Error(
4837
+ `Failed to load stream index: ${error instanceof Error ? error.message : String(error)}`
4838
+ )
4839
+ );
4840
+ }
4841
+ }
4842
+ async function saveStreamIndex(projectPath, index) {
4843
+ const dir = streamsDir(projectPath);
4844
+ try {
4845
+ fs3.mkdirSync(dir, { recursive: true });
4846
+ fs3.writeFileSync(indexPath(projectPath), JSON.stringify(index, null, 2));
4847
+ return Ok(void 0);
4848
+ } catch (error) {
4849
+ return Err(
4850
+ new Error(
4851
+ `Failed to save stream index: ${error instanceof Error ? error.message : String(error)}`
4852
+ )
4853
+ );
4854
+ }
4855
+ }
4856
+ var branchCache = /* @__PURE__ */ new Map();
4857
+ var BRANCH_CACHE_TTL_MS = 3e4;
4858
+ function getCurrentBranch(projectPath) {
4859
+ const cached = branchCache.get(projectPath);
4860
+ if (cached && Date.now() - cached.timestamp < BRANCH_CACHE_TTL_MS) {
4861
+ return cached.branch;
4862
+ }
4863
+ try {
4864
+ const branch = execSync("git rev-parse --abbrev-ref HEAD", {
4865
+ cwd: projectPath,
4866
+ stdio: "pipe"
4867
+ }).toString().trim();
4868
+ branchCache.set(projectPath, { branch, timestamp: Date.now() });
4869
+ return branch;
4870
+ } catch {
4871
+ branchCache.set(projectPath, { branch: null, timestamp: Date.now() });
4872
+ return null;
4873
+ }
4874
+ }
4875
+ async function resolveStreamPath(projectPath, options) {
4876
+ const idxResult = await loadStreamIndex(projectPath);
4877
+ if (!idxResult.ok) return idxResult;
4878
+ const index = idxResult.value;
4879
+ if (options?.stream) {
4880
+ if (!index.streams[options.stream]) {
4881
+ return Err(
4882
+ new Error(
4883
+ `Stream '${options.stream}' not found. Known streams: ${Object.keys(index.streams).join(", ") || "none"}`
4884
+ )
4885
+ );
4886
+ }
4887
+ return Ok(path2.join(streamsDir(projectPath), options.stream));
4888
+ }
4889
+ const branch = getCurrentBranch(projectPath);
4890
+ if (branch && branch !== "main" && branch !== "master") {
4891
+ for (const [name, info] of Object.entries(index.streams)) {
4892
+ if (info.branch === branch) {
4893
+ return Ok(path2.join(streamsDir(projectPath), name));
4894
+ }
4895
+ }
4896
+ }
4897
+ if (index.activeStream && index.streams[index.activeStream]) {
4898
+ return Ok(path2.join(streamsDir(projectPath), index.activeStream));
4899
+ }
4900
+ return Err(
4901
+ new Error(
4902
+ `Cannot resolve stream. Specify --stream <name> or create a stream. Known streams: ${Object.keys(index.streams).join(", ") || "none"}`
4903
+ )
4904
+ );
4905
+ }
4906
+ async function touchStream(projectPath, name) {
4907
+ const idxResult = await loadStreamIndex(projectPath);
4908
+ if (!idxResult.ok) return idxResult;
4909
+ const index = idxResult.value;
4910
+ if (!index.streams[name]) {
4911
+ return Err(new Error(`Stream '${name}' not found`));
4912
+ }
4913
+ index.streams[name].lastActiveAt = (/* @__PURE__ */ new Date()).toISOString();
4914
+ index.activeStream = name;
4915
+ return saveStreamIndex(projectPath, index);
4916
+ }
4917
+ async function createStream(projectPath, name, branch) {
4918
+ const nameCheck = validateStreamName(name);
4919
+ if (!nameCheck.ok) return nameCheck;
4920
+ const idxResult = await loadStreamIndex(projectPath);
4921
+ if (!idxResult.ok) return idxResult;
4922
+ const index = idxResult.value;
4923
+ if (index.streams[name]) {
4924
+ return Err(new Error(`Stream '${name}' already exists`));
4925
+ }
4926
+ const streamPath = path2.join(streamsDir(projectPath), name);
4927
+ try {
4928
+ fs3.mkdirSync(streamPath, { recursive: true });
4929
+ } catch (error) {
4930
+ return Err(
4931
+ new Error(
4932
+ `Failed to create stream directory: ${error instanceof Error ? error.message : String(error)}`
4933
+ )
4934
+ );
4935
+ }
4936
+ const now = (/* @__PURE__ */ new Date()).toISOString();
4937
+ index.streams[name] = {
4938
+ name,
4939
+ branch,
4940
+ createdAt: now,
4941
+ lastActiveAt: now
4942
+ };
4943
+ const saveResult = await saveStreamIndex(projectPath, index);
4944
+ if (!saveResult.ok) return saveResult;
4945
+ return Ok(streamPath);
4946
+ }
4947
+ async function listStreams(projectPath) {
4948
+ const idxResult = await loadStreamIndex(projectPath);
4949
+ if (!idxResult.ok) return idxResult;
4950
+ return Ok(Object.values(idxResult.value.streams));
4951
+ }
4952
+ async function setActiveStream(projectPath, name) {
4953
+ const idxResult = await loadStreamIndex(projectPath);
4954
+ if (!idxResult.ok) return idxResult;
4955
+ const index = idxResult.value;
4956
+ if (!index.streams[name]) {
4957
+ return Err(new Error(`Stream '${name}' not found`));
4958
+ }
4959
+ index.activeStream = name;
4960
+ return saveStreamIndex(projectPath, index);
4961
+ }
4962
+ async function archiveStream(projectPath, name) {
4963
+ const idxResult = await loadStreamIndex(projectPath);
4964
+ if (!idxResult.ok) return idxResult;
4965
+ const index = idxResult.value;
4966
+ if (!index.streams[name]) {
4967
+ return Err(new Error(`Stream '${name}' not found`));
4968
+ }
4969
+ const streamPath = path2.join(streamsDir(projectPath), name);
4970
+ const archiveDir = path2.join(projectPath, HARNESS_DIR, "archive", "streams");
4971
+ try {
4972
+ fs3.mkdirSync(archiveDir, { recursive: true });
4973
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
4974
+ fs3.renameSync(streamPath, path2.join(archiveDir, `${name}-${date}`));
4975
+ } catch (error) {
4976
+ return Err(
4977
+ new Error(
4978
+ `Failed to archive stream: ${error instanceof Error ? error.message : String(error)}`
4979
+ )
4980
+ );
4981
+ }
4982
+ delete index.streams[name];
4983
+ if (index.activeStream === name) {
4984
+ index.activeStream = null;
4985
+ }
4986
+ return saveStreamIndex(projectPath, index);
4987
+ }
4988
+ function getStreamForBranch(index, branch) {
4989
+ for (const [name, info] of Object.entries(index.streams)) {
4990
+ if (info.branch === branch) return name;
4991
+ }
4992
+ return null;
4993
+ }
4994
+ var STATE_FILES = ["state.json", "handoff.json", "learnings.md", "failures.md"];
4995
+ async function migrateToStreams(projectPath) {
4996
+ const harnessDir = path2.join(projectPath, HARNESS_DIR);
4997
+ if (fs3.existsSync(indexPath(projectPath))) {
4998
+ return Ok(void 0);
4999
+ }
5000
+ const filesToMove = STATE_FILES.filter((f) => fs3.existsSync(path2.join(harnessDir, f)));
5001
+ if (filesToMove.length === 0) {
5002
+ return Ok(void 0);
5003
+ }
5004
+ const defaultDir = path2.join(streamsDir(projectPath), "default");
5005
+ try {
5006
+ fs3.mkdirSync(defaultDir, { recursive: true });
5007
+ for (const file of filesToMove) {
5008
+ fs3.renameSync(path2.join(harnessDir, file), path2.join(defaultDir, file));
5009
+ }
5010
+ } catch (error) {
5011
+ return Err(
5012
+ new Error(`Migration failed: ${error instanceof Error ? error.message : String(error)}`)
5013
+ );
5014
+ }
5015
+ const now = (/* @__PURE__ */ new Date()).toISOString();
5016
+ const index = {
5017
+ schemaVersion: 1,
5018
+ activeStream: "default",
5019
+ streams: {
5020
+ default: {
5021
+ name: "default",
5022
+ createdAt: now,
5023
+ lastActiveAt: now
5024
+ }
5025
+ }
5026
+ };
5027
+ return saveStreamIndex(projectPath, index);
5028
+ }
5029
+
5030
+ // src/state/state-manager.ts
5031
+ var HARNESS_DIR2 = ".harness";
3719
5032
  var STATE_FILE = "state.json";
3720
5033
  var LEARNINGS_FILE = "learnings.md";
3721
5034
  var FAILURES_FILE = "failures.md";
3722
5035
  var HANDOFF_FILE = "handoff.json";
3723
5036
  var GATE_CONFIG_FILE = "gate.json";
3724
- async function loadState(projectPath) {
3725
- const statePath = path.join(projectPath, HARNESS_DIR, STATE_FILE);
3726
- if (!fs2.existsSync(statePath)) {
3727
- return Ok({ ...DEFAULT_STATE });
5037
+ var INDEX_FILE2 = "index.json";
5038
+ async function getStateDir(projectPath, stream) {
5039
+ const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
5040
+ const hasStreams = fs4.existsSync(streamsIndexPath);
5041
+ if (stream || hasStreams) {
5042
+ const result = await resolveStreamPath(projectPath, stream ? { stream } : void 0);
5043
+ if (result.ok) {
5044
+ return result;
5045
+ }
5046
+ if (stream) {
5047
+ return result;
5048
+ }
3728
5049
  }
5050
+ return Ok(path3.join(projectPath, HARNESS_DIR2));
5051
+ }
5052
+ async function loadState(projectPath, stream) {
3729
5053
  try {
3730
- const raw = fs2.readFileSync(statePath, "utf-8");
5054
+ const dirResult = await getStateDir(projectPath, stream);
5055
+ if (!dirResult.ok) return dirResult;
5056
+ const stateDir = dirResult.value;
5057
+ const statePath = path3.join(stateDir, STATE_FILE);
5058
+ if (!fs4.existsSync(statePath)) {
5059
+ return Ok({ ...DEFAULT_STATE });
5060
+ }
5061
+ const raw = fs4.readFileSync(statePath, "utf-8");
3731
5062
  const parsed = JSON.parse(raw);
3732
5063
  const result = HarnessStateSchema.safeParse(parsed);
3733
5064
  if (!result.success) {
@@ -3736,18 +5067,18 @@ async function loadState(projectPath) {
3736
5067
  return Ok(result.data);
3737
5068
  } catch (error) {
3738
5069
  return Err(
3739
- new Error(
3740
- `Failed to load state from ${statePath}: ${error instanceof Error ? error.message : String(error)}`
3741
- )
5070
+ new Error(`Failed to load state: ${error instanceof Error ? error.message : String(error)}`)
3742
5071
  );
3743
5072
  }
3744
5073
  }
3745
- async function saveState(projectPath, state) {
3746
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3747
- const statePath = path.join(harnessDir, STATE_FILE);
5074
+ async function saveState(projectPath, state, stream) {
3748
5075
  try {
3749
- fs2.mkdirSync(harnessDir, { recursive: true });
3750
- fs2.writeFileSync(statePath, JSON.stringify(state, null, 2));
5076
+ const dirResult = await getStateDir(projectPath, stream);
5077
+ if (!dirResult.ok) return dirResult;
5078
+ const stateDir = dirResult.value;
5079
+ const statePath = path3.join(stateDir, STATE_FILE);
5080
+ fs4.mkdirSync(stateDir, { recursive: true });
5081
+ fs4.writeFileSync(statePath, JSON.stringify(state, null, 2));
3751
5082
  return Ok(void 0);
3752
5083
  } catch (error) {
3753
5084
  return Err(
@@ -3755,11 +5086,13 @@ async function saveState(projectPath, state) {
3755
5086
  );
3756
5087
  }
3757
5088
  }
3758
- async function appendLearning(projectPath, learning, skillName, outcome) {
3759
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3760
- const learningsPath = path.join(harnessDir, LEARNINGS_FILE);
5089
+ async function appendLearning(projectPath, learning, skillName, outcome, stream) {
3761
5090
  try {
3762
- fs2.mkdirSync(harnessDir, { recursive: true });
5091
+ const dirResult = await getStateDir(projectPath, stream);
5092
+ if (!dirResult.ok) return dirResult;
5093
+ const stateDir = dirResult.value;
5094
+ const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
5095
+ fs4.mkdirSync(stateDir, { recursive: true });
3763
5096
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3764
5097
  let entry;
3765
5098
  if (skillName && outcome) {
@@ -3775,11 +5108,11 @@ async function appendLearning(projectPath, learning, skillName, outcome) {
3775
5108
  - **${timestamp}:** ${learning}
3776
5109
  `;
3777
5110
  }
3778
- if (!fs2.existsSync(learningsPath)) {
3779
- fs2.writeFileSync(learningsPath, `# Learnings
5111
+ if (!fs4.existsSync(learningsPath)) {
5112
+ fs4.writeFileSync(learningsPath, `# Learnings
3780
5113
  ${entry}`);
3781
5114
  } else {
3782
- fs2.appendFileSync(learningsPath, entry);
5115
+ fs4.appendFileSync(learningsPath, entry);
3783
5116
  }
3784
5117
  return Ok(void 0);
3785
5118
  } catch (error) {
@@ -3790,13 +5123,16 @@ ${entry}`);
3790
5123
  );
3791
5124
  }
3792
5125
  }
3793
- async function loadRelevantLearnings(projectPath, skillName) {
3794
- const learningsPath = path.join(projectPath, HARNESS_DIR, LEARNINGS_FILE);
3795
- if (!fs2.existsSync(learningsPath)) {
3796
- return Ok([]);
3797
- }
5126
+ async function loadRelevantLearnings(projectPath, skillName, stream) {
3798
5127
  try {
3799
- const content = fs2.readFileSync(learningsPath, "utf-8");
5128
+ const dirResult = await getStateDir(projectPath, stream);
5129
+ if (!dirResult.ok) return dirResult;
5130
+ const stateDir = dirResult.value;
5131
+ const learningsPath = path3.join(stateDir, LEARNINGS_FILE);
5132
+ if (!fs4.existsSync(learningsPath)) {
5133
+ return Ok([]);
5134
+ }
5135
+ const content = fs4.readFileSync(learningsPath, "utf-8");
3800
5136
  const lines = content.split("\n");
3801
5137
  const entries = [];
3802
5138
  let currentBlock = [];
@@ -3830,20 +5166,22 @@ async function loadRelevantLearnings(projectPath, skillName) {
3830
5166
  }
3831
5167
  }
3832
5168
  var FAILURE_LINE_REGEX = /^- \*\*(\d{4}-\d{2}-\d{2}) \[skill:([^\]]+)\] \[type:([^\]]+)\]:\*\* (.+)$/;
3833
- async function appendFailure(projectPath, description, skillName, type) {
3834
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3835
- const failuresPath = path.join(harnessDir, FAILURES_FILE);
5169
+ async function appendFailure(projectPath, description, skillName, type, stream) {
3836
5170
  try {
3837
- fs2.mkdirSync(harnessDir, { recursive: true });
5171
+ const dirResult = await getStateDir(projectPath, stream);
5172
+ if (!dirResult.ok) return dirResult;
5173
+ const stateDir = dirResult.value;
5174
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5175
+ fs4.mkdirSync(stateDir, { recursive: true });
3838
5176
  const timestamp = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3839
5177
  const entry = `
3840
5178
  - **${timestamp} [skill:${skillName}] [type:${type}]:** ${description}
3841
5179
  `;
3842
- if (!fs2.existsSync(failuresPath)) {
3843
- fs2.writeFileSync(failuresPath, `# Failures
5180
+ if (!fs4.existsSync(failuresPath)) {
5181
+ fs4.writeFileSync(failuresPath, `# Failures
3844
5182
  ${entry}`);
3845
5183
  } else {
3846
- fs2.appendFileSync(failuresPath, entry);
5184
+ fs4.appendFileSync(failuresPath, entry);
3847
5185
  }
3848
5186
  return Ok(void 0);
3849
5187
  } catch (error) {
@@ -3854,13 +5192,16 @@ ${entry}`);
3854
5192
  );
3855
5193
  }
3856
5194
  }
3857
- async function loadFailures(projectPath) {
3858
- const failuresPath = path.join(projectPath, HARNESS_DIR, FAILURES_FILE);
3859
- if (!fs2.existsSync(failuresPath)) {
3860
- return Ok([]);
3861
- }
5195
+ async function loadFailures(projectPath, stream) {
3862
5196
  try {
3863
- const content = fs2.readFileSync(failuresPath, "utf-8");
5197
+ const dirResult = await getStateDir(projectPath, stream);
5198
+ if (!dirResult.ok) return dirResult;
5199
+ const stateDir = dirResult.value;
5200
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5201
+ if (!fs4.existsSync(failuresPath)) {
5202
+ return Ok([]);
5203
+ }
5204
+ const content = fs4.readFileSync(failuresPath, "utf-8");
3864
5205
  const entries = [];
3865
5206
  for (const line of content.split("\n")) {
3866
5207
  const match = line.match(FAILURE_LINE_REGEX);
@@ -3882,23 +5223,25 @@ async function loadFailures(projectPath) {
3882
5223
  );
3883
5224
  }
3884
5225
  }
3885
- async function archiveFailures(projectPath) {
3886
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3887
- const failuresPath = path.join(harnessDir, FAILURES_FILE);
3888
- if (!fs2.existsSync(failuresPath)) {
3889
- return Ok(void 0);
3890
- }
5226
+ async function archiveFailures(projectPath, stream) {
3891
5227
  try {
3892
- const archiveDir = path.join(harnessDir, "archive");
3893
- fs2.mkdirSync(archiveDir, { recursive: true });
5228
+ const dirResult = await getStateDir(projectPath, stream);
5229
+ if (!dirResult.ok) return dirResult;
5230
+ const stateDir = dirResult.value;
5231
+ const failuresPath = path3.join(stateDir, FAILURES_FILE);
5232
+ if (!fs4.existsSync(failuresPath)) {
5233
+ return Ok(void 0);
5234
+ }
5235
+ const archiveDir = path3.join(stateDir, "archive");
5236
+ fs4.mkdirSync(archiveDir, { recursive: true });
3894
5237
  const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
3895
5238
  let archiveName = `failures-${date}.md`;
3896
5239
  let counter = 2;
3897
- while (fs2.existsSync(path.join(archiveDir, archiveName))) {
5240
+ while (fs4.existsSync(path3.join(archiveDir, archiveName))) {
3898
5241
  archiveName = `failures-${date}-${counter}.md`;
3899
5242
  counter++;
3900
5243
  }
3901
- fs2.renameSync(failuresPath, path.join(archiveDir, archiveName));
5244
+ fs4.renameSync(failuresPath, path3.join(archiveDir, archiveName));
3902
5245
  return Ok(void 0);
3903
5246
  } catch (error) {
3904
5247
  return Err(
@@ -3908,12 +5251,14 @@ async function archiveFailures(projectPath) {
3908
5251
  );
3909
5252
  }
3910
5253
  }
3911
- async function saveHandoff(projectPath, handoff) {
3912
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3913
- const handoffPath = path.join(harnessDir, HANDOFF_FILE);
5254
+ async function saveHandoff(projectPath, handoff, stream) {
3914
5255
  try {
3915
- fs2.mkdirSync(harnessDir, { recursive: true });
3916
- fs2.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
5256
+ const dirResult = await getStateDir(projectPath, stream);
5257
+ if (!dirResult.ok) return dirResult;
5258
+ const stateDir = dirResult.value;
5259
+ const handoffPath = path3.join(stateDir, HANDOFF_FILE);
5260
+ fs4.mkdirSync(stateDir, { recursive: true });
5261
+ fs4.writeFileSync(handoffPath, JSON.stringify(handoff, null, 2));
3917
5262
  return Ok(void 0);
3918
5263
  } catch (error) {
3919
5264
  return Err(
@@ -3921,13 +5266,16 @@ async function saveHandoff(projectPath, handoff) {
3921
5266
  );
3922
5267
  }
3923
5268
  }
3924
- async function loadHandoff(projectPath) {
3925
- const handoffPath = path.join(projectPath, HARNESS_DIR, HANDOFF_FILE);
3926
- if (!fs2.existsSync(handoffPath)) {
3927
- return Ok(null);
3928
- }
5269
+ async function loadHandoff(projectPath, stream) {
3929
5270
  try {
3930
- const raw = fs2.readFileSync(handoffPath, "utf-8");
5271
+ const dirResult = await getStateDir(projectPath, stream);
5272
+ if (!dirResult.ok) return dirResult;
5273
+ const stateDir = dirResult.value;
5274
+ const handoffPath = path3.join(stateDir, HANDOFF_FILE);
5275
+ if (!fs4.existsSync(handoffPath)) {
5276
+ return Ok(null);
5277
+ }
5278
+ const raw = fs4.readFileSync(handoffPath, "utf-8");
3931
5279
  const parsed = JSON.parse(raw);
3932
5280
  const result = HandoffSchema.safeParse(parsed);
3933
5281
  if (!result.success) {
@@ -3941,40 +5289,51 @@ async function loadHandoff(projectPath) {
3941
5289
  }
3942
5290
  }
3943
5291
  async function runMechanicalGate(projectPath) {
3944
- const harnessDir = path.join(projectPath, HARNESS_DIR);
3945
- const gateConfigPath = path.join(harnessDir, GATE_CONFIG_FILE);
5292
+ const harnessDir = path3.join(projectPath, HARNESS_DIR2);
5293
+ const gateConfigPath = path3.join(harnessDir, GATE_CONFIG_FILE);
3946
5294
  try {
3947
5295
  let checks = [];
3948
- if (fs2.existsSync(gateConfigPath)) {
3949
- const raw = JSON.parse(fs2.readFileSync(gateConfigPath, "utf-8"));
5296
+ if (fs4.existsSync(gateConfigPath)) {
5297
+ const raw = JSON.parse(fs4.readFileSync(gateConfigPath, "utf-8"));
3950
5298
  const config = GateConfigSchema.safeParse(raw);
3951
5299
  if (config.success && config.data.checks) {
3952
5300
  checks = config.data.checks;
3953
5301
  }
3954
5302
  }
3955
5303
  if (checks.length === 0) {
3956
- const packageJsonPath = path.join(projectPath, "package.json");
3957
- if (fs2.existsSync(packageJsonPath)) {
3958
- const pkg = JSON.parse(fs2.readFileSync(packageJsonPath, "utf-8"));
5304
+ const packageJsonPath = path3.join(projectPath, "package.json");
5305
+ if (fs4.existsSync(packageJsonPath)) {
5306
+ const pkg = JSON.parse(fs4.readFileSync(packageJsonPath, "utf-8"));
3959
5307
  const scripts = pkg.scripts || {};
3960
5308
  if (scripts.test) checks.push({ name: "test", command: "npm test" });
3961
5309
  if (scripts.lint) checks.push({ name: "lint", command: "npm run lint" });
3962
5310
  if (scripts.typecheck) checks.push({ name: "typecheck", command: "npm run typecheck" });
3963
5311
  if (scripts.build) checks.push({ name: "build", command: "npm run build" });
3964
5312
  }
3965
- if (fs2.existsSync(path.join(projectPath, "go.mod"))) {
5313
+ if (fs4.existsSync(path3.join(projectPath, "go.mod"))) {
3966
5314
  checks.push({ name: "test", command: "go test ./..." });
3967
5315
  checks.push({ name: "build", command: "go build ./..." });
3968
5316
  }
3969
- if (fs2.existsSync(path.join(projectPath, "pyproject.toml")) || fs2.existsSync(path.join(projectPath, "setup.py"))) {
5317
+ if (fs4.existsSync(path3.join(projectPath, "pyproject.toml")) || fs4.existsSync(path3.join(projectPath, "setup.py"))) {
3970
5318
  checks.push({ name: "test", command: "python -m pytest" });
3971
5319
  }
3972
5320
  }
3973
5321
  const results = [];
5322
+ const SAFE_GATE_COMMAND = /^(?:npm|pnpm|yarn)\s+(?:test|run\s+[\w.-]+|run-script\s+[\w.-]+)$|^go\s+(?:test|build|vet|fmt)\s+[\w./ -]+$|^(?:python|python3)\s+-m\s+[\w.-]+$|^make\s+[\w.-]+$|^cargo\s+(?:test|build|check|clippy)(?:\s+[\w./ -]+)?$|^(?:gradle|mvn)\s+[\w:.-]+$/;
3974
5323
  for (const check of checks) {
5324
+ if (!SAFE_GATE_COMMAND.test(check.command)) {
5325
+ results.push({
5326
+ name: check.name,
5327
+ passed: false,
5328
+ command: check.command,
5329
+ output: `Blocked: command does not match safe gate pattern. Allowed prefixes: npm, npx, pnpm, yarn, go, python, python3, make, cargo, gradle, mvn`,
5330
+ duration: 0
5331
+ });
5332
+ continue;
5333
+ }
3975
5334
  const start = Date.now();
3976
5335
  try {
3977
- execSync(check.command, {
5336
+ execSync2(check.command, {
3978
5337
  cwd: projectPath,
3979
5338
  stdio: "pipe",
3980
5339
  timeout: 12e4
@@ -4157,37 +5516,613 @@ async function runMultiTurnPipeline(initialContext, turnExecutor, options) {
4157
5516
  };
4158
5517
  }
4159
5518
 
4160
- // src/ci/check-orchestrator.ts
4161
- import * as path2 from "path";
4162
- var ALL_CHECKS = ["validate", "deps", "docs", "entropy", "phase-gate"];
4163
- async function runSingleCheck(name, projectRoot, config) {
4164
- const start = Date.now();
4165
- const issues = [];
4166
- try {
4167
- switch (name) {
4168
- case "validate": {
4169
- const agentsPath = path2.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
4170
- const result = await validateAgentsMap(agentsPath);
4171
- if (!result.ok) {
4172
- issues.push({ severity: "error", message: result.error.message });
4173
- } else if (!result.value.valid) {
4174
- if (result.value.errors) {
4175
- for (const err of result.value.errors) {
4176
- issues.push({ severity: "error", message: err.message });
4177
- }
4178
- }
4179
- for (const section of result.value.missingSections) {
4180
- issues.push({ severity: "warning", message: `Missing section: ${section}` });
4181
- }
4182
- for (const link of result.value.brokenLinks) {
4183
- issues.push({
4184
- severity: "warning",
4185
- message: `Broken link: ${link.text} \u2192 ${link.path}`,
4186
- file: link.path
4187
- });
4188
- }
4189
- }
4190
- break;
5519
+ // src/security/scanner.ts
5520
+ import * as fs6 from "fs/promises";
5521
+
5522
+ // src/security/rules/registry.ts
5523
+ var RuleRegistry = class {
5524
+ rules = /* @__PURE__ */ new Map();
5525
+ register(rule) {
5526
+ this.rules.set(rule.id, rule);
5527
+ }
5528
+ registerAll(rules) {
5529
+ for (const rule of rules) {
5530
+ this.register(rule);
5531
+ }
5532
+ }
5533
+ getById(id) {
5534
+ return this.rules.get(id);
5535
+ }
5536
+ getAll() {
5537
+ return Array.from(this.rules.values());
5538
+ }
5539
+ getByCategory(category) {
5540
+ return this.getAll().filter((r) => r.category === category);
5541
+ }
5542
+ getForStacks(stacks) {
5543
+ return this.getAll().filter((rule) => {
5544
+ if (!rule.stack || rule.stack.length === 0) return true;
5545
+ return rule.stack.some((s) => stacks.includes(s));
5546
+ });
5547
+ }
5548
+ };
5549
+
5550
+ // src/security/config.ts
5551
+ import { z as z4 } from "zod";
5552
+
5553
+ // src/security/types.ts
5554
+ var DEFAULT_SECURITY_CONFIG = {
5555
+ enabled: true,
5556
+ strict: false,
5557
+ rules: {},
5558
+ exclude: ["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]
5559
+ };
5560
+
5561
+ // src/security/config.ts
5562
+ var RuleOverrideSchema = z4.enum(["off", "error", "warning", "info"]);
5563
+ var SecurityConfigSchema = z4.object({
5564
+ enabled: z4.boolean().default(true),
5565
+ strict: z4.boolean().default(false),
5566
+ rules: z4.record(z4.string(), RuleOverrideSchema).optional().default({}),
5567
+ exclude: z4.array(z4.string()).optional().default(["**/node_modules/**", "**/dist/**", "**/*.test.ts", "**/fixtures/**"]),
5568
+ external: z4.object({
5569
+ semgrep: z4.object({
5570
+ enabled: z4.union([z4.literal("auto"), z4.boolean()]).default("auto"),
5571
+ rulesets: z4.array(z4.string()).optional()
5572
+ }).optional(),
5573
+ gitleaks: z4.object({
5574
+ enabled: z4.union([z4.literal("auto"), z4.boolean()]).default("auto")
5575
+ }).optional()
5576
+ }).optional()
5577
+ });
5578
+ function parseSecurityConfig(input) {
5579
+ if (input === void 0 || input === null) {
5580
+ return { ...DEFAULT_SECURITY_CONFIG };
5581
+ }
5582
+ const result = SecurityConfigSchema.safeParse(input);
5583
+ if (result.success) {
5584
+ return result.data;
5585
+ }
5586
+ return { ...DEFAULT_SECURITY_CONFIG };
5587
+ }
5588
+ function resolveRuleSeverity(ruleId, defaultSeverity, overrides, strict) {
5589
+ if (overrides[ruleId] !== void 0) {
5590
+ return overrides[ruleId];
5591
+ }
5592
+ for (const [pattern, override] of Object.entries(overrides)) {
5593
+ if (pattern.endsWith("*")) {
5594
+ const prefix = pattern.slice(0, -1);
5595
+ if (ruleId.startsWith(prefix)) {
5596
+ return override;
5597
+ }
5598
+ }
5599
+ }
5600
+ if (strict && (defaultSeverity === "warning" || defaultSeverity === "info")) {
5601
+ return "error";
5602
+ }
5603
+ return defaultSeverity;
5604
+ }
5605
+
5606
+ // src/security/stack-detector.ts
5607
+ import * as fs5 from "fs";
5608
+ import * as path4 from "path";
5609
+ function detectStack(projectRoot) {
5610
+ const stacks = [];
5611
+ const pkgJsonPath = path4.join(projectRoot, "package.json");
5612
+ if (fs5.existsSync(pkgJsonPath)) {
5613
+ stacks.push("node");
5614
+ try {
5615
+ const pkgJson = JSON.parse(fs5.readFileSync(pkgJsonPath, "utf-8"));
5616
+ const allDeps = {
5617
+ ...pkgJson.dependencies,
5618
+ ...pkgJson.devDependencies
5619
+ };
5620
+ if (allDeps.react || allDeps["react-dom"]) stacks.push("react");
5621
+ if (allDeps.express) stacks.push("express");
5622
+ if (allDeps.koa) stacks.push("koa");
5623
+ if (allDeps.fastify) stacks.push("fastify");
5624
+ if (allDeps.next) stacks.push("next");
5625
+ if (allDeps.vue) stacks.push("vue");
5626
+ if (allDeps.angular || allDeps["@angular/core"]) stacks.push("angular");
5627
+ } catch {
5628
+ }
5629
+ }
5630
+ const goModPath = path4.join(projectRoot, "go.mod");
5631
+ if (fs5.existsSync(goModPath)) {
5632
+ stacks.push("go");
5633
+ }
5634
+ const requirementsPath = path4.join(projectRoot, "requirements.txt");
5635
+ const pyprojectPath = path4.join(projectRoot, "pyproject.toml");
5636
+ if (fs5.existsSync(requirementsPath) || fs5.existsSync(pyprojectPath)) {
5637
+ stacks.push("python");
5638
+ }
5639
+ return stacks;
5640
+ }
5641
+
5642
+ // src/security/rules/secrets.ts
5643
+ var secretRules = [
5644
+ {
5645
+ id: "SEC-SEC-001",
5646
+ name: "AWS Access Key",
5647
+ category: "secrets",
5648
+ severity: "error",
5649
+ confidence: "high",
5650
+ patterns: [/(?:AKIA|ABIA|ACCA|ASIA)[0-9A-Z]{16}/],
5651
+ message: "Hardcoded AWS access key detected",
5652
+ remediation: "Use environment variables or a secrets manager",
5653
+ references: ["CWE-798"]
5654
+ },
5655
+ {
5656
+ id: "SEC-SEC-002",
5657
+ name: "Generic API Key/Secret Assignment",
5658
+ category: "secrets",
5659
+ severity: "error",
5660
+ confidence: "high",
5661
+ patterns: [
5662
+ /(?:api[_-]?key|api[_-]?secret|secret[_-]?key|access[_-]?token|auth[_-]?token)\s*[:=]\s*['"][^'"]{8,}['"]/i
5663
+ ],
5664
+ message: "Hardcoded API key or secret detected",
5665
+ remediation: "Use environment variables: process.env.API_KEY",
5666
+ references: ["CWE-798"]
5667
+ },
5668
+ {
5669
+ id: "SEC-SEC-003",
5670
+ name: "Private Key",
5671
+ category: "secrets",
5672
+ severity: "error",
5673
+ confidence: "high",
5674
+ patterns: [/-----BEGIN\s(?:RSA|DSA|EC|OPENSSH|PGP)\s(?:PRIVATE\s)?KEY-----/],
5675
+ message: "Private key detected in source code",
5676
+ remediation: "Store private keys in a secrets manager, never in source",
5677
+ references: ["CWE-321"]
5678
+ },
5679
+ {
5680
+ id: "SEC-SEC-004",
5681
+ name: "Password Assignment",
5682
+ category: "secrets",
5683
+ severity: "error",
5684
+ confidence: "high",
5685
+ patterns: [/(?:password|passwd|pwd)\s*[:=]\s*['"][^'"]{4,}['"]/i],
5686
+ message: "Hardcoded password detected",
5687
+ remediation: "Use environment variables or a secrets manager",
5688
+ references: ["CWE-259"]
5689
+ },
5690
+ {
5691
+ id: "SEC-SEC-005",
5692
+ name: "JWT/Bearer Token",
5693
+ category: "secrets",
5694
+ severity: "error",
5695
+ confidence: "high",
5696
+ patterns: [/eyJ[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}\.[A-Za-z0-9_-]{10,}/],
5697
+ message: "Hardcoded JWT token detected",
5698
+ remediation: "Tokens should be fetched at runtime, not embedded in source",
5699
+ references: ["CWE-798"]
5700
+ }
5701
+ ];
5702
+
5703
+ // src/security/rules/injection.ts
5704
+ var injectionRules = [
5705
+ {
5706
+ id: "SEC-INJ-001",
5707
+ name: "eval/Function Constructor",
5708
+ category: "injection",
5709
+ severity: "error",
5710
+ confidence: "high",
5711
+ patterns: [/\beval\s*\(/, /new\s+Function\s*\(/],
5712
+ message: "eval() and Function constructor allow arbitrary code execution",
5713
+ remediation: "Use JSON.parse() for data, or a sandboxed interpreter for dynamic code",
5714
+ references: ["CWE-95"]
5715
+ },
5716
+ {
5717
+ id: "SEC-INJ-002",
5718
+ name: "SQL String Concatenation",
5719
+ category: "injection",
5720
+ severity: "error",
5721
+ confidence: "high",
5722
+ patterns: [
5723
+ /(?:query|execute|prepare)\s*\(\s*['"][^'"]*['"]\s*\+/,
5724
+ /(?:query|execute|prepare)\s*\(\s*`[^`]*\$\{/
5725
+ ],
5726
+ message: "SQL query built with string concatenation or template literals with interpolation",
5727
+ remediation: 'Use parameterized queries: query("SELECT * FROM users WHERE id = $1", [id])',
5728
+ references: ["CWE-89"]
5729
+ },
5730
+ {
5731
+ id: "SEC-INJ-003",
5732
+ name: "Command Injection",
5733
+ category: "injection",
5734
+ severity: "error",
5735
+ confidence: "high",
5736
+ patterns: [
5737
+ /\bexec\s*\(\s*['"][^'"]*['"]\s*\+/,
5738
+ /\bexec\s*\(\s*`[^`]*\$\{/,
5739
+ /\bexecSync\s*\(\s*['"][^'"]*['"]\s*\+/,
5740
+ /\bexecSync\s*\(\s*`[^`]*\$\{/
5741
+ ],
5742
+ message: "Shell command built with string concatenation",
5743
+ remediation: "Use execFile() with argument array instead of exec() with string",
5744
+ references: ["CWE-78"]
5745
+ }
5746
+ ];
5747
+
5748
+ // src/security/rules/xss.ts
5749
+ var xssRules = [
5750
+ {
5751
+ id: "SEC-XSS-001",
5752
+ name: "innerHTML Assignment",
5753
+ category: "xss",
5754
+ severity: "error",
5755
+ confidence: "high",
5756
+ patterns: [/\.innerHTML\s*=/],
5757
+ message: "Direct innerHTML assignment can lead to XSS",
5758
+ remediation: "Use textContent for text, or a sanitizer like DOMPurify for HTML",
5759
+ references: ["CWE-79"]
5760
+ },
5761
+ {
5762
+ id: "SEC-XSS-002",
5763
+ name: "dangerouslySetInnerHTML",
5764
+ category: "xss",
5765
+ severity: "error",
5766
+ confidence: "high",
5767
+ patterns: [/dangerouslySetInnerHTML/],
5768
+ message: "dangerouslySetInnerHTML bypasses React XSS protections",
5769
+ remediation: "Sanitize HTML with DOMPurify before passing to dangerouslySetInnerHTML",
5770
+ references: ["CWE-79"]
5771
+ },
5772
+ {
5773
+ id: "SEC-XSS-003",
5774
+ name: "document.write",
5775
+ category: "xss",
5776
+ severity: "error",
5777
+ confidence: "high",
5778
+ patterns: [/document\.write\s*\(/, /document\.writeln\s*\(/],
5779
+ message: "document.write can lead to XSS and is a legacy API",
5780
+ remediation: "Use DOM APIs: createElement, appendChild, textContent",
5781
+ references: ["CWE-79"]
5782
+ }
5783
+ ];
5784
+
5785
+ // src/security/rules/crypto.ts
5786
+ var cryptoRules = [
5787
+ {
5788
+ id: "SEC-CRY-001",
5789
+ name: "Weak Hash Algorithm",
5790
+ category: "crypto",
5791
+ severity: "error",
5792
+ confidence: "high",
5793
+ patterns: [/createHash\s*\(\s*['"](?:md5|sha1|md4|ripemd160)['"]\s*\)/],
5794
+ message: "MD5 and SHA1 are cryptographically broken for security use",
5795
+ remediation: 'Use SHA-256 or higher: createHash("sha256")',
5796
+ references: ["CWE-328"]
5797
+ },
5798
+ {
5799
+ id: "SEC-CRY-002",
5800
+ name: "Hardcoded Encryption Key",
5801
+ category: "crypto",
5802
+ severity: "error",
5803
+ confidence: "high",
5804
+ patterns: [
5805
+ /(?:encryption[_-]?key|cipher[_-]?key|aes[_-]?key|secret[_-]?key)\s*[:=]\s*['"][^'"]{4,}['"]/i
5806
+ ],
5807
+ message: "Hardcoded encryption key detected",
5808
+ remediation: "Load encryption keys from environment variables or a key management service",
5809
+ references: ["CWE-321"]
5810
+ }
5811
+ ];
5812
+
5813
+ // src/security/rules/path-traversal.ts
5814
+ var pathTraversalRules = [
5815
+ {
5816
+ id: "SEC-PTH-001",
5817
+ name: "Path Traversal Pattern",
5818
+ category: "path-traversal",
5819
+ severity: "warning",
5820
+ confidence: "medium",
5821
+ patterns: [
5822
+ /(?:readFile|readFileSync|writeFile|writeFileSync|createReadStream|createWriteStream|access|stat|unlink|rmdir|mkdir)\s*\([^)]*\.{2}[/\\]/,
5823
+ /(?:readFile|readFileSync|writeFile|writeFileSync)\s*\([^)]*\+/
5824
+ ],
5825
+ message: "Potential path traversal: file operation with ../ or string concatenation",
5826
+ remediation: "Use path.resolve() and validate the resolved path stays within the expected directory",
5827
+ references: ["CWE-22"]
5828
+ }
5829
+ ];
5830
+
5831
+ // src/security/rules/network.ts
5832
+ var networkRules = [
5833
+ {
5834
+ id: "SEC-NET-001",
5835
+ name: "CORS Wildcard Origin",
5836
+ category: "network",
5837
+ severity: "warning",
5838
+ confidence: "medium",
5839
+ patterns: [/origin\s*:\s*['"][*]['"]/],
5840
+ message: "CORS wildcard origin allows any website to make requests",
5841
+ remediation: "Restrict CORS to specific trusted origins",
5842
+ references: ["CWE-942"]
5843
+ },
5844
+ {
5845
+ id: "SEC-NET-002",
5846
+ name: "Disabled TLS Verification",
5847
+ category: "network",
5848
+ severity: "warning",
5849
+ confidence: "high",
5850
+ patterns: [/rejectUnauthorized\s*:\s*false/],
5851
+ message: "TLS certificate verification is disabled, enabling MITM attacks",
5852
+ remediation: "Remove rejectUnauthorized: false, or use a proper CA bundle",
5853
+ references: ["CWE-295"]
5854
+ },
5855
+ {
5856
+ id: "SEC-NET-003",
5857
+ name: "Hardcoded HTTP URL",
5858
+ category: "network",
5859
+ severity: "info",
5860
+ confidence: "low",
5861
+ patterns: [/['"]http:\/\/(?!localhost|127\.0\.0\.1|0\.0\.0\.0)[^'"]+['"]/],
5862
+ message: "Non-TLS HTTP URL detected (excluding localhost)",
5863
+ remediation: "Use HTTPS for all non-local connections",
5864
+ references: ["CWE-319"]
5865
+ }
5866
+ ];
5867
+
5868
+ // src/security/rules/deserialization.ts
5869
+ var deserializationRules = [
5870
+ {
5871
+ id: "SEC-DES-001",
5872
+ name: "Unvalidated JSON Parse",
5873
+ category: "deserialization",
5874
+ severity: "warning",
5875
+ confidence: "medium",
5876
+ patterns: [
5877
+ /JSON\.parse\s*\(\s*(?:req|request)\.body/,
5878
+ /JSON\.parse\s*\(\s*(?:event|data|payload|input|body)\b/
5879
+ ],
5880
+ message: "JSON.parse on potentially untrusted input without schema validation",
5881
+ remediation: "Validate parsed data with Zod, ajv, or joi before use",
5882
+ references: ["CWE-502"]
5883
+ }
5884
+ ];
5885
+
5886
+ // src/security/rules/stack/node.ts
5887
+ var nodeRules = [
5888
+ {
5889
+ id: "SEC-NODE-001",
5890
+ name: "Prototype Pollution",
5891
+ category: "injection",
5892
+ severity: "warning",
5893
+ confidence: "medium",
5894
+ patterns: [
5895
+ /__proto__/,
5896
+ /\bconstructor\s*\[/,
5897
+ /\bprototype\s*\[/,
5898
+ /Object\.assign\s*\(\s*\w+\s*,\s*(?:req|request|body|input|params|query)\b/
5899
+ ],
5900
+ stack: ["node"],
5901
+ message: "Potential prototype pollution via __proto__, constructor, or Object.assign with untrusted input",
5902
+ remediation: "Validate keys against a whitelist, use Object.create(null), or use Map instead of plain objects",
5903
+ references: ["CWE-1321"]
5904
+ },
5905
+ {
5906
+ id: "SEC-NODE-002",
5907
+ name: "NoSQL Injection",
5908
+ category: "injection",
5909
+ severity: "warning",
5910
+ confidence: "medium",
5911
+ patterns: [
5912
+ /\.find\s*\(\s*\{[^}]*\$(?:gt|gte|lt|lte|ne|in|nin|regex|where|exists)/,
5913
+ /\.find\s*\(\s*(?:req|request)\.(?:body|query|params)/
5914
+ ],
5915
+ stack: ["node"],
5916
+ message: "Potential NoSQL injection: MongoDB query operators in user input",
5917
+ remediation: "Sanitize input by stripping keys starting with $ before using in queries",
5918
+ references: ["CWE-943"]
5919
+ }
5920
+ ];
5921
+
5922
+ // src/security/rules/stack/express.ts
5923
+ var expressRules = [
5924
+ {
5925
+ id: "SEC-EXPRESS-001",
5926
+ name: "Missing Helmet",
5927
+ category: "network",
5928
+ severity: "info",
5929
+ confidence: "low",
5930
+ patterns: [/app\s*=\s*express\s*\(\)/],
5931
+ stack: ["express"],
5932
+ fileGlob: "**/app.{ts,js}",
5933
+ message: "Express app initialization detected \u2014 ensure helmet middleware is applied for security headers",
5934
+ remediation: "Add helmet middleware: app.use(helmet())",
5935
+ references: ["CWE-693"]
5936
+ },
5937
+ {
5938
+ id: "SEC-EXPRESS-002",
5939
+ name: "Unprotected Route with Body Parsing",
5940
+ category: "network",
5941
+ severity: "info",
5942
+ confidence: "low",
5943
+ patterns: [/app\.(?:post|put|patch)\s*\([^)]*,\s*(?:req|request)\s*(?:,|\))/],
5944
+ stack: ["express"],
5945
+ message: "Express route accepts request body \u2014 ensure input validation and rate limiting are applied",
5946
+ remediation: "Add express-rate-limit and validate request body with Zod/joi",
5947
+ references: ["CWE-770"]
5948
+ }
5949
+ ];
5950
+
5951
+ // src/security/rules/stack/react.ts
5952
+ var reactRules = [
5953
+ {
5954
+ id: "SEC-REACT-001",
5955
+ name: "Sensitive Data in Client Storage",
5956
+ category: "secrets",
5957
+ severity: "warning",
5958
+ confidence: "medium",
5959
+ patterns: [
5960
+ /localStorage\.setItem\s*\(\s*['"](?:token|jwt|auth|session|password|secret|key|credential)/i,
5961
+ /sessionStorage\.setItem\s*\(\s*['"](?:token|jwt|auth|session|password|secret|key|credential)/i
5962
+ ],
5963
+ stack: ["react"],
5964
+ message: "Storing sensitive data in browser storage is accessible to XSS attacks",
5965
+ remediation: "Use httpOnly cookies for auth tokens instead of localStorage",
5966
+ references: ["CWE-922"]
5967
+ }
5968
+ ];
5969
+
5970
+ // src/security/rules/stack/go.ts
5971
+ var goRules = [
5972
+ {
5973
+ id: "SEC-GO-001",
5974
+ name: "Unsafe Pointer Usage",
5975
+ category: "injection",
5976
+ severity: "warning",
5977
+ confidence: "medium",
5978
+ patterns: [/unsafe\.Pointer/],
5979
+ stack: ["go"],
5980
+ message: "unsafe.Pointer bypasses Go type safety",
5981
+ remediation: "Avoid unsafe.Pointer unless absolutely necessary; document justification",
5982
+ references: ["CWE-119"]
5983
+ },
5984
+ {
5985
+ id: "SEC-GO-002",
5986
+ name: "Format String Injection",
5987
+ category: "injection",
5988
+ severity: "warning",
5989
+ confidence: "medium",
5990
+ patterns: [/fmt\.Sprintf\s*\(\s*\w+[^,)]*\)/],
5991
+ stack: ["go"],
5992
+ message: "Format string may come from user input",
5993
+ remediation: 'Use fmt.Sprintf with a literal format string: fmt.Sprintf("%s", userInput)',
5994
+ references: ["CWE-134"]
5995
+ }
5996
+ ];
5997
+
5998
+ // src/security/scanner.ts
5999
+ var SecurityScanner = class {
6000
+ registry;
6001
+ config;
6002
+ activeRules = [];
6003
+ constructor(config = {}) {
6004
+ this.config = { ...DEFAULT_SECURITY_CONFIG, ...config };
6005
+ this.registry = new RuleRegistry();
6006
+ this.registry.registerAll([
6007
+ ...secretRules,
6008
+ ...injectionRules,
6009
+ ...xssRules,
6010
+ ...cryptoRules,
6011
+ ...pathTraversalRules,
6012
+ ...networkRules,
6013
+ ...deserializationRules
6014
+ ]);
6015
+ this.registry.registerAll([...nodeRules, ...expressRules, ...reactRules, ...goRules]);
6016
+ this.activeRules = this.registry.getAll();
6017
+ }
6018
+ configureForProject(projectRoot) {
6019
+ const stacks = detectStack(projectRoot);
6020
+ this.activeRules = this.registry.getForStacks(stacks.length > 0 ? stacks : []);
6021
+ }
6022
+ scanContent(content, filePath, startLine = 1) {
6023
+ if (!this.config.enabled) return [];
6024
+ const findings = [];
6025
+ const lines = content.split("\n");
6026
+ for (const rule of this.activeRules) {
6027
+ const resolved = resolveRuleSeverity(
6028
+ rule.id,
6029
+ rule.severity,
6030
+ this.config.rules ?? {},
6031
+ this.config.strict
6032
+ );
6033
+ if (resolved === "off") continue;
6034
+ for (let i = 0; i < lines.length; i++) {
6035
+ const line = lines[i] ?? "";
6036
+ if (line.includes("harness-ignore") && line.includes(rule.id)) continue;
6037
+ for (const pattern of rule.patterns) {
6038
+ pattern.lastIndex = 0;
6039
+ if (pattern.test(line)) {
6040
+ findings.push({
6041
+ ruleId: rule.id,
6042
+ ruleName: rule.name,
6043
+ category: rule.category,
6044
+ severity: resolved,
6045
+ confidence: rule.confidence,
6046
+ file: filePath,
6047
+ line: startLine + i,
6048
+ match: line.trim(),
6049
+ context: line,
6050
+ message: rule.message,
6051
+ remediation: rule.remediation,
6052
+ ...rule.references ? { references: rule.references } : {}
6053
+ });
6054
+ break;
6055
+ }
6056
+ }
6057
+ }
6058
+ }
6059
+ return findings;
6060
+ }
6061
+ async scanFile(filePath) {
6062
+ if (!this.config.enabled) return [];
6063
+ const content = await fs6.readFile(filePath, "utf-8");
6064
+ return this.scanContent(content, filePath, 1);
6065
+ }
6066
+ async scanFiles(filePaths) {
6067
+ const allFindings = [];
6068
+ let scannedCount = 0;
6069
+ for (const filePath of filePaths) {
6070
+ try {
6071
+ const findings = await this.scanFile(filePath);
6072
+ allFindings.push(...findings);
6073
+ scannedCount++;
6074
+ } catch {
6075
+ }
6076
+ }
6077
+ return {
6078
+ findings: allFindings,
6079
+ scannedFiles: scannedCount,
6080
+ rulesApplied: this.activeRules.length,
6081
+ externalToolsUsed: [],
6082
+ coverage: "baseline"
6083
+ };
6084
+ }
6085
+ };
6086
+
6087
+ // src/ci/check-orchestrator.ts
6088
+ import * as path5 from "path";
6089
+ var ALL_CHECKS = [
6090
+ "validate",
6091
+ "deps",
6092
+ "docs",
6093
+ "entropy",
6094
+ "security",
6095
+ "perf",
6096
+ "phase-gate"
6097
+ ];
6098
+ async function runSingleCheck(name, projectRoot, config) {
6099
+ const start = Date.now();
6100
+ const issues = [];
6101
+ try {
6102
+ switch (name) {
6103
+ case "validate": {
6104
+ const agentsPath = path5.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6105
+ const result = await validateAgentsMap(agentsPath);
6106
+ if (!result.ok) {
6107
+ issues.push({ severity: "error", message: result.error.message });
6108
+ } else if (!result.value.valid) {
6109
+ if (result.value.errors) {
6110
+ for (const err of result.value.errors) {
6111
+ issues.push({ severity: "error", message: err.message });
6112
+ }
6113
+ }
6114
+ for (const section of result.value.missingSections) {
6115
+ issues.push({ severity: "warning", message: `Missing section: ${section}` });
6116
+ }
6117
+ for (const link of result.value.brokenLinks) {
6118
+ issues.push({
6119
+ severity: "warning",
6120
+ message: `Broken link: ${link.text} \u2192 ${link.path}`,
6121
+ file: link.path
6122
+ });
6123
+ }
6124
+ }
6125
+ break;
4191
6126
  }
4192
6127
  case "deps": {
4193
6128
  const rawLayers = config.layers;
@@ -4221,7 +6156,7 @@ async function runSingleCheck(name, projectRoot, config) {
4221
6156
  break;
4222
6157
  }
4223
6158
  case "docs": {
4224
- const docsDir = path2.join(projectRoot, config.docsDir ?? "docs");
6159
+ const docsDir = path5.join(projectRoot, config.docsDir ?? "docs");
4225
6160
  const result = await checkDocCoverage("project", { docsDir });
4226
6161
  if (!result.ok) {
4227
6162
  issues.push({ severity: "warning", message: result.error.message });
@@ -4269,6 +6204,68 @@ async function runSingleCheck(name, projectRoot, config) {
4269
6204
  }
4270
6205
  break;
4271
6206
  }
6207
+ case "security": {
6208
+ const securityConfig = parseSecurityConfig(config.security);
6209
+ if (!securityConfig.enabled) break;
6210
+ const scanner = new SecurityScanner(securityConfig);
6211
+ scanner.configureForProject(projectRoot);
6212
+ const { glob: globFn } = await import("glob");
6213
+ const sourceFiles = await globFn("**/*.{ts,tsx,js,jsx,go,py}", {
6214
+ cwd: projectRoot,
6215
+ ignore: securityConfig.exclude ?? [
6216
+ "**/node_modules/**",
6217
+ "**/dist/**",
6218
+ "**/*.test.ts",
6219
+ "**/fixtures/**"
6220
+ ],
6221
+ absolute: true
6222
+ });
6223
+ const scanResult = await scanner.scanFiles(sourceFiles);
6224
+ for (const finding of scanResult.findings) {
6225
+ issues.push({
6226
+ severity: finding.severity === "info" ? "warning" : finding.severity,
6227
+ message: `[${finding.ruleId}] ${finding.message}: ${finding.match}`,
6228
+ file: finding.file,
6229
+ line: finding.line
6230
+ });
6231
+ }
6232
+ break;
6233
+ }
6234
+ case "perf": {
6235
+ const perfAnalyzer = new EntropyAnalyzer({
6236
+ rootDir: projectRoot,
6237
+ analyze: {
6238
+ complexity: true,
6239
+ coupling: true
6240
+ }
6241
+ });
6242
+ const perfResult = await perfAnalyzer.analyze();
6243
+ if (!perfResult.ok) {
6244
+ issues.push({ severity: "warning", message: perfResult.error.message });
6245
+ } else {
6246
+ const perfReport = perfResult.value;
6247
+ if (perfReport.complexity) {
6248
+ for (const v of perfReport.complexity.violations) {
6249
+ issues.push({
6250
+ severity: v.severity === "info" ? "warning" : v.severity,
6251
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.function} in ${v.file} (${v.value} > ${v.threshold})`,
6252
+ file: v.file,
6253
+ line: v.line
6254
+ });
6255
+ }
6256
+ }
6257
+ if (perfReport.coupling) {
6258
+ for (const v of perfReport.coupling.violations) {
6259
+ issues.push({
6260
+ severity: v.severity === "info" ? "warning" : v.severity,
6261
+ message: `[Tier ${v.tier}] ${v.metric}: ${v.file} (${v.value} > ${v.threshold})`,
6262
+ file: v.file
6263
+ });
6264
+ }
6265
+ }
6266
+ }
6267
+ break;
6268
+ }
4272
6269
  case "phase-gate": {
4273
6270
  const phaseGates = config.phaseGates;
4274
6271
  if (!phaseGates?.enabled) {
@@ -4339,75 +6336,2194 @@ async function runCIChecks(input) {
4339
6336
  }
4340
6337
  }
4341
6338
 
6339
+ // src/review/mechanical-checks.ts
6340
+ import * as path6 from "path";
6341
+ async function runMechanicalChecks(options) {
6342
+ const { projectRoot, config, skip = [], changedFiles } = options;
6343
+ const findings = [];
6344
+ const statuses = {
6345
+ validate: "skip",
6346
+ "check-deps": "skip",
6347
+ "check-docs": "skip",
6348
+ "security-scan": "skip"
6349
+ };
6350
+ if (!skip.includes("validate")) {
6351
+ try {
6352
+ const agentsPath = path6.join(projectRoot, config.agentsMapPath ?? "AGENTS.md");
6353
+ const result = await validateAgentsMap(agentsPath);
6354
+ if (!result.ok) {
6355
+ statuses.validate = "fail";
6356
+ findings.push({
6357
+ tool: "validate",
6358
+ file: agentsPath,
6359
+ message: result.error.message,
6360
+ severity: "error"
6361
+ });
6362
+ } else if (!result.value.valid) {
6363
+ statuses.validate = "fail";
6364
+ if (result.value.errors) {
6365
+ for (const err of result.value.errors) {
6366
+ findings.push({
6367
+ tool: "validate",
6368
+ file: agentsPath,
6369
+ message: err.message,
6370
+ severity: "error"
6371
+ });
6372
+ }
6373
+ }
6374
+ for (const section of result.value.missingSections) {
6375
+ findings.push({
6376
+ tool: "validate",
6377
+ file: agentsPath,
6378
+ message: `Missing section: ${section}`,
6379
+ severity: "warning"
6380
+ });
6381
+ }
6382
+ } else {
6383
+ statuses.validate = "pass";
6384
+ }
6385
+ } catch (err) {
6386
+ statuses.validate = "fail";
6387
+ findings.push({
6388
+ tool: "validate",
6389
+ file: path6.join(projectRoot, "AGENTS.md"),
6390
+ message: err instanceof Error ? err.message : String(err),
6391
+ severity: "error"
6392
+ });
6393
+ }
6394
+ }
6395
+ if (!skip.includes("check-deps")) {
6396
+ try {
6397
+ const rawLayers = config.layers;
6398
+ if (rawLayers && rawLayers.length > 0) {
6399
+ const parser = new TypeScriptParser();
6400
+ const layers = rawLayers.map(
6401
+ (l) => defineLayer(
6402
+ l.name,
6403
+ Array.isArray(l.patterns) ? l.patterns : [l.pattern],
6404
+ l.allowedDependencies
6405
+ )
6406
+ );
6407
+ const result = await validateDependencies({
6408
+ layers,
6409
+ rootDir: projectRoot,
6410
+ parser
6411
+ });
6412
+ if (!result.ok) {
6413
+ statuses["check-deps"] = "fail";
6414
+ findings.push({
6415
+ tool: "check-deps",
6416
+ file: projectRoot,
6417
+ message: result.error.message,
6418
+ severity: "error"
6419
+ });
6420
+ } else if (result.value.violations.length > 0) {
6421
+ statuses["check-deps"] = "fail";
6422
+ for (const v of result.value.violations) {
6423
+ findings.push({
6424
+ tool: "check-deps",
6425
+ file: v.file,
6426
+ line: v.line,
6427
+ message: `Layer violation: ${v.fromLayer} -> ${v.toLayer}: ${v.reason}`,
6428
+ severity: "error"
6429
+ });
6430
+ }
6431
+ } else {
6432
+ statuses["check-deps"] = "pass";
6433
+ }
6434
+ } else {
6435
+ statuses["check-deps"] = "pass";
6436
+ }
6437
+ } catch (err) {
6438
+ statuses["check-deps"] = "fail";
6439
+ findings.push({
6440
+ tool: "check-deps",
6441
+ file: projectRoot,
6442
+ message: err instanceof Error ? err.message : String(err),
6443
+ severity: "error"
6444
+ });
6445
+ }
6446
+ }
6447
+ if (!skip.includes("check-docs")) {
6448
+ try {
6449
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6450
+ const result = await checkDocCoverage("project", { docsDir });
6451
+ if (!result.ok) {
6452
+ statuses["check-docs"] = "warn";
6453
+ findings.push({
6454
+ tool: "check-docs",
6455
+ file: docsDir,
6456
+ message: result.error.message,
6457
+ severity: "warning"
6458
+ });
6459
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6460
+ statuses["check-docs"] = "warn";
6461
+ for (const gap of result.value.gaps) {
6462
+ findings.push({
6463
+ tool: "check-docs",
6464
+ file: gap.file,
6465
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6466
+ severity: "warning"
6467
+ });
6468
+ }
6469
+ } else {
6470
+ statuses["check-docs"] = "pass";
6471
+ }
6472
+ } catch (err) {
6473
+ statuses["check-docs"] = "warn";
6474
+ findings.push({
6475
+ tool: "check-docs",
6476
+ file: path6.join(projectRoot, "docs"),
6477
+ message: err instanceof Error ? err.message : String(err),
6478
+ severity: "warning"
6479
+ });
6480
+ }
6481
+ }
6482
+ if (!skip.includes("security-scan")) {
6483
+ try {
6484
+ const securityConfig = parseSecurityConfig(config.security);
6485
+ if (!securityConfig.enabled) {
6486
+ statuses["security-scan"] = "skip";
6487
+ } else {
6488
+ const scanner = new SecurityScanner(securityConfig);
6489
+ scanner.configureForProject(projectRoot);
6490
+ const filesToScan = changedFiles ?? [];
6491
+ const scanResult = await scanner.scanFiles(filesToScan);
6492
+ if (scanResult.findings.length > 0) {
6493
+ statuses["security-scan"] = "warn";
6494
+ for (const f of scanResult.findings) {
6495
+ findings.push({
6496
+ tool: "security-scan",
6497
+ file: f.file,
6498
+ line: f.line,
6499
+ ruleId: f.ruleId,
6500
+ message: f.message,
6501
+ severity: f.severity === "info" ? "warning" : f.severity
6502
+ });
6503
+ }
6504
+ } else {
6505
+ statuses["security-scan"] = "pass";
6506
+ }
6507
+ }
6508
+ } catch (err) {
6509
+ statuses["security-scan"] = "warn";
6510
+ findings.push({
6511
+ tool: "security-scan",
6512
+ file: projectRoot,
6513
+ message: err instanceof Error ? err.message : String(err),
6514
+ severity: "warning"
6515
+ });
6516
+ }
6517
+ }
6518
+ const hasErrors = findings.some((f) => f.severity === "error");
6519
+ const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
6520
+ return Ok({
6521
+ pass: !hasErrors,
6522
+ stopPipeline,
6523
+ findings,
6524
+ checks: {
6525
+ validate: statuses.validate,
6526
+ checkDeps: statuses["check-deps"],
6527
+ checkDocs: statuses["check-docs"],
6528
+ securityScan: statuses["security-scan"]
6529
+ }
6530
+ });
6531
+ }
6532
+
6533
+ // src/review/exclusion-set.ts
6534
+ var ExclusionSet = class {
6535
+ /** Findings indexed by file path for O(1) file lookup */
6536
+ byFile;
6537
+ allFindings;
6538
+ constructor(findings) {
6539
+ this.allFindings = [...findings];
6540
+ this.byFile = /* @__PURE__ */ new Map();
6541
+ for (const f of findings) {
6542
+ const existing = this.byFile.get(f.file);
6543
+ if (existing) {
6544
+ existing.push(f);
6545
+ } else {
6546
+ this.byFile.set(f.file, [f]);
6547
+ }
6548
+ }
6549
+ }
6550
+ /**
6551
+ * Returns true if any mechanical finding covers the given file + line range.
6552
+ *
6553
+ * A mechanical finding "covers" a range if:
6554
+ * - The file matches, AND
6555
+ * - The finding has no line (file-level finding — covers everything), OR
6556
+ * - The finding's line falls within [startLine, endLine] inclusive.
6557
+ */
6558
+ isExcluded(file, lineRange) {
6559
+ const fileFindings = this.byFile.get(file);
6560
+ if (!fileFindings) return false;
6561
+ const [start, end] = lineRange;
6562
+ return fileFindings.some((f) => {
6563
+ if (f.line === void 0) return true;
6564
+ return f.line >= start && f.line <= end;
6565
+ });
6566
+ }
6567
+ /** Number of findings in the set */
6568
+ get size() {
6569
+ return this.allFindings.length;
6570
+ }
6571
+ /** Returns a copy of all findings */
6572
+ getFindings() {
6573
+ return [...this.allFindings];
6574
+ }
6575
+ };
6576
+ function buildExclusionSet(findings) {
6577
+ return new ExclusionSet(findings);
6578
+ }
6579
+
6580
+ // src/review/change-type.ts
6581
+ var PREFIX_PATTERNS = [
6582
+ { pattern: /^(feat|feature)(\([^)]*\))?:/i, type: "feature" },
6583
+ { pattern: /^(fix|bugfix)(\([^)]*\))?:/i, type: "bugfix" },
6584
+ { pattern: /^refactor(\([^)]*\))?:/i, type: "refactor" },
6585
+ { pattern: /^docs?(\([^)]*\))?:/i, type: "docs" }
6586
+ ];
6587
+ var TEST_FILE_PATTERN = /\.(test|spec)\.(ts|tsx|js|jsx|mts|cts)$/;
6588
+ var MD_FILE_PATTERN = /\.md$/;
6589
+ function detectChangeType(commitMessage, diff) {
6590
+ const trimmed = commitMessage.trim();
6591
+ for (const { pattern, type } of PREFIX_PATTERNS) {
6592
+ if (pattern.test(trimmed)) {
6593
+ return type;
6594
+ }
6595
+ }
6596
+ if (diff.changedFiles.length > 0 && diff.changedFiles.every((f) => MD_FILE_PATTERN.test(f))) {
6597
+ return "docs";
6598
+ }
6599
+ const newNonTestFiles = diff.newFiles.filter((f) => !TEST_FILE_PATTERN.test(f));
6600
+ if (newNonTestFiles.length > 0) {
6601
+ return "feature";
6602
+ }
6603
+ const hasNewTestFile = diff.newFiles.some((f) => TEST_FILE_PATTERN.test(f));
6604
+ if (diff.totalDiffLines < 20 && hasNewTestFile) {
6605
+ return "bugfix";
6606
+ }
6607
+ return "feature";
6608
+ }
6609
+
6610
+ // src/review/context-scoper.ts
6611
+ import * as path7 from "path";
6612
+ var ALL_DOMAINS = ["compliance", "bug", "security", "architecture"];
6613
+ var SECURITY_PATTERNS = /auth|crypto|password|secret|token|session|cookie|hash|encrypt|decrypt|sql|shell|exec|eval/i;
6614
+ function computeContextBudget(diffLines) {
6615
+ if (diffLines < 20) return diffLines * 3;
6616
+ return diffLines;
6617
+ }
6618
+ function isWithinProject(absPath, projectRoot) {
6619
+ const resolvedRoot = path7.resolve(projectRoot) + path7.sep;
6620
+ const resolvedPath = path7.resolve(absPath);
6621
+ return resolvedPath.startsWith(resolvedRoot) || resolvedPath === path7.resolve(projectRoot);
6622
+ }
6623
+ async function readContextFile(projectRoot, filePath, reason) {
6624
+ const absPath = path7.isAbsolute(filePath) ? filePath : path7.join(projectRoot, filePath);
6625
+ if (!isWithinProject(absPath, projectRoot)) return null;
6626
+ const result = await readFileContent(absPath);
6627
+ if (!result.ok) return null;
6628
+ const content = result.value;
6629
+ const lines = content.split("\n").length;
6630
+ const relPath = path7.isAbsolute(filePath) ? path7.relative(projectRoot, filePath) : filePath;
6631
+ return { path: relPath, content, reason, lines };
6632
+ }
6633
+ function extractImportSources(content) {
6634
+ const sources = [];
6635
+ const importRegex = /(?:import\s+(?:.*?\s+from\s+)?['"]([^'"]+)['"]|require\(\s*['"]([^'"]+)['"]\s*\))/g;
6636
+ let match;
6637
+ while ((match = importRegex.exec(content)) !== null) {
6638
+ const source = match[1] ?? match[2];
6639
+ if (source) sources.push(source);
6640
+ }
6641
+ return sources;
6642
+ }
6643
+ async function resolveImportPath2(projectRoot, fromFile, importSource) {
6644
+ if (!importSource.startsWith(".")) return null;
6645
+ const fromDir = path7.dirname(path7.join(projectRoot, fromFile));
6646
+ const basePath = path7.resolve(fromDir, importSource);
6647
+ if (!isWithinProject(basePath, projectRoot)) return null;
6648
+ const relBase = path7.relative(projectRoot, basePath);
6649
+ const candidates = [
6650
+ relBase + ".ts",
6651
+ relBase + ".tsx",
6652
+ relBase + ".mts",
6653
+ path7.join(relBase, "index.ts")
6654
+ ];
6655
+ for (const candidate of candidates) {
6656
+ const absCandidate = path7.join(projectRoot, candidate);
6657
+ if (await fileExists(absCandidate)) {
6658
+ return candidate;
6659
+ }
6660
+ }
6661
+ return null;
6662
+ }
6663
+ async function findTestFiles(projectRoot, sourceFile) {
6664
+ const baseName = path7.basename(sourceFile, path7.extname(sourceFile));
6665
+ const pattern = `**/${baseName}.{test,spec}.{ts,tsx,mts}`;
6666
+ const results = await findFiles(pattern, projectRoot);
6667
+ return results.map((f) => path7.relative(projectRoot, f));
6668
+ }
6669
+ async function gatherImportContext(projectRoot, changedFiles, budget) {
6670
+ const contextFiles = [];
6671
+ let linesGathered = 0;
6672
+ const seen = new Set(changedFiles.map((f) => f.path));
6673
+ for (const cf of changedFiles) {
6674
+ if (linesGathered >= budget) break;
6675
+ const sources = extractImportSources(cf.content);
6676
+ for (const source of sources) {
6677
+ if (linesGathered >= budget) break;
6678
+ const resolved = await resolveImportPath2(projectRoot, cf.path, source);
6679
+ if (resolved && !seen.has(resolved)) {
6680
+ seen.add(resolved);
6681
+ const contextFile = await readContextFile(projectRoot, resolved, "import");
6682
+ if (contextFile) {
6683
+ contextFiles.push(contextFile);
6684
+ linesGathered += contextFile.lines;
6685
+ }
6686
+ }
6687
+ }
6688
+ }
6689
+ return contextFiles;
6690
+ }
6691
+ async function gatherGraphDependencyContext(projectRoot, changedFilePaths, graph, budget) {
6692
+ const contextFiles = [];
6693
+ let linesGathered = 0;
6694
+ const seen = new Set(changedFilePaths);
6695
+ for (const filePath of changedFilePaths) {
6696
+ if (linesGathered >= budget) break;
6697
+ let deps;
6698
+ try {
6699
+ deps = await graph.getDependencies(filePath);
6700
+ } catch {
6701
+ continue;
6702
+ }
6703
+ for (const dep of deps) {
6704
+ if (linesGathered >= budget) break;
6705
+ if (seen.has(dep)) continue;
6706
+ seen.add(dep);
6707
+ const contextFile = await readContextFile(projectRoot, dep, "graph-dependency");
6708
+ if (contextFile) {
6709
+ contextFiles.push(contextFile);
6710
+ linesGathered += contextFile.lines;
6711
+ }
6712
+ }
6713
+ }
6714
+ return contextFiles;
6715
+ }
6716
+ async function gatherTestContext(projectRoot, changedFilePaths, graph) {
6717
+ const testFiles = [];
6718
+ const seen = /* @__PURE__ */ new Set();
6719
+ if (graph) {
6720
+ for (const filePath of changedFilePaths) {
6721
+ let impact;
6722
+ try {
6723
+ impact = await graph.getImpact(filePath);
6724
+ } catch {
6725
+ continue;
6726
+ }
6727
+ for (const testFile of impact.tests) {
6728
+ if (seen.has(testFile)) continue;
6729
+ seen.add(testFile);
6730
+ const cf = await readContextFile(projectRoot, testFile, "test");
6731
+ if (cf) testFiles.push(cf);
6732
+ }
6733
+ }
6734
+ } else {
6735
+ for (const filePath of changedFilePaths) {
6736
+ const found = await findTestFiles(projectRoot, filePath);
6737
+ for (const testFile of found) {
6738
+ if (seen.has(testFile)) continue;
6739
+ seen.add(testFile);
6740
+ const cf = await readContextFile(projectRoot, testFile, "test");
6741
+ if (cf) testFiles.push(cf);
6742
+ }
6743
+ }
6744
+ }
6745
+ return testFiles;
6746
+ }
6747
+ async function scopeComplianceContext(projectRoot, _changedFiles, options) {
6748
+ const contextFiles = [];
6749
+ const conventionFiles = options.conventionFiles ?? ["CLAUDE.md", "AGENTS.md"];
6750
+ for (const cf of conventionFiles) {
6751
+ const file = await readContextFile(projectRoot, cf, "convention");
6752
+ if (file) contextFiles.push(file);
6753
+ }
6754
+ return contextFiles;
6755
+ }
6756
+ async function scopeBugContext(projectRoot, changedFiles, budget, options) {
6757
+ const contextFiles = [];
6758
+ const changedPaths = changedFiles.map((f) => f.path);
6759
+ if (options.graph) {
6760
+ const deps = await gatherGraphDependencyContext(
6761
+ projectRoot,
6762
+ changedPaths,
6763
+ options.graph,
6764
+ budget
6765
+ );
6766
+ contextFiles.push(...deps);
6767
+ } else {
6768
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6769
+ contextFiles.push(...deps);
6770
+ }
6771
+ const tests = await gatherTestContext(projectRoot, changedPaths, options.graph);
6772
+ contextFiles.push(...tests);
6773
+ return contextFiles;
6774
+ }
6775
+ async function scopeSecurityContext(projectRoot, changedFiles, budget, options) {
6776
+ const contextFiles = [];
6777
+ const changedPaths = changedFiles.map((f) => f.path);
6778
+ if (options.graph) {
6779
+ const allPaths = [];
6780
+ for (const filePath of changedPaths) {
6781
+ try {
6782
+ const deps = await options.graph.getDependencies(filePath);
6783
+ allPaths.push(...deps);
6784
+ } catch {
6785
+ continue;
6786
+ }
6787
+ }
6788
+ const uniquePaths = [...new Set(allPaths)];
6789
+ const securityFirst = uniquePaths.sort((a, b) => {
6790
+ const aMatch = SECURITY_PATTERNS.test(a) ? 0 : 1;
6791
+ const bMatch = SECURITY_PATTERNS.test(b) ? 0 : 1;
6792
+ return aMatch - bMatch;
6793
+ });
6794
+ for (const depPath of securityFirst) {
6795
+ if (contextFiles.reduce((sum, f) => sum + f.lines, 0) >= budget) break;
6796
+ const cf = await readContextFile(projectRoot, depPath, "graph-dependency");
6797
+ if (cf) contextFiles.push(cf);
6798
+ }
6799
+ } else {
6800
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6801
+ contextFiles.push(...deps);
6802
+ }
6803
+ return contextFiles;
6804
+ }
6805
+ async function scopeArchitectureContext(projectRoot, changedFiles, budget, options) {
6806
+ const contextFiles = [];
6807
+ const changedPaths = changedFiles.map((f) => f.path);
6808
+ if (options.graph) {
6809
+ let linesGathered = 0;
6810
+ for (const filePath of changedPaths) {
6811
+ if (linesGathered >= budget) break;
6812
+ let impact;
6813
+ try {
6814
+ impact = await options.graph.getImpact(filePath);
6815
+ } catch {
6816
+ continue;
6817
+ }
6818
+ for (const codePath of impact.code) {
6819
+ if (linesGathered >= budget) break;
6820
+ const cf = await readContextFile(projectRoot, codePath, "graph-impact");
6821
+ if (cf) {
6822
+ contextFiles.push(cf);
6823
+ linesGathered += cf.lines;
6824
+ }
6825
+ }
6826
+ }
6827
+ } else {
6828
+ const deps = await gatherImportContext(projectRoot, changedFiles, budget);
6829
+ contextFiles.push(...deps);
6830
+ if (options.checkDepsOutput) {
6831
+ contextFiles.push({
6832
+ path: "harness-check-deps-output",
6833
+ content: options.checkDepsOutput,
6834
+ lines: options.checkDepsOutput.split("\n").length,
6835
+ reason: "convention"
6836
+ });
6837
+ }
6838
+ }
6839
+ return contextFiles;
6840
+ }
6841
+ async function scopeContext(options) {
6842
+ const { projectRoot, diff, commitMessage } = options;
6843
+ const changeType = detectChangeType(commitMessage, diff);
6844
+ const budget = computeContextBudget(diff.totalDiffLines);
6845
+ const changedFiles = [];
6846
+ for (const filePath of diff.changedFiles) {
6847
+ const cf = await readContextFile(projectRoot, filePath, "changed");
6848
+ if (cf) changedFiles.push(cf);
6849
+ }
6850
+ const scopers = {
6851
+ compliance: () => scopeComplianceContext(projectRoot, changedFiles, options),
6852
+ bug: () => scopeBugContext(projectRoot, changedFiles, budget, options),
6853
+ security: () => scopeSecurityContext(projectRoot, changedFiles, budget, options),
6854
+ architecture: () => scopeArchitectureContext(projectRoot, changedFiles, budget, options)
6855
+ };
6856
+ const bundles = [];
6857
+ for (const domain of ALL_DOMAINS) {
6858
+ const contextFiles = await scopers[domain]();
6859
+ const contextLines = contextFiles.reduce((sum, f) => sum + f.lines, 0);
6860
+ bundles.push({
6861
+ domain,
6862
+ changeType,
6863
+ changedFiles: [...changedFiles],
6864
+ contextFiles,
6865
+ commitHistory: options.commitHistory ?? [],
6866
+ diffLines: diff.totalDiffLines,
6867
+ contextLines
6868
+ });
6869
+ }
6870
+ return bundles;
6871
+ }
6872
+
6873
+ // src/review/constants.ts
6874
+ var SEVERITY_RANK = {
6875
+ suggestion: 0,
6876
+ important: 1,
6877
+ critical: 2
6878
+ };
6879
+ var SEVERITY_ORDER = ["critical", "important", "suggestion"];
6880
+ var SEVERITY_LABELS = {
6881
+ critical: "Critical",
6882
+ important: "Important",
6883
+ suggestion: "Suggestion"
6884
+ };
6885
+ var VALIDATED_BY_RANK = {
6886
+ mechanical: 0,
6887
+ heuristic: 1,
6888
+ graph: 2
6889
+ };
6890
+ function makeFindingId(domain, file, line, title) {
6891
+ const hash = title.slice(0, 20).replace(/[^a-zA-Z0-9]/g, "");
6892
+ return `${domain}-${file.replace(/[^a-zA-Z0-9]/g, "-")}-${line}-${hash}`;
6893
+ }
6894
+
6895
+ // src/review/agents/compliance-agent.ts
6896
+ var COMPLIANCE_DESCRIPTOR = {
6897
+ domain: "compliance",
6898
+ tier: "standard",
6899
+ displayName: "Compliance",
6900
+ focusAreas: [
6901
+ "Spec alignment \u2014 implementation matches design doc",
6902
+ "API surface \u2014 new public interfaces are minimal and well-named",
6903
+ "Backward compatibility \u2014 no breaking changes without migration path",
6904
+ "Convention adherence \u2014 project conventions from CLAUDE.md/AGENTS.md followed",
6905
+ "Documentation completeness \u2014 all public interfaces documented"
6906
+ ]
6907
+ };
6908
+ function extractConventionRules(bundle) {
6909
+ const rules = [];
6910
+ const conventionFiles = bundle.contextFiles.filter((f) => f.reason === "convention");
6911
+ for (const file of conventionFiles) {
6912
+ const lines = file.content.split("\n");
6913
+ for (const line of lines) {
6914
+ const trimmed = line.trim();
6915
+ if (trimmed.startsWith("- ") || trimmed.startsWith("* ")) {
6916
+ rules.push({ text: trimmed.slice(2).trim(), source: file.path });
6917
+ }
6918
+ }
6919
+ }
6920
+ return rules;
6921
+ }
6922
+ function findMissingJsDoc(bundle) {
6923
+ const missing = [];
6924
+ for (const cf of bundle.changedFiles) {
6925
+ const lines = cf.content.split("\n");
6926
+ for (let i = 0; i < lines.length; i++) {
6927
+ const line = lines[i];
6928
+ const exportMatch = line.match(
6929
+ /export\s+(?:async\s+)?(?:function|const|class|interface|type)\s+(\w+)/
6930
+ );
6931
+ if (exportMatch) {
6932
+ let hasJsDoc = false;
6933
+ for (let j = i - 1; j >= 0; j--) {
6934
+ const prev = lines[j].trim();
6935
+ if (prev === "") continue;
6936
+ if (prev.endsWith("*/")) {
6937
+ hasJsDoc = true;
6938
+ }
6939
+ break;
6940
+ }
6941
+ if (!hasJsDoc) {
6942
+ missing.push({
6943
+ file: cf.path,
6944
+ line: i + 1,
6945
+ exportName: exportMatch[1]
6946
+ });
6947
+ }
6948
+ }
6949
+ }
6950
+ }
6951
+ return missing;
6952
+ }
6953
+ function runComplianceAgent(bundle) {
6954
+ const findings = [];
6955
+ const rules = extractConventionRules(bundle);
6956
+ const jsDocRuleExists = rules.some((r) => r.text.toLowerCase().includes("jsdoc"));
6957
+ if (jsDocRuleExists) {
6958
+ const missingDocs = findMissingJsDoc(bundle);
6959
+ for (const m of missingDocs) {
6960
+ findings.push({
6961
+ id: makeFindingId("compliance", m.file, m.line, `Missing JSDoc ${m.exportName}`),
6962
+ file: m.file,
6963
+ lineRange: [m.line, m.line],
6964
+ domain: "compliance",
6965
+ severity: "important",
6966
+ title: `Missing JSDoc on exported \`${m.exportName}\``,
6967
+ rationale: `Convention requires all exports to have JSDoc comments (from ${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.source ?? "conventions"}).`,
6968
+ suggestion: `Add a JSDoc comment above the export of \`${m.exportName}\`.`,
6969
+ evidence: [
6970
+ `changeType: ${bundle.changeType}`,
6971
+ `Convention rule: "${rules.find((r) => r.text.toLowerCase().includes("jsdoc"))?.text ?? ""}"`
6972
+ ],
6973
+ validatedBy: "heuristic"
6974
+ });
6975
+ }
6976
+ }
6977
+ switch (bundle.changeType) {
6978
+ case "feature": {
6979
+ const hasSpecContext = bundle.contextFiles.some(
6980
+ (f) => f.reason === "spec" || f.reason === "convention"
6981
+ );
6982
+ if (!hasSpecContext && bundle.changedFiles.length > 0) {
6983
+ const firstFile = bundle.changedFiles[0];
6984
+ findings.push({
6985
+ id: makeFindingId("compliance", firstFile.path, 1, "No spec for feature"),
6986
+ file: firstFile.path,
6987
+ lineRange: [1, 1],
6988
+ domain: "compliance",
6989
+ severity: "suggestion",
6990
+ title: "No spec/design doc found for feature change",
6991
+ rationale: "Feature changes should reference a spec or design doc to verify alignment. No spec context was included in the review bundle.",
6992
+ evidence: [`changeType: feature`, `contextFiles count: ${bundle.contextFiles.length}`],
6993
+ validatedBy: "heuristic"
6994
+ });
6995
+ }
6996
+ break;
6997
+ }
6998
+ case "bugfix": {
6999
+ if (bundle.commitHistory.length === 0 && bundle.changedFiles.length > 0) {
7000
+ const firstFile = bundle.changedFiles[0];
7001
+ findings.push({
7002
+ id: makeFindingId("compliance", firstFile.path, 1, "Bugfix no history"),
7003
+ file: firstFile.path,
7004
+ lineRange: [1, 1],
7005
+ domain: "compliance",
7006
+ severity: "suggestion",
7007
+ title: "Bugfix without commit history context",
7008
+ rationale: "Bugfix changes benefit from commit history to verify the root cause is addressed, not just the symptom. No commit history was provided.",
7009
+ evidence: [`changeType: bugfix`, `commitHistory entries: ${bundle.commitHistory.length}`],
7010
+ validatedBy: "heuristic"
7011
+ });
7012
+ }
7013
+ break;
7014
+ }
7015
+ case "refactor": {
7016
+ break;
7017
+ }
7018
+ case "docs": {
7019
+ break;
7020
+ }
7021
+ }
7022
+ const resultTypeRule = rules.find((r) => r.text.toLowerCase().includes("result type"));
7023
+ if (resultTypeRule) {
7024
+ for (const cf of bundle.changedFiles) {
7025
+ const hasTryCatch = cf.content.includes("try {") || cf.content.includes("try{");
7026
+ const usesResult = cf.content.includes("Result<") || cf.content.includes("Result >") || cf.content.includes(": Result");
7027
+ if (hasTryCatch && !usesResult) {
7028
+ findings.push({
7029
+ id: makeFindingId("compliance", cf.path, 1, "try-catch not Result"),
7030
+ file: cf.path,
7031
+ lineRange: [1, cf.lines],
7032
+ domain: "compliance",
7033
+ severity: "suggestion",
7034
+ title: "Fallible operation uses try/catch instead of Result type",
7035
+ rationale: `Convention requires using Result type for fallible operations (from ${resultTypeRule.source}).`,
7036
+ suggestion: "Refactor error handling to use the Result type pattern.",
7037
+ evidence: [
7038
+ `changeType: ${bundle.changeType}`,
7039
+ `Convention rule: "${resultTypeRule.text}"`
7040
+ ],
7041
+ validatedBy: "heuristic"
7042
+ });
7043
+ }
7044
+ }
7045
+ }
7046
+ return findings;
7047
+ }
7048
+
7049
+ // src/review/agents/bug-agent.ts
7050
+ var BUG_DETECTION_DESCRIPTOR = {
7051
+ domain: "bug",
7052
+ tier: "strong",
7053
+ displayName: "Bug Detection",
7054
+ focusAreas: [
7055
+ "Edge cases \u2014 boundary conditions, empty input, max values, null, concurrent access",
7056
+ "Error handling \u2014 errors handled at appropriate level, no silent swallowing",
7057
+ "Logic errors \u2014 off-by-one, incorrect boolean logic, missing early returns",
7058
+ "Race conditions \u2014 concurrent access to shared state",
7059
+ "Resource leaks \u2014 unclosed handles, missing cleanup in error paths",
7060
+ "Type safety \u2014 type mismatches, unsafe casts, missing null checks",
7061
+ "Test coverage \u2014 tests for happy path, error paths, and edge cases"
7062
+ ]
7063
+ };
7064
+ function detectDivisionByZero(bundle) {
7065
+ const findings = [];
7066
+ for (const cf of bundle.changedFiles) {
7067
+ const lines = cf.content.split("\n");
7068
+ for (let i = 0; i < lines.length; i++) {
7069
+ const line = lines[i];
7070
+ if (line.match(/[^=!<>]\s*\/\s*[a-zA-Z_]\w*/) && !line.includes("//")) {
7071
+ const preceding = lines.slice(Math.max(0, i - 3), i).join("\n");
7072
+ if (!preceding.includes("=== 0") && !preceding.includes("!== 0") && !preceding.includes("== 0") && !preceding.includes("!= 0")) {
7073
+ findings.push({
7074
+ id: makeFindingId("bug", cf.path, i + 1, "division by zero"),
7075
+ file: cf.path,
7076
+ lineRange: [i + 1, i + 1],
7077
+ domain: "bug",
7078
+ severity: "important",
7079
+ title: "Potential division by zero without guard",
7080
+ rationale: "Division operation found without a preceding zero check on the divisor. This can cause Infinity or NaN at runtime.",
7081
+ suggestion: "Add a check for zero before dividing, or use a safe division utility.",
7082
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7083
+ validatedBy: "heuristic"
7084
+ });
7085
+ }
7086
+ }
7087
+ }
7088
+ }
7089
+ return findings;
7090
+ }
7091
+ function detectEmptyCatch(bundle) {
7092
+ const findings = [];
7093
+ for (const cf of bundle.changedFiles) {
7094
+ const lines = cf.content.split("\n");
7095
+ for (let i = 0; i < lines.length; i++) {
7096
+ const line = lines[i];
7097
+ if (line.match(/catch\s*\([^)]*\)\s*\{\s*\}/) || line.match(/catch\s*\([^)]*\)\s*\{/) && i + 1 < lines.length && lines[i + 1].trim() === "}") {
7098
+ findings.push({
7099
+ id: makeFindingId("bug", cf.path, i + 1, "empty catch block"),
7100
+ file: cf.path,
7101
+ lineRange: [i + 1, i + 2],
7102
+ domain: "bug",
7103
+ severity: "important",
7104
+ title: "Empty catch block silently swallows error",
7105
+ rationale: "Catching an error without handling, logging, or re-throwing it hides failures and makes debugging difficult.",
7106
+ suggestion: "Log the error, re-throw it, or handle it explicitly. If intentionally ignoring, add a comment explaining why.",
7107
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7108
+ validatedBy: "heuristic"
7109
+ });
7110
+ }
7111
+ }
7112
+ }
7113
+ return findings;
7114
+ }
7115
+ function detectMissingTests(bundle) {
7116
+ const findings = [];
7117
+ const hasTestFiles = bundle.contextFiles.some((f) => f.reason === "test");
7118
+ if (!hasTestFiles) {
7119
+ const sourceFiles = bundle.changedFiles.filter(
7120
+ (f) => !f.path.match(/\.(test|spec)\.(ts|tsx|js|jsx)$/)
7121
+ );
7122
+ if (sourceFiles.length > 0) {
7123
+ const firstFile = sourceFiles[0];
7124
+ findings.push({
7125
+ id: makeFindingId("bug", firstFile.path, 1, "no test files"),
7126
+ file: firstFile.path,
7127
+ lineRange: [1, 1],
7128
+ domain: "bug",
7129
+ severity: "suggestion",
7130
+ title: "No test files found for changed source files",
7131
+ rationale: "Changed source files should have corresponding test files. No test files were found in the review context.",
7132
+ evidence: [`Source files without tests: ${sourceFiles.map((f) => f.path).join(", ")}`],
7133
+ validatedBy: "heuristic"
7134
+ });
7135
+ }
7136
+ }
7137
+ return findings;
7138
+ }
7139
+ function runBugDetectionAgent(bundle) {
7140
+ const findings = [];
7141
+ findings.push(...detectDivisionByZero(bundle));
7142
+ findings.push(...detectEmptyCatch(bundle));
7143
+ findings.push(...detectMissingTests(bundle));
7144
+ return findings;
7145
+ }
7146
+
7147
+ // src/review/agents/security-agent.ts
7148
+ var SECURITY_DESCRIPTOR = {
7149
+ domain: "security",
7150
+ tier: "strong",
7151
+ displayName: "Security",
7152
+ focusAreas: [
7153
+ "Input validation \u2014 user input flowing to dangerous sinks (SQL, shell, HTML)",
7154
+ "Authorization \u2014 missing auth checks on new/modified endpoints",
7155
+ "Data exposure \u2014 sensitive data in logs, error messages, API responses",
7156
+ "Authentication bypass \u2014 paths introduced by the change",
7157
+ "Insecure defaults \u2014 new configuration options with unsafe defaults",
7158
+ "Node.js specific \u2014 prototype pollution, ReDoS, path traversal"
7159
+ ]
7160
+ };
7161
+ var EVAL_PATTERN = /\beval\s*\(|new\s+Function\s*\(/;
7162
+ var SECRET_PATTERNS = [
7163
+ /(?:api[_-]?key|secret|password|token|private[_-]?key)\s*=\s*["'][^"']{8,}/i,
7164
+ /["'](?:sk|pk|api|key|secret|token|password)[-_][a-zA-Z0-9]{10,}["']/i
7165
+ ];
7166
+ var SQL_CONCAT_PATTERN = /(?:SELECT|INSERT|UPDATE|DELETE|DROP|CREATE|ALTER)\s+.*?\+\s*\w+|`[^`]*\$\{[^}]*\}[^`]*(?:SELECT|INSERT|UPDATE|DELETE|WHERE)/i;
7167
+ var SHELL_EXEC_PATTERN = /(?:exec|execSync|spawn|spawnSync)\s*\(\s*`[^`]*\$\{/;
7168
+ function detectEvalUsage(bundle) {
7169
+ const findings = [];
7170
+ for (const cf of bundle.changedFiles) {
7171
+ const lines = cf.content.split("\n");
7172
+ for (let i = 0; i < lines.length; i++) {
7173
+ const line = lines[i];
7174
+ if (EVAL_PATTERN.test(line)) {
7175
+ findings.push({
7176
+ id: makeFindingId("security", cf.path, i + 1, "eval usage CWE-94"),
7177
+ file: cf.path,
7178
+ lineRange: [i + 1, i + 1],
7179
+ domain: "security",
7180
+ severity: "critical",
7181
+ title: `Dangerous ${"eval"}() or new ${"Function"}() usage`,
7182
+ rationale: `${"eval"}() and new ${"Function"}() execute arbitrary code. If user input reaches these calls, it enables Remote Code Execution (CWE-94).`,
7183
+ suggestion: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7184
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7185
+ validatedBy: "heuristic",
7186
+ cweId: "CWE-94",
7187
+ owaspCategory: "A03:2021 Injection",
7188
+ confidence: "high",
7189
+ remediation: "Replace eval/Function with a safe alternative (JSON.parse for data, a sandboxed evaluator for expressions).",
7190
+ references: [
7191
+ "https://cwe.mitre.org/data/definitions/94.html",
7192
+ "https://owasp.org/Top10/A03_2021-Injection/"
7193
+ ]
7194
+ });
7195
+ }
7196
+ }
7197
+ }
7198
+ return findings;
7199
+ }
7200
+ function detectHardcodedSecrets(bundle) {
7201
+ const findings = [];
7202
+ for (const cf of bundle.changedFiles) {
7203
+ const lines = cf.content.split("\n");
7204
+ for (let i = 0; i < lines.length; i++) {
7205
+ const line = lines[i];
7206
+ const codePart = line.includes("//") ? line.slice(0, line.indexOf("//")) : line;
7207
+ for (const pattern of SECRET_PATTERNS) {
7208
+ if (pattern.test(codePart)) {
7209
+ findings.push({
7210
+ id: makeFindingId("security", cf.path, i + 1, "hardcoded secret CWE-798"),
7211
+ file: cf.path,
7212
+ lineRange: [i + 1, i + 1],
7213
+ domain: "security",
7214
+ severity: "critical",
7215
+ title: "Hardcoded secret or API key detected",
7216
+ rationale: "Hardcoded secrets in source code can be extracted from version history even after removal. Use environment variables or a secrets manager (CWE-798).",
7217
+ suggestion: "Move the secret to an environment variable and access it via process.env.",
7218
+ evidence: [`Line ${i + 1}: [secret detected \u2014 value redacted]`],
7219
+ validatedBy: "heuristic",
7220
+ cweId: "CWE-798",
7221
+ owaspCategory: "A07:2021 Identification and Authentication Failures",
7222
+ confidence: "high",
7223
+ remediation: "Move the secret to an environment variable and access it via process.env.",
7224
+ references: [
7225
+ "https://cwe.mitre.org/data/definitions/798.html",
7226
+ "https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/"
7227
+ ]
7228
+ });
7229
+ break;
7230
+ }
7231
+ }
7232
+ }
7233
+ }
7234
+ return findings;
7235
+ }
7236
+ function detectSqlInjection(bundle) {
7237
+ const findings = [];
7238
+ for (const cf of bundle.changedFiles) {
7239
+ const lines = cf.content.split("\n");
7240
+ for (let i = 0; i < lines.length; i++) {
7241
+ const line = lines[i];
7242
+ if (SQL_CONCAT_PATTERN.test(line)) {
7243
+ findings.push({
7244
+ id: makeFindingId("security", cf.path, i + 1, "SQL injection CWE-89"),
7245
+ file: cf.path,
7246
+ lineRange: [i + 1, i + 1],
7247
+ domain: "security",
7248
+ severity: "critical",
7249
+ title: "Potential SQL injection via string concatenation",
7250
+ rationale: "Building SQL queries with string concatenation or template literals allows attackers to inject malicious SQL (CWE-89).",
7251
+ suggestion: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7252
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7253
+ validatedBy: "heuristic",
7254
+ cweId: "CWE-89",
7255
+ owaspCategory: "A03:2021 Injection",
7256
+ confidence: "high",
7257
+ remediation: "Use parameterized queries or a query builder (e.g., Knex, Prisma) instead of string concatenation.",
7258
+ references: [
7259
+ "https://cwe.mitre.org/data/definitions/89.html",
7260
+ "https://owasp.org/Top10/A03_2021-Injection/"
7261
+ ]
7262
+ });
7263
+ }
7264
+ }
7265
+ }
7266
+ return findings;
7267
+ }
7268
+ function detectCommandInjection(bundle) {
7269
+ const findings = [];
7270
+ for (const cf of bundle.changedFiles) {
7271
+ const lines = cf.content.split("\n");
7272
+ for (let i = 0; i < lines.length; i++) {
7273
+ const line = lines[i];
7274
+ if (SHELL_EXEC_PATTERN.test(line)) {
7275
+ findings.push({
7276
+ id: makeFindingId("security", cf.path, i + 1, "command injection CWE-78"),
7277
+ file: cf.path,
7278
+ lineRange: [i + 1, i + 1],
7279
+ domain: "security",
7280
+ severity: "critical",
7281
+ title: "Potential command injection via shell exec with interpolation",
7282
+ rationale: "Using exec/spawn with template literal interpolation allows attackers to inject shell commands (CWE-78).",
7283
+ suggestion: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7284
+ evidence: [`Line ${i + 1}: ${line.trim()}`],
7285
+ validatedBy: "heuristic",
7286
+ cweId: "CWE-78",
7287
+ owaspCategory: "A03:2021 Injection",
7288
+ confidence: "high",
7289
+ remediation: "Use execFile or spawn with an arguments array instead of shell string interpolation.",
7290
+ references: [
7291
+ "https://cwe.mitre.org/data/definitions/78.html",
7292
+ "https://owasp.org/Top10/A03_2021-Injection/"
7293
+ ]
7294
+ });
7295
+ }
7296
+ }
7297
+ }
7298
+ return findings;
7299
+ }
7300
+ function runSecurityAgent(bundle) {
7301
+ const findings = [];
7302
+ findings.push(...detectEvalUsage(bundle));
7303
+ findings.push(...detectHardcodedSecrets(bundle));
7304
+ findings.push(...detectSqlInjection(bundle));
7305
+ findings.push(...detectCommandInjection(bundle));
7306
+ return findings;
7307
+ }
7308
+
7309
+ // src/review/agents/architecture-agent.ts
7310
+ var ARCHITECTURE_DESCRIPTOR = {
7311
+ domain: "architecture",
7312
+ tier: "standard",
7313
+ displayName: "Architecture",
7314
+ focusAreas: [
7315
+ "Layer compliance \u2014 imports flow in the correct direction per architectural layers",
7316
+ "Dependency direction \u2014 modules depend on abstractions, not concretions",
7317
+ "Single Responsibility \u2014 each module has one reason to change",
7318
+ "Pattern consistency \u2014 code follows established codebase patterns",
7319
+ "Separation of concerns \u2014 business logic separated from infrastructure",
7320
+ "DRY violations \u2014 duplicated logic that should be extracted (excluding intentional duplication)"
7321
+ ]
7322
+ };
7323
+ var LARGE_FILE_THRESHOLD = 300;
7324
+ function detectLayerViolations(bundle) {
7325
+ const findings = [];
7326
+ const checkDepsFile = bundle.contextFiles.find((f) => f.path === "harness-check-deps-output");
7327
+ if (!checkDepsFile) return findings;
7328
+ const lines = checkDepsFile.content.split("\n");
7329
+ for (const line of lines) {
7330
+ if (line.toLowerCase().includes("violation") || line.toLowerCase().includes("layer")) {
7331
+ const fileMatch = line.match(/(?:in\s+)?(\S+\.(?:ts|tsx|js|jsx))(?::(\d+))?/);
7332
+ const file = fileMatch?.[1] ?? bundle.changedFiles[0]?.path ?? "unknown";
7333
+ const lineNum = fileMatch?.[2] ? parseInt(fileMatch[2], 10) : 1;
7334
+ findings.push({
7335
+ id: makeFindingId("arch", file, lineNum, "layer violation"),
7336
+ file,
7337
+ lineRange: [lineNum, lineNum],
7338
+ domain: "architecture",
7339
+ severity: "critical",
7340
+ title: "Layer boundary violation detected by check-deps",
7341
+ rationale: `Architectural layer violation: ${line.trim()}. Imports must flow in the correct direction per the project's layer definitions.`,
7342
+ suggestion: "Route the dependency through the correct intermediate layer (e.g., routes -> services -> db, not routes -> db).",
7343
+ evidence: [line.trim()],
7344
+ validatedBy: "heuristic"
7345
+ });
7346
+ }
7347
+ }
7348
+ return findings;
7349
+ }
7350
+ function detectLargeFiles(bundle) {
7351
+ const findings = [];
7352
+ for (const cf of bundle.changedFiles) {
7353
+ if (cf.lines > LARGE_FILE_THRESHOLD) {
7354
+ findings.push({
7355
+ id: makeFindingId("arch", cf.path, 1, "large file SRP"),
7356
+ file: cf.path,
7357
+ lineRange: [1, cf.lines],
7358
+ domain: "architecture",
7359
+ severity: "suggestion",
7360
+ title: `Large file (${cf.lines} lines) may violate Single Responsibility`,
7361
+ rationale: `Files over ${LARGE_FILE_THRESHOLD} lines often contain multiple responsibilities. Consider splitting into focused modules.`,
7362
+ suggestion: "Identify distinct responsibilities and extract them into separate modules.",
7363
+ evidence: [`File has ${cf.lines} lines (threshold: ${LARGE_FILE_THRESHOLD})`],
7364
+ validatedBy: "heuristic"
7365
+ });
7366
+ }
7367
+ }
7368
+ return findings;
7369
+ }
7370
+ function detectCircularImports(bundle) {
7371
+ const findings = [];
7372
+ const changedPaths = new Set(bundle.changedFiles.map((f) => f.path));
7373
+ for (const cf of bundle.changedFiles) {
7374
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7375
+ let match;
7376
+ const imports = /* @__PURE__ */ new Set();
7377
+ while ((match = importRegex.exec(cf.content)) !== null) {
7378
+ const source = match[1];
7379
+ if (source.startsWith(".")) {
7380
+ imports.add(source.replace(/^\.\//, "").replace(/^\.\.\//, ""));
7381
+ }
7382
+ }
7383
+ for (const ctxFile of bundle.contextFiles) {
7384
+ if (ctxFile.reason !== "import" && ctxFile.reason !== "graph-dependency") continue;
7385
+ const ctxImportRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7386
+ let ctxMatch;
7387
+ while ((ctxMatch = ctxImportRegex.exec(ctxFile.content)) !== null) {
7388
+ const ctxSource = ctxMatch[1];
7389
+ if (ctxSource.startsWith(".")) {
7390
+ for (const changedPath of changedPaths) {
7391
+ const baseName = changedPath.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, "");
7392
+ if (ctxSource.includes(baseName) && imports.has(ctxFile.path.replace(/.*\//, "").replace(/\.(ts|tsx|js|jsx)$/, ""))) {
7393
+ findings.push({
7394
+ id: makeFindingId("arch", cf.path, 1, `circular ${ctxFile.path}`),
7395
+ file: cf.path,
7396
+ lineRange: [1, 1],
7397
+ domain: "architecture",
7398
+ severity: "important",
7399
+ title: `Potential circular import between ${cf.path} and ${ctxFile.path}`,
7400
+ rationale: "Circular imports can cause runtime issues (undefined values at import time) and indicate tightly coupled modules that should be refactored.",
7401
+ suggestion: "Extract shared types/interfaces into a separate module that both files can import from.",
7402
+ evidence: [`${cf.path} imports from a module that also imports from ${cf.path}`],
7403
+ validatedBy: "heuristic"
7404
+ });
7405
+ }
7406
+ }
7407
+ }
7408
+ }
7409
+ }
7410
+ }
7411
+ return findings;
7412
+ }
7413
+ function runArchitectureAgent(bundle) {
7414
+ const findings = [];
7415
+ findings.push(...detectLayerViolations(bundle));
7416
+ findings.push(...detectLargeFiles(bundle));
7417
+ findings.push(...detectCircularImports(bundle));
7418
+ return findings;
7419
+ }
7420
+
7421
+ // src/review/agents/index.ts
7422
+ var AGENT_DESCRIPTORS = {
7423
+ compliance: COMPLIANCE_DESCRIPTOR,
7424
+ bug: BUG_DETECTION_DESCRIPTOR,
7425
+ security: SECURITY_DESCRIPTOR,
7426
+ architecture: ARCHITECTURE_DESCRIPTOR
7427
+ };
7428
+
7429
+ // src/review/fan-out.ts
7430
+ var AGENT_RUNNERS = {
7431
+ compliance: runComplianceAgent,
7432
+ bug: runBugDetectionAgent,
7433
+ security: runSecurityAgent,
7434
+ architecture: runArchitectureAgent
7435
+ };
7436
+ async function runAgent(bundle) {
7437
+ const start = Date.now();
7438
+ const runner = AGENT_RUNNERS[bundle.domain];
7439
+ const findings = runner(bundle);
7440
+ const durationMs = Date.now() - start;
7441
+ return {
7442
+ domain: bundle.domain,
7443
+ findings,
7444
+ durationMs
7445
+ };
7446
+ }
7447
+ async function fanOutReview(options) {
7448
+ const { bundles } = options;
7449
+ if (bundles.length === 0) return [];
7450
+ const results = await Promise.all(bundles.map((bundle) => runAgent(bundle)));
7451
+ return results;
7452
+ }
7453
+
7454
+ // src/review/validate-findings.ts
7455
+ import * as path8 from "path";
7456
+ var DOWNGRADE_MAP = {
7457
+ critical: "important",
7458
+ important: "suggestion",
7459
+ suggestion: "suggestion"
7460
+ };
7461
+ function extractCrossFileRefs(finding) {
7462
+ const refs = [];
7463
+ const crossFilePattern = /([^\s]+\.(?:ts|tsx|js|jsx))\s+affects\s+([^\s]+\.(?:ts|tsx|js|jsx))/i;
7464
+ for (const ev of finding.evidence) {
7465
+ const match = ev.match(crossFilePattern);
7466
+ if (match) {
7467
+ refs.push({ from: match[1], to: match[2] });
7468
+ }
7469
+ }
7470
+ return refs;
7471
+ }
7472
+ function normalizePath(filePath, projectRoot) {
7473
+ let normalized = filePath;
7474
+ if (path8.isAbsolute(normalized)) {
7475
+ const root = projectRoot.endsWith(path8.sep) ? projectRoot : projectRoot + path8.sep;
7476
+ if (normalized.startsWith(root)) {
7477
+ normalized = normalized.slice(root.length);
7478
+ }
7479
+ }
7480
+ if (normalized.startsWith("./")) {
7481
+ normalized = normalized.slice(2);
7482
+ }
7483
+ return path8.normalize(normalized);
7484
+ }
7485
+ function followImportChain(fromFile, fileContents, maxDepth = 2) {
7486
+ const visited = /* @__PURE__ */ new Set();
7487
+ const queue = [{ file: fromFile, depth: 0 }];
7488
+ while (queue.length > 0) {
7489
+ const current = queue.shift();
7490
+ if (visited.has(current.file) || current.depth > maxDepth) continue;
7491
+ visited.add(current.file);
7492
+ const content = fileContents.get(current.file);
7493
+ if (!content) continue;
7494
+ const importRegex = /import\s+.*?from\s+['"]([^'"]+)['"]/g;
7495
+ let match;
7496
+ while ((match = importRegex.exec(content)) !== null) {
7497
+ const importPath = match[1];
7498
+ if (!importPath.startsWith(".")) continue;
7499
+ const dir = path8.dirname(current.file);
7500
+ let resolved = path8.join(dir, importPath);
7501
+ if (!resolved.match(/\.(ts|tsx|js|jsx)$/)) {
7502
+ resolved += ".ts";
7503
+ }
7504
+ resolved = path8.normalize(resolved);
7505
+ if (!visited.has(resolved) && current.depth + 1 <= maxDepth) {
7506
+ queue.push({ file: resolved, depth: current.depth + 1 });
7507
+ }
7508
+ }
7509
+ }
7510
+ visited.delete(fromFile);
7511
+ return visited;
7512
+ }
7513
+ async function validateFindings(options) {
7514
+ const { findings, exclusionSet, graph, projectRoot, fileContents } = options;
7515
+ const validated = [];
7516
+ for (const finding of findings) {
7517
+ const normalizedFile = normalizePath(finding.file, projectRoot);
7518
+ if (exclusionSet.isExcluded(normalizedFile, finding.lineRange) || exclusionSet.isExcluded(finding.file, finding.lineRange)) {
7519
+ continue;
7520
+ }
7521
+ const absoluteFile = path8.isAbsolute(finding.file) ? finding.file : path8.join(projectRoot, finding.file);
7522
+ if (exclusionSet.isExcluded(absoluteFile, finding.lineRange)) {
7523
+ continue;
7524
+ }
7525
+ const crossFileRefs = extractCrossFileRefs(finding);
7526
+ if (crossFileRefs.length === 0) {
7527
+ validated.push({ ...finding });
7528
+ continue;
7529
+ }
7530
+ if (graph) {
7531
+ try {
7532
+ let allReachable = true;
7533
+ for (const ref of crossFileRefs) {
7534
+ const reachable = await graph.isReachable(ref.from, ref.to);
7535
+ if (!reachable) {
7536
+ allReachable = false;
7537
+ break;
7538
+ }
7539
+ }
7540
+ if (allReachable) {
7541
+ validated.push({ ...finding, validatedBy: "graph" });
7542
+ }
7543
+ continue;
7544
+ } catch {
7545
+ }
7546
+ }
7547
+ {
7548
+ let chainValidated = false;
7549
+ if (fileContents) {
7550
+ for (const ref of crossFileRefs) {
7551
+ const normalizedFrom = normalizePath(ref.from, projectRoot);
7552
+ const reachable = followImportChain(normalizedFrom, fileContents, 2);
7553
+ const normalizedTo = normalizePath(ref.to, projectRoot);
7554
+ if (reachable.has(normalizedTo)) {
7555
+ chainValidated = true;
7556
+ break;
7557
+ }
7558
+ }
7559
+ }
7560
+ if (chainValidated) {
7561
+ validated.push({ ...finding, validatedBy: "heuristic" });
7562
+ } else {
7563
+ validated.push({
7564
+ ...finding,
7565
+ severity: DOWNGRADE_MAP[finding.severity],
7566
+ validatedBy: "heuristic"
7567
+ });
7568
+ }
7569
+ }
7570
+ }
7571
+ return validated;
7572
+ }
7573
+
7574
+ // src/review/deduplicate-findings.ts
7575
+ function rangesOverlap(a, b, gap) {
7576
+ return a[0] <= b[1] + gap && b[0] <= a[1] + gap;
7577
+ }
7578
+ function mergeFindings(a, b) {
7579
+ const highestSeverity = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a.severity : b.severity;
7580
+ const highestValidatedBy = (VALIDATED_BY_RANK[a.validatedBy] ?? 0) >= (VALIDATED_BY_RANK[b.validatedBy] ?? 0) ? a.validatedBy : b.validatedBy;
7581
+ const longestRationale = a.rationale.length >= b.rationale.length ? a.rationale : b.rationale;
7582
+ const evidenceSet = /* @__PURE__ */ new Set([...a.evidence, ...b.evidence]);
7583
+ const lineRange = [
7584
+ Math.min(a.lineRange[0], b.lineRange[0]),
7585
+ Math.max(a.lineRange[1], b.lineRange[1])
7586
+ ];
7587
+ const domains = /* @__PURE__ */ new Set();
7588
+ domains.add(a.domain);
7589
+ domains.add(b.domain);
7590
+ const suggestion = a.suggestion && b.suggestion ? a.suggestion.length >= b.suggestion.length ? a.suggestion : b.suggestion : a.suggestion ?? b.suggestion;
7591
+ const primaryFinding = SEVERITY_RANK[a.severity] >= SEVERITY_RANK[b.severity] ? a : b;
7592
+ const domainList = [...domains].sort().join(", ");
7593
+ const cleanTitle = primaryFinding.title.replace(/^\[.*?\]\s*/, "");
7594
+ const title = `[${domainList}] ${cleanTitle}`;
7595
+ const merged = {
7596
+ id: primaryFinding.id,
7597
+ file: a.file,
7598
+ // same file for all merged findings
7599
+ lineRange,
7600
+ domain: primaryFinding.domain,
7601
+ severity: highestSeverity,
7602
+ title,
7603
+ rationale: longestRationale,
7604
+ evidence: [...evidenceSet],
7605
+ validatedBy: highestValidatedBy
7606
+ };
7607
+ if (suggestion !== void 0) {
7608
+ merged.suggestion = suggestion;
7609
+ }
7610
+ const cweId = primaryFinding.cweId ?? a.cweId ?? b.cweId;
7611
+ const owaspCategory = primaryFinding.owaspCategory ?? a.owaspCategory ?? b.owaspCategory;
7612
+ const confidence = primaryFinding.confidence ?? a.confidence ?? b.confidence;
7613
+ const remediation = a.remediation && b.remediation ? a.remediation.length >= b.remediation.length ? a.remediation : b.remediation : a.remediation ?? b.remediation;
7614
+ const mergedRefs = [.../* @__PURE__ */ new Set([...a.references ?? [], ...b.references ?? []])];
7615
+ if (cweId !== void 0) merged.cweId = cweId;
7616
+ if (owaspCategory !== void 0) merged.owaspCategory = owaspCategory;
7617
+ if (confidence !== void 0) merged.confidence = confidence;
7618
+ if (remediation !== void 0) merged.remediation = remediation;
7619
+ if (mergedRefs.length > 0) merged.references = mergedRefs;
7620
+ return merged;
7621
+ }
7622
+ function deduplicateFindings(options) {
7623
+ const { findings, lineGap = 3 } = options;
7624
+ if (findings.length === 0) return [];
7625
+ const byFile = /* @__PURE__ */ new Map();
7626
+ for (const f of findings) {
7627
+ const existing = byFile.get(f.file);
7628
+ if (existing) {
7629
+ existing.push(f);
7630
+ } else {
7631
+ byFile.set(f.file, [f]);
7632
+ }
7633
+ }
7634
+ const result = [];
7635
+ for (const [, fileFindings] of byFile) {
7636
+ const sorted = [...fileFindings].sort((a, b) => a.lineRange[0] - b.lineRange[0]);
7637
+ const clusters = [];
7638
+ let current = sorted[0];
7639
+ for (let i = 1; i < sorted.length; i++) {
7640
+ const next = sorted[i];
7641
+ if (rangesOverlap(current.lineRange, next.lineRange, lineGap)) {
7642
+ current = mergeFindings(current, next);
7643
+ } else {
7644
+ clusters.push(current);
7645
+ current = next;
7646
+ }
7647
+ }
7648
+ clusters.push(current);
7649
+ result.push(...clusters);
7650
+ }
7651
+ return result;
7652
+ }
7653
+
7654
+ // src/review/eligibility-gate.ts
7655
+ function checkEligibility(pr, ciMode) {
7656
+ if (!ciMode) {
7657
+ return { eligible: true };
7658
+ }
7659
+ if (pr.state === "closed") {
7660
+ return { eligible: false, reason: "PR is closed" };
7661
+ }
7662
+ if (pr.state === "merged") {
7663
+ return { eligible: false, reason: "PR is merged" };
7664
+ }
7665
+ if (pr.isDraft) {
7666
+ return { eligible: false, reason: "PR is a draft" };
7667
+ }
7668
+ if (pr.changedFiles.length > 0 && pr.changedFiles.every((f) => f.endsWith(".md"))) {
7669
+ return { eligible: false, reason: "Trivial change: documentation only" };
7670
+ }
7671
+ const priorMatch = pr.priorReviews.find((r) => r.headSha === pr.headSha);
7672
+ if (priorMatch) {
7673
+ return { eligible: false, reason: `Already reviewed at ${priorMatch.headSha}` };
7674
+ }
7675
+ return { eligible: true };
7676
+ }
7677
+
7678
+ // src/review/model-tier-resolver.ts
7679
+ var DEFAULT_PROVIDER_TIERS = {
7680
+ claude: {
7681
+ fast: "haiku",
7682
+ standard: "sonnet",
7683
+ strong: "opus"
7684
+ },
7685
+ openai: {
7686
+ fast: "gpt-4o-mini",
7687
+ standard: "gpt-4o",
7688
+ strong: "o1"
7689
+ },
7690
+ gemini: {
7691
+ fast: "gemini-flash",
7692
+ standard: "gemini-pro",
7693
+ strong: "gemini-ultra"
7694
+ }
7695
+ };
7696
+ function resolveModelTier(tier, config, provider) {
7697
+ const configValue = config?.[tier];
7698
+ if (configValue !== void 0) {
7699
+ return configValue;
7700
+ }
7701
+ if (provider) {
7702
+ const providerDefaults = DEFAULT_PROVIDER_TIERS[provider];
7703
+ const defaultValue = providerDefaults[tier];
7704
+ if (defaultValue !== void 0) {
7705
+ return defaultValue;
7706
+ }
7707
+ }
7708
+ return void 0;
7709
+ }
7710
+
7711
+ // src/review/output/assessment.ts
7712
+ function determineAssessment(findings) {
7713
+ if (findings.length === 0) return "approve";
7714
+ let maxSeverity = "suggestion";
7715
+ for (const f of findings) {
7716
+ if (SEVERITY_RANK[f.severity] > SEVERITY_RANK[maxSeverity]) {
7717
+ maxSeverity = f.severity;
7718
+ }
7719
+ }
7720
+ switch (maxSeverity) {
7721
+ case "critical":
7722
+ return "request-changes";
7723
+ case "important":
7724
+ return "comment";
7725
+ case "suggestion":
7726
+ return "approve";
7727
+ }
7728
+ }
7729
+ function getExitCode(assessment) {
7730
+ return assessment === "request-changes" ? 1 : 0;
7731
+ }
7732
+
7733
+ // src/review/output/format-terminal.ts
7734
+ function formatFindingBlock(finding) {
7735
+ const lines = [];
7736
+ const location = `${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}`;
7737
+ lines.push(` [${finding.domain}] ${finding.title}`);
7738
+ lines.push(` Location: ${location}`);
7739
+ lines.push(` Rationale: ${finding.rationale}`);
7740
+ if (finding.suggestion) {
7741
+ lines.push(` Suggestion: ${finding.suggestion}`);
7742
+ }
7743
+ return lines.join("\n");
7744
+ }
7745
+ function formatTerminalOutput(options) {
7746
+ const { findings, strengths } = options;
7747
+ const sections = [];
7748
+ sections.push("## Strengths\n");
7749
+ if (strengths.length === 0) {
7750
+ sections.push(" No specific strengths noted.\n");
7751
+ } else {
7752
+ for (const s of strengths) {
7753
+ const prefix = s.file ? `${s.file}: ` : "";
7754
+ sections.push(` + ${prefix}${s.description}`);
7755
+ }
7756
+ sections.push("");
7757
+ }
7758
+ sections.push("## Issues\n");
7759
+ let hasIssues = false;
7760
+ for (const severity of SEVERITY_ORDER) {
7761
+ const group = findings.filter((f) => f.severity === severity);
7762
+ if (group.length === 0) continue;
7763
+ hasIssues = true;
7764
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7765
+ `);
7766
+ for (const finding of group) {
7767
+ sections.push(formatFindingBlock(finding));
7768
+ sections.push("");
7769
+ }
7770
+ }
7771
+ if (!hasIssues) {
7772
+ sections.push(" No issues found.\n");
7773
+ }
7774
+ const assessment = determineAssessment(findings);
7775
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7776
+ sections.push(`## Assessment: ${assessmentLabel}
7777
+ `);
7778
+ const issueCount = findings.length;
7779
+ const criticalCount = findings.filter((f) => f.severity === "critical").length;
7780
+ const importantCount = findings.filter((f) => f.severity === "important").length;
7781
+ const suggestionCount = findings.filter((f) => f.severity === "suggestion").length;
7782
+ if (issueCount === 0) {
7783
+ sections.push(" No issues found. The changes look good.");
7784
+ } else {
7785
+ const parts = [];
7786
+ if (criticalCount > 0) parts.push(`${criticalCount} critical`);
7787
+ if (importantCount > 0) parts.push(`${importantCount} important`);
7788
+ if (suggestionCount > 0) parts.push(`${suggestionCount} suggestion(s)`);
7789
+ sections.push(` Found ${issueCount} issue(s): ${parts.join(", ")}.`);
7790
+ }
7791
+ return sections.join("\n");
7792
+ }
7793
+
7794
+ // src/review/output/format-github.ts
7795
+ var SMALL_SUGGESTION_LINE_LIMIT = 10;
7796
+ function sanitizeMarkdown(text) {
7797
+ return text.replace(/</g, "&lt;").replace(/>/g, "&gt;");
7798
+ }
7799
+ function isSmallSuggestion(suggestion) {
7800
+ if (!suggestion) return false;
7801
+ const lineCount = suggestion.split("\n").length;
7802
+ return lineCount < SMALL_SUGGESTION_LINE_LIMIT;
7803
+ }
7804
+ function formatGitHubComment(finding) {
7805
+ const severityBadge = `**${finding.severity.toUpperCase()}**`;
7806
+ const header = `${severityBadge} [${finding.domain}] ${sanitizeMarkdown(finding.title)}`;
7807
+ let body;
7808
+ if (isSmallSuggestion(finding.suggestion)) {
7809
+ body = [
7810
+ header,
7811
+ "",
7812
+ sanitizeMarkdown(finding.rationale),
7813
+ "",
7814
+ "```suggestion",
7815
+ finding.suggestion,
7816
+ "```"
7817
+ ].join("\n");
7818
+ } else {
7819
+ const parts = [header, "", `**Rationale:** ${sanitizeMarkdown(finding.rationale)}`];
7820
+ if (finding.suggestion) {
7821
+ parts.push("", `**Suggested approach:** ${sanitizeMarkdown(finding.suggestion)}`);
7822
+ }
7823
+ body = parts.join("\n");
7824
+ }
7825
+ return {
7826
+ path: finding.file,
7827
+ line: finding.lineRange[1],
7828
+ // Comment on end line of range
7829
+ side: "RIGHT",
7830
+ body
7831
+ };
7832
+ }
7833
+ function formatGitHubSummary(options) {
7834
+ const { findings, strengths } = options;
7835
+ const sections = [];
7836
+ sections.push("## Strengths\n");
7837
+ if (strengths.length === 0) {
7838
+ sections.push("No specific strengths noted.\n");
7839
+ } else {
7840
+ for (const s of strengths) {
7841
+ const prefix = s.file ? `**${s.file}:** ` : "";
7842
+ sections.push(`- ${prefix}${sanitizeMarkdown(s.description)}`);
7843
+ }
7844
+ sections.push("");
7845
+ }
7846
+ sections.push("## Issues\n");
7847
+ let hasIssues = false;
7848
+ for (const severity of SEVERITY_ORDER) {
7849
+ const group = findings.filter((f) => f.severity === severity);
7850
+ if (group.length === 0) continue;
7851
+ hasIssues = true;
7852
+ sections.push(`### ${SEVERITY_LABELS[severity]} (${group.length})
7853
+ `);
7854
+ for (const finding of group) {
7855
+ const location = `\`${finding.file}:L${finding.lineRange[0]}-${finding.lineRange[1]}\``;
7856
+ sections.push(`- **${sanitizeMarkdown(finding.title)}** at ${location}`);
7857
+ sections.push(` ${sanitizeMarkdown(finding.rationale)}`);
7858
+ sections.push("");
7859
+ }
7860
+ }
7861
+ if (!hasIssues) {
7862
+ sections.push("No issues found.\n");
7863
+ }
7864
+ const assessment = determineAssessment(findings);
7865
+ const assessmentLabel = assessment === "approve" ? "Approve" : assessment === "comment" ? "Comment" : "Request Changes";
7866
+ sections.push(`## Assessment: ${assessmentLabel}`);
7867
+ return sections.join("\n");
7868
+ }
7869
+
7870
+ // src/review/pipeline-orchestrator.ts
7871
+ async function runReviewPipeline(options) {
7872
+ const {
7873
+ projectRoot,
7874
+ diff,
7875
+ commitMessage,
7876
+ flags,
7877
+ graph,
7878
+ prMetadata,
7879
+ conventionFiles,
7880
+ checkDepsOutput,
7881
+ config = {},
7882
+ commitHistory
7883
+ } = options;
7884
+ if (flags.ci && prMetadata) {
7885
+ const eligibility = checkEligibility(prMetadata, true);
7886
+ if (!eligibility.eligible) {
7887
+ return {
7888
+ skipped: true,
7889
+ ...eligibility.reason != null ? { skipReason: eligibility.reason } : {},
7890
+ stoppedByMechanical: false,
7891
+ findings: [],
7892
+ strengths: [],
7893
+ terminalOutput: `Review skipped: ${eligibility.reason ?? "ineligible"}`,
7894
+ githubComments: [],
7895
+ exitCode: 0
7896
+ };
7897
+ }
7898
+ }
7899
+ let mechanicalResult;
7900
+ let exclusionSet;
7901
+ if (flags.noMechanical) {
7902
+ exclusionSet = buildExclusionSet([]);
7903
+ } else {
7904
+ try {
7905
+ const mechResult = await runMechanicalChecks({
7906
+ projectRoot,
7907
+ config,
7908
+ changedFiles: diff.changedFiles
7909
+ });
7910
+ if (mechResult.ok) {
7911
+ mechanicalResult = mechResult.value;
7912
+ exclusionSet = buildExclusionSet(mechResult.value.findings);
7913
+ if (mechResult.value.stopPipeline) {
7914
+ const mechFindings = mechResult.value.findings.filter((f) => f.severity === "error").map((f) => ` x ${f.tool}: ${f.file}${f.line ? `:${f.line}` : ""} - ${f.message}`).join("\n");
7915
+ const terminalOutput2 = [
7916
+ "## Strengths\n",
7917
+ " No AI review performed (mechanical checks failed).\n",
7918
+ "## Issues\n",
7919
+ "### Critical (mechanical)\n",
7920
+ mechFindings,
7921
+ "\n## Assessment: Request Changes\n",
7922
+ " Mechanical checks must pass before AI review."
7923
+ ].join("\n");
7924
+ return {
7925
+ skipped: false,
7926
+ stoppedByMechanical: true,
7927
+ assessment: "request-changes",
7928
+ findings: [],
7929
+ strengths: [],
7930
+ terminalOutput: terminalOutput2,
7931
+ githubComments: [],
7932
+ exitCode: 1,
7933
+ mechanicalResult
7934
+ };
7935
+ }
7936
+ } else {
7937
+ exclusionSet = buildExclusionSet([]);
7938
+ }
7939
+ } catch {
7940
+ exclusionSet = buildExclusionSet([]);
7941
+ }
7942
+ }
7943
+ let contextBundles;
7944
+ try {
7945
+ contextBundles = await scopeContext({
7946
+ projectRoot,
7947
+ diff,
7948
+ commitMessage,
7949
+ ...graph != null ? { graph } : {},
7950
+ ...conventionFiles != null ? { conventionFiles } : {},
7951
+ ...checkDepsOutput != null ? { checkDepsOutput } : {},
7952
+ ...commitHistory != null ? { commitHistory } : {}
7953
+ });
7954
+ } catch {
7955
+ contextBundles = ["compliance", "bug", "security", "architecture"].map((domain) => ({
7956
+ domain,
7957
+ changeType: "feature",
7958
+ changedFiles: [],
7959
+ contextFiles: [],
7960
+ commitHistory: [],
7961
+ diffLines: diff.totalDiffLines,
7962
+ contextLines: 0
7963
+ }));
7964
+ }
7965
+ const agentResults = await fanOutReview({ bundles: contextBundles });
7966
+ const rawFindings = agentResults.flatMap((r) => r.findings);
7967
+ const fileContents = /* @__PURE__ */ new Map();
7968
+ for (const [file, content] of diff.fileDiffs) {
7969
+ fileContents.set(file, content);
7970
+ }
7971
+ const validatedFindings = await validateFindings({
7972
+ findings: rawFindings,
7973
+ exclusionSet,
7974
+ ...graph != null ? { graph } : {},
7975
+ projectRoot,
7976
+ fileContents
7977
+ });
7978
+ const dedupedFindings = deduplicateFindings({ findings: validatedFindings });
7979
+ const strengths = [];
7980
+ const assessment = determineAssessment(dedupedFindings);
7981
+ const exitCode = getExitCode(assessment);
7982
+ const terminalOutput = formatTerminalOutput({
7983
+ findings: dedupedFindings,
7984
+ strengths
7985
+ });
7986
+ let githubComments = [];
7987
+ if (flags.comment) {
7988
+ githubComments = dedupedFindings.map((f) => formatGitHubComment(f));
7989
+ }
7990
+ return {
7991
+ skipped: false,
7992
+ stoppedByMechanical: false,
7993
+ assessment,
7994
+ findings: dedupedFindings,
7995
+ strengths,
7996
+ terminalOutput,
7997
+ githubComments,
7998
+ exitCode,
7999
+ ...mechanicalResult !== void 0 ? { mechanicalResult } : {}
8000
+ };
8001
+ }
8002
+
8003
+ // src/roadmap/parse.ts
8004
+ import { Ok as Ok2, Err as Err2 } from "@harness-engineering/types";
8005
+ var VALID_STATUSES = /* @__PURE__ */ new Set([
8006
+ "backlog",
8007
+ "planned",
8008
+ "in-progress",
8009
+ "done",
8010
+ "blocked"
8011
+ ]);
8012
+ var EM_DASH = "\u2014";
8013
+ function parseRoadmap(markdown) {
8014
+ const fmMatch = markdown.match(/^---\n([\s\S]*?)\n---/);
8015
+ if (!fmMatch) {
8016
+ return Err2(new Error("Missing or malformed YAML frontmatter"));
8017
+ }
8018
+ const fmResult = parseFrontmatter(fmMatch[1]);
8019
+ if (!fmResult.ok) return fmResult;
8020
+ const body = markdown.slice(fmMatch[0].length);
8021
+ const milestonesResult = parseMilestones(body);
8022
+ if (!milestonesResult.ok) return milestonesResult;
8023
+ return Ok2({
8024
+ frontmatter: fmResult.value,
8025
+ milestones: milestonesResult.value
8026
+ });
8027
+ }
8028
+ function parseFrontmatter(raw) {
8029
+ const lines = raw.split("\n");
8030
+ const map = /* @__PURE__ */ new Map();
8031
+ for (const line of lines) {
8032
+ const idx = line.indexOf(":");
8033
+ if (idx === -1) continue;
8034
+ const key = line.slice(0, idx).trim();
8035
+ const val = line.slice(idx + 1).trim();
8036
+ map.set(key, val);
8037
+ }
8038
+ const project = map.get("project");
8039
+ const versionStr = map.get("version");
8040
+ const lastSynced = map.get("last_synced");
8041
+ const lastManualEdit = map.get("last_manual_edit");
8042
+ if (!project || !versionStr || !lastSynced || !lastManualEdit) {
8043
+ return Err2(
8044
+ new Error(
8045
+ "Frontmatter missing required fields: project, version, last_synced, last_manual_edit"
8046
+ )
8047
+ );
8048
+ }
8049
+ const version = parseInt(versionStr, 10);
8050
+ if (isNaN(version)) {
8051
+ return Err2(new Error("Frontmatter version must be a number"));
8052
+ }
8053
+ return Ok2({ project, version, lastSynced, lastManualEdit });
8054
+ }
8055
+ function parseMilestones(body) {
8056
+ const milestones = [];
8057
+ const h2Pattern = /^## (.+)$/gm;
8058
+ const h2Matches = [];
8059
+ let match;
8060
+ while ((match = h2Pattern.exec(body)) !== null) {
8061
+ h2Matches.push({ heading: match[1], startIndex: match.index });
8062
+ }
8063
+ for (let i = 0; i < h2Matches.length; i++) {
8064
+ const h2 = h2Matches[i];
8065
+ const nextStart = i + 1 < h2Matches.length ? h2Matches[i + 1].startIndex : body.length;
8066
+ const sectionBody = body.slice(h2.startIndex + h2.heading.length + 4, nextStart);
8067
+ const isBacklog = h2.heading === "Backlog";
8068
+ const milestoneName = isBacklog ? "Backlog" : h2.heading.replace(/^Milestone:\s*/, "");
8069
+ const featuresResult = parseFeatures(sectionBody);
8070
+ if (!featuresResult.ok) return featuresResult;
8071
+ milestones.push({
8072
+ name: milestoneName,
8073
+ isBacklog,
8074
+ features: featuresResult.value
8075
+ });
8076
+ }
8077
+ return Ok2(milestones);
8078
+ }
8079
+ function parseFeatures(sectionBody) {
8080
+ const features = [];
8081
+ const h3Pattern = /^### Feature: (.+)$/gm;
8082
+ const h3Matches = [];
8083
+ let match;
8084
+ while ((match = h3Pattern.exec(sectionBody)) !== null) {
8085
+ h3Matches.push({ name: match[1], startIndex: match.index });
8086
+ }
8087
+ for (let i = 0; i < h3Matches.length; i++) {
8088
+ const h3 = h3Matches[i];
8089
+ const nextStart = i + 1 < h3Matches.length ? h3Matches[i + 1].startIndex : sectionBody.length;
8090
+ const featureBody = sectionBody.slice(
8091
+ h3.startIndex + `### Feature: ${h3.name}`.length,
8092
+ nextStart
8093
+ );
8094
+ const featureResult = parseFeatureFields(h3.name, featureBody);
8095
+ if (!featureResult.ok) return featureResult;
8096
+ features.push(featureResult.value);
8097
+ }
8098
+ return Ok2(features);
8099
+ }
8100
+ function parseFeatureFields(name, body) {
8101
+ const fieldMap = /* @__PURE__ */ new Map();
8102
+ const fieldPattern = /^- \*\*(.+?):\*\* (.+)$/gm;
8103
+ let match;
8104
+ while ((match = fieldPattern.exec(body)) !== null) {
8105
+ fieldMap.set(match[1], match[2]);
8106
+ }
8107
+ const statusRaw = fieldMap.get("Status");
8108
+ if (!statusRaw || !VALID_STATUSES.has(statusRaw)) {
8109
+ return Err2(
8110
+ new Error(
8111
+ `Feature "${name}" has invalid status: "${statusRaw ?? "(missing)"}". Valid statuses: ${[...VALID_STATUSES].join(", ")}`
8112
+ )
8113
+ );
8114
+ }
8115
+ const status = statusRaw;
8116
+ const specRaw = fieldMap.get("Spec") ?? EM_DASH;
8117
+ const spec = specRaw === EM_DASH ? null : specRaw;
8118
+ const plansRaw = fieldMap.get("Plans") ?? EM_DASH;
8119
+ const plans = plansRaw === EM_DASH ? [] : plansRaw.split(",").map((p) => p.trim());
8120
+ const blockedByRaw = fieldMap.get("Blocked by") ?? EM_DASH;
8121
+ const blockedBy = blockedByRaw === EM_DASH ? [] : blockedByRaw.split(",").map((b) => b.trim());
8122
+ const summary = fieldMap.get("Summary") ?? "";
8123
+ return Ok2({ name, status, spec, plans, blockedBy, summary });
8124
+ }
8125
+
8126
+ // src/roadmap/serialize.ts
8127
+ var EM_DASH2 = "\u2014";
8128
+ function serializeRoadmap(roadmap) {
8129
+ const lines = [];
8130
+ lines.push("---");
8131
+ lines.push(`project: ${roadmap.frontmatter.project}`);
8132
+ lines.push(`version: ${roadmap.frontmatter.version}`);
8133
+ lines.push(`last_synced: ${roadmap.frontmatter.lastSynced}`);
8134
+ lines.push(`last_manual_edit: ${roadmap.frontmatter.lastManualEdit}`);
8135
+ lines.push("---");
8136
+ lines.push("");
8137
+ lines.push("# Project Roadmap");
8138
+ for (const milestone of roadmap.milestones) {
8139
+ lines.push("");
8140
+ lines.push(serializeMilestoneHeading(milestone));
8141
+ for (const feature of milestone.features) {
8142
+ lines.push("");
8143
+ lines.push(...serializeFeature(feature));
8144
+ }
8145
+ }
8146
+ lines.push("");
8147
+ return lines.join("\n");
8148
+ }
8149
+ function serializeMilestoneHeading(milestone) {
8150
+ return milestone.isBacklog ? "## Backlog" : `## Milestone: ${milestone.name}`;
8151
+ }
8152
+ function serializeFeature(feature) {
8153
+ const spec = feature.spec ?? EM_DASH2;
8154
+ const plans = feature.plans.length > 0 ? feature.plans.join(", ") : EM_DASH2;
8155
+ const blockedBy = feature.blockedBy.length > 0 ? feature.blockedBy.join(", ") : EM_DASH2;
8156
+ return [
8157
+ `### Feature: ${feature.name}`,
8158
+ `- **Status:** ${feature.status}`,
8159
+ `- **Spec:** ${spec}`,
8160
+ `- **Plans:** ${plans}`,
8161
+ `- **Blocked by:** ${blockedBy}`,
8162
+ `- **Summary:** ${feature.summary}`
8163
+ ];
8164
+ }
8165
+
8166
+ // src/roadmap/sync.ts
8167
+ import * as fs7 from "fs";
8168
+ import * as path9 from "path";
8169
+ import { Ok as Ok3 } from "@harness-engineering/types";
8170
+ function inferStatus(feature, projectPath, allFeatures) {
8171
+ if (feature.blockedBy.length > 0) {
8172
+ const blockerNotDone = feature.blockedBy.some((blockerName) => {
8173
+ const blocker = allFeatures.find((f) => f.name.toLowerCase() === blockerName.toLowerCase());
8174
+ return !blocker || blocker.status !== "done";
8175
+ });
8176
+ if (blockerNotDone) return "blocked";
8177
+ }
8178
+ if (feature.plans.length === 0) return null;
8179
+ const allTaskStatuses = [];
8180
+ const featuresWithPlans = allFeatures.filter((f) => f.plans.length > 0);
8181
+ const useRootState = featuresWithPlans.length <= 1;
8182
+ if (useRootState) {
8183
+ const rootStatePath = path9.join(projectPath, ".harness", "state.json");
8184
+ if (fs7.existsSync(rootStatePath)) {
8185
+ try {
8186
+ const raw = fs7.readFileSync(rootStatePath, "utf-8");
8187
+ const state = JSON.parse(raw);
8188
+ if (state.progress) {
8189
+ for (const status of Object.values(state.progress)) {
8190
+ allTaskStatuses.push(status);
8191
+ }
8192
+ }
8193
+ } catch {
8194
+ }
8195
+ }
8196
+ }
8197
+ const sessionsDir = path9.join(projectPath, ".harness", "sessions");
8198
+ if (fs7.existsSync(sessionsDir)) {
8199
+ try {
8200
+ const sessionDirs = fs7.readdirSync(sessionsDir, { withFileTypes: true });
8201
+ for (const entry of sessionDirs) {
8202
+ if (!entry.isDirectory()) continue;
8203
+ const autopilotPath = path9.join(sessionsDir, entry.name, "autopilot-state.json");
8204
+ if (!fs7.existsSync(autopilotPath)) continue;
8205
+ try {
8206
+ const raw = fs7.readFileSync(autopilotPath, "utf-8");
8207
+ const autopilot = JSON.parse(raw);
8208
+ if (!autopilot.phases) continue;
8209
+ const linkedPhases = autopilot.phases.filter(
8210
+ (phase) => phase.planPath ? feature.plans.some((p) => p === phase.planPath || phase.planPath.endsWith(p)) : false
8211
+ );
8212
+ if (linkedPhases.length > 0) {
8213
+ for (const phase of linkedPhases) {
8214
+ if (phase.status === "complete") {
8215
+ allTaskStatuses.push("complete");
8216
+ } else if (phase.status === "pending") {
8217
+ allTaskStatuses.push("pending");
8218
+ } else {
8219
+ allTaskStatuses.push("in_progress");
8220
+ }
8221
+ }
8222
+ }
8223
+ } catch {
8224
+ }
8225
+ }
8226
+ } catch {
8227
+ }
8228
+ }
8229
+ if (allTaskStatuses.length === 0) return null;
8230
+ const allComplete = allTaskStatuses.every((s) => s === "complete");
8231
+ if (allComplete) return "done";
8232
+ const anyStarted = allTaskStatuses.some((s) => s === "in_progress" || s === "complete");
8233
+ if (anyStarted) return "in-progress";
8234
+ return null;
8235
+ }
8236
+ function syncRoadmap(options) {
8237
+ const { projectPath, roadmap, forceSync } = options;
8238
+ const isManuallyEdited = new Date(roadmap.frontmatter.lastManualEdit) > new Date(roadmap.frontmatter.lastSynced);
8239
+ const skipOverride = isManuallyEdited && !forceSync;
8240
+ const allFeatures = roadmap.milestones.flatMap((m) => m.features);
8241
+ const changes = [];
8242
+ for (const feature of allFeatures) {
8243
+ if (skipOverride) continue;
8244
+ const inferred = inferStatus(feature, projectPath, allFeatures);
8245
+ if (inferred === null) continue;
8246
+ if (inferred === feature.status) continue;
8247
+ changes.push({
8248
+ feature: feature.name,
8249
+ from: feature.status,
8250
+ to: inferred
8251
+ });
8252
+ }
8253
+ return Ok3(changes);
8254
+ }
8255
+
8256
+ // src/interaction/types.ts
8257
+ import { z as z5 } from "zod";
8258
+ var InteractionTypeSchema = z5.enum(["question", "confirmation", "transition"]);
8259
+ var QuestionSchema = z5.object({
8260
+ text: z5.string(),
8261
+ options: z5.array(z5.string()).optional(),
8262
+ default: z5.string().optional()
8263
+ });
8264
+ var ConfirmationSchema = z5.object({
8265
+ text: z5.string(),
8266
+ context: z5.string()
8267
+ });
8268
+ var TransitionSchema = z5.object({
8269
+ completedPhase: z5.string(),
8270
+ suggestedNext: z5.string(),
8271
+ reason: z5.string(),
8272
+ artifacts: z5.array(z5.string()),
8273
+ requiresConfirmation: z5.boolean(),
8274
+ summary: z5.string()
8275
+ });
8276
+ var EmitInteractionInputSchema = z5.object({
8277
+ path: z5.string(),
8278
+ type: InteractionTypeSchema,
8279
+ stream: z5.string().optional(),
8280
+ question: QuestionSchema.optional(),
8281
+ confirmation: ConfirmationSchema.optional(),
8282
+ transition: TransitionSchema.optional()
8283
+ });
8284
+
8285
+ // src/update-checker.ts
8286
+ import * as fs8 from "fs";
8287
+ import * as path10 from "path";
8288
+ import * as os from "os";
8289
+ import { spawn } from "child_process";
8290
+ function getStatePath() {
8291
+ return path10.join(os.homedir(), ".harness", "update-check.json");
8292
+ }
8293
+ function isUpdateCheckEnabled(configInterval) {
8294
+ if (process.env["HARNESS_NO_UPDATE_CHECK"] === "1") return false;
8295
+ if (configInterval === 0) return false;
8296
+ return true;
8297
+ }
8298
+ function shouldRunCheck(state, intervalMs) {
8299
+ if (state === null) return true;
8300
+ return state.lastCheckTime + intervalMs <= Date.now();
8301
+ }
8302
+ function readCheckState() {
8303
+ try {
8304
+ const raw = fs8.readFileSync(getStatePath(), "utf-8");
8305
+ const parsed = JSON.parse(raw);
8306
+ if (typeof parsed === "object" && parsed !== null && "lastCheckTime" in parsed && typeof parsed.lastCheckTime === "number" && "currentVersion" in parsed && typeof parsed.currentVersion === "string") {
8307
+ const state = parsed;
8308
+ return {
8309
+ lastCheckTime: state.lastCheckTime,
8310
+ latestVersion: typeof state.latestVersion === "string" ? state.latestVersion : null,
8311
+ currentVersion: state.currentVersion
8312
+ };
8313
+ }
8314
+ return null;
8315
+ } catch {
8316
+ return null;
8317
+ }
8318
+ }
8319
+ function spawnBackgroundCheck(currentVersion) {
8320
+ const statePath = getStatePath();
8321
+ const stateDir = path10.dirname(statePath);
8322
+ const script = `
8323
+ const { execSync } = require('child_process');
8324
+ const fs = require('fs');
8325
+ const path = require('path');
8326
+ const crypto = require('crypto');
8327
+ try {
8328
+ const latest = execSync('npm view @harness-engineering/cli dist-tags.latest', {
8329
+ encoding: 'utf-8',
8330
+ timeout: 15000,
8331
+ stdio: ['ignore', 'pipe', 'ignore'],
8332
+ }).trim();
8333
+ const stateDir = ${JSON.stringify(stateDir)};
8334
+ const statePath = ${JSON.stringify(statePath)};
8335
+ fs.mkdirSync(stateDir, { recursive: true });
8336
+ const tmpFile = path.join(stateDir, '.update-check-' + crypto.randomBytes(4).toString('hex') + '.tmp');
8337
+ fs.writeFileSync(tmpFile, JSON.stringify({
8338
+ lastCheckTime: Date.now(),
8339
+ latestVersion: latest || null,
8340
+ currentVersion: ${JSON.stringify(currentVersion)},
8341
+ }), { mode: 0o644 });
8342
+ fs.renameSync(tmpFile, statePath);
8343
+ } catch (_) {}
8344
+ `.trim();
8345
+ try {
8346
+ const child = spawn(process.execPath, ["-e", script], {
8347
+ detached: true,
8348
+ stdio: "ignore"
8349
+ });
8350
+ child.unref();
8351
+ } catch {
8352
+ }
8353
+ }
8354
+ function compareVersions(a, b) {
8355
+ const pa = a.split(".").map(Number);
8356
+ const pb = b.split(".").map(Number);
8357
+ for (let i = 0; i < 3; i++) {
8358
+ const na = pa[i] ?? 0;
8359
+ const nb = pb[i] ?? 0;
8360
+ if (na > nb) return 1;
8361
+ if (na < nb) return -1;
8362
+ }
8363
+ return 0;
8364
+ }
8365
+ function getUpdateNotification(currentVersion) {
8366
+ const state = readCheckState();
8367
+ if (!state) return null;
8368
+ if (!state.latestVersion) return null;
8369
+ if (compareVersions(state.latestVersion, currentVersion) <= 0) return null;
8370
+ return `Update available: v${currentVersion} -> v${state.latestVersion}
8371
+ Run "harness update" to upgrade.`;
8372
+ }
8373
+
4342
8374
  // src/index.ts
4343
- var VERSION = "0.6.0";
8375
+ var VERSION = "0.8.0";
4344
8376
  export {
8377
+ AGENT_DESCRIPTORS,
8378
+ ARCHITECTURE_DESCRIPTOR,
4345
8379
  AgentActionEmitter,
8380
+ BUG_DETECTION_DESCRIPTOR,
8381
+ BaselineManager,
8382
+ BenchmarkRunner,
8383
+ COMPLIANCE_DESCRIPTOR,
4346
8384
  ChecklistBuilder,
8385
+ ConfirmationSchema,
4347
8386
  ConsoleSink,
8387
+ CriticalPathResolver,
8388
+ DEFAULT_PROVIDER_TIERS,
8389
+ DEFAULT_SECURITY_CONFIG,
4348
8390
  DEFAULT_STATE,
8391
+ DEFAULT_STREAM_INDEX,
8392
+ EmitInteractionInputSchema,
4349
8393
  EntropyAnalyzer,
4350
8394
  EntropyConfigSchema,
8395
+ ExclusionSet,
4351
8396
  FailureEntrySchema,
4352
8397
  FileSink,
4353
8398
  GateConfigSchema,
4354
8399
  GateResultSchema,
4355
8400
  HandoffSchema,
4356
8401
  HarnessStateSchema,
8402
+ InteractionTypeSchema,
4357
8403
  NoOpExecutor,
4358
8404
  NoOpSink,
4359
8405
  NoOpTelemetryAdapter,
4360
8406
  PatternConfigSchema,
8407
+ QuestionSchema,
4361
8408
  REQUIRED_SECTIONS,
8409
+ RegressionDetector,
8410
+ RuleRegistry,
8411
+ SECURITY_DESCRIPTOR,
8412
+ SecurityConfigSchema,
8413
+ SecurityScanner,
8414
+ StreamIndexSchema,
8415
+ StreamInfoSchema,
8416
+ TransitionSchema,
4362
8417
  TypeScriptParser,
4363
8418
  VERSION,
4364
8419
  analyzeDiff,
4365
8420
  appendFailure,
4366
8421
  appendLearning,
4367
8422
  applyFixes,
8423
+ applyHotspotDowngrade,
4368
8424
  archiveFailures,
8425
+ archiveStream,
4369
8426
  buildDependencyGraph,
8427
+ buildExclusionSet,
4370
8428
  buildSnapshot,
4371
8429
  checkDocCoverage,
8430
+ checkEligibility,
8431
+ classifyFinding,
4372
8432
  configureFeedback,
4373
8433
  contextBudget,
4374
8434
  contextFilter,
4375
8435
  createBoundaryValidator,
8436
+ createCommentedCodeFixes,
4376
8437
  createError,
4377
8438
  createFixes,
8439
+ createForbiddenImportFixes,
8440
+ createOrphanedDepFixes,
4378
8441
  createParseError,
4379
8442
  createSelfReview,
8443
+ createStream,
8444
+ cryptoRules,
8445
+ deduplicateCleanupFindings,
8446
+ deduplicateFindings,
4380
8447
  defineLayer,
8448
+ deserializationRules,
8449
+ detectChangeType,
4381
8450
  detectCircularDeps,
4382
8451
  detectCircularDepsInFiles,
8452
+ detectComplexityViolations,
8453
+ detectCouplingViolations,
4383
8454
  detectDeadCode,
4384
8455
  detectDocDrift,
4385
8456
  detectPatternViolations,
8457
+ detectSizeBudgetViolations,
8458
+ detectStack,
8459
+ determineAssessment,
4386
8460
  executeWorkflow,
8461
+ expressRules,
4387
8462
  extractMarkdownLinks,
4388
8463
  extractSections,
8464
+ fanOutReview,
8465
+ formatFindingBlock,
8466
+ formatGitHubComment,
8467
+ formatGitHubSummary,
8468
+ formatTerminalOutput,
4389
8469
  generateAgentsMap,
4390
8470
  generateSuggestions,
4391
8471
  getActionEmitter,
8472
+ getExitCode,
4392
8473
  getFeedbackConfig,
4393
8474
  getPhaseCategories,
8475
+ getStreamForBranch,
8476
+ getUpdateNotification,
8477
+ goRules,
8478
+ injectionRules,
8479
+ isSmallSuggestion,
8480
+ isUpdateCheckEnabled,
8481
+ listStreams,
4394
8482
  loadFailures,
4395
8483
  loadHandoff,
4396
8484
  loadRelevantLearnings,
4397
8485
  loadState,
8486
+ loadStreamIndex,
4398
8487
  logAgentAction,
8488
+ migrateToStreams,
8489
+ networkRules,
8490
+ nodeRules,
4399
8491
  parseDiff,
8492
+ parseRoadmap,
8493
+ parseSecurityConfig,
8494
+ parseSize,
8495
+ pathTraversalRules,
4400
8496
  previewFix,
8497
+ reactRules,
8498
+ readCheckState,
4401
8499
  requestMultiplePeerReviews,
4402
8500
  requestPeerReview,
4403
8501
  resetFeedbackConfig,
4404
8502
  resolveFileToLayer,
8503
+ resolveModelTier,
8504
+ resolveRuleSeverity,
8505
+ resolveStreamPath,
8506
+ runArchitectureAgent,
8507
+ runBugDetectionAgent,
4405
8508
  runCIChecks,
8509
+ runComplianceAgent,
8510
+ runMechanicalChecks,
4406
8511
  runMechanicalGate,
4407
8512
  runMultiTurnPipeline,
4408
8513
  runPipeline,
8514
+ runReviewPipeline,
8515
+ runSecurityAgent,
4409
8516
  saveHandoff,
4410
8517
  saveState,
8518
+ saveStreamIndex,
8519
+ scopeContext,
8520
+ secretRules,
8521
+ serializeRoadmap,
8522
+ setActiveStream,
8523
+ shouldRunCheck,
8524
+ spawnBackgroundCheck,
8525
+ syncRoadmap,
8526
+ touchStream,
4411
8527
  trackAction,
4412
8528
  validateAgentsMap,
4413
8529
  validateBoundaries,
@@ -4415,6 +8531,8 @@ export {
4415
8531
  validateConfig,
4416
8532
  validateDependencies,
4417
8533
  validateFileStructure,
8534
+ validateFindings,
4418
8535
  validateKnowledgeMap,
4419
- validatePatternConfig
8536
+ validatePatternConfig,
8537
+ xssRules
4420
8538
  };