@harness-engineering/cli 1.8.2 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +3 -3
  2. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +13 -1
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +45 -4
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +36 -15
  5. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +1 -1
  6. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +68 -11
  7. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +41 -3
  8. package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +28 -3
  9. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +14 -2
  10. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +18 -2
  11. package/dist/agents/skills/gemini-cli/cleanup-dead-code/SKILL.md +3 -3
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +13 -1
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +45 -4
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +36 -15
  15. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +1 -1
  16. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +68 -11
  17. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +41 -3
  18. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/SKILL.md +28 -3
  19. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +14 -2
  20. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +18 -2
  21. package/dist/bin/harness.js +3 -3
  22. package/dist/{chunk-LB4GRDDV.js → chunk-6JIT7CEM.js} +1 -1
  23. package/dist/{chunk-SAB3VXOW.js → chunk-CGSHUJES.js} +137 -85
  24. package/dist/{chunk-Y7U5AYAL.js → chunk-RTPHUDZS.js} +10 -10
  25. package/dist/{dist-ZODQVGC4.js → dist-C5PYIQPF.js} +1 -1
  26. package/dist/{dist-K6KTTN3I.js → dist-I7DB5VKB.js} +237 -0
  27. package/dist/index.js +3 -3
  28. package/dist/validate-cross-check-VG573VZO.js +7 -0
  29. package/package.json +4 -4
  30. package/dist/validate-cross-check-DLNK423G.js +0 -7
@@ -4969,6 +4969,15 @@ var FAILURES_FILE = "failures.md";
4969
4969
  var HANDOFF_FILE = "handoff.json";
4970
4970
  var GATE_CONFIG_FILE = "gate.json";
4971
4971
  var INDEX_FILE2 = "index.json";
4972
+ var MAX_CACHE_ENTRIES = 8;
4973
+ var learningsCacheMap = /* @__PURE__ */ new Map();
4974
+ var failuresCacheMap = /* @__PURE__ */ new Map();
4975
+ function evictIfNeeded(map) {
4976
+ if (map.size > MAX_CACHE_ENTRIES) {
4977
+ const oldest = map.keys().next().value;
4978
+ if (oldest !== void 0) map.delete(oldest);
4979
+ }
4980
+ }
4972
4981
  async function getStateDir(projectPath, stream) {
4973
4982
  const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
4974
4983
  const hasStreams = fs4.existsSync(streamsIndexPath);
@@ -5066,25 +5075,35 @@ async function loadRelevantLearnings(projectPath, skillName, stream) {
5066
5075
  if (!fs4.existsSync(learningsPath)) {
5067
5076
  return Ok([]);
5068
5077
  }
5069
- const content = fs4.readFileSync(learningsPath, "utf-8");
5070
- const lines = content.split("\n");
5071
- const entries = [];
5072
- let currentBlock = [];
5073
- for (const line of lines) {
5074
- if (line.startsWith("# ")) continue;
5075
- const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5076
- const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5077
- if (isDatedBullet || isHeading) {
5078
- if (currentBlock.length > 0) {
5079
- entries.push(currentBlock.join("\n"));
5078
+ const stats = fs4.statSync(learningsPath);
5079
+ const cacheKey = learningsPath;
5080
+ const cached = learningsCacheMap.get(cacheKey);
5081
+ let entries;
5082
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5083
+ entries = cached.entries;
5084
+ } else {
5085
+ const content = fs4.readFileSync(learningsPath, "utf-8");
5086
+ const lines = content.split("\n");
5087
+ entries = [];
5088
+ let currentBlock = [];
5089
+ for (const line of lines) {
5090
+ if (line.startsWith("# ")) continue;
5091
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5092
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5093
+ if (isDatedBullet || isHeading) {
5094
+ if (currentBlock.length > 0) {
5095
+ entries.push(currentBlock.join("\n"));
5096
+ }
5097
+ currentBlock = [line];
5098
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
5099
+ currentBlock.push(line);
5080
5100
  }
5081
- currentBlock = [line];
5082
- } else if (line.trim() !== "" && currentBlock.length > 0) {
5083
- currentBlock.push(line);
5084
5101
  }
5085
- }
5086
- if (currentBlock.length > 0) {
5087
- entries.push(currentBlock.join("\n"));
5102
+ if (currentBlock.length > 0) {
5103
+ entries.push(currentBlock.join("\n"));
5104
+ }
5105
+ learningsCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5106
+ evictIfNeeded(learningsCacheMap);
5088
5107
  }
5089
5108
  if (!skillName) {
5090
5109
  return Ok(entries);
@@ -5135,6 +5154,12 @@ async function loadFailures(projectPath, stream) {
5135
5154
  if (!fs4.existsSync(failuresPath)) {
5136
5155
  return Ok([]);
5137
5156
  }
5157
+ const stats = fs4.statSync(failuresPath);
5158
+ const cacheKey = failuresPath;
5159
+ const cached = failuresCacheMap.get(cacheKey);
5160
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5161
+ return Ok(cached.entries);
5162
+ }
5138
5163
  const content = fs4.readFileSync(failuresPath, "utf-8");
5139
5164
  const entries = [];
5140
5165
  for (const line of content.split("\n")) {
@@ -5148,6 +5173,8 @@ async function loadFailures(projectPath, stream) {
5148
5173
  });
5149
5174
  }
5150
5175
  }
5176
+ failuresCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5177
+ evictIfNeeded(failuresCacheMap);
5151
5178
  return Ok(entries);
5152
5179
  } catch (error) {
5153
5180
  return Err(
@@ -6199,14 +6226,22 @@ async function runCIChecks(input) {
6199
6226
  const { projectRoot, config, skip = [], failOn = "error" } = input;
6200
6227
  try {
6201
6228
  const checks = [];
6202
- for (const name of ALL_CHECKS) {
6203
- if (skip.includes(name)) {
6204
- checks.push({ name, status: "skip", issues: [], durationMs: 0 });
6205
- } else {
6206
- const result = await runSingleCheck(name, projectRoot, config);
6207
- checks.push(result);
6208
- }
6229
+ const skippedSet = new Set(skip);
6230
+ if (skippedSet.has("validate")) {
6231
+ checks.push({ name: "validate", status: "skip", issues: [], durationMs: 0 });
6232
+ } else {
6233
+ checks.push(await runSingleCheck("validate", projectRoot, config));
6209
6234
  }
6235
+ const remainingChecks = ALL_CHECKS.slice(1);
6236
+ const phase2Results = await Promise.all(
6237
+ remainingChecks.map(async (name) => {
6238
+ if (skippedSet.has(name)) {
6239
+ return { name, status: "skip", issues: [], durationMs: 0 };
6240
+ }
6241
+ return runSingleCheck(name, projectRoot, config);
6242
+ })
6243
+ );
6244
+ checks.push(...phase2Results);
6210
6245
  const summary = buildSummary(checks);
6211
6246
  const exitCode = determineExitCode(summary, failOn);
6212
6247
  const report = {
@@ -6328,76 +6363,93 @@ async function runMechanicalChecks(options) {
6328
6363
  });
6329
6364
  }
6330
6365
  }
6366
+ const parallelChecks = [];
6331
6367
  if (!skip.includes("check-docs")) {
6332
- try {
6333
- const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6334
- const result = await checkDocCoverage("project", { docsDir });
6335
- if (!result.ok) {
6336
- statuses["check-docs"] = "warn";
6337
- findings.push({
6338
- tool: "check-docs",
6339
- file: docsDir,
6340
- message: result.error.message,
6341
- severity: "warning"
6342
- });
6343
- } else if (result.value.gaps && result.value.gaps.length > 0) {
6344
- statuses["check-docs"] = "warn";
6345
- for (const gap of result.value.gaps) {
6346
- findings.push({
6368
+ parallelChecks.push(
6369
+ (async () => {
6370
+ const localFindings = [];
6371
+ try {
6372
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6373
+ const result = await checkDocCoverage("project", { docsDir });
6374
+ if (!result.ok) {
6375
+ statuses["check-docs"] = "warn";
6376
+ localFindings.push({
6377
+ tool: "check-docs",
6378
+ file: docsDir,
6379
+ message: result.error.message,
6380
+ severity: "warning"
6381
+ });
6382
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6383
+ statuses["check-docs"] = "warn";
6384
+ for (const gap of result.value.gaps) {
6385
+ localFindings.push({
6386
+ tool: "check-docs",
6387
+ file: gap.file,
6388
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6389
+ severity: "warning"
6390
+ });
6391
+ }
6392
+ } else {
6393
+ statuses["check-docs"] = "pass";
6394
+ }
6395
+ } catch (err) {
6396
+ statuses["check-docs"] = "warn";
6397
+ localFindings.push({
6347
6398
  tool: "check-docs",
6348
- file: gap.file,
6349
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6399
+ file: path6.join(projectRoot, "docs"),
6400
+ message: err instanceof Error ? err.message : String(err),
6350
6401
  severity: "warning"
6351
6402
  });
6352
6403
  }
6353
- } else {
6354
- statuses["check-docs"] = "pass";
6355
- }
6356
- } catch (err) {
6357
- statuses["check-docs"] = "warn";
6358
- findings.push({
6359
- tool: "check-docs",
6360
- file: path6.join(projectRoot, "docs"),
6361
- message: err instanceof Error ? err.message : String(err),
6362
- severity: "warning"
6363
- });
6364
- }
6404
+ return localFindings;
6405
+ })()
6406
+ );
6365
6407
  }
6366
6408
  if (!skip.includes("security-scan")) {
6367
- try {
6368
- const securityConfig = parseSecurityConfig(config.security);
6369
- if (!securityConfig.enabled) {
6370
- statuses["security-scan"] = "skip";
6371
- } else {
6372
- const scanner = new SecurityScanner(securityConfig);
6373
- scanner.configureForProject(projectRoot);
6374
- const filesToScan = changedFiles ?? [];
6375
- const scanResult = await scanner.scanFiles(filesToScan);
6376
- if (scanResult.findings.length > 0) {
6377
- statuses["security-scan"] = "warn";
6378
- for (const f of scanResult.findings) {
6379
- findings.push({
6380
- tool: "security-scan",
6381
- file: f.file,
6382
- line: f.line,
6383
- ruleId: f.ruleId,
6384
- message: f.message,
6385
- severity: f.severity === "info" ? "warning" : f.severity
6386
- });
6409
+ parallelChecks.push(
6410
+ (async () => {
6411
+ const localFindings = [];
6412
+ try {
6413
+ const securityConfig = parseSecurityConfig(config.security);
6414
+ if (!securityConfig.enabled) {
6415
+ statuses["security-scan"] = "skip";
6416
+ } else {
6417
+ const scanner = new SecurityScanner(securityConfig);
6418
+ scanner.configureForProject(projectRoot);
6419
+ const filesToScan = changedFiles ?? [];
6420
+ const scanResult = await scanner.scanFiles(filesToScan);
6421
+ if (scanResult.findings.length > 0) {
6422
+ statuses["security-scan"] = "warn";
6423
+ for (const f of scanResult.findings) {
6424
+ localFindings.push({
6425
+ tool: "security-scan",
6426
+ file: f.file,
6427
+ line: f.line,
6428
+ ruleId: f.ruleId,
6429
+ message: f.message,
6430
+ severity: f.severity === "info" ? "warning" : f.severity
6431
+ });
6432
+ }
6433
+ } else {
6434
+ statuses["security-scan"] = "pass";
6435
+ }
6387
6436
  }
6388
- } else {
6389
- statuses["security-scan"] = "pass";
6437
+ } catch (err) {
6438
+ statuses["security-scan"] = "warn";
6439
+ localFindings.push({
6440
+ tool: "security-scan",
6441
+ file: projectRoot,
6442
+ message: err instanceof Error ? err.message : String(err),
6443
+ severity: "warning"
6444
+ });
6390
6445
  }
6391
- }
6392
- } catch (err) {
6393
- statuses["security-scan"] = "warn";
6394
- findings.push({
6395
- tool: "security-scan",
6396
- file: projectRoot,
6397
- message: err instanceof Error ? err.message : String(err),
6398
- severity: "warning"
6399
- });
6400
- }
6446
+ return localFindings;
6447
+ })()
6448
+ );
6449
+ }
6450
+ const parallelResults = await Promise.all(parallelChecks);
6451
+ for (const result of parallelResults) {
6452
+ findings.push(...result);
6401
6453
  }
6402
6454
  const hasErrors = findings.some((f) => f.severity === "error");
6403
6455
  const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
@@ -31,7 +31,7 @@ import {
31
31
  validateAgentsMap,
32
32
  validateDependencies,
33
33
  validateKnowledgeMap
34
- } from "./chunk-SAB3VXOW.js";
34
+ } from "./chunk-CGSHUJES.js";
35
35
  import {
36
36
  CLIError,
37
37
  ExitCode,
@@ -369,7 +369,7 @@ function createValidateCommand() {
369
369
  process.exit(result.error.exitCode);
370
370
  }
371
371
  if (opts.crossCheck) {
372
- const { runCrossCheck: runCrossCheck2 } = await import("./validate-cross-check-DLNK423G.js");
372
+ const { runCrossCheck: runCrossCheck2 } = await import("./validate-cross-check-VG573VZO.js");
373
373
  const cwd = process.cwd();
374
374
  const specsDir = path2.join(cwd, "docs", "specs");
375
375
  const plansDir = path2.join(cwd, "docs", "plans");
@@ -735,7 +735,7 @@ function createPerfCommand() {
735
735
  perf.command("bench [glob]").description("Run benchmarks via vitest bench").action(async (glob2, _opts, cmd) => {
736
736
  const globalOpts = cmd.optsWithGlobals();
737
737
  const cwd = process.cwd();
738
- const { BenchmarkRunner } = await import("./dist-ZODQVGC4.js");
738
+ const { BenchmarkRunner } = await import("./dist-C5PYIQPF.js");
739
739
  const runner = new BenchmarkRunner();
740
740
  const benchFiles = runner.discover(cwd, glob2);
741
741
  if (benchFiles.length === 0) {
@@ -804,7 +804,7 @@ Results (${result.results.length} benchmarks):`);
804
804
  baselines.command("update").description("Update baselines from latest benchmark run").action(async (_opts, cmd) => {
805
805
  const globalOpts = cmd.optsWithGlobals();
806
806
  const cwd = process.cwd();
807
- const { BenchmarkRunner } = await import("./dist-ZODQVGC4.js");
807
+ const { BenchmarkRunner } = await import("./dist-C5PYIQPF.js");
808
808
  const runner = new BenchmarkRunner();
809
809
  const manager = new BaselineManager(cwd);
810
810
  logger.info("Running benchmarks to update baselines...");
@@ -832,7 +832,7 @@ Results (${result.results.length} benchmarks):`);
832
832
  perf.command("report").description("Full performance report with metrics, trends, and hotspots").action(async (_opts, cmd) => {
833
833
  const globalOpts = cmd.optsWithGlobals();
834
834
  const cwd = process.cwd();
835
- const { EntropyAnalyzer: EntropyAnalyzer2 } = await import("./dist-ZODQVGC4.js");
835
+ const { EntropyAnalyzer: EntropyAnalyzer2 } = await import("./dist-C5PYIQPF.js");
836
836
  const analyzer = new EntropyAnalyzer2({
837
837
  rootDir: path6.resolve(cwd),
838
838
  analyze: { complexity: true, coupling: true }
@@ -5094,7 +5094,7 @@ function createGenerateCommand3() {
5094
5094
  import { Command as Command39 } from "commander";
5095
5095
  import * as path37 from "path";
5096
5096
  async function runScan(projectPath) {
5097
- const { GraphStore, CodeIngestor, TopologicalLinker, KnowledgeIngestor, GitIngestor } = await import("./dist-K6KTTN3I.js");
5097
+ const { GraphStore, CodeIngestor, TopologicalLinker, KnowledgeIngestor, GitIngestor } = await import("./dist-I7DB5VKB.js");
5098
5098
  const store = new GraphStore();
5099
5099
  const start = Date.now();
5100
5100
  await new CodeIngestor(store).ingest(projectPath);
@@ -5175,7 +5175,7 @@ async function runIngest(projectPath, source, opts) {
5175
5175
  SyncManager,
5176
5176
  JiraConnector,
5177
5177
  SlackConnector
5178
- } = await import("./dist-K6KTTN3I.js");
5178
+ } = await import("./dist-I7DB5VKB.js");
5179
5179
  const graphDir = path38.join(projectPath, ".harness", "graph");
5180
5180
  const store = new GraphStore();
5181
5181
  await store.load(graphDir);
@@ -5268,7 +5268,7 @@ function createIngestCommand() {
5268
5268
  import { Command as Command41 } from "commander";
5269
5269
  import * as path39 from "path";
5270
5270
  async function runQuery(projectPath, rootNodeId, opts) {
5271
- const { GraphStore, ContextQL } = await import("./dist-K6KTTN3I.js");
5271
+ const { GraphStore, ContextQL } = await import("./dist-I7DB5VKB.js");
5272
5272
  const store = new GraphStore();
5273
5273
  const graphDir = path39.join(projectPath, ".harness", "graph");
5274
5274
  const loaded = await store.load(graphDir);
@@ -5317,7 +5317,7 @@ import { Command as Command42 } from "commander";
5317
5317
  // src/commands/graph/status.ts
5318
5318
  import * as path40 from "path";
5319
5319
  async function runGraphStatus(projectPath) {
5320
- const { GraphStore } = await import("./dist-K6KTTN3I.js");
5320
+ const { GraphStore } = await import("./dist-I7DB5VKB.js");
5321
5321
  const graphDir = path40.join(projectPath, ".harness", "graph");
5322
5322
  const store = new GraphStore();
5323
5323
  const loaded = await store.load(graphDir);
@@ -5357,7 +5357,7 @@ async function runGraphStatus(projectPath) {
5357
5357
  // src/commands/graph/export.ts
5358
5358
  import * as path41 from "path";
5359
5359
  async function runGraphExport(projectPath, format) {
5360
- const { GraphStore } = await import("./dist-K6KTTN3I.js");
5360
+ const { GraphStore } = await import("./dist-I7DB5VKB.js");
5361
5361
  const graphDir = path41.join(projectPath, ".harness", "graph");
5362
5362
  const store = new GraphStore();
5363
5363
  const loaded = await store.load(graphDir);
@@ -165,7 +165,7 @@ import {
165
165
  validateKnowledgeMap,
166
166
  validatePatternConfig,
167
167
  xssRules
168
- } from "./chunk-SAB3VXOW.js";
168
+ } from "./chunk-CGSHUJES.js";
169
169
  export {
170
170
  AGENT_DESCRIPTORS,
171
171
  ARCHITECTURE_DESCRIPTOR,
@@ -2164,6 +2164,242 @@ var GraphCouplingAdapter = class {
2164
2164
  return maxDepth;
2165
2165
  }
2166
2166
  };
2167
+ var DEFAULT_THRESHOLD = 2;
2168
+ var DEFAULT_METRICS = [
2169
+ "cyclomaticComplexity",
2170
+ "fanIn",
2171
+ "fanOut",
2172
+ "hotspotScore",
2173
+ "transitiveDepth"
2174
+ ];
2175
+ var RECOGNIZED_METRICS = new Set(DEFAULT_METRICS);
2176
+ var GraphAnomalyAdapter = class {
2177
+ constructor(store) {
2178
+ this.store = store;
2179
+ }
2180
+ detect(options) {
2181
+ const threshold = options?.threshold != null && options.threshold > 0 ? options.threshold : DEFAULT_THRESHOLD;
2182
+ const requestedMetrics = options?.metrics ?? [...DEFAULT_METRICS];
2183
+ const warnings = [];
2184
+ const metricsToAnalyze = [];
2185
+ for (const m of requestedMetrics) {
2186
+ if (RECOGNIZED_METRICS.has(m)) {
2187
+ metricsToAnalyze.push(m);
2188
+ } else {
2189
+ warnings.push(m);
2190
+ }
2191
+ }
2192
+ const allOutliers = [];
2193
+ const analyzedNodeIds = /* @__PURE__ */ new Set();
2194
+ const couplingMetrics = ["fanIn", "fanOut", "transitiveDepth"];
2195
+ const needsCoupling = metricsToAnalyze.some((m) => couplingMetrics.includes(m));
2196
+ const needsComplexity = metricsToAnalyze.includes("hotspotScore");
2197
+ const cachedCouplingData = needsCoupling ? new GraphCouplingAdapter(this.store).computeCouplingData() : void 0;
2198
+ const cachedHotspotData = needsComplexity ? new GraphComplexityAdapter(this.store).computeComplexityHotspots() : void 0;
2199
+ for (const metric of metricsToAnalyze) {
2200
+ const entries = this.collectMetricValues(metric, cachedCouplingData, cachedHotspotData);
2201
+ for (const e of entries) {
2202
+ analyzedNodeIds.add(e.nodeId);
2203
+ }
2204
+ const outliers = this.computeZScoreOutliers(entries, metric, threshold);
2205
+ allOutliers.push(...outliers);
2206
+ }
2207
+ allOutliers.sort((a, b) => b.zScore - a.zScore);
2208
+ const articulationPoints = this.findArticulationPoints();
2209
+ const outlierNodeIds = new Set(allOutliers.map((o) => o.nodeId));
2210
+ const apNodeIds = new Set(articulationPoints.map((ap) => ap.nodeId));
2211
+ const overlapping = [...outlierNodeIds].filter((id) => apNodeIds.has(id));
2212
+ return {
2213
+ statisticalOutliers: allOutliers,
2214
+ articulationPoints,
2215
+ overlapping,
2216
+ summary: {
2217
+ totalNodesAnalyzed: analyzedNodeIds.size,
2218
+ outlierCount: allOutliers.length,
2219
+ articulationPointCount: articulationPoints.length,
2220
+ overlapCount: overlapping.length,
2221
+ metricsAnalyzed: metricsToAnalyze,
2222
+ warnings,
2223
+ threshold
2224
+ }
2225
+ };
2226
+ }
2227
+ collectMetricValues(metric, cachedCouplingData, cachedHotspotData) {
2228
+ const entries = [];
2229
+ if (metric === "cyclomaticComplexity") {
2230
+ const functionNodes = [
2231
+ ...this.store.findNodes({ type: "function" }),
2232
+ ...this.store.findNodes({ type: "method" })
2233
+ ];
2234
+ for (const node of functionNodes) {
2235
+ const cc = node.metadata?.cyclomaticComplexity;
2236
+ if (typeof cc === "number") {
2237
+ entries.push({
2238
+ nodeId: node.id,
2239
+ nodeName: node.name,
2240
+ nodePath: node.path,
2241
+ nodeType: node.type,
2242
+ value: cc
2243
+ });
2244
+ }
2245
+ }
2246
+ } else if (metric === "fanIn" || metric === "fanOut" || metric === "transitiveDepth") {
2247
+ const couplingData = cachedCouplingData ?? new GraphCouplingAdapter(this.store).computeCouplingData();
2248
+ const fileNodes = this.store.findNodes({ type: "file" });
2249
+ for (const fileData of couplingData.files) {
2250
+ const fileNode = fileNodes.find((n) => (n.path ?? n.name) === fileData.file);
2251
+ if (!fileNode) continue;
2252
+ entries.push({
2253
+ nodeId: fileNode.id,
2254
+ nodeName: fileNode.name,
2255
+ nodePath: fileNode.path,
2256
+ nodeType: "file",
2257
+ value: fileData[metric]
2258
+ });
2259
+ }
2260
+ } else if (metric === "hotspotScore") {
2261
+ const hotspots = cachedHotspotData ?? new GraphComplexityAdapter(this.store).computeComplexityHotspots();
2262
+ const functionNodes = [
2263
+ ...this.store.findNodes({ type: "function" }),
2264
+ ...this.store.findNodes({ type: "method" })
2265
+ ];
2266
+ for (const h of hotspots.hotspots) {
2267
+ const fnNode = functionNodes.find(
2268
+ (n) => n.name === h.function && (n.path ?? "") === (h.file ?? "")
2269
+ );
2270
+ if (!fnNode) continue;
2271
+ entries.push({
2272
+ nodeId: fnNode.id,
2273
+ nodeName: fnNode.name,
2274
+ nodePath: fnNode.path,
2275
+ nodeType: fnNode.type,
2276
+ value: h.hotspotScore
2277
+ });
2278
+ }
2279
+ }
2280
+ return entries;
2281
+ }
2282
+ computeZScoreOutliers(entries, metric, threshold) {
2283
+ if (entries.length === 0) return [];
2284
+ const values = entries.map((e) => e.value);
2285
+ const mean = values.reduce((sum, v) => sum + v, 0) / values.length;
2286
+ const variance = values.reduce((sum, v) => sum + (v - mean) ** 2, 0) / values.length;
2287
+ const stdDev = Math.sqrt(variance);
2288
+ if (stdDev === 0) return [];
2289
+ const outliers = [];
2290
+ for (const entry of entries) {
2291
+ const zScore = Math.abs(entry.value - mean) / stdDev;
2292
+ if (zScore > threshold) {
2293
+ outliers.push({
2294
+ nodeId: entry.nodeId,
2295
+ nodeName: entry.nodeName,
2296
+ nodePath: entry.nodePath,
2297
+ nodeType: entry.nodeType,
2298
+ metric,
2299
+ value: entry.value,
2300
+ zScore,
2301
+ mean,
2302
+ stdDev
2303
+ });
2304
+ }
2305
+ }
2306
+ return outliers;
2307
+ }
2308
+ findArticulationPoints() {
2309
+ const fileNodes = this.store.findNodes({ type: "file" });
2310
+ if (fileNodes.length === 0) return [];
2311
+ const nodeMap = /* @__PURE__ */ new Map();
2312
+ const adj = /* @__PURE__ */ new Map();
2313
+ for (const node of fileNodes) {
2314
+ nodeMap.set(node.id, { name: node.name, path: node.path });
2315
+ adj.set(node.id, /* @__PURE__ */ new Set());
2316
+ }
2317
+ const importEdges = this.store.getEdges({ type: "imports" });
2318
+ for (const edge of importEdges) {
2319
+ if (adj.has(edge.from) && adj.has(edge.to)) {
2320
+ adj.get(edge.from).add(edge.to);
2321
+ adj.get(edge.to).add(edge.from);
2322
+ }
2323
+ }
2324
+ const disc = /* @__PURE__ */ new Map();
2325
+ const low = /* @__PURE__ */ new Map();
2326
+ const parent = /* @__PURE__ */ new Map();
2327
+ const apSet = /* @__PURE__ */ new Set();
2328
+ let timer = 0;
2329
+ const dfs = (u) => {
2330
+ disc.set(u, timer);
2331
+ low.set(u, timer);
2332
+ timer++;
2333
+ let children = 0;
2334
+ for (const v of adj.get(u)) {
2335
+ if (!disc.has(v)) {
2336
+ children++;
2337
+ parent.set(v, u);
2338
+ dfs(v);
2339
+ low.set(u, Math.min(low.get(u), low.get(v)));
2340
+ if (parent.get(u) === null && children > 1) {
2341
+ apSet.add(u);
2342
+ }
2343
+ if (parent.get(u) !== null && low.get(v) >= disc.get(u)) {
2344
+ apSet.add(u);
2345
+ }
2346
+ } else if (v !== parent.get(u)) {
2347
+ low.set(u, Math.min(low.get(u), disc.get(v)));
2348
+ }
2349
+ }
2350
+ };
2351
+ for (const nodeId of adj.keys()) {
2352
+ if (!disc.has(nodeId)) {
2353
+ parent.set(nodeId, null);
2354
+ dfs(nodeId);
2355
+ }
2356
+ }
2357
+ const results = [];
2358
+ for (const apId of apSet) {
2359
+ const { components, dependentCount } = this.computeRemovalImpact(apId, adj);
2360
+ const info = nodeMap.get(apId);
2361
+ results.push({
2362
+ nodeId: apId,
2363
+ nodeName: info.name,
2364
+ nodePath: info.path,
2365
+ componentsIfRemoved: components,
2366
+ dependentCount
2367
+ });
2368
+ }
2369
+ results.sort((a, b) => b.dependentCount - a.dependentCount);
2370
+ return results;
2371
+ }
2372
+ computeRemovalImpact(removedId, adj) {
2373
+ const visited = /* @__PURE__ */ new Set();
2374
+ visited.add(removedId);
2375
+ const componentSizes = [];
2376
+ for (const nodeId of adj.keys()) {
2377
+ if (visited.has(nodeId)) continue;
2378
+ const queue = [nodeId];
2379
+ visited.add(nodeId);
2380
+ let size = 0;
2381
+ let head = 0;
2382
+ while (head < queue.length) {
2383
+ const current = queue[head++];
2384
+ size++;
2385
+ for (const neighbor of adj.get(current)) {
2386
+ if (!visited.has(neighbor)) {
2387
+ visited.add(neighbor);
2388
+ queue.push(neighbor);
2389
+ }
2390
+ }
2391
+ }
2392
+ componentSizes.push(size);
2393
+ }
2394
+ const components = componentSizes.length;
2395
+ if (componentSizes.length <= 1) {
2396
+ return { components, dependentCount: 0 };
2397
+ }
2398
+ const maxSize = Math.max(...componentSizes);
2399
+ const dependentCount = componentSizes.reduce((sum, s) => sum + s, 0) - maxSize;
2400
+ return { components, dependentCount };
2401
+ }
2402
+ };
2167
2403
  var PHASE_NODE_TYPES = {
2168
2404
  implement: ["file", "function", "class", "method", "interface", "variable"],
2169
2405
  review: ["adr", "document", "learning", "commit"],
@@ -2785,6 +3021,7 @@ export {
2785
3021
  EDGE_TYPES,
2786
3022
  FusionLayer,
2787
3023
  GitIngestor,
3024
+ GraphAnomalyAdapter,
2788
3025
  GraphComplexityAdapter,
2789
3026
  GraphConstraintAdapter,
2790
3027
  GraphCouplingAdapter,
package/dist/index.js CHANGED
@@ -30,11 +30,11 @@ import {
30
30
  runPersona,
31
31
  runQuery,
32
32
  runScan
33
- } from "./chunk-Y7U5AYAL.js";
33
+ } from "./chunk-RTPHUDZS.js";
34
34
  import {
35
35
  runCrossCheck
36
- } from "./chunk-LB4GRDDV.js";
37
- import "./chunk-SAB3VXOW.js";
36
+ } from "./chunk-6JIT7CEM.js";
37
+ import "./chunk-CGSHUJES.js";
38
38
  import {
39
39
  CLIError,
40
40
  ExitCode,
@@ -0,0 +1,7 @@
1
+ import {
2
+ runCrossCheck
3
+ } from "./chunk-6JIT7CEM.js";
4
+ import "./chunk-CGSHUJES.js";
5
+ export {
6
+ runCrossCheck
7
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@harness-engineering/cli",
3
- "version": "1.8.2",
3
+ "version": "1.9.0",
4
4
  "description": "CLI for Harness Engineering toolkit",
5
5
  "type": "module",
6
6
  "bin": {
@@ -30,9 +30,9 @@
30
30
  "minimatch": "^10.2.4",
31
31
  "yaml": "^2.3.0",
32
32
  "zod": "^3.22.0",
33
- "@harness-engineering/core": "0.9.2",
34
- "@harness-engineering/linter-gen": "0.1.2",
35
- "@harness-engineering/graph": "0.2.3"
33
+ "@harness-engineering/core": "0.10.0",
34
+ "@harness-engineering/graph": "0.3.0",
35
+ "@harness-engineering/linter-gen": "0.1.2"
36
36
  },
37
37
  "devDependencies": {
38
38
  "@types/node": "^22.0.0",