@harness-engineering/cli 1.8.1 → 1.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +3 -3
  2. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +13 -1
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +45 -4
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +36 -15
  5. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +1 -1
  6. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +68 -11
  7. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +41 -3
  8. package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +28 -3
  9. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +14 -2
  10. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +18 -2
  11. package/dist/agents/skills/gemini-cli/cleanup-dead-code/SKILL.md +3 -3
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +13 -1
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +45 -4
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +36 -15
  15. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +1 -1
  16. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +68 -11
  17. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +41 -3
  18. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/SKILL.md +28 -3
  19. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +14 -2
  20. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +18 -2
  21. package/dist/bin/harness.js +7 -7
  22. package/dist/{chunk-RT2LYQHF.js → chunk-6JIT7CEM.js} +1 -1
  23. package/dist/{chunk-E2RTDBMG.js → chunk-CGSHUJES.js} +138 -86
  24. package/dist/{chunk-KJANDVVC.js → chunk-RTPHUDZS.js} +28 -17
  25. package/dist/{chunk-ACMDUQJG.js → chunk-ULSRSP53.js} +2 -1
  26. package/dist/{create-skill-NZDLMMR6.js → create-skill-UZOHMXRU.js} +1 -1
  27. package/dist/{dist-CCM3L3UE.js → dist-C5PYIQPF.js} +1 -1
  28. package/dist/{dist-K6KTTN3I.js → dist-I7DB5VKB.js} +237 -0
  29. package/dist/index.js +4 -4
  30. package/dist/validate-cross-check-VG573VZO.js +7 -0
  31. package/package.json +6 -5
  32. package/dist/validate-cross-check-ZGKFQY57.js +0 -7
@@ -2164,6 +2164,242 @@ var GraphCouplingAdapter = class {
2164
2164
  return maxDepth;
2165
2165
  }
2166
2166
  };
2167
+ var DEFAULT_THRESHOLD = 2;
2168
+ var DEFAULT_METRICS = [
2169
+ "cyclomaticComplexity",
2170
+ "fanIn",
2171
+ "fanOut",
2172
+ "hotspotScore",
2173
+ "transitiveDepth"
2174
+ ];
2175
+ var RECOGNIZED_METRICS = new Set(DEFAULT_METRICS);
2176
+ var GraphAnomalyAdapter = class {
2177
+ constructor(store) {
2178
+ this.store = store;
2179
+ }
2180
+ detect(options) {
2181
+ const threshold = options?.threshold != null && options.threshold > 0 ? options.threshold : DEFAULT_THRESHOLD;
2182
+ const requestedMetrics = options?.metrics ?? [...DEFAULT_METRICS];
2183
+ const warnings = [];
2184
+ const metricsToAnalyze = [];
2185
+ for (const m of requestedMetrics) {
2186
+ if (RECOGNIZED_METRICS.has(m)) {
2187
+ metricsToAnalyze.push(m);
2188
+ } else {
2189
+ warnings.push(m);
2190
+ }
2191
+ }
2192
+ const allOutliers = [];
2193
+ const analyzedNodeIds = /* @__PURE__ */ new Set();
2194
+ const couplingMetrics = ["fanIn", "fanOut", "transitiveDepth"];
2195
+ const needsCoupling = metricsToAnalyze.some((m) => couplingMetrics.includes(m));
2196
+ const needsComplexity = metricsToAnalyze.includes("hotspotScore");
2197
+ const cachedCouplingData = needsCoupling ? new GraphCouplingAdapter(this.store).computeCouplingData() : void 0;
2198
+ const cachedHotspotData = needsComplexity ? new GraphComplexityAdapter(this.store).computeComplexityHotspots() : void 0;
2199
+ for (const metric of metricsToAnalyze) {
2200
+ const entries = this.collectMetricValues(metric, cachedCouplingData, cachedHotspotData);
2201
+ for (const e of entries) {
2202
+ analyzedNodeIds.add(e.nodeId);
2203
+ }
2204
+ const outliers = this.computeZScoreOutliers(entries, metric, threshold);
2205
+ allOutliers.push(...outliers);
2206
+ }
2207
+ allOutliers.sort((a, b) => b.zScore - a.zScore);
2208
+ const articulationPoints = this.findArticulationPoints();
2209
+ const outlierNodeIds = new Set(allOutliers.map((o) => o.nodeId));
2210
+ const apNodeIds = new Set(articulationPoints.map((ap) => ap.nodeId));
2211
+ const overlapping = [...outlierNodeIds].filter((id) => apNodeIds.has(id));
2212
+ return {
2213
+ statisticalOutliers: allOutliers,
2214
+ articulationPoints,
2215
+ overlapping,
2216
+ summary: {
2217
+ totalNodesAnalyzed: analyzedNodeIds.size,
2218
+ outlierCount: allOutliers.length,
2219
+ articulationPointCount: articulationPoints.length,
2220
+ overlapCount: overlapping.length,
2221
+ metricsAnalyzed: metricsToAnalyze,
2222
+ warnings,
2223
+ threshold
2224
+ }
2225
+ };
2226
+ }
2227
+ collectMetricValues(metric, cachedCouplingData, cachedHotspotData) {
2228
+ const entries = [];
2229
+ if (metric === "cyclomaticComplexity") {
2230
+ const functionNodes = [
2231
+ ...this.store.findNodes({ type: "function" }),
2232
+ ...this.store.findNodes({ type: "method" })
2233
+ ];
2234
+ for (const node of functionNodes) {
2235
+ const cc = node.metadata?.cyclomaticComplexity;
2236
+ if (typeof cc === "number") {
2237
+ entries.push({
2238
+ nodeId: node.id,
2239
+ nodeName: node.name,
2240
+ nodePath: node.path,
2241
+ nodeType: node.type,
2242
+ value: cc
2243
+ });
2244
+ }
2245
+ }
2246
+ } else if (metric === "fanIn" || metric === "fanOut" || metric === "transitiveDepth") {
2247
+ const couplingData = cachedCouplingData ?? new GraphCouplingAdapter(this.store).computeCouplingData();
2248
+ const fileNodes = this.store.findNodes({ type: "file" });
2249
+ for (const fileData of couplingData.files) {
2250
+ const fileNode = fileNodes.find((n) => (n.path ?? n.name) === fileData.file);
2251
+ if (!fileNode) continue;
2252
+ entries.push({
2253
+ nodeId: fileNode.id,
2254
+ nodeName: fileNode.name,
2255
+ nodePath: fileNode.path,
2256
+ nodeType: "file",
2257
+ value: fileData[metric]
2258
+ });
2259
+ }
2260
+ } else if (metric === "hotspotScore") {
2261
+ const hotspots = cachedHotspotData ?? new GraphComplexityAdapter(this.store).computeComplexityHotspots();
2262
+ const functionNodes = [
2263
+ ...this.store.findNodes({ type: "function" }),
2264
+ ...this.store.findNodes({ type: "method" })
2265
+ ];
2266
+ for (const h of hotspots.hotspots) {
2267
+ const fnNode = functionNodes.find(
2268
+ (n) => n.name === h.function && (n.path ?? "") === (h.file ?? "")
2269
+ );
2270
+ if (!fnNode) continue;
2271
+ entries.push({
2272
+ nodeId: fnNode.id,
2273
+ nodeName: fnNode.name,
2274
+ nodePath: fnNode.path,
2275
+ nodeType: fnNode.type,
2276
+ value: h.hotspotScore
2277
+ });
2278
+ }
2279
+ }
2280
+ return entries;
2281
+ }
2282
+ computeZScoreOutliers(entries, metric, threshold) {
2283
+ if (entries.length === 0) return [];
2284
+ const values = entries.map((e) => e.value);
2285
+ const mean = values.reduce((sum, v) => sum + v, 0) / values.length;
2286
+ const variance = values.reduce((sum, v) => sum + (v - mean) ** 2, 0) / values.length;
2287
+ const stdDev = Math.sqrt(variance);
2288
+ if (stdDev === 0) return [];
2289
+ const outliers = [];
2290
+ for (const entry of entries) {
2291
+ const zScore = Math.abs(entry.value - mean) / stdDev;
2292
+ if (zScore > threshold) {
2293
+ outliers.push({
2294
+ nodeId: entry.nodeId,
2295
+ nodeName: entry.nodeName,
2296
+ nodePath: entry.nodePath,
2297
+ nodeType: entry.nodeType,
2298
+ metric,
2299
+ value: entry.value,
2300
+ zScore,
2301
+ mean,
2302
+ stdDev
2303
+ });
2304
+ }
2305
+ }
2306
+ return outliers;
2307
+ }
2308
+ findArticulationPoints() {
2309
+ const fileNodes = this.store.findNodes({ type: "file" });
2310
+ if (fileNodes.length === 0) return [];
2311
+ const nodeMap = /* @__PURE__ */ new Map();
2312
+ const adj = /* @__PURE__ */ new Map();
2313
+ for (const node of fileNodes) {
2314
+ nodeMap.set(node.id, { name: node.name, path: node.path });
2315
+ adj.set(node.id, /* @__PURE__ */ new Set());
2316
+ }
2317
+ const importEdges = this.store.getEdges({ type: "imports" });
2318
+ for (const edge of importEdges) {
2319
+ if (adj.has(edge.from) && adj.has(edge.to)) {
2320
+ adj.get(edge.from).add(edge.to);
2321
+ adj.get(edge.to).add(edge.from);
2322
+ }
2323
+ }
2324
+ const disc = /* @__PURE__ */ new Map();
2325
+ const low = /* @__PURE__ */ new Map();
2326
+ const parent = /* @__PURE__ */ new Map();
2327
+ const apSet = /* @__PURE__ */ new Set();
2328
+ let timer = 0;
2329
+ const dfs = (u) => {
2330
+ disc.set(u, timer);
2331
+ low.set(u, timer);
2332
+ timer++;
2333
+ let children = 0;
2334
+ for (const v of adj.get(u)) {
2335
+ if (!disc.has(v)) {
2336
+ children++;
2337
+ parent.set(v, u);
2338
+ dfs(v);
2339
+ low.set(u, Math.min(low.get(u), low.get(v)));
2340
+ if (parent.get(u) === null && children > 1) {
2341
+ apSet.add(u);
2342
+ }
2343
+ if (parent.get(u) !== null && low.get(v) >= disc.get(u)) {
2344
+ apSet.add(u);
2345
+ }
2346
+ } else if (v !== parent.get(u)) {
2347
+ low.set(u, Math.min(low.get(u), disc.get(v)));
2348
+ }
2349
+ }
2350
+ };
2351
+ for (const nodeId of adj.keys()) {
2352
+ if (!disc.has(nodeId)) {
2353
+ parent.set(nodeId, null);
2354
+ dfs(nodeId);
2355
+ }
2356
+ }
2357
+ const results = [];
2358
+ for (const apId of apSet) {
2359
+ const { components, dependentCount } = this.computeRemovalImpact(apId, adj);
2360
+ const info = nodeMap.get(apId);
2361
+ results.push({
2362
+ nodeId: apId,
2363
+ nodeName: info.name,
2364
+ nodePath: info.path,
2365
+ componentsIfRemoved: components,
2366
+ dependentCount
2367
+ });
2368
+ }
2369
+ results.sort((a, b) => b.dependentCount - a.dependentCount);
2370
+ return results;
2371
+ }
2372
+ computeRemovalImpact(removedId, adj) {
2373
+ const visited = /* @__PURE__ */ new Set();
2374
+ visited.add(removedId);
2375
+ const componentSizes = [];
2376
+ for (const nodeId of adj.keys()) {
2377
+ if (visited.has(nodeId)) continue;
2378
+ const queue = [nodeId];
2379
+ visited.add(nodeId);
2380
+ let size = 0;
2381
+ let head = 0;
2382
+ while (head < queue.length) {
2383
+ const current = queue[head++];
2384
+ size++;
2385
+ for (const neighbor of adj.get(current)) {
2386
+ if (!visited.has(neighbor)) {
2387
+ visited.add(neighbor);
2388
+ queue.push(neighbor);
2389
+ }
2390
+ }
2391
+ }
2392
+ componentSizes.push(size);
2393
+ }
2394
+ const components = componentSizes.length;
2395
+ if (componentSizes.length <= 1) {
2396
+ return { components, dependentCount: 0 };
2397
+ }
2398
+ const maxSize = Math.max(...componentSizes);
2399
+ const dependentCount = componentSizes.reduce((sum, s) => sum + s, 0) - maxSize;
2400
+ return { components, dependentCount };
2401
+ }
2402
+ };
2167
2403
  var PHASE_NODE_TYPES = {
2168
2404
  implement: ["file", "function", "class", "method", "interface", "variable"],
2169
2405
  review: ["adr", "document", "learning", "commit"],
@@ -2785,6 +3021,7 @@ export {
2785
3021
  EDGE_TYPES,
2786
3022
  FusionLayer,
2787
3023
  GitIngestor,
3024
+ GraphAnomalyAdapter,
2788
3025
  GraphComplexityAdapter,
2789
3026
  GraphConstraintAdapter,
2790
3027
  GraphCouplingAdapter,
package/dist/index.js CHANGED
@@ -30,18 +30,18 @@ import {
30
30
  runPersona,
31
31
  runQuery,
32
32
  runScan
33
- } from "./chunk-KJANDVVC.js";
33
+ } from "./chunk-RTPHUDZS.js";
34
34
  import {
35
35
  runCrossCheck
36
- } from "./chunk-RT2LYQHF.js";
37
- import "./chunk-E2RTDBMG.js";
36
+ } from "./chunk-6JIT7CEM.js";
37
+ import "./chunk-CGSHUJES.js";
38
38
  import {
39
39
  CLIError,
40
40
  ExitCode,
41
41
  generateSkillFiles,
42
42
  handleError,
43
43
  logger
44
- } from "./chunk-ACMDUQJG.js";
44
+ } from "./chunk-ULSRSP53.js";
45
45
  export {
46
46
  AGENT_DESCRIPTIONS,
47
47
  ALLOWED_PERSONA_COMMANDS,
@@ -0,0 +1,7 @@
1
+ import {
2
+ runCrossCheck
3
+ } from "./chunk-6JIT7CEM.js";
4
+ import "./chunk-CGSHUJES.js";
5
+ export {
6
+ runCrossCheck
7
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@harness-engineering/cli",
3
- "version": "1.8.1",
3
+ "version": "1.9.0",
4
4
  "description": "CLI for Harness Engineering toolkit",
5
5
  "type": "module",
6
6
  "bin": {
@@ -12,7 +12,8 @@
12
12
  ".": {
13
13
  "types": "./dist/index.d.ts",
14
14
  "import": "./dist/index.js"
15
- }
15
+ },
16
+ "./package.json": "./package.json"
16
17
  },
17
18
  "files": [
18
19
  "dist",
@@ -29,9 +30,9 @@
29
30
  "minimatch": "^10.2.4",
30
31
  "yaml": "^2.3.0",
31
32
  "zod": "^3.22.0",
32
- "@harness-engineering/linter-gen": "0.1.2",
33
- "@harness-engineering/core": "0.9.1",
34
- "@harness-engineering/graph": "0.2.3"
33
+ "@harness-engineering/core": "0.10.0",
34
+ "@harness-engineering/graph": "0.3.0",
35
+ "@harness-engineering/linter-gen": "0.1.2"
35
36
  },
36
37
  "devDependencies": {
37
38
  "@types/node": "^22.0.0",
@@ -1,7 +0,0 @@
1
- import {
2
- runCrossCheck
3
- } from "./chunk-RT2LYQHF.js";
4
- import "./chunk-E2RTDBMG.js";
5
- export {
6
- runCrossCheck
7
- };