@harness-engineering/cli 1.8.2 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +3 -3
  2. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +20 -3
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +55 -5
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +36 -15
  5. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +1 -1
  6. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +70 -13
  7. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +41 -3
  8. package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +28 -3
  9. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +14 -2
  10. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +18 -2
  11. package/dist/agents/skills/gemini-cli/cleanup-dead-code/SKILL.md +3 -3
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +20 -3
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +55 -5
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +36 -15
  15. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +1 -1
  16. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +70 -13
  17. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +41 -3
  18. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/SKILL.md +28 -3
  19. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +14 -2
  20. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +18 -2
  21. package/dist/agents-md-EMRFLNBC.js +8 -0
  22. package/dist/architecture-5JNN5L3M.js +13 -0
  23. package/dist/bin/harness-mcp.d.ts +1 -0
  24. package/dist/bin/harness-mcp.js +28 -0
  25. package/dist/bin/harness.js +42 -8
  26. package/dist/check-phase-gate-WOKIYGAM.js +12 -0
  27. package/dist/chunk-46YA6FI3.js +293 -0
  28. package/dist/chunk-4PFMY3H7.js +248 -0
  29. package/dist/{chunk-LB4GRDDV.js → chunk-72GHBOL2.js} +1 -1
  30. package/dist/chunk-7X7ZAYMY.js +373 -0
  31. package/dist/chunk-B7HFEHWP.js +35 -0
  32. package/dist/chunk-BM3PWGXQ.js +14 -0
  33. package/dist/chunk-C2ERUR3L.js +255 -0
  34. package/dist/chunk-CWZ4Y2PO.js +189 -0
  35. package/dist/{chunk-ULSRSP53.js → chunk-ECUJQS3B.js} +11 -112
  36. package/dist/chunk-EOLRW32Q.js +72 -0
  37. package/dist/chunk-F3YDAJFQ.js +125 -0
  38. package/dist/chunk-F4PTVZWA.js +116 -0
  39. package/dist/chunk-FPIPT36X.js +187 -0
  40. package/dist/chunk-FX7SQHGD.js +103 -0
  41. package/dist/chunk-HIOXKZYF.js +15 -0
  42. package/dist/chunk-IDZNPTYD.js +16 -0
  43. package/dist/chunk-JSTQ3AWB.js +31 -0
  44. package/dist/chunk-K6XAPGML.js +27 -0
  45. package/dist/chunk-KET4QQZB.js +8 -0
  46. package/dist/chunk-LXU5M77O.js +4028 -0
  47. package/dist/chunk-MDUK2J2O.js +67 -0
  48. package/dist/chunk-MHBMTPW7.js +29 -0
  49. package/dist/chunk-MO4YQOMB.js +85 -0
  50. package/dist/chunk-NKDM3FMH.js +52 -0
  51. package/dist/{chunk-SAB3VXOW.js → chunk-NX6DSZSM.js} +144 -111
  52. package/dist/chunk-OPXH4CQN.js +62 -0
  53. package/dist/{chunk-Y7U5AYAL.js → chunk-PAHHT2IK.js} +471 -2719
  54. package/dist/chunk-PMTFPOCT.js +122 -0
  55. package/dist/chunk-PSXF277V.js +89 -0
  56. package/dist/chunk-Q6AB7W5Z.js +135 -0
  57. package/dist/chunk-QPEH2QPG.js +347 -0
  58. package/dist/chunk-TEFCFC4H.js +15 -0
  59. package/dist/chunk-TRAPF4IX.js +185 -0
  60. package/dist/chunk-VUCPTQ6G.js +67 -0
  61. package/dist/chunk-W6Y7ZW3Y.js +13 -0
  62. package/dist/chunk-ZOAWBDWU.js +72 -0
  63. package/dist/ci-workflow-ZBBUNTHQ.js +8 -0
  64. package/dist/constants-5JGUXPEK.js +6 -0
  65. package/dist/create-skill-LUWO46WF.js +11 -0
  66. package/dist/dist-D4RYGUZE.js +14 -0
  67. package/dist/{dist-K6KTTN3I.js → dist-I7DB5VKB.js} +237 -0
  68. package/dist/dist-L7LAAQAS.js +18 -0
  69. package/dist/{dist-ZODQVGC4.js → dist-PBTNVK6K.js} +8 -6
  70. package/dist/docs-PTJGD6XI.js +12 -0
  71. package/dist/engine-SCMZ3G3E.js +8 -0
  72. package/dist/entropy-YIUBGKY7.js +12 -0
  73. package/dist/feedback-WEVQSLAA.js +18 -0
  74. package/dist/generate-agent-definitions-BU5LOJTI.js +15 -0
  75. package/dist/glob-helper-5OHBUQAI.js +52 -0
  76. package/dist/graph-loader-RLO3KRIX.js +8 -0
  77. package/dist/index.d.ts +11 -1
  78. package/dist/index.js +84 -33
  79. package/dist/loader-6S6PVGSF.js +10 -0
  80. package/dist/mcp-BNLBTCXZ.js +34 -0
  81. package/dist/performance-5TVW6SA6.js +24 -0
  82. package/dist/review-pipeline-4JTQAWKW.js +9 -0
  83. package/dist/runner-VMYLHWOC.js +6 -0
  84. package/dist/runtime-PXIM7UV6.js +9 -0
  85. package/dist/security-URYTKLGK.js +9 -0
  86. package/dist/skill-executor-KVS47DAU.js +8 -0
  87. package/dist/validate-KSDUUK2M.js +12 -0
  88. package/dist/validate-cross-check-WZAX357V.js +8 -0
  89. package/dist/version-KFFPOQAX.js +6 -0
  90. package/package.json +7 -5
  91. package/dist/create-skill-UZOHMXRU.js +0 -8
  92. package/dist/validate-cross-check-DLNK423G.js +0 -7
@@ -0,0 +1,185 @@
1
+ // src/persona/trigger-detector.ts
2
+ import * as fs from "fs";
3
+ import * as path from "path";
4
+ function detectTrigger(projectPath) {
5
+ const handoffPath = path.join(projectPath, ".harness", "handoff.json");
6
+ if (!fs.existsSync(handoffPath)) {
7
+ return { trigger: "manual" };
8
+ }
9
+ try {
10
+ const raw = fs.readFileSync(handoffPath, "utf-8");
11
+ const handoff = JSON.parse(raw);
12
+ if (handoff.fromSkill === "harness-planning" && Array.isArray(handoff.pending) && handoff.pending.length > 0) {
13
+ return {
14
+ trigger: "on_plan_approved",
15
+ handoff: {
16
+ fromSkill: handoff.fromSkill,
17
+ summary: handoff.summary ?? "",
18
+ pending: handoff.pending,
19
+ planPath: handoff.planPath
20
+ }
21
+ };
22
+ }
23
+ return { trigger: "manual" };
24
+ } catch {
25
+ return { trigger: "manual" };
26
+ }
27
+ }
28
+
29
+ // src/persona/runner.ts
30
+ var TIMEOUT_ERROR_MESSAGE = "__PERSONA_RUNNER_TIMEOUT__";
31
+ function stepName(step) {
32
+ return "command" in step ? step.command : step.skill;
33
+ }
34
+ function stepType(step) {
35
+ return "command" in step ? "command" : "skill";
36
+ }
37
+ function matchesTrigger(step, trigger) {
38
+ const when = step.when ?? "always";
39
+ return when === "always" || when === trigger;
40
+ }
41
+ function skipRemaining(activeSteps, fromIndex, report) {
42
+ for (let j = fromIndex; j < activeSteps.length; j++) {
43
+ const remaining = activeSteps[j];
44
+ report.steps.push({
45
+ name: stepName(remaining),
46
+ type: stepType(remaining),
47
+ status: "skipped",
48
+ durationMs: 0
49
+ });
50
+ }
51
+ }
52
+ async function runPersona(persona, context) {
53
+ const startTime = Date.now();
54
+ const timeout = persona.config.timeout;
55
+ const report = {
56
+ persona: persona.name.toLowerCase().replace(/\s+/g, "-"),
57
+ status: "pass",
58
+ steps: [],
59
+ totalDurationMs: 0
60
+ };
61
+ let resolvedTrigger;
62
+ let handoff = context.handoff;
63
+ if (context.trigger === "auto") {
64
+ const detection = detectTrigger(context.projectPath);
65
+ resolvedTrigger = detection.trigger;
66
+ handoff = detection.handoff ?? handoff;
67
+ } else {
68
+ resolvedTrigger = context.trigger;
69
+ }
70
+ const activeSteps = persona.steps.filter((s) => matchesTrigger(s, resolvedTrigger));
71
+ for (let i = 0; i < activeSteps.length; i++) {
72
+ const step = activeSteps[i];
73
+ if (Date.now() - startTime >= timeout) {
74
+ skipRemaining(activeSteps, i, report);
75
+ report.status = "partial";
76
+ break;
77
+ }
78
+ const stepStart = Date.now();
79
+ const remainingTime = timeout - (Date.now() - startTime);
80
+ if ("command" in step) {
81
+ const result = await Promise.race([
82
+ context.commandExecutor(step.command),
83
+ new Promise(
84
+ (resolve) => setTimeout(
85
+ () => resolve({
86
+ ok: false,
87
+ error: new Error(TIMEOUT_ERROR_MESSAGE)
88
+ }),
89
+ remainingTime
90
+ )
91
+ )
92
+ ]);
93
+ const durationMs = Date.now() - stepStart;
94
+ if (result.ok) {
95
+ report.steps.push({
96
+ name: step.command,
97
+ type: "command",
98
+ status: "pass",
99
+ result: result.value,
100
+ durationMs
101
+ });
102
+ } else if (result.error.message === TIMEOUT_ERROR_MESSAGE) {
103
+ report.steps.push({
104
+ name: step.command,
105
+ type: "command",
106
+ status: "skipped",
107
+ error: "timed out",
108
+ durationMs
109
+ });
110
+ report.status = "partial";
111
+ skipRemaining(activeSteps, i + 1, report);
112
+ break;
113
+ } else {
114
+ report.steps.push({
115
+ name: step.command,
116
+ type: "command",
117
+ status: "fail",
118
+ error: result.error.message,
119
+ durationMs
120
+ });
121
+ report.status = "fail";
122
+ skipRemaining(activeSteps, i + 1, report);
123
+ break;
124
+ }
125
+ } else {
126
+ const skillContext = {
127
+ trigger: resolvedTrigger,
128
+ projectPath: context.projectPath,
129
+ outputMode: step.output ?? "auto",
130
+ ...handoff ? { handoff } : {}
131
+ };
132
+ const SKILL_TIMEOUT_RESULT = {
133
+ status: "fail",
134
+ output: "timed out",
135
+ durationMs: 0
136
+ };
137
+ const result = await Promise.race([
138
+ context.skillExecutor(step.skill, skillContext),
139
+ new Promise(
140
+ (resolve) => setTimeout(() => resolve(SKILL_TIMEOUT_RESULT), remainingTime)
141
+ )
142
+ ]);
143
+ const durationMs = Date.now() - stepStart;
144
+ if (result === SKILL_TIMEOUT_RESULT) {
145
+ report.steps.push({
146
+ name: step.skill,
147
+ type: "skill",
148
+ status: "skipped",
149
+ error: "timed out",
150
+ durationMs
151
+ });
152
+ report.status = "partial";
153
+ skipRemaining(activeSteps, i + 1, report);
154
+ break;
155
+ } else if (result.status === "pass") {
156
+ report.steps.push({
157
+ name: step.skill,
158
+ type: "skill",
159
+ status: "pass",
160
+ result: result.output,
161
+ ...result.artifactPath ? { artifactPath: result.artifactPath } : {},
162
+ durationMs
163
+ });
164
+ } else {
165
+ report.steps.push({
166
+ name: step.skill,
167
+ type: "skill",
168
+ status: "fail",
169
+ error: result.output,
170
+ durationMs
171
+ });
172
+ report.status = "fail";
173
+ skipRemaining(activeSteps, i + 1, report);
174
+ break;
175
+ }
176
+ }
177
+ }
178
+ report.totalDurationMs = Date.now() - startTime;
179
+ return report;
180
+ }
181
+
182
+ export {
183
+ detectTrigger,
184
+ runPersona
185
+ };
@@ -0,0 +1,67 @@
1
+ import {
2
+ Err,
3
+ Ok
4
+ } from "./chunk-MHBMTPW7.js";
5
+
6
+ // src/persona/generators/ci-workflow.ts
7
+ import YAML from "yaml";
8
+ function buildGitHubTriggers(triggers) {
9
+ const on = {};
10
+ for (const trigger of triggers) {
11
+ switch (trigger.event) {
12
+ case "on_pr": {
13
+ const prConfig = {};
14
+ if (trigger.conditions?.paths) prConfig.paths = trigger.conditions.paths;
15
+ on.pull_request = prConfig;
16
+ break;
17
+ }
18
+ case "on_commit": {
19
+ const pushConfig = {};
20
+ if (trigger.conditions?.branches) pushConfig.branches = trigger.conditions.branches;
21
+ on.push = pushConfig;
22
+ break;
23
+ }
24
+ case "scheduled":
25
+ on.schedule = [{ cron: trigger.cron }];
26
+ break;
27
+ }
28
+ }
29
+ return on;
30
+ }
31
+ function generateCIWorkflow(persona, platform) {
32
+ try {
33
+ if (platform === "gitlab") return Err(new Error("GitLab CI generation is not yet supported"));
34
+ const severity = persona.config.severity;
35
+ const steps = [
36
+ { uses: "actions/checkout@v4" },
37
+ { uses: "actions/setup-node@v4", with: { "node-version": "20" } },
38
+ { uses: "pnpm/action-setup@v4", with: { run_install: "frozen" } }
39
+ ];
40
+ const commandSteps = persona.steps.filter((s) => "command" in s);
41
+ for (const step of commandSteps) {
42
+ const severityFlag = severity ? ` --severity ${severity}` : "";
43
+ steps.push({ run: `npx harness ${step.command}${severityFlag}` });
44
+ }
45
+ const workflow = {
46
+ name: persona.name,
47
+ on: buildGitHubTriggers(persona.triggers),
48
+ jobs: {
49
+ enforce: {
50
+ "runs-on": "ubuntu-latest",
51
+ steps
52
+ }
53
+ }
54
+ };
55
+ return Ok(YAML.stringify(workflow, { lineWidth: 0 }));
56
+ } catch (error) {
57
+ return Err(
58
+ new Error(
59
+ `Failed to generate CI workflow: ${error instanceof Error ? error.message : String(error)}`
60
+ )
61
+ );
62
+ }
63
+ }
64
+
65
+ export {
66
+ generateCIWorkflow
67
+ };
@@ -0,0 +1,13 @@
1
+ // src/mcp/utils/sanitize-path.ts
2
+ import * as path from "path";
3
+ function sanitizePath(inputPath) {
4
+ const resolved = path.resolve(inputPath);
5
+ if (resolved === "/" || resolved === path.parse(resolved).root) {
6
+ throw new Error("Invalid project path: cannot use filesystem root");
7
+ }
8
+ return resolved;
9
+ }
10
+
11
+ export {
12
+ sanitizePath
13
+ };
@@ -0,0 +1,72 @@
1
+ // src/slash-commands/types.ts
2
+ var VALID_PLATFORMS = ["claude-code", "gemini-cli"];
3
+ var GENERATED_HEADER_CLAUDE = "<!-- Generated by harness generate-slash-commands. Do not edit. -->";
4
+ var GENERATED_HEADER_GEMINI = "# Generated by harness generate-slash-commands. Do not edit.";
5
+
6
+ // src/slash-commands/sync.ts
7
+ import fs from "fs";
8
+ import path from "path";
9
+
10
+ // src/agent-definitions/constants.ts
11
+ var GENERATED_HEADER_AGENT = "<!-- Generated by harness generate-agent-definitions. Do not edit. -->";
12
+
13
+ // src/slash-commands/sync.ts
14
+ function computeSyncPlan(outputDir, rendered) {
15
+ const added = [];
16
+ const updated = [];
17
+ const removed = [];
18
+ const unchanged = [];
19
+ for (const [filename, content] of rendered) {
20
+ const filePath = path.join(outputDir, filename);
21
+ if (!fs.existsSync(filePath)) {
22
+ added.push(filename);
23
+ } else {
24
+ const existing = fs.readFileSync(filePath, "utf-8");
25
+ if (existing === content) {
26
+ unchanged.push(filename);
27
+ } else {
28
+ updated.push(filename);
29
+ }
30
+ }
31
+ }
32
+ if (fs.existsSync(outputDir)) {
33
+ const existing = fs.readdirSync(outputDir).filter((f) => {
34
+ const stat = fs.statSync(path.join(outputDir, f));
35
+ return stat.isFile();
36
+ });
37
+ for (const filename of existing) {
38
+ if (rendered.has(filename)) continue;
39
+ const content = fs.readFileSync(path.join(outputDir, filename), "utf-8");
40
+ if (content.includes(GENERATED_HEADER_CLAUDE) || content.includes(GENERATED_HEADER_GEMINI) || content.includes(GENERATED_HEADER_AGENT)) {
41
+ removed.push(filename);
42
+ }
43
+ }
44
+ }
45
+ return { added, updated, removed, unchanged };
46
+ }
47
+ function applySyncPlan(outputDir, rendered, plan, deleteOrphans) {
48
+ fs.mkdirSync(outputDir, { recursive: true });
49
+ for (const filename of [...plan.added, ...plan.updated]) {
50
+ const content = rendered.get(filename);
51
+ if (content !== void 0) {
52
+ fs.writeFileSync(path.join(outputDir, filename), content);
53
+ }
54
+ }
55
+ if (deleteOrphans) {
56
+ for (const filename of plan.removed) {
57
+ const filePath = path.join(outputDir, filename);
58
+ if (fs.existsSync(filePath)) {
59
+ fs.unlinkSync(filePath);
60
+ }
61
+ }
62
+ }
63
+ }
64
+
65
+ export {
66
+ VALID_PLATFORMS,
67
+ GENERATED_HEADER_CLAUDE,
68
+ GENERATED_HEADER_GEMINI,
69
+ GENERATED_HEADER_AGENT,
70
+ computeSyncPlan,
71
+ applySyncPlan
72
+ };
@@ -0,0 +1,8 @@
1
+ import {
2
+ generateCIWorkflow
3
+ } from "./chunk-VUCPTQ6G.js";
4
+ import "./chunk-NX6DSZSM.js";
5
+ import "./chunk-MHBMTPW7.js";
6
+ export {
7
+ generateCIWorkflow
8
+ };
@@ -0,0 +1,6 @@
1
+ import {
2
+ ALLOWED_PERSONA_COMMANDS
3
+ } from "./chunk-TEFCFC4H.js";
4
+ export {
5
+ ALLOWED_PERSONA_COMMANDS
6
+ };
@@ -0,0 +1,11 @@
1
+ import {
2
+ createCreateSkillCommand,
3
+ generateSkillFiles
4
+ } from "./chunk-ECUJQS3B.js";
5
+ import "./chunk-HIOXKZYF.js";
6
+ import "./chunk-B7HFEHWP.js";
7
+ import "./chunk-MDUK2J2O.js";
8
+ export {
9
+ createCreateSkillCommand,
10
+ generateSkillFiles
11
+ };
@@ -0,0 +1,14 @@
1
+ import {
2
+ Err,
3
+ Ok,
4
+ STANDARD_COGNITIVE_MODES,
5
+ isErr,
6
+ isOk
7
+ } from "./chunk-MHBMTPW7.js";
8
+ export {
9
+ Err,
10
+ Ok,
11
+ STANDARD_COGNITIVE_MODES,
12
+ isErr,
13
+ isOk
14
+ };
@@ -2164,6 +2164,242 @@ var GraphCouplingAdapter = class {
2164
2164
  return maxDepth;
2165
2165
  }
2166
2166
  };
2167
+ var DEFAULT_THRESHOLD = 2;
2168
+ var DEFAULT_METRICS = [
2169
+ "cyclomaticComplexity",
2170
+ "fanIn",
2171
+ "fanOut",
2172
+ "hotspotScore",
2173
+ "transitiveDepth"
2174
+ ];
2175
+ var RECOGNIZED_METRICS = new Set(DEFAULT_METRICS);
2176
+ var GraphAnomalyAdapter = class {
2177
+ constructor(store) {
2178
+ this.store = store;
2179
+ }
2180
+ detect(options) {
2181
+ const threshold = options?.threshold != null && options.threshold > 0 ? options.threshold : DEFAULT_THRESHOLD;
2182
+ const requestedMetrics = options?.metrics ?? [...DEFAULT_METRICS];
2183
+ const warnings = [];
2184
+ const metricsToAnalyze = [];
2185
+ for (const m of requestedMetrics) {
2186
+ if (RECOGNIZED_METRICS.has(m)) {
2187
+ metricsToAnalyze.push(m);
2188
+ } else {
2189
+ warnings.push(m);
2190
+ }
2191
+ }
2192
+ const allOutliers = [];
2193
+ const analyzedNodeIds = /* @__PURE__ */ new Set();
2194
+ const couplingMetrics = ["fanIn", "fanOut", "transitiveDepth"];
2195
+ const needsCoupling = metricsToAnalyze.some((m) => couplingMetrics.includes(m));
2196
+ const needsComplexity = metricsToAnalyze.includes("hotspotScore");
2197
+ const cachedCouplingData = needsCoupling ? new GraphCouplingAdapter(this.store).computeCouplingData() : void 0;
2198
+ const cachedHotspotData = needsComplexity ? new GraphComplexityAdapter(this.store).computeComplexityHotspots() : void 0;
2199
+ for (const metric of metricsToAnalyze) {
2200
+ const entries = this.collectMetricValues(metric, cachedCouplingData, cachedHotspotData);
2201
+ for (const e of entries) {
2202
+ analyzedNodeIds.add(e.nodeId);
2203
+ }
2204
+ const outliers = this.computeZScoreOutliers(entries, metric, threshold);
2205
+ allOutliers.push(...outliers);
2206
+ }
2207
+ allOutliers.sort((a, b) => b.zScore - a.zScore);
2208
+ const articulationPoints = this.findArticulationPoints();
2209
+ const outlierNodeIds = new Set(allOutliers.map((o) => o.nodeId));
2210
+ const apNodeIds = new Set(articulationPoints.map((ap) => ap.nodeId));
2211
+ const overlapping = [...outlierNodeIds].filter((id) => apNodeIds.has(id));
2212
+ return {
2213
+ statisticalOutliers: allOutliers,
2214
+ articulationPoints,
2215
+ overlapping,
2216
+ summary: {
2217
+ totalNodesAnalyzed: analyzedNodeIds.size,
2218
+ outlierCount: allOutliers.length,
2219
+ articulationPointCount: articulationPoints.length,
2220
+ overlapCount: overlapping.length,
2221
+ metricsAnalyzed: metricsToAnalyze,
2222
+ warnings,
2223
+ threshold
2224
+ }
2225
+ };
2226
+ }
2227
+ collectMetricValues(metric, cachedCouplingData, cachedHotspotData) {
2228
+ const entries = [];
2229
+ if (metric === "cyclomaticComplexity") {
2230
+ const functionNodes = [
2231
+ ...this.store.findNodes({ type: "function" }),
2232
+ ...this.store.findNodes({ type: "method" })
2233
+ ];
2234
+ for (const node of functionNodes) {
2235
+ const cc = node.metadata?.cyclomaticComplexity;
2236
+ if (typeof cc === "number") {
2237
+ entries.push({
2238
+ nodeId: node.id,
2239
+ nodeName: node.name,
2240
+ nodePath: node.path,
2241
+ nodeType: node.type,
2242
+ value: cc
2243
+ });
2244
+ }
2245
+ }
2246
+ } else if (metric === "fanIn" || metric === "fanOut" || metric === "transitiveDepth") {
2247
+ const couplingData = cachedCouplingData ?? new GraphCouplingAdapter(this.store).computeCouplingData();
2248
+ const fileNodes = this.store.findNodes({ type: "file" });
2249
+ for (const fileData of couplingData.files) {
2250
+ const fileNode = fileNodes.find((n) => (n.path ?? n.name) === fileData.file);
2251
+ if (!fileNode) continue;
2252
+ entries.push({
2253
+ nodeId: fileNode.id,
2254
+ nodeName: fileNode.name,
2255
+ nodePath: fileNode.path,
2256
+ nodeType: "file",
2257
+ value: fileData[metric]
2258
+ });
2259
+ }
2260
+ } else if (metric === "hotspotScore") {
2261
+ const hotspots = cachedHotspotData ?? new GraphComplexityAdapter(this.store).computeComplexityHotspots();
2262
+ const functionNodes = [
2263
+ ...this.store.findNodes({ type: "function" }),
2264
+ ...this.store.findNodes({ type: "method" })
2265
+ ];
2266
+ for (const h of hotspots.hotspots) {
2267
+ const fnNode = functionNodes.find(
2268
+ (n) => n.name === h.function && (n.path ?? "") === (h.file ?? "")
2269
+ );
2270
+ if (!fnNode) continue;
2271
+ entries.push({
2272
+ nodeId: fnNode.id,
2273
+ nodeName: fnNode.name,
2274
+ nodePath: fnNode.path,
2275
+ nodeType: fnNode.type,
2276
+ value: h.hotspotScore
2277
+ });
2278
+ }
2279
+ }
2280
+ return entries;
2281
+ }
2282
+ computeZScoreOutliers(entries, metric, threshold) {
2283
+ if (entries.length === 0) return [];
2284
+ const values = entries.map((e) => e.value);
2285
+ const mean = values.reduce((sum, v) => sum + v, 0) / values.length;
2286
+ const variance = values.reduce((sum, v) => sum + (v - mean) ** 2, 0) / values.length;
2287
+ const stdDev = Math.sqrt(variance);
2288
+ if (stdDev === 0) return [];
2289
+ const outliers = [];
2290
+ for (const entry of entries) {
2291
+ const zScore = Math.abs(entry.value - mean) / stdDev;
2292
+ if (zScore > threshold) {
2293
+ outliers.push({
2294
+ nodeId: entry.nodeId,
2295
+ nodeName: entry.nodeName,
2296
+ nodePath: entry.nodePath,
2297
+ nodeType: entry.nodeType,
2298
+ metric,
2299
+ value: entry.value,
2300
+ zScore,
2301
+ mean,
2302
+ stdDev
2303
+ });
2304
+ }
2305
+ }
2306
+ return outliers;
2307
+ }
2308
+ findArticulationPoints() {
2309
+ const fileNodes = this.store.findNodes({ type: "file" });
2310
+ if (fileNodes.length === 0) return [];
2311
+ const nodeMap = /* @__PURE__ */ new Map();
2312
+ const adj = /* @__PURE__ */ new Map();
2313
+ for (const node of fileNodes) {
2314
+ nodeMap.set(node.id, { name: node.name, path: node.path });
2315
+ adj.set(node.id, /* @__PURE__ */ new Set());
2316
+ }
2317
+ const importEdges = this.store.getEdges({ type: "imports" });
2318
+ for (const edge of importEdges) {
2319
+ if (adj.has(edge.from) && adj.has(edge.to)) {
2320
+ adj.get(edge.from).add(edge.to);
2321
+ adj.get(edge.to).add(edge.from);
2322
+ }
2323
+ }
2324
+ const disc = /* @__PURE__ */ new Map();
2325
+ const low = /* @__PURE__ */ new Map();
2326
+ const parent = /* @__PURE__ */ new Map();
2327
+ const apSet = /* @__PURE__ */ new Set();
2328
+ let timer = 0;
2329
+ const dfs = (u) => {
2330
+ disc.set(u, timer);
2331
+ low.set(u, timer);
2332
+ timer++;
2333
+ let children = 0;
2334
+ for (const v of adj.get(u)) {
2335
+ if (!disc.has(v)) {
2336
+ children++;
2337
+ parent.set(v, u);
2338
+ dfs(v);
2339
+ low.set(u, Math.min(low.get(u), low.get(v)));
2340
+ if (parent.get(u) === null && children > 1) {
2341
+ apSet.add(u);
2342
+ }
2343
+ if (parent.get(u) !== null && low.get(v) >= disc.get(u)) {
2344
+ apSet.add(u);
2345
+ }
2346
+ } else if (v !== parent.get(u)) {
2347
+ low.set(u, Math.min(low.get(u), disc.get(v)));
2348
+ }
2349
+ }
2350
+ };
2351
+ for (const nodeId of adj.keys()) {
2352
+ if (!disc.has(nodeId)) {
2353
+ parent.set(nodeId, null);
2354
+ dfs(nodeId);
2355
+ }
2356
+ }
2357
+ const results = [];
2358
+ for (const apId of apSet) {
2359
+ const { components, dependentCount } = this.computeRemovalImpact(apId, adj);
2360
+ const info = nodeMap.get(apId);
2361
+ results.push({
2362
+ nodeId: apId,
2363
+ nodeName: info.name,
2364
+ nodePath: info.path,
2365
+ componentsIfRemoved: components,
2366
+ dependentCount
2367
+ });
2368
+ }
2369
+ results.sort((a, b) => b.dependentCount - a.dependentCount);
2370
+ return results;
2371
+ }
2372
+ computeRemovalImpact(removedId, adj) {
2373
+ const visited = /* @__PURE__ */ new Set();
2374
+ visited.add(removedId);
2375
+ const componentSizes = [];
2376
+ for (const nodeId of adj.keys()) {
2377
+ if (visited.has(nodeId)) continue;
2378
+ const queue = [nodeId];
2379
+ visited.add(nodeId);
2380
+ let size = 0;
2381
+ let head = 0;
2382
+ while (head < queue.length) {
2383
+ const current = queue[head++];
2384
+ size++;
2385
+ for (const neighbor of adj.get(current)) {
2386
+ if (!visited.has(neighbor)) {
2387
+ visited.add(neighbor);
2388
+ queue.push(neighbor);
2389
+ }
2390
+ }
2391
+ }
2392
+ componentSizes.push(size);
2393
+ }
2394
+ const components = componentSizes.length;
2395
+ if (componentSizes.length <= 1) {
2396
+ return { components, dependentCount: 0 };
2397
+ }
2398
+ const maxSize = Math.max(...componentSizes);
2399
+ const dependentCount = componentSizes.reduce((sum, s) => sum + s, 0) - maxSize;
2400
+ return { components, dependentCount };
2401
+ }
2402
+ };
2167
2403
  var PHASE_NODE_TYPES = {
2168
2404
  implement: ["file", "function", "class", "method", "interface", "variable"],
2169
2405
  review: ["adr", "document", "learning", "commit"],
@@ -2785,6 +3021,7 @@ export {
2785
3021
  EDGE_TYPES,
2786
3022
  FusionLayer,
2787
3023
  GitIngestor,
3024
+ GraphAnomalyAdapter,
2788
3025
  GraphComplexityAdapter,
2789
3026
  GraphConstraintAdapter,
2790
3027
  GraphCouplingAdapter,
@@ -0,0 +1,18 @@
1
+ import {
2
+ LinterConfigSchema,
3
+ ParseError,
4
+ RuleConfigSchema,
5
+ TemplateError,
6
+ TemplateLoadError,
7
+ generate,
8
+ validate
9
+ } from "./chunk-QPEH2QPG.js";
10
+ export {
11
+ LinterConfigSchema,
12
+ ParseError,
13
+ RuleConfigSchema,
14
+ TemplateError,
15
+ TemplateLoadError,
16
+ generate,
17
+ validate
18
+ };
@@ -17,7 +17,6 @@ import {
17
17
  EmitInteractionInputSchema,
18
18
  EntropyAnalyzer,
19
19
  EntropyConfigSchema,
20
- Err,
21
20
  ExclusionSet,
22
21
  FailureEntrySchema,
23
22
  FileSink,
@@ -29,14 +28,12 @@ import {
29
28
  NoOpExecutor,
30
29
  NoOpSink,
31
30
  NoOpTelemetryAdapter,
32
- Ok,
33
31
  PatternConfigSchema,
34
32
  QuestionSchema,
35
33
  REQUIRED_SECTIONS,
36
34
  RegressionDetector,
37
35
  RuleRegistry,
38
36
  SECURITY_DESCRIPTOR,
39
- STANDARD_COGNITIVE_MODES,
40
37
  SecurityConfigSchema,
41
38
  SecurityScanner,
42
39
  StreamIndexSchema,
@@ -104,8 +101,6 @@ import {
104
101
  getUpdateNotification,
105
102
  goRules,
106
103
  injectionRules,
107
- isErr,
108
- isOk,
109
104
  isSmallSuggestion,
110
105
  isUpdateCheckEnabled,
111
106
  listStreams,
@@ -165,7 +160,14 @@ import {
165
160
  validateKnowledgeMap,
166
161
  validatePatternConfig,
167
162
  xssRules
168
- } from "./chunk-SAB3VXOW.js";
163
+ } from "./chunk-NX6DSZSM.js";
164
+ import {
165
+ Err,
166
+ Ok,
167
+ STANDARD_COGNITIVE_MODES,
168
+ isErr,
169
+ isOk
170
+ } from "./chunk-MHBMTPW7.js";
169
171
  export {
170
172
  AGENT_DESCRIPTORS,
171
173
  ARCHITECTURE_DESCRIPTOR,