@harness-engineering/cli 1.8.2 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. package/dist/agents/skills/claude-code/cleanup-dead-code/SKILL.md +3 -3
  2. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +20 -3
  3. package/dist/agents/skills/claude-code/harness-brainstorming/SKILL.md +55 -5
  4. package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +36 -15
  5. package/dist/agents/skills/claude-code/harness-codebase-cleanup/SKILL.md +1 -1
  6. package/dist/agents/skills/claude-code/harness-execution/SKILL.md +70 -13
  7. package/dist/agents/skills/claude-code/harness-planning/SKILL.md +41 -3
  8. package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +28 -3
  9. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +14 -2
  10. package/dist/agents/skills/claude-code/harness-verification/SKILL.md +18 -2
  11. package/dist/agents/skills/gemini-cli/cleanup-dead-code/SKILL.md +3 -3
  12. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +20 -3
  13. package/dist/agents/skills/gemini-cli/harness-brainstorming/SKILL.md +55 -5
  14. package/dist/agents/skills/gemini-cli/harness-code-review/SKILL.md +36 -15
  15. package/dist/agents/skills/gemini-cli/harness-codebase-cleanup/SKILL.md +1 -1
  16. package/dist/agents/skills/gemini-cli/harness-execution/SKILL.md +70 -13
  17. package/dist/agents/skills/gemini-cli/harness-planning/SKILL.md +41 -3
  18. package/dist/agents/skills/gemini-cli/harness-pre-commit-review/SKILL.md +28 -3
  19. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +14 -2
  20. package/dist/agents/skills/gemini-cli/harness-verification/SKILL.md +18 -2
  21. package/dist/agents-md-EMRFLNBC.js +8 -0
  22. package/dist/architecture-5JNN5L3M.js +13 -0
  23. package/dist/bin/harness-mcp.d.ts +1 -0
  24. package/dist/bin/harness-mcp.js +28 -0
  25. package/dist/bin/harness.js +42 -8
  26. package/dist/check-phase-gate-WOKIYGAM.js +12 -0
  27. package/dist/chunk-46YA6FI3.js +293 -0
  28. package/dist/chunk-4PFMY3H7.js +248 -0
  29. package/dist/{chunk-LB4GRDDV.js → chunk-72GHBOL2.js} +1 -1
  30. package/dist/chunk-7X7ZAYMY.js +373 -0
  31. package/dist/chunk-B7HFEHWP.js +35 -0
  32. package/dist/chunk-BM3PWGXQ.js +14 -0
  33. package/dist/chunk-C2ERUR3L.js +255 -0
  34. package/dist/chunk-CWZ4Y2PO.js +189 -0
  35. package/dist/{chunk-ULSRSP53.js → chunk-ECUJQS3B.js} +11 -112
  36. package/dist/chunk-EOLRW32Q.js +72 -0
  37. package/dist/chunk-F3YDAJFQ.js +125 -0
  38. package/dist/chunk-F4PTVZWA.js +116 -0
  39. package/dist/chunk-FPIPT36X.js +187 -0
  40. package/dist/chunk-FX7SQHGD.js +103 -0
  41. package/dist/chunk-HIOXKZYF.js +15 -0
  42. package/dist/chunk-IDZNPTYD.js +16 -0
  43. package/dist/chunk-JSTQ3AWB.js +31 -0
  44. package/dist/chunk-K6XAPGML.js +27 -0
  45. package/dist/chunk-KET4QQZB.js +8 -0
  46. package/dist/chunk-LXU5M77O.js +4028 -0
  47. package/dist/chunk-MDUK2J2O.js +67 -0
  48. package/dist/chunk-MHBMTPW7.js +29 -0
  49. package/dist/chunk-MO4YQOMB.js +85 -0
  50. package/dist/chunk-NKDM3FMH.js +52 -0
  51. package/dist/{chunk-SAB3VXOW.js → chunk-NX6DSZSM.js} +144 -111
  52. package/dist/chunk-OPXH4CQN.js +62 -0
  53. package/dist/{chunk-Y7U5AYAL.js → chunk-PAHHT2IK.js} +471 -2719
  54. package/dist/chunk-PMTFPOCT.js +122 -0
  55. package/dist/chunk-PSXF277V.js +89 -0
  56. package/dist/chunk-Q6AB7W5Z.js +135 -0
  57. package/dist/chunk-QPEH2QPG.js +347 -0
  58. package/dist/chunk-TEFCFC4H.js +15 -0
  59. package/dist/chunk-TRAPF4IX.js +185 -0
  60. package/dist/chunk-VUCPTQ6G.js +67 -0
  61. package/dist/chunk-W6Y7ZW3Y.js +13 -0
  62. package/dist/chunk-ZOAWBDWU.js +72 -0
  63. package/dist/ci-workflow-ZBBUNTHQ.js +8 -0
  64. package/dist/constants-5JGUXPEK.js +6 -0
  65. package/dist/create-skill-LUWO46WF.js +11 -0
  66. package/dist/dist-D4RYGUZE.js +14 -0
  67. package/dist/{dist-K6KTTN3I.js → dist-I7DB5VKB.js} +237 -0
  68. package/dist/dist-L7LAAQAS.js +18 -0
  69. package/dist/{dist-ZODQVGC4.js → dist-PBTNVK6K.js} +8 -6
  70. package/dist/docs-PTJGD6XI.js +12 -0
  71. package/dist/engine-SCMZ3G3E.js +8 -0
  72. package/dist/entropy-YIUBGKY7.js +12 -0
  73. package/dist/feedback-WEVQSLAA.js +18 -0
  74. package/dist/generate-agent-definitions-BU5LOJTI.js +15 -0
  75. package/dist/glob-helper-5OHBUQAI.js +52 -0
  76. package/dist/graph-loader-RLO3KRIX.js +8 -0
  77. package/dist/index.d.ts +11 -1
  78. package/dist/index.js +84 -33
  79. package/dist/loader-6S6PVGSF.js +10 -0
  80. package/dist/mcp-BNLBTCXZ.js +34 -0
  81. package/dist/performance-5TVW6SA6.js +24 -0
  82. package/dist/review-pipeline-4JTQAWKW.js +9 -0
  83. package/dist/runner-VMYLHWOC.js +6 -0
  84. package/dist/runtime-PXIM7UV6.js +9 -0
  85. package/dist/security-URYTKLGK.js +9 -0
  86. package/dist/skill-executor-KVS47DAU.js +8 -0
  87. package/dist/validate-KSDUUK2M.js +12 -0
  88. package/dist/validate-cross-check-WZAX357V.js +8 -0
  89. package/dist/version-KFFPOQAX.js +6 -0
  90. package/package.json +7 -5
  91. package/dist/create-skill-UZOHMXRU.js +0 -8
  92. package/dist/validate-cross-check-DLNK423G.js +0 -7
@@ -0,0 +1,67 @@
1
+ // src/skill/schema.ts
2
+ import { z } from "zod";
3
+ var SkillPhaseSchema = z.object({
4
+ name: z.string(),
5
+ description: z.string(),
6
+ required: z.boolean().default(true)
7
+ });
8
+ var SkillCliSchema = z.object({
9
+ command: z.string(),
10
+ args: z.array(
11
+ z.object({
12
+ name: z.string(),
13
+ description: z.string(),
14
+ required: z.boolean().default(false)
15
+ })
16
+ ).default([])
17
+ });
18
+ var SkillMcpSchema = z.object({
19
+ tool: z.string(),
20
+ input: z.record(z.string())
21
+ });
22
+ var SkillStateSchema = z.object({
23
+ persistent: z.boolean().default(false),
24
+ files: z.array(z.string()).default([])
25
+ });
26
+ var ALLOWED_TRIGGERS = [
27
+ "manual",
28
+ "on_pr",
29
+ "on_commit",
30
+ "on_new_feature",
31
+ "on_bug_fix",
32
+ "on_refactor",
33
+ "on_project_init",
34
+ "on_review",
35
+ "on_milestone",
36
+ "on_task_complete",
37
+ "on_doc_check"
38
+ ];
39
+ var ALLOWED_PLATFORMS = ["claude-code", "gemini-cli"];
40
+ var ALLOWED_COGNITIVE_MODES = [
41
+ "adversarial-reviewer",
42
+ "constructive-architect",
43
+ "meticulous-implementer",
44
+ "diagnostic-investigator",
45
+ "advisory-guide",
46
+ "meticulous-verifier"
47
+ ];
48
+ var SkillMetadataSchema = z.object({
49
+ name: z.string().regex(/^[a-z][a-z0-9-]*$/, "Name must be lowercase with hyphens"),
50
+ version: z.string().regex(/^\d+\.\d+\.\d+$/, "Version must be semver format"),
51
+ description: z.string(),
52
+ cognitive_mode: z.string().regex(/^[a-z][a-z0-9]*(-[a-z0-9]+)*$/, "Cognitive mode must be kebab-case").optional(),
53
+ triggers: z.array(z.enum(ALLOWED_TRIGGERS)),
54
+ platforms: z.array(z.enum(ALLOWED_PLATFORMS)),
55
+ tools: z.array(z.string()),
56
+ cli: SkillCliSchema.optional(),
57
+ mcp: SkillMcpSchema.optional(),
58
+ type: z.enum(["rigid", "flexible"]),
59
+ phases: z.array(SkillPhaseSchema).optional(),
60
+ state: SkillStateSchema.default({}),
61
+ depends_on: z.array(z.string()).default([])
62
+ });
63
+
64
+ export {
65
+ ALLOWED_COGNITIVE_MODES,
66
+ SkillMetadataSchema
67
+ };
@@ -0,0 +1,29 @@
1
+ // ../types/dist/index.mjs
2
+ function Ok(value) {
3
+ return { ok: true, value };
4
+ }
5
+ function Err(error) {
6
+ return { ok: false, error };
7
+ }
8
+ function isOk(result) {
9
+ return result.ok === true;
10
+ }
11
+ function isErr(result) {
12
+ return result.ok === false;
13
+ }
14
+ var STANDARD_COGNITIVE_MODES = [
15
+ "adversarial-reviewer",
16
+ "constructive-architect",
17
+ "meticulous-implementer",
18
+ "diagnostic-investigator",
19
+ "advisory-guide",
20
+ "meticulous-verifier"
21
+ ];
22
+
23
+ export {
24
+ Ok,
25
+ Err,
26
+ isOk,
27
+ isErr,
28
+ STANDARD_COGNITIVE_MODES
29
+ };
@@ -0,0 +1,85 @@
1
+ import {
2
+ resolveProjectConfig
3
+ } from "./chunk-K6XAPGML.js";
4
+ import {
5
+ resultToMcpResponse
6
+ } from "./chunk-IDZNPTYD.js";
7
+ import {
8
+ sanitizePath
9
+ } from "./chunk-W6Y7ZW3Y.js";
10
+
11
+ // src/mcp/tools/architecture.ts
12
+ var checkDependenciesDefinition = {
13
+ name: "check_dependencies",
14
+ description: "Validate layer boundaries and detect circular dependencies",
15
+ inputSchema: {
16
+ type: "object",
17
+ properties: {
18
+ path: { type: "string", description: "Path to project root" }
19
+ },
20
+ required: ["path"]
21
+ }
22
+ };
23
+ async function handleCheckDependencies(input) {
24
+ let projectPath;
25
+ try {
26
+ projectPath = sanitizePath(input.path);
27
+ } catch (error) {
28
+ return {
29
+ content: [
30
+ {
31
+ type: "text",
32
+ text: `Error: ${error instanceof Error ? error.message : String(error)}`
33
+ }
34
+ ],
35
+ isError: true
36
+ };
37
+ }
38
+ const configResult = resolveProjectConfig(projectPath);
39
+ if (!configResult.ok) return resultToMcpResponse(configResult);
40
+ try {
41
+ const { validateDependencies, TypeScriptParser } = await import("./dist-PBTNVK6K.js");
42
+ const config = configResult.value;
43
+ const rawLayers = Array.isArray(config.layers) ? config.layers : [];
44
+ const layers = rawLayers.map((l) => ({
45
+ name: l.name,
46
+ patterns: [l.pattern],
47
+ allowedDependencies: l.allowedDependencies
48
+ }));
49
+ const parser = new TypeScriptParser();
50
+ const { loadGraphStore } = await import("./graph-loader-RLO3KRIX.js");
51
+ const store = await loadGraphStore(projectPath);
52
+ let graphDependencyData;
53
+ if (store) {
54
+ const { GraphConstraintAdapter } = await import("./dist-I7DB5VKB.js");
55
+ const adapter = new GraphConstraintAdapter(store);
56
+ const graphData = adapter.computeDependencyGraph();
57
+ graphDependencyData = {
58
+ nodes: [...graphData.nodes],
59
+ edges: graphData.edges.map((e) => ({ ...e }))
60
+ };
61
+ }
62
+ const result = await validateDependencies({
63
+ layers,
64
+ rootDir: projectPath,
65
+ parser,
66
+ ...graphDependencyData !== void 0 && { graphDependencyData }
67
+ });
68
+ return resultToMcpResponse(result);
69
+ } catch (error) {
70
+ return {
71
+ content: [
72
+ {
73
+ type: "text",
74
+ text: `Error: ${error instanceof Error ? error.message : String(error)}`
75
+ }
76
+ ],
77
+ isError: true
78
+ };
79
+ }
80
+ }
81
+
82
+ export {
83
+ checkDependenciesDefinition,
84
+ handleCheckDependencies
85
+ };
@@ -0,0 +1,52 @@
1
+ import {
2
+ Err,
3
+ Ok
4
+ } from "./chunk-MHBMTPW7.js";
5
+
6
+ // src/persona/generators/agents-md.ts
7
+ function formatTrigger(trigger) {
8
+ switch (trigger.event) {
9
+ case "on_pr": {
10
+ const paths = trigger.conditions?.paths?.join(", ") ?? "all files";
11
+ return `On PR (${paths})`;
12
+ }
13
+ case "on_commit": {
14
+ const branches = trigger.conditions?.branches?.join(", ") ?? "all branches";
15
+ return `On commit (${branches})`;
16
+ }
17
+ case "scheduled":
18
+ return `Scheduled (cron: ${trigger.cron})`;
19
+ case "manual":
20
+ return "Manual";
21
+ }
22
+ }
23
+ function generateAgentsMd(persona) {
24
+ try {
25
+ const triggers = persona.triggers.map(formatTrigger).join(", ");
26
+ const skills = persona.skills.join(", ");
27
+ const commands = persona.steps.filter((s) => "command" in s).map((s) => `\`harness ${s.command}\``).join(", ");
28
+ const stepSkills = persona.steps.filter((s) => "skill" in s).map((s) => `\`harness skill run ${s.skill}\``).join(", ");
29
+ const allCommands = [commands, stepSkills].filter(Boolean).join(", ");
30
+ const fragment = `## ${persona.name} Agent
31
+
32
+ **Role:** ${persona.role}
33
+
34
+ **Triggers:** ${triggers}
35
+
36
+ **Skills:** ${skills}
37
+
38
+ **When this agent flags an issue:** Fix violations before merging. Run ${allCommands} locally to validate.
39
+ `;
40
+ return Ok(fragment);
41
+ } catch (error) {
42
+ return Err(
43
+ new Error(
44
+ `Failed to generate AGENTS.md fragment: ${error instanceof Error ? error.message : String(error)}`
45
+ )
46
+ );
47
+ }
48
+ }
49
+
50
+ export {
51
+ generateAgentsMd
52
+ };
@@ -1,24 +1,7 @@
1
- // ../types/dist/index.mjs
2
- function Ok(value) {
3
- return { ok: true, value };
4
- }
5
- function Err(error) {
6
- return { ok: false, error };
7
- }
8
- function isOk(result) {
9
- return result.ok === true;
10
- }
11
- function isErr(result) {
12
- return result.ok === false;
13
- }
14
- var STANDARD_COGNITIVE_MODES = [
15
- "adversarial-reviewer",
16
- "constructive-architect",
17
- "meticulous-implementer",
18
- "diagnostic-investigator",
19
- "advisory-guide",
20
- "meticulous-verifier"
21
- ];
1
+ import {
2
+ Err,
3
+ Ok
4
+ } from "./chunk-MHBMTPW7.js";
22
5
 
23
6
  // ../core/dist/index.mjs
24
7
  import { access, constants, readFile } from "fs";
@@ -4969,6 +4952,15 @@ var FAILURES_FILE = "failures.md";
4969
4952
  var HANDOFF_FILE = "handoff.json";
4970
4953
  var GATE_CONFIG_FILE = "gate.json";
4971
4954
  var INDEX_FILE2 = "index.json";
4955
+ var MAX_CACHE_ENTRIES = 8;
4956
+ var learningsCacheMap = /* @__PURE__ */ new Map();
4957
+ var failuresCacheMap = /* @__PURE__ */ new Map();
4958
+ function evictIfNeeded(map) {
4959
+ if (map.size > MAX_CACHE_ENTRIES) {
4960
+ const oldest = map.keys().next().value;
4961
+ if (oldest !== void 0) map.delete(oldest);
4962
+ }
4963
+ }
4972
4964
  async function getStateDir(projectPath, stream) {
4973
4965
  const streamsIndexPath = path3.join(projectPath, HARNESS_DIR2, "streams", INDEX_FILE2);
4974
4966
  const hasStreams = fs4.existsSync(streamsIndexPath);
@@ -5048,6 +5040,7 @@ ${entry}`);
5048
5040
  } else {
5049
5041
  fs4.appendFileSync(learningsPath, entry);
5050
5042
  }
5043
+ learningsCacheMap.delete(learningsPath);
5051
5044
  return Ok(void 0);
5052
5045
  } catch (error) {
5053
5046
  return Err(
@@ -5066,25 +5059,35 @@ async function loadRelevantLearnings(projectPath, skillName, stream) {
5066
5059
  if (!fs4.existsSync(learningsPath)) {
5067
5060
  return Ok([]);
5068
5061
  }
5069
- const content = fs4.readFileSync(learningsPath, "utf-8");
5070
- const lines = content.split("\n");
5071
- const entries = [];
5072
- let currentBlock = [];
5073
- for (const line of lines) {
5074
- if (line.startsWith("# ")) continue;
5075
- const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5076
- const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5077
- if (isDatedBullet || isHeading) {
5078
- if (currentBlock.length > 0) {
5079
- entries.push(currentBlock.join("\n"));
5062
+ const stats = fs4.statSync(learningsPath);
5063
+ const cacheKey = learningsPath;
5064
+ const cached = learningsCacheMap.get(cacheKey);
5065
+ let entries;
5066
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5067
+ entries = cached.entries;
5068
+ } else {
5069
+ const content = fs4.readFileSync(learningsPath, "utf-8");
5070
+ const lines = content.split("\n");
5071
+ entries = [];
5072
+ let currentBlock = [];
5073
+ for (const line of lines) {
5074
+ if (line.startsWith("# ")) continue;
5075
+ const isDatedBullet = /^- \*\*\d{4}-\d{2}-\d{2}/.test(line);
5076
+ const isHeading = /^## \d{4}-\d{2}-\d{2}/.test(line);
5077
+ if (isDatedBullet || isHeading) {
5078
+ if (currentBlock.length > 0) {
5079
+ entries.push(currentBlock.join("\n"));
5080
+ }
5081
+ currentBlock = [line];
5082
+ } else if (line.trim() !== "" && currentBlock.length > 0) {
5083
+ currentBlock.push(line);
5080
5084
  }
5081
- currentBlock = [line];
5082
- } else if (line.trim() !== "" && currentBlock.length > 0) {
5083
- currentBlock.push(line);
5084
5085
  }
5085
- }
5086
- if (currentBlock.length > 0) {
5087
- entries.push(currentBlock.join("\n"));
5086
+ if (currentBlock.length > 0) {
5087
+ entries.push(currentBlock.join("\n"));
5088
+ }
5089
+ learningsCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5090
+ evictIfNeeded(learningsCacheMap);
5088
5091
  }
5089
5092
  if (!skillName) {
5090
5093
  return Ok(entries);
@@ -5117,6 +5120,7 @@ ${entry}`);
5117
5120
  } else {
5118
5121
  fs4.appendFileSync(failuresPath, entry);
5119
5122
  }
5123
+ failuresCacheMap.delete(failuresPath);
5120
5124
  return Ok(void 0);
5121
5125
  } catch (error) {
5122
5126
  return Err(
@@ -5135,6 +5139,12 @@ async function loadFailures(projectPath, stream) {
5135
5139
  if (!fs4.existsSync(failuresPath)) {
5136
5140
  return Ok([]);
5137
5141
  }
5142
+ const stats = fs4.statSync(failuresPath);
5143
+ const cacheKey = failuresPath;
5144
+ const cached = failuresCacheMap.get(cacheKey);
5145
+ if (cached && cached.mtimeMs === stats.mtimeMs) {
5146
+ return Ok(cached.entries);
5147
+ }
5138
5148
  const content = fs4.readFileSync(failuresPath, "utf-8");
5139
5149
  const entries = [];
5140
5150
  for (const line of content.split("\n")) {
@@ -5148,6 +5158,8 @@ async function loadFailures(projectPath, stream) {
5148
5158
  });
5149
5159
  }
5150
5160
  }
5161
+ failuresCacheMap.set(cacheKey, { mtimeMs: stats.mtimeMs, entries });
5162
+ evictIfNeeded(failuresCacheMap);
5151
5163
  return Ok(entries);
5152
5164
  } catch (error) {
5153
5165
  return Err(
@@ -5176,6 +5188,7 @@ async function archiveFailures(projectPath, stream) {
5176
5188
  counter++;
5177
5189
  }
5178
5190
  fs4.renameSync(failuresPath, path3.join(archiveDir, archiveName));
5191
+ failuresCacheMap.delete(failuresPath);
5179
5192
  return Ok(void 0);
5180
5193
  } catch (error) {
5181
5194
  return Err(
@@ -6199,14 +6212,22 @@ async function runCIChecks(input) {
6199
6212
  const { projectRoot, config, skip = [], failOn = "error" } = input;
6200
6213
  try {
6201
6214
  const checks = [];
6202
- for (const name of ALL_CHECKS) {
6203
- if (skip.includes(name)) {
6204
- checks.push({ name, status: "skip", issues: [], durationMs: 0 });
6205
- } else {
6206
- const result = await runSingleCheck(name, projectRoot, config);
6207
- checks.push(result);
6208
- }
6215
+ const skippedSet = new Set(skip);
6216
+ if (skippedSet.has("validate")) {
6217
+ checks.push({ name: "validate", status: "skip", issues: [], durationMs: 0 });
6218
+ } else {
6219
+ checks.push(await runSingleCheck("validate", projectRoot, config));
6209
6220
  }
6221
+ const remainingChecks = ALL_CHECKS.slice(1);
6222
+ const phase2Results = await Promise.all(
6223
+ remainingChecks.map(async (name) => {
6224
+ if (skippedSet.has(name)) {
6225
+ return { name, status: "skip", issues: [], durationMs: 0 };
6226
+ }
6227
+ return runSingleCheck(name, projectRoot, config);
6228
+ })
6229
+ );
6230
+ checks.push(...phase2Results);
6210
6231
  const summary = buildSummary(checks);
6211
6232
  const exitCode = determineExitCode(summary, failOn);
6212
6233
  const report = {
@@ -6328,76 +6349,93 @@ async function runMechanicalChecks(options) {
6328
6349
  });
6329
6350
  }
6330
6351
  }
6352
+ const parallelChecks = [];
6331
6353
  if (!skip.includes("check-docs")) {
6332
- try {
6333
- const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6334
- const result = await checkDocCoverage("project", { docsDir });
6335
- if (!result.ok) {
6336
- statuses["check-docs"] = "warn";
6337
- findings.push({
6338
- tool: "check-docs",
6339
- file: docsDir,
6340
- message: result.error.message,
6341
- severity: "warning"
6342
- });
6343
- } else if (result.value.gaps && result.value.gaps.length > 0) {
6344
- statuses["check-docs"] = "warn";
6345
- for (const gap of result.value.gaps) {
6346
- findings.push({
6354
+ parallelChecks.push(
6355
+ (async () => {
6356
+ const localFindings = [];
6357
+ try {
6358
+ const docsDir = path6.join(projectRoot, config.docsDir ?? "docs");
6359
+ const result = await checkDocCoverage("project", { docsDir });
6360
+ if (!result.ok) {
6361
+ statuses["check-docs"] = "warn";
6362
+ localFindings.push({
6363
+ tool: "check-docs",
6364
+ file: docsDir,
6365
+ message: result.error.message,
6366
+ severity: "warning"
6367
+ });
6368
+ } else if (result.value.gaps && result.value.gaps.length > 0) {
6369
+ statuses["check-docs"] = "warn";
6370
+ for (const gap of result.value.gaps) {
6371
+ localFindings.push({
6372
+ tool: "check-docs",
6373
+ file: gap.file,
6374
+ message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6375
+ severity: "warning"
6376
+ });
6377
+ }
6378
+ } else {
6379
+ statuses["check-docs"] = "pass";
6380
+ }
6381
+ } catch (err) {
6382
+ statuses["check-docs"] = "warn";
6383
+ localFindings.push({
6347
6384
  tool: "check-docs",
6348
- file: gap.file,
6349
- message: `Undocumented: ${gap.file} (suggested: ${gap.suggestedSection})`,
6385
+ file: path6.join(projectRoot, "docs"),
6386
+ message: err instanceof Error ? err.message : String(err),
6350
6387
  severity: "warning"
6351
6388
  });
6352
6389
  }
6353
- } else {
6354
- statuses["check-docs"] = "pass";
6355
- }
6356
- } catch (err) {
6357
- statuses["check-docs"] = "warn";
6358
- findings.push({
6359
- tool: "check-docs",
6360
- file: path6.join(projectRoot, "docs"),
6361
- message: err instanceof Error ? err.message : String(err),
6362
- severity: "warning"
6363
- });
6364
- }
6390
+ return localFindings;
6391
+ })()
6392
+ );
6365
6393
  }
6366
6394
  if (!skip.includes("security-scan")) {
6367
- try {
6368
- const securityConfig = parseSecurityConfig(config.security);
6369
- if (!securityConfig.enabled) {
6370
- statuses["security-scan"] = "skip";
6371
- } else {
6372
- const scanner = new SecurityScanner(securityConfig);
6373
- scanner.configureForProject(projectRoot);
6374
- const filesToScan = changedFiles ?? [];
6375
- const scanResult = await scanner.scanFiles(filesToScan);
6376
- if (scanResult.findings.length > 0) {
6377
- statuses["security-scan"] = "warn";
6378
- for (const f of scanResult.findings) {
6379
- findings.push({
6380
- tool: "security-scan",
6381
- file: f.file,
6382
- line: f.line,
6383
- ruleId: f.ruleId,
6384
- message: f.message,
6385
- severity: f.severity === "info" ? "warning" : f.severity
6386
- });
6395
+ parallelChecks.push(
6396
+ (async () => {
6397
+ const localFindings = [];
6398
+ try {
6399
+ const securityConfig = parseSecurityConfig(config.security);
6400
+ if (!securityConfig.enabled) {
6401
+ statuses["security-scan"] = "skip";
6402
+ } else {
6403
+ const scanner = new SecurityScanner(securityConfig);
6404
+ scanner.configureForProject(projectRoot);
6405
+ const filesToScan = changedFiles ?? [];
6406
+ const scanResult = await scanner.scanFiles(filesToScan);
6407
+ if (scanResult.findings.length > 0) {
6408
+ statuses["security-scan"] = "warn";
6409
+ for (const f of scanResult.findings) {
6410
+ localFindings.push({
6411
+ tool: "security-scan",
6412
+ file: f.file,
6413
+ line: f.line,
6414
+ ruleId: f.ruleId,
6415
+ message: f.message,
6416
+ severity: f.severity === "info" ? "warning" : f.severity
6417
+ });
6418
+ }
6419
+ } else {
6420
+ statuses["security-scan"] = "pass";
6421
+ }
6387
6422
  }
6388
- } else {
6389
- statuses["security-scan"] = "pass";
6423
+ } catch (err) {
6424
+ statuses["security-scan"] = "warn";
6425
+ localFindings.push({
6426
+ tool: "security-scan",
6427
+ file: projectRoot,
6428
+ message: err instanceof Error ? err.message : String(err),
6429
+ severity: "warning"
6430
+ });
6390
6431
  }
6391
- }
6392
- } catch (err) {
6393
- statuses["security-scan"] = "warn";
6394
- findings.push({
6395
- tool: "security-scan",
6396
- file: projectRoot,
6397
- message: err instanceof Error ? err.message : String(err),
6398
- severity: "warning"
6399
- });
6400
- }
6432
+ return localFindings;
6433
+ })()
6434
+ );
6435
+ }
6436
+ const parallelResults = await Promise.all(parallelChecks);
6437
+ for (const result of parallelResults) {
6438
+ findings.push(...result);
6401
6439
  }
6402
6440
  const hasErrors = findings.some((f) => f.severity === "error");
6403
6441
  const stopPipeline = statuses.validate === "fail" || statuses["check-deps"] === "fail";
@@ -8203,11 +8241,6 @@ Run "harness update" to upgrade.`;
8203
8241
  var VERSION = "1.8.2";
8204
8242
 
8205
8243
  export {
8206
- Ok,
8207
- Err,
8208
- isOk,
8209
- isErr,
8210
- STANDARD_COGNITIVE_MODES,
8211
8244
  createError,
8212
8245
  validateFileStructure,
8213
8246
  validateConfig,
@@ -0,0 +1,62 @@
1
+ // src/mcp/utils/graph-loader.ts
2
+ import * as path from "path";
3
+ import { stat } from "fs/promises";
4
+ var MAX_CACHE_ENTRIES = 8;
5
+ var cache = /* @__PURE__ */ new Map();
6
+ var pending = /* @__PURE__ */ new Map();
7
+ function clearGraphStoreCache() {
8
+ cache.clear();
9
+ pending.clear();
10
+ }
11
+ function evictIfNeeded() {
12
+ if (cache.size > MAX_CACHE_ENTRIES) {
13
+ const oldest = cache.keys().next().value;
14
+ if (oldest !== void 0) cache.delete(oldest);
15
+ }
16
+ }
17
+ async function doLoadGraphStore(projectRoot) {
18
+ const { GraphStore } = await import("./dist-I7DB5VKB.js");
19
+ const graphDir = path.join(projectRoot, ".harness", "graph");
20
+ const store = new GraphStore();
21
+ const loaded = await store.load(graphDir);
22
+ if (!loaded) return null;
23
+ return store;
24
+ }
25
+ async function loadGraphStore(projectRoot) {
26
+ const graphDir = path.join(projectRoot, ".harness", "graph");
27
+ const graphPath = path.join(graphDir, "graph.json");
28
+ let mtimeMs;
29
+ try {
30
+ const stats = await stat(graphPath);
31
+ mtimeMs = stats.mtimeMs;
32
+ } catch {
33
+ return null;
34
+ }
35
+ const cached = cache.get(projectRoot);
36
+ if (cached && cached.mtimeMs === mtimeMs) {
37
+ return cached.store;
38
+ }
39
+ const pendingLoad = pending.get(projectRoot);
40
+ let promise;
41
+ if (pendingLoad && pendingLoad.mtimeMs === mtimeMs) {
42
+ promise = pendingLoad.promise;
43
+ } else {
44
+ promise = doLoadGraphStore(projectRoot);
45
+ pending.set(projectRoot, { promise, mtimeMs });
46
+ }
47
+ const store = await promise;
48
+ const currentPending = pending.get(projectRoot);
49
+ if (currentPending && currentPending.promise === promise) {
50
+ pending.delete(projectRoot);
51
+ }
52
+ if (store !== null) {
53
+ cache.set(projectRoot, { store, mtimeMs });
54
+ evictIfNeeded();
55
+ }
56
+ return store;
57
+ }
58
+
59
+ export {
60
+ clearGraphStoreCache,
61
+ loadGraphStore
62
+ };