@kaiohenricunha/harness 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +130 -0
  3. package/package.json +68 -0
  4. package/plugins/harness/.claude-plugin/plugin.json +8 -0
  5. package/plugins/harness/README.md +74 -0
  6. package/plugins/harness/bin/harness-check-instruction-drift.mjs +77 -0
  7. package/plugins/harness/bin/harness-check-spec-coverage.mjs +81 -0
  8. package/plugins/harness/bin/harness-detect-drift.mjs +53 -0
  9. package/plugins/harness/bin/harness-doctor.mjs +145 -0
  10. package/plugins/harness/bin/harness-init.mjs +89 -0
  11. package/plugins/harness/bin/harness-validate-skills.mjs +92 -0
  12. package/plugins/harness/bin/harness-validate-specs.mjs +70 -0
  13. package/plugins/harness/bin/harness.mjs +93 -0
  14. package/plugins/harness/hooks/guard-destructive-git.sh +58 -0
  15. package/plugins/harness/scripts/auto-update-manifest.mjs +20 -0
  16. package/plugins/harness/scripts/detect-branch-drift.mjs +81 -0
  17. package/plugins/harness/scripts/lib/output.sh +105 -0
  18. package/plugins/harness/scripts/refresh-worktrees.sh +35 -0
  19. package/plugins/harness/scripts/validate-settings.sh +202 -0
  20. package/plugins/harness/src/check-instruction-drift.mjs +127 -0
  21. package/plugins/harness/src/check-spec-coverage.mjs +95 -0
  22. package/plugins/harness/src/index.mjs +57 -0
  23. package/plugins/harness/src/init-harness-scaffold.mjs +121 -0
  24. package/plugins/harness/src/lib/argv.mjs +108 -0
  25. package/plugins/harness/src/lib/debug.mjs +37 -0
  26. package/plugins/harness/src/lib/errors.mjs +147 -0
  27. package/plugins/harness/src/lib/exit-codes.mjs +18 -0
  28. package/plugins/harness/src/lib/output.mjs +90 -0
  29. package/plugins/harness/src/spec-harness-lib.mjs +359 -0
  30. package/plugins/harness/src/validate-skills-inventory.mjs +148 -0
  31. package/plugins/harness/src/validate-specs.mjs +217 -0
  32. package/plugins/harness/templates/claude/hooks/guard-destructive-git.sh +50 -0
  33. package/plugins/harness/templates/claude/settings.headless.json +24 -0
  34. package/plugins/harness/templates/claude/settings.json +16 -0
  35. package/plugins/harness/templates/claude/skills-manifest.json +6 -0
  36. package/plugins/harness/templates/docs/repo-facts.json +17 -0
  37. package/plugins/harness/templates/docs/specs/README.md +36 -0
  38. package/plugins/harness/templates/githooks/pre-commit +9 -0
  39. package/plugins/harness/templates/workflows/ai-review.yml +28 -0
  40. package/plugins/harness/templates/workflows/detect-drift.yml +15 -0
  41. package/plugins/harness/templates/workflows/validate-skills.yml +36 -0
@@ -0,0 +1,359 @@
1
+ import { execFileSync } from "child_process";
2
+ import { existsSync, readFileSync, readdirSync } from "fs";
3
+ import path from "path";
4
+ import { debug } from "./lib/debug.mjs";
5
+
6
+ /**
7
+ * Execution context threaded through every validator.
8
+ *
9
+ * @typedef {object} HarnessContext
10
+ * @property {string} repoRoot Absolute path to the repository root.
11
+ * @property {string} specsRoot Absolute path to `<repoRoot>/docs/specs`.
12
+ * @property {string} manifestPath Absolute path to `<repoRoot>/.claude/skills-manifest.json`.
13
+ * @property {string} factsPath Absolute path to `<repoRoot>/docs/repo-facts.json`.
14
+ */
15
+
16
+ /**
17
+ * Uniform shape returned by every validator.
18
+ *
19
+ * @typedef {object} ValidationResult
20
+ * @property {boolean} ok True when `errors.length === 0`.
21
+ * @property {Array<import('./lib/errors.mjs').ValidationError>} errors
22
+ */
23
+
24
+ /**
25
+ * Build a {@link HarnessContext} by resolving the repository root through a
26
+ * three-step fallback:
27
+ *
28
+ * 1. `repoRoot` option passed in.
29
+ * 2. `HARNESS_REPO_ROOT` env var.
30
+ * 3. `git rev-parse --show-toplevel` in the current working directory.
31
+ *
32
+ * Throws when none of the three produce a value (typically when running
33
+ * outside a git repo with no env override).
34
+ *
35
+ * @param {{ repoRoot?: string }} [opts]
36
+ * @returns {HarnessContext}
37
+ */
38
+ export function createHarnessContext({ repoRoot } = {}) {
39
+ const root =
40
+ repoRoot ??
41
+ process.env.HARNESS_REPO_ROOT ??
42
+ resolveRepoRootFromGit();
43
+ if (!root) {
44
+ throw new Error(
45
+ "harness: repoRoot not provided; pass { repoRoot } or set HARNESS_REPO_ROOT, or run inside a git repo",
46
+ );
47
+ }
48
+ return {
49
+ repoRoot: root,
50
+ specsRoot: path.join(root, "docs", "specs"),
51
+ manifestPath: path.join(root, ".claude", "skills-manifest.json"),
52
+ factsPath: path.join(root, "docs", "repo-facts.json"),
53
+ };
54
+ }
55
+
56
+ function resolveRepoRootFromGit() {
57
+ try {
58
+ return execFileSync("git", ["rev-parse", "--show-toplevel"], {
59
+ encoding: "utf8",
60
+ }).trim();
61
+ } catch (err) {
62
+ debug("git:rev-parse", err.message);
63
+ return null;
64
+ }
65
+ }
66
+
67
+ /**
68
+ * Convert a platform-native path (which may use `\` on Windows) to a POSIX
69
+ * path so glob and prefix comparisons are stable across OSes.
70
+ *
71
+ * @param {string} p
72
+ * @returns {string}
73
+ */
74
+ export function toPosix(p) {
75
+ return p.split(path.sep).join("/");
76
+ }
77
+
78
+ /**
79
+ * Read and parse a JSON file at `<repoRoot>/<relativePath>`.
80
+ * Throws the raw SyntaxError when the file is not valid JSON.
81
+ *
82
+ * @param {HarnessContext} ctx
83
+ * @param {string} relativePath
84
+ * @returns {any}
85
+ */
86
+ export function readJson(ctx, relativePath) {
87
+ return JSON.parse(readFileSync(path.join(ctx.repoRoot, relativePath), "utf8"));
88
+ }
89
+
90
+ /**
91
+ * Read a file at `<repoRoot>/<relativePath>` as UTF-8 text.
92
+ *
93
+ * @param {HarnessContext} ctx
94
+ * @param {string} relativePath
95
+ * @returns {string}
96
+ */
97
+ export function readText(ctx, relativePath) {
98
+ return readFileSync(path.join(ctx.repoRoot, relativePath), "utf8");
99
+ }
100
+
101
+ /**
102
+ * Check whether `<repoRoot>/<relativePath>` exists on disk.
103
+ *
104
+ * @param {HarnessContext} ctx
105
+ * @param {string} relativePath
106
+ * @returns {boolean}
107
+ */
108
+ export function pathExists(ctx, relativePath) {
109
+ return existsSync(path.join(ctx.repoRoot, relativePath));
110
+ }
111
+
112
+ /**
113
+ * Run `git <args>` with `cwd = ctx.repoRoot`, return trimmed stdout.
114
+ * Lets the underlying error bubble on non-zero exit.
115
+ *
116
+ * @param {HarnessContext} ctx
117
+ * @param {string[]} args
118
+ * @returns {string}
119
+ */
120
+ export function git(ctx, args) {
121
+ return execFileSync("git", args, { cwd: ctx.repoRoot, encoding: "utf8" }).trim();
122
+ }
123
+
124
+ /**
125
+ * Read the repository's authoritative facts file at `docs/repo-facts.json`.
126
+ *
127
+ * @param {HarnessContext} ctx
128
+ * @returns {any} Parsed repo-facts.json (shape is repo-specific; see `docs/repo-facts.json`).
129
+ */
130
+ export function loadFacts(ctx) {
131
+ return readJson(ctx, "docs/repo-facts.json");
132
+ }
133
+
134
+ /**
135
+ * List every sub-directory under `docs/specs/` in the repo (spec ids).
136
+ *
137
+ * @param {HarnessContext} ctx
138
+ * @returns {string[]} Spec ids sorted alphabetically.
139
+ */
140
+ export function listSpecDirs(ctx) {
141
+ return readdirSync(ctx.specsRoot, { withFileTypes: true })
142
+ .filter((e) => e.isDirectory())
143
+ .map((e) => e.name)
144
+ .sort();
145
+ }
146
+
147
+ const DEFAULT_IGNORED_TOP_LEVEL = new Set([
148
+ ".git",
149
+ "node_modules",
150
+ "dist",
151
+ "coverage",
152
+ ]);
153
+ const DEFAULT_IGNORED_DIRS = new Set([
154
+ ".claude/worktrees",
155
+ "bin",
156
+ "api/tmp",
157
+ "test-results",
158
+ ]);
159
+
160
+ /**
161
+ * Recursively list every file under `ctx.repoRoot`, returning repo-relative
162
+ * POSIX paths. Skips the conventional top-level noise directories (`.git`,
163
+ * `node_modules`, `dist`, `coverage`) and a curated set of nested ones
164
+ * (`.claude/worktrees`, `bin`, `api/tmp`, `test-results`).
165
+ *
166
+ * @param {HarnessContext} ctx
167
+ * @param {{ ignoredTopLevel?: Set<string>, ignoredDirectories?: Set<string> }} [opts]
168
+ * @returns {string[]} Repo-relative POSIX paths sorted alphabetically.
169
+ */
170
+ export function listRepoPaths(ctx, { ignoredTopLevel, ignoredDirectories } = {}) {
171
+ const topSkip = ignoredTopLevel ?? DEFAULT_IGNORED_TOP_LEVEL;
172
+ const dirSkip = ignoredDirectories ?? DEFAULT_IGNORED_DIRS;
173
+ const out = [];
174
+
175
+ function walk(relativeDir = "") {
176
+ const absoluteDir = path.join(ctx.repoRoot, relativeDir);
177
+ for (const entry of readdirSync(absoluteDir, { withFileTypes: true })) {
178
+ if (!relativeDir && topSkip.has(entry.name)) continue;
179
+ const rel = toPosix(path.join(relativeDir, entry.name));
180
+ if (dirSkip.has(rel)) continue;
181
+ if (entry.isDirectory()) {
182
+ walk(rel);
183
+ continue;
184
+ }
185
+ out.push(rel);
186
+ }
187
+ }
188
+
189
+ walk();
190
+ return out.sort();
191
+ }
192
+
193
+ /**
194
+ * Escape every regex metacharacter in `v` so it can be dropped into a
195
+ * `new RegExp(...)` literal match.
196
+ *
197
+ * @param {string} v
198
+ * @returns {string}
199
+ */
200
+ export function escapeRegex(v) {
201
+ return v.replace(/[|\\{}()[\]^$+*?.]/g, "\\$&");
202
+ }
203
+
204
+ /**
205
+ * Compile a glob pattern (`**`, `*`, `?`) into a `RegExp` anchored `^…$`.
206
+ * Uses POSIX semantics — no brace expansion, no character classes.
207
+ *
208
+ * @param {string} glob
209
+ * @returns {RegExp}
210
+ */
211
+ export function globToRegExp(glob) {
212
+ let regex = "^";
213
+ for (let i = 0; i < glob.length; i += 1) {
214
+ const c = glob[i];
215
+ const n = glob[i + 1];
216
+ if (c === "*" && n === "*") {
217
+ regex += ".*";
218
+ i += 1;
219
+ continue;
220
+ }
221
+ if (c === "*") {
222
+ regex += "[^/]*";
223
+ continue;
224
+ }
225
+ if (c === "?") {
226
+ regex += ".";
227
+ continue;
228
+ }
229
+ regex += escapeRegex(c);
230
+ }
231
+ return new RegExp(regex + "$");
232
+ }
233
+
234
+ /**
235
+ * Check whether `value` matches the glob `pattern`.
236
+ *
237
+ * @param {string} pattern
238
+ * @param {string} value
239
+ * @returns {boolean}
240
+ */
241
+ export function matchesGlob(pattern, value) {
242
+ return globToRegExp(pattern).test(value);
243
+ }
244
+
245
+ /**
246
+ * Resolve a `pattern` against an array of candidate paths. Treats bare
247
+ * (glob-free) patterns as prefix matches so `docs/specs/foo` covers
248
+ * `docs/specs/foo/spec.json` etc.
249
+ *
250
+ * @param {string} pattern
251
+ * @param {string[]} paths
252
+ * @returns {boolean}
253
+ */
254
+ export function anyPathMatches(pattern, paths) {
255
+ const normalized = toPosix(pattern);
256
+ if (!normalized.includes("*") && !normalized.includes("?")) {
257
+ return (
258
+ paths.includes(normalized) ||
259
+ paths.some((c) => c.startsWith(`${normalized}/`))
260
+ );
261
+ }
262
+ const rx = globToRegExp(normalized);
263
+ return paths.some((c) => rx.test(c));
264
+ }
265
+
266
+ // ---- PR context helpers (unchanged from squadranks) ----
267
+
268
+ /**
269
+ * Extract the body of a markdown H2 section named `heading` (e.g.
270
+ * `## Spec ID`) from a PR body, case-insensitive. Returns `""` when the
271
+ * section is absent.
272
+ *
273
+ * @param {string} body
274
+ * @param {string} heading
275
+ * @returns {string}
276
+ */
277
+ export function extractTemplateSection(body, heading) {
278
+ if (!body) return "";
279
+ const rx = new RegExp(
280
+ `##\\s*${escapeRegex(heading)}\\s*\\n([\\s\\S]*?)(?=\\n##\\s|$)`,
281
+ "i",
282
+ );
283
+ const m = body.match(rx);
284
+ return m ? m[1].trim() : "";
285
+ }
286
+
287
+ /**
288
+ * A section is "meaningful" when it contains at least one non-comment, non-whitespace
289
+ * character. Strips `<!-- ... -->` HTML comments before the length check.
290
+ *
291
+ * @param {string} section
292
+ * @returns {boolean}
293
+ */
294
+ export function isMeaningfulSection(section) {
295
+ if (!section) return false;
296
+ const cleaned = section.replace(/<!--[\s\S]*?-->/g, "").trim();
297
+ return cleaned.length > 0;
298
+ }
299
+
300
+ /**
301
+ * Pull-request execution context from the GitHub Actions environment.
302
+ *
303
+ * @typedef {object} PullRequestContext
304
+ * @property {boolean} isPullRequest Derived from `GITHUB_EVENT_NAME === "pull_request"`.
305
+ * @property {string} body `PR_BODY` env — populated by workflows that pipe PR text in.
306
+ * @property {string} actor `GITHUB_ACTOR` env.
307
+ */
308
+
309
+ /**
310
+ * Read pull-request metadata from the standard GitHub Actions env vars.
311
+ *
312
+ * @returns {PullRequestContext}
313
+ */
314
+ export function getPullRequestContext() {
315
+ const event = process.env.GITHUB_EVENT_NAME ?? "";
316
+ const isPullRequest = event === "pull_request";
317
+ const body = process.env.PR_BODY ?? "";
318
+ const actor = process.env.GITHUB_ACTOR ?? "";
319
+ return { isPullRequest, body, actor };
320
+ }
321
+
322
+ const BOT_AUTHORS = new Set(["dependabot[bot]", "github-actions[bot]"]);
323
+
324
+ /**
325
+ * Report whether `actor` is one of the recognized bot authors that bypasses
326
+ * the PR-body spec/rationale contract.
327
+ *
328
+ * @param {string} actor
329
+ * @returns {boolean}
330
+ */
331
+ export function isBotActor(actor) {
332
+ return BOT_AUTHORS.has(actor);
333
+ }
334
+
335
+ /**
336
+ * Resolve the list of files changed in the current PR. Prefers
337
+ * `HARNESS_CHANGED_FILES` (CSV) when set; otherwise falls back to
338
+ * `git diff --name-only origin/<base>...HEAD`, defaulting `base` to
339
+ * `GITHUB_BASE_REF || "main"`. Returns `[]` on git failure — the failure is
340
+ * surfaced via `debug("git:diff", …)` when `HARNESS_DEBUG=1`.
341
+ *
342
+ * @returns {string[]}
343
+ */
344
+ export function getChangedFiles() {
345
+ const csv = process.env.HARNESS_CHANGED_FILES;
346
+ if (csv) return csv.split(",").filter(Boolean);
347
+ const base = process.env.GITHUB_BASE_REF || "main";
348
+ try {
349
+ const out = execFileSync(
350
+ "git",
351
+ ["diff", "--name-only", `origin/${base}...HEAD`],
352
+ { encoding: "utf8" },
353
+ );
354
+ return out.split("\n").filter(Boolean);
355
+ } catch (err) {
356
+ debug("git:diff", err.message);
357
+ return [];
358
+ }
359
+ }
@@ -0,0 +1,148 @@
1
+ import { readFileSync, writeFileSync, readdirSync, existsSync, statSync } from "fs";
2
+ import { createHash } from "crypto";
3
+ import path from "path";
4
+ import { ValidationError, ERROR_CODES } from "./lib/errors.mjs";
5
+
6
+ function sha256(content) {
7
+ return "sha256:" + createHash("sha256").update(content).digest("hex");
8
+ }
9
+
10
+ function loadManifest(ctx) {
11
+ if (!existsSync(ctx.manifestPath)) {
12
+ throw new Error(`Manifest not found: ${ctx.manifestPath}`);
13
+ }
14
+ return JSON.parse(readFileSync(ctx.manifestPath, "utf8"));
15
+ }
16
+
17
+ function listCommandFiles(ctx) {
18
+ const dir = path.join(ctx.repoRoot, ".claude", "commands");
19
+ if (!existsSync(dir)) return [];
20
+ return readdirSync(dir)
21
+ .filter((f) => f.endsWith(".md"))
22
+ .map((f) => `.claude/commands/${f}`);
23
+ }
24
+
25
+ function listSkillFilesRecursive(ctx) {
26
+ const dir = path.join(ctx.repoRoot, ".claude", "skills");
27
+ if (!existsSync(dir)) return [];
28
+ const out = [];
29
+ for (const entry of readdirSync(dir, { withFileTypes: true })) {
30
+ const top = path.join(dir, entry.name);
31
+ if (entry.isFile() && entry.name.endsWith(".md")) {
32
+ out.push(`.claude/skills/${entry.name}`);
33
+ } else if (entry.isDirectory()) {
34
+ // Anthropic-standard directory-form skills: look for SKILL.md inside.
35
+ const inner = path.join(top, "SKILL.md");
36
+ if (existsSync(inner)) out.push(`.claude/skills/${entry.name}/SKILL.md`);
37
+ }
38
+ }
39
+ return out;
40
+ }
41
+
42
+ /**
43
+ * Validate `.claude/skills-manifest.json`:
44
+ * - every indexed file exists on disk
45
+ * - recorded sha256 checksum matches the current file contents
46
+ * - no file on disk under `.claude/commands/` or `.claude/skills/` is
47
+ * orphaned (i.e. missing from the manifest)
48
+ * - the `dependencies[]` DAG has no cycles
49
+ *
50
+ * @param {import('./spec-harness-lib.mjs').HarnessContext} ctx
51
+ * @returns {{
52
+ * ok: boolean,
53
+ * errors: import('./lib/errors.mjs').ValidationError[],
54
+ * manifest: any
55
+ * }}
56
+ */
57
+ export function validateManifest(ctx) {
58
+ const errors = [];
59
+ const manifest = loadManifest(ctx);
60
+ const entryPaths = new Set(manifest.skills.map((s) => s.path));
61
+
62
+ for (const skill of manifest.skills) {
63
+ const abs = path.join(ctx.repoRoot, skill.path);
64
+ if (!existsSync(abs)) {
65
+ errors.push(new ValidationError({
66
+ code: ERROR_CODES.MANIFEST_ENTRY_MISSING,
67
+ category: "manifest",
68
+ file: skill.path,
69
+ message: `File not found: ${skill.path}`,
70
+ hint: "remove the manifest entry or restore the file on disk",
71
+ }));
72
+ continue;
73
+ }
74
+ const actual = sha256(readFileSync(abs, "utf8"));
75
+ if (actual !== skill.checksum) {
76
+ errors.push(new ValidationError({
77
+ code: ERROR_CODES.MANIFEST_CHECKSUM_MISMATCH,
78
+ category: "manifest",
79
+ file: skill.path,
80
+ expected: skill.checksum,
81
+ got: actual,
82
+ message: `Checksum mismatch for ${skill.name}: expected ${skill.checksum}, got ${actual}`,
83
+ hint: "run `node plugins/harness/scripts/auto-update-manifest.mjs` to refresh checksums",
84
+ }));
85
+ }
86
+ }
87
+
88
+ const onDisk = [...listCommandFiles(ctx), ...listSkillFilesRecursive(ctx)];
89
+ for (const p of onDisk) {
90
+ if (!entryPaths.has(p)) {
91
+ errors.push(new ValidationError({
92
+ code: ERROR_CODES.MANIFEST_ORPHAN_FILE,
93
+ category: "manifest",
94
+ file: p,
95
+ message: `Orphan on disk (not in manifest): ${p}`,
96
+ hint: "add the file to .claude/skills-manifest.json or delete it",
97
+ }));
98
+ }
99
+ }
100
+
101
+ // DAG check — no cycles in dependencies[].
102
+ const graph = new Map(manifest.skills.map((s) => [s.name, s.dependencies ?? []]));
103
+ const WHITE = 0, GRAY = 1, BLACK = 2;
104
+ const color = new Map();
105
+ for (const name of graph.keys()) color.set(name, WHITE);
106
+ function visit(name, stack) {
107
+ if (color.get(name) === GRAY) {
108
+ errors.push(new ValidationError({
109
+ code: ERROR_CODES.MANIFEST_DEPENDENCY_CYCLE,
110
+ category: "manifest",
111
+ file: ".claude/skills-manifest.json",
112
+ got: stack.concat(name).join(" -> "),
113
+ message: `Dependency cycle: ${stack.concat(name).join(" -> ")}`,
114
+ }));
115
+ return;
116
+ }
117
+ if (color.get(name) === BLACK) return;
118
+ color.set(name, GRAY);
119
+ for (const dep of graph.get(name) ?? []) {
120
+ if (graph.has(dep)) visit(dep, stack.concat(name));
121
+ }
122
+ color.set(name, BLACK);
123
+ }
124
+ for (const name of graph.keys()) visit(name, []);
125
+
126
+ return { ok: errors.length === 0, errors, manifest };
127
+ }
128
+
129
+ /**
130
+ * Recompute every sha256 in `.claude/skills-manifest.json` from the current
131
+ * contents on disk and write the manifest back in place. Does not validate
132
+ * anything — pair with {@link validateManifest} to confirm the result.
133
+ *
134
+ * @param {import('./spec-harness-lib.mjs').HarnessContext} ctx
135
+ * @returns {any} The in-memory manifest object just written to disk.
136
+ */
137
+ export function refreshChecksums(ctx) {
138
+ const manifest = loadManifest(ctx);
139
+ for (const skill of manifest.skills) {
140
+ const abs = path.join(ctx.repoRoot, skill.path);
141
+ if (!existsSync(abs)) continue;
142
+ skill.checksum = sha256(readFileSync(abs, "utf8"));
143
+ skill.lastValidated = new Date().toISOString().slice(0, 10);
144
+ }
145
+ manifest.generatedAt = new Date().toISOString();
146
+ writeFileSync(ctx.manifestPath, JSON.stringify(manifest, null, 2) + "\n");
147
+ return manifest;
148
+ }