@kaiohenricunha/harness 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +130 -0
  3. package/package.json +68 -0
  4. package/plugins/harness/.claude-plugin/plugin.json +8 -0
  5. package/plugins/harness/README.md +74 -0
  6. package/plugins/harness/bin/harness-check-instruction-drift.mjs +77 -0
  7. package/plugins/harness/bin/harness-check-spec-coverage.mjs +81 -0
  8. package/plugins/harness/bin/harness-detect-drift.mjs +53 -0
  9. package/plugins/harness/bin/harness-doctor.mjs +145 -0
  10. package/plugins/harness/bin/harness-init.mjs +89 -0
  11. package/plugins/harness/bin/harness-validate-skills.mjs +92 -0
  12. package/plugins/harness/bin/harness-validate-specs.mjs +70 -0
  13. package/plugins/harness/bin/harness.mjs +93 -0
  14. package/plugins/harness/hooks/guard-destructive-git.sh +58 -0
  15. package/plugins/harness/scripts/auto-update-manifest.mjs +20 -0
  16. package/plugins/harness/scripts/detect-branch-drift.mjs +81 -0
  17. package/plugins/harness/scripts/lib/output.sh +105 -0
  18. package/plugins/harness/scripts/refresh-worktrees.sh +35 -0
  19. package/plugins/harness/scripts/validate-settings.sh +202 -0
  20. package/plugins/harness/src/check-instruction-drift.mjs +127 -0
  21. package/plugins/harness/src/check-spec-coverage.mjs +95 -0
  22. package/plugins/harness/src/index.mjs +57 -0
  23. package/plugins/harness/src/init-harness-scaffold.mjs +121 -0
  24. package/plugins/harness/src/lib/argv.mjs +108 -0
  25. package/plugins/harness/src/lib/debug.mjs +37 -0
  26. package/plugins/harness/src/lib/errors.mjs +147 -0
  27. package/plugins/harness/src/lib/exit-codes.mjs +18 -0
  28. package/plugins/harness/src/lib/output.mjs +90 -0
  29. package/plugins/harness/src/spec-harness-lib.mjs +359 -0
  30. package/plugins/harness/src/validate-skills-inventory.mjs +148 -0
  31. package/plugins/harness/src/validate-specs.mjs +217 -0
  32. package/plugins/harness/templates/claude/hooks/guard-destructive-git.sh +50 -0
  33. package/plugins/harness/templates/claude/settings.headless.json +24 -0
  34. package/plugins/harness/templates/claude/settings.json +16 -0
  35. package/plugins/harness/templates/claude/skills-manifest.json +6 -0
  36. package/plugins/harness/templates/docs/repo-facts.json +17 -0
  37. package/plugins/harness/templates/docs/specs/README.md +36 -0
  38. package/plugins/harness/templates/githooks/pre-commit +9 -0
  39. package/plugins/harness/templates/workflows/ai-review.yml +28 -0
  40. package/plugins/harness/templates/workflows/detect-drift.yml +15 -0
  41. package/plugins/harness/templates/workflows/validate-skills.yml +36 -0
@@ -0,0 +1,217 @@
1
+ import path from "path";
2
+ import {
3
+ anyPathMatches,
4
+ listRepoPaths,
5
+ listSpecDirs,
6
+ readJson,
7
+ pathExists,
8
+ } from "./spec-harness-lib.mjs";
9
+ import { ValidationError, ERROR_CODES } from "./lib/errors.mjs";
10
+
11
+ const VALID_STATUSES = new Set([
12
+ "draft",
13
+ "approved",
14
+ "implementing",
15
+ "done",
16
+ ]);
17
+
18
+ /**
19
+ * Validate every spec.json under docs/specs/.
20
+ *
21
+ * Checks performed per spec:
22
+ * - spec.json exists
23
+ * - required fields present and non-empty: id, title, status, owners, linked_paths, acceptance_commands, depends_on_specs, active_prs
24
+ * - status is one of the allowed enum values
25
+ * - id matches the directory name
26
+ * - linked_paths entries are non-empty strings
27
+ * - acceptance_commands entries are non-empty strings
28
+ *
29
+ * Cross-spec checks:
30
+ * - depends_on_specs references resolve to known spec ids
31
+ *
32
+ * @param {object} ctx Harness context from createHarnessContext().
33
+ * @returns {{ ok: boolean, errors: ValidationError[] }}
34
+ */
35
+ export function validateSpecs(ctx) {
36
+ const errors = [];
37
+ const specDirs = listSpecDirs(ctx);
38
+ const repoPaths = listRepoPaths(ctx);
39
+
40
+ // Collect known spec IDs for cross-reference resolution.
41
+ const specIds = new Set(specDirs);
42
+
43
+ for (const specDir of specDirs) {
44
+ const specJsonRelative = `docs/specs/${specDir}/spec.json`;
45
+ const prefix = `docs/specs/${specDir}`;
46
+
47
+ if (!pathExists(ctx, specJsonRelative)) {
48
+ errors.push(new ValidationError({
49
+ code: ERROR_CODES.SPEC_JSON_INVALID,
50
+ category: "spec",
51
+ file: prefix,
52
+ message: "missing spec.json",
53
+ hint: "create spec.json with required fields (id, title, status, owners, linked_paths, acceptance_commands)",
54
+ }));
55
+ continue;
56
+ }
57
+
58
+ let metadata;
59
+ try {
60
+ metadata = readJson(ctx, specJsonRelative);
61
+ } catch (err) {
62
+ errors.push(new ValidationError({
63
+ code: ERROR_CODES.SPEC_JSON_INVALID,
64
+ category: "spec",
65
+ file: prefix,
66
+ message: `spec.json is not valid JSON — ${err.message}`,
67
+ hint: "run `node -e \"JSON.parse(require('fs').readFileSync('docs/specs/<id>/spec.json','utf8'))\"` to locate the parse error",
68
+ }));
69
+ continue;
70
+ }
71
+
72
+ // id must match directory name.
73
+ if (metadata.id !== specDir) {
74
+ errors.push(new ValidationError({
75
+ code: ERROR_CODES.SPEC_ID_MISMATCH,
76
+ category: "spec",
77
+ file: prefix,
78
+ pointer: "id",
79
+ expected: specDir,
80
+ got: String(metadata.id),
81
+ message: `spec.json id "${metadata.id}" must equal directory name "${specDir}"`,
82
+ }));
83
+ }
84
+
85
+ // title: required, non-empty string.
86
+ if (typeof metadata.title !== "string" || !metadata.title.trim()) {
87
+ errors.push(new ValidationError({
88
+ code: ERROR_CODES.SPEC_MISSING_REQUIRED_FIELD,
89
+ category: "spec",
90
+ file: prefix,
91
+ pointer: "title",
92
+ message: "spec.json title must be a non-empty string",
93
+ }));
94
+ }
95
+
96
+ // status: required, must be in enum.
97
+ if (!VALID_STATUSES.has(metadata.status)) {
98
+ errors.push(new ValidationError({
99
+ code: ERROR_CODES.SPEC_STATUS_INVALID,
100
+ category: "spec",
101
+ file: prefix,
102
+ pointer: "status",
103
+ expected: [...VALID_STATUSES].join(", "),
104
+ got: String(metadata.status),
105
+ message: `invalid status "${metadata.status}" (allowed: ${[...VALID_STATUSES].join(", ")})`,
106
+ }));
107
+ }
108
+
109
+ // owners: required, non-empty array.
110
+ if (!Array.isArray(metadata.owners) || metadata.owners.length === 0) {
111
+ errors.push(new ValidationError({
112
+ code: ERROR_CODES.SPEC_MISSING_REQUIRED_FIELD,
113
+ category: "spec",
114
+ file: prefix,
115
+ pointer: "owners",
116
+ message: "owners must be a non-empty array",
117
+ }));
118
+ }
119
+
120
+ // linked_paths: required, non-empty array of strings.
121
+ if (!Array.isArray(metadata.linked_paths) || metadata.linked_paths.length === 0) {
122
+ errors.push(new ValidationError({
123
+ code: ERROR_CODES.SPEC_LINKED_PATH_MISSING,
124
+ category: "spec",
125
+ file: prefix,
126
+ pointer: "linked_paths",
127
+ message: "linked_paths must be a non-empty array",
128
+ }));
129
+ } else {
130
+ for (const linkedPath of metadata.linked_paths) {
131
+ if (typeof linkedPath !== "string" || !linkedPath.trim()) {
132
+ errors.push(new ValidationError({
133
+ code: ERROR_CODES.SPEC_LINKED_PATH_MISSING,
134
+ category: "spec",
135
+ file: prefix,
136
+ pointer: "linked_paths[]",
137
+ got: JSON.stringify(linkedPath),
138
+ message: "linked_paths entries must be non-empty strings",
139
+ }));
140
+ }
141
+ }
142
+ }
143
+
144
+ // acceptance_commands: required, non-empty array of non-empty strings.
145
+ if (!Array.isArray(metadata.acceptance_commands) || metadata.acceptance_commands.length === 0) {
146
+ errors.push(new ValidationError({
147
+ code: ERROR_CODES.SPEC_ACCEPTANCE_EMPTY,
148
+ category: "spec",
149
+ file: prefix,
150
+ pointer: "acceptance_commands",
151
+ message: "acceptance_commands must be a non-empty array",
152
+ }));
153
+ } else {
154
+ for (const cmd of metadata.acceptance_commands) {
155
+ if (typeof cmd !== "string" || !cmd.trim()) {
156
+ errors.push(new ValidationError({
157
+ code: ERROR_CODES.SPEC_ACCEPTANCE_EMPTY,
158
+ category: "spec",
159
+ file: prefix,
160
+ pointer: "acceptance_commands[]",
161
+ got: JSON.stringify(cmd),
162
+ message: "acceptance_commands entries must be non-empty strings",
163
+ }));
164
+ }
165
+ }
166
+ }
167
+
168
+ // depends_on_specs: must be an array (can be empty).
169
+ if (!Array.isArray(metadata.depends_on_specs)) {
170
+ errors.push(new ValidationError({
171
+ code: ERROR_CODES.SPEC_MISSING_REQUIRED_FIELD,
172
+ category: "spec",
173
+ file: prefix,
174
+ pointer: "depends_on_specs",
175
+ message: "depends_on_specs must be an array",
176
+ }));
177
+ }
178
+
179
+ // active_prs: must be an array (can be empty).
180
+ if (!Array.isArray(metadata.active_prs)) {
181
+ errors.push(new ValidationError({
182
+ code: ERROR_CODES.SPEC_MISSING_REQUIRED_FIELD,
183
+ category: "spec",
184
+ file: prefix,
185
+ pointer: "active_prs",
186
+ message: "active_prs must be an array",
187
+ }));
188
+ }
189
+ }
190
+
191
+ // Cross-spec: depends_on_specs references must resolve.
192
+ for (const specDir of specDirs) {
193
+ const specJsonRelative = `docs/specs/${specDir}/spec.json`;
194
+ if (!pathExists(ctx, specJsonRelative)) continue;
195
+ let metadata;
196
+ try {
197
+ metadata = readJson(ctx, specJsonRelative);
198
+ } catch {
199
+ continue;
200
+ }
201
+ for (const dependency of metadata.depends_on_specs ?? []) {
202
+ if (typeof dependency !== "string" || !dependency.trim()) continue;
203
+ if (!specIds.has(dependency)) {
204
+ errors.push(new ValidationError({
205
+ code: ERROR_CODES.SPEC_DEPENDENCY_UNKNOWN,
206
+ category: "spec",
207
+ file: `docs/specs/${specDir}`,
208
+ pointer: "depends_on_specs",
209
+ got: dependency,
210
+ message: `depends_on_specs references unknown spec "${dependency}"`,
211
+ }));
212
+ }
213
+ }
214
+ }
215
+
216
+ return { ok: errors.length === 0, errors };
217
+ }
@@ -0,0 +1,50 @@
1
+ #!/usr/bin/env bash
2
+ # PreToolUse hook: block destructive git operations.
3
+ # Reads JSON from stdin (Claude Code hook protocol).
4
+ # Exit 2 = block the tool call (Claude Code hook protocol — NOT the harness
5
+ # validator exit convention). Exit 0 = allow.
6
+ #
7
+ # Bypass: set BYPASS_DESTRUCTIVE_GIT=1 in the command's environment when you
8
+ # genuinely need to run a destructive git invocation. Use sparingly — the
9
+ # block exists because these operations are silently destructive.
10
+
11
+ # Fail open if jq is not installed (don't break all Bash tool calls).
12
+ if ! command -v jq >/dev/null 2>&1; then
13
+ exit 0
14
+ fi
15
+
16
+ if [ "${BYPASS_DESTRUCTIVE_GIT:-0}" = "1" ]; then
17
+ exit 0
18
+ fi
19
+
20
+ INPUT=$(cat)
21
+ TOOL=$(printf '%s' "$INPUT" | jq -r '.tool_name // empty')
22
+ [ "$TOOL" = "Bash" ] || exit 0
23
+
24
+ CMD=$(printf '%s' "$INPUT" | jq -r '.tool_input.command // empty')
25
+ NORM=$(printf '%s' "$CMD" | tr '\t' ' ' | tr -s ' ')
26
+
27
+ BOUNDARY='(^|[[:space:];&|])'
28
+ G='git[[:space:]]+'
29
+
30
+ PATTERNS=(
31
+ "${BOUNDARY}${G}reset[[:space:]]+--hard(\b|[[:space:]]|$)"
32
+ "${BOUNDARY}${G}push[[:space:]][^&;|]*(-f|--force|--force-with-lease)(\b|=|[[:space:]]|$)"
33
+ "${BOUNDARY}${G}clean[[:space:]][^&;|]*(-[a-zA-Z]*f[a-zA-Z]*|--force)(\b|=|[[:space:]]|$)"
34
+ "${BOUNDARY}${G}checkout[[:space:]]+\.(\b|$)"
35
+ "${BOUNDARY}${G}restore[[:space:]]+\.(\b|$)"
36
+ "${BOUNDARY}${G}branch[[:space:]]+-D\b"
37
+ "${BOUNDARY}${G}worktree[[:space:]]+remove[[:space:]]+--force\b"
38
+ )
39
+
40
+ for rx in "${PATTERNS[@]}"; do
41
+ if printf '%s' "$NORM" | grep -qE "$rx"; then
42
+ {
43
+ echo "BLOCKED: Destructive git operation detected. Get explicit user confirmation first."
44
+ echo " Bypass (only with user confirmation): BYPASS_DESTRUCTIVE_GIT=1 <your command>"
45
+ } >&2
46
+ exit 2
47
+ fi
48
+ done
49
+
50
+ exit 0
@@ -0,0 +1,24 @@
1
+ {
2
+ "_note": "Conservative deny-list for unattended runs. Extend per-repo as needed.",
3
+ "permissions": {
4
+ "allow": [
5
+ "Read",
6
+ "Grep",
7
+ "Glob",
8
+ "Bash(git status)",
9
+ "Bash(git status:*)",
10
+ "Bash(git diff:*)",
11
+ "Bash(git log:*)"
12
+ ],
13
+ "deny": [
14
+ "Bash(git push:*)",
15
+ "Bash(git push)",
16
+ "Bash(git reset --hard:*)",
17
+ "Bash(git reset --hard)",
18
+ "Bash(git clean -f:*)",
19
+ "Bash(git clean -fd:*)",
20
+ "Bash(npm publish:*)",
21
+ "Bash(npm publish)"
22
+ ]
23
+ }
24
+ }
@@ -0,0 +1,16 @@
1
+ {
2
+ "hooks": {
3
+ "PreToolUse": [
4
+ {
5
+ "matcher": "Bash",
6
+ "hooks": [
7
+ {
8
+ "type": "command",
9
+ "command": "bash \"$CLAUDE_PROJECT_DIR/.claude/hooks/guard-destructive-git.sh\"",
10
+ "timeout": 5000
11
+ }
12
+ ]
13
+ }
14
+ ]
15
+ }
16
+ }
@@ -0,0 +1,6 @@
1
+ {
2
+ "$schema": "https://json-schema.org/draft/2020-12/schema",
3
+ "version": "1.0.0",
4
+ "generatedAt": "{{today}}",
5
+ "skills": []
6
+ }
@@ -0,0 +1,17 @@
1
+ {
2
+ "project_name": "{{project_name}}",
3
+ "project_type": "{{project_type}}",
4
+ "protected_paths": [
5
+ "CLAUDE.md",
6
+ "README.md",
7
+ ".github/workflows/**",
8
+ ".claude/commands/**",
9
+ ".claude/hooks/**",
10
+ ".claude/settings.json",
11
+ ".claude/settings.headless.json",
12
+ ".claude/skills-manifest.json",
13
+ "docs/repo-facts.json",
14
+ "docs/specs/**/spec.json"
15
+ ],
16
+ "verification_commands": []
17
+ }
@@ -0,0 +1,36 @@
1
+ # Specs
2
+
3
+ Each spec lives in its own directory with a `spec.json` metadata file and supporting markdown.
4
+
5
+ ## Layout
6
+
7
+ ```
8
+ docs/specs/
9
+ ├─ <slug>/
10
+ │ ├─ spec.json Required metadata
11
+ │ ├─ spec.md Human-readable spec
12
+ │ └─ (requirements.md, design.md, tasks.md — optional phase docs)
13
+ └─ README.md This file
14
+ ```
15
+
16
+ ## spec.json schema
17
+
18
+ ```json
19
+ {
20
+ "id": "unique-slug-matching-dir-name",
21
+ "title": "Human title",
22
+ "status": "draft | approved | implementing | done",
23
+ "owners": ["Person Name"],
24
+ "linked_paths": ["glob", "patterns", "of/files/this/spec/covers/**"],
25
+ "acceptance_commands": ["npm test", "go test ./..."],
26
+ "depends_on_specs": [],
27
+ "active_prs": []
28
+ }
29
+ ```
30
+
31
+ ## Workflow
32
+
33
+ 1. Draft → `status: draft`; no CI enforcement yet.
34
+ 2. Approve → `status: approved`; files in `linked_paths` now require this spec (or a `No-spec rationale`) in any PR that touches them.
35
+ 3. Implement → `status: implementing`; work in progress, same gating.
36
+ 4. Done → `status: done`; spec remains as governance over linked_paths (Böckeler's "spec-anchored" mode).
@@ -0,0 +1,9 @@
1
+ #!/usr/bin/env bash
2
+ set -euo pipefail
3
+
4
+ staged=$(git diff --cached --name-only --diff-filter=ACMR)
5
+ if echo "$staged" | grep -qE '^\.claude/(commands|skills)/'; then
6
+ echo "harness: auto-updating skills-manifest.json"
7
+ npx harness-validate-skills --update
8
+ git add .claude/skills-manifest.json
9
+ fi
@@ -0,0 +1,28 @@
1
+ # Always-on AI PR review via Claude-headless.
2
+ #
3
+ # Phase 5 of the SDD & Harness Hardening plan chose this path over Vercel Agent
4
+ # because it works in any repo with an ANTHROPIC_API_KEY secret — no per-repo
5
+ # Vercel project linking required. Swap to vercel:vercel-agent per-project if
6
+ # the host project is Vercel-deployed and prefers that integration.
7
+ #
8
+ # Requires: ANTHROPIC_API_KEY set as a GitHub Actions secret.
9
+ # Scope: runs only on same-repo PRs (not forks) to prevent secret exfiltration
10
+ # via malicious PR (SEC-6 in the spec).
11
+ name: AI PR review
12
+ on:
13
+ pull_request:
14
+ types: [opened, synchronize, ready_for_review]
15
+
16
+ jobs:
17
+ review:
18
+ runs-on: ubuntu-latest
19
+ if: github.event.pull_request.draft == false && github.event.pull_request.head.repo.full_name == github.repository
20
+ steps:
21
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
22
+ with: { fetch-depth: 0 }
23
+ - name: Run /review-prs headless
24
+ env:
25
+ ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
26
+ run: |
27
+ npx @anthropic-ai/claude-code -p "Run /review-prs for PR #${{ github.event.pull_request.number }}" \
28
+ --settings .claude/settings.headless.json --max-turns 40
@@ -0,0 +1,15 @@
1
+ name: Detect harness drift
2
+ on:
3
+ schedule:
4
+ - cron: "0 12 * * 1"
5
+ workflow_dispatch:
6
+
7
+ jobs:
8
+ detect:
9
+ runs-on: ubuntu-latest
10
+ steps:
11
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
12
+ with: { fetch-depth: 0 }
13
+ - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f
14
+ with: { node-version: "22" }
15
+ - run: npx --yes @kaiohenricunha/harness harness-detect-drift
@@ -0,0 +1,36 @@
1
+ name: Validate Skills Inventory
2
+
3
+ on:
4
+ schedule:
5
+ - cron: "0 9 * * 1"
6
+ workflow_dispatch:
7
+ pull_request:
8
+ paths:
9
+ - ".claude/commands/**"
10
+ - ".claude/skills/**"
11
+ - ".claude/skills-manifest.json"
12
+ - "docs/specs/**/spec.json"
13
+ - "docs/repo-facts.json"
14
+
15
+ jobs:
16
+ validate:
17
+ runs-on: ubuntu-latest
18
+ steps:
19
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
20
+ - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
21
+ with:
22
+ node-version: "22"
23
+ cache: "npm"
24
+ - run: npm ci
25
+ - run: npx harness-validate-skills
26
+ - run: npx harness-validate-specs
27
+ - run: npx harness-check-instruction-drift
28
+ - name: Check for stale skills (30-day window)
29
+ run: |
30
+ node -e "
31
+ const m = require('./.claude/skills-manifest.json');
32
+ const cut = new Date(); cut.setDate(cut.getDate() - 30);
33
+ const stale = m.skills.filter(s => new Date(s.lastValidated) < cut);
34
+ if (stale.length) { console.log('Stale:'); stale.forEach(s => console.log(' -', s.name)); process.exit(1); }
35
+ console.log('✅ all skills within 30 days');
36
+ "