@kaiohenricunha/harness 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/LICENSE +21 -0
  2. package/README.md +130 -0
  3. package/package.json +68 -0
  4. package/plugins/harness/.claude-plugin/plugin.json +8 -0
  5. package/plugins/harness/README.md +74 -0
  6. package/plugins/harness/bin/harness-check-instruction-drift.mjs +77 -0
  7. package/plugins/harness/bin/harness-check-spec-coverage.mjs +81 -0
  8. package/plugins/harness/bin/harness-detect-drift.mjs +53 -0
  9. package/plugins/harness/bin/harness-doctor.mjs +145 -0
  10. package/plugins/harness/bin/harness-init.mjs +89 -0
  11. package/plugins/harness/bin/harness-validate-skills.mjs +92 -0
  12. package/plugins/harness/bin/harness-validate-specs.mjs +70 -0
  13. package/plugins/harness/bin/harness.mjs +93 -0
  14. package/plugins/harness/hooks/guard-destructive-git.sh +58 -0
  15. package/plugins/harness/scripts/auto-update-manifest.mjs +20 -0
  16. package/plugins/harness/scripts/detect-branch-drift.mjs +81 -0
  17. package/plugins/harness/scripts/lib/output.sh +105 -0
  18. package/plugins/harness/scripts/refresh-worktrees.sh +35 -0
  19. package/plugins/harness/scripts/validate-settings.sh +202 -0
  20. package/plugins/harness/src/check-instruction-drift.mjs +127 -0
  21. package/plugins/harness/src/check-spec-coverage.mjs +95 -0
  22. package/plugins/harness/src/index.mjs +57 -0
  23. package/plugins/harness/src/init-harness-scaffold.mjs +121 -0
  24. package/plugins/harness/src/lib/argv.mjs +108 -0
  25. package/plugins/harness/src/lib/debug.mjs +37 -0
  26. package/plugins/harness/src/lib/errors.mjs +147 -0
  27. package/plugins/harness/src/lib/exit-codes.mjs +18 -0
  28. package/plugins/harness/src/lib/output.mjs +90 -0
  29. package/plugins/harness/src/spec-harness-lib.mjs +359 -0
  30. package/plugins/harness/src/validate-skills-inventory.mjs +148 -0
  31. package/plugins/harness/src/validate-specs.mjs +217 -0
  32. package/plugins/harness/templates/claude/hooks/guard-destructive-git.sh +50 -0
  33. package/plugins/harness/templates/claude/settings.headless.json +24 -0
  34. package/plugins/harness/templates/claude/settings.json +16 -0
  35. package/plugins/harness/templates/claude/skills-manifest.json +6 -0
  36. package/plugins/harness/templates/docs/repo-facts.json +17 -0
  37. package/plugins/harness/templates/docs/specs/README.md +36 -0
  38. package/plugins/harness/templates/githooks/pre-commit +9 -0
  39. package/plugins/harness/templates/workflows/ai-review.yml +28 -0
  40. package/plugins/harness/templates/workflows/detect-drift.yml +15 -0
  41. package/plugins/harness/templates/workflows/validate-skills.yml +36 -0
@@ -0,0 +1,202 @@
1
+ #!/usr/bin/env bash
2
+ # validate-settings.sh — enforce the contract in
3
+ # docs/specs/claude-hardening/spec/7-non-functional-requirements.md.
4
+ #
5
+ # Usage:
6
+ # validate-settings.sh # validates ~/.claude/settings.json
7
+ # validate-settings.sh <path-to-settings> # validates an alternative file
8
+ # validate-settings.sh --json [<path>] # emit JSON events on stdout
9
+ #
10
+ # Exit codes:
11
+ # 0 — all hard checks pass
12
+ # 1 — at least one hard check failed
13
+ #
14
+ # Hard checks:
15
+ # SEC-1 no secret literals in *_KEY/*_TOKEN/*_SECRET fields (unless ${VAR})
16
+ # SEC-2 skipDangerousModePermissionPrompt must not be present
17
+ # SEC-3 no @latest in mcpServers[*].args
18
+ # SEC-4 .credentials.json mode == 600
19
+ # OPS-1 JSON well-formed
20
+ # every mcpServers[*].command resolves on PATH or as existing absolute path
21
+ # every hooks[*].command + statusLine.command path exists
22
+ # every enabledPlugins key exists in installed_plugins.json
23
+ #
24
+ # Soft checks (warn, exit 0):
25
+ # OPS-2 ~/.claude/projects/ ≤ 1.5 GB, ~/.claude/file-history/ ≤ 100 MB
26
+
27
+ set -euo pipefail
28
+
29
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
30
+ # shellcheck source=plugins/harness/scripts/lib/output.sh
31
+ source "$SCRIPT_DIR/lib/output.sh"
32
+
33
+ # Argv: --json flag is order-independent but must precede the positional path.
34
+ HARNESS_JSON=0
35
+ while [ $# -gt 0 ]; do
36
+ case "$1" in
37
+ --json) HARNESS_JSON=1; shift ;;
38
+ --help|-h)
39
+ grep -E '^# ' "${BASH_SOURCE[0]}" | sed 's/^# \{0,1\}//'
40
+ exit 0
41
+ ;;
42
+ *) break ;;
43
+ esac
44
+ done
45
+ export HARNESS_JSON
46
+
47
+ SETTINGS="${1:-$HOME/.claude/settings.json}"
48
+ PLUGINS_REG="$HOME/.claude/plugins/installed_plugins.json"
49
+ CREDS="$HOME/.claude/.credentials.json"
50
+ PROJECTS_DIR="$HOME/.claude/projects"
51
+ FILE_HISTORY_DIR="$HOME/.claude/file-history"
52
+
53
+ out_init
54
+
55
+ if [ "$HARNESS_JSON" != "1" ]; then
56
+ echo "Validating $SETTINGS"
57
+ echo
58
+ fi
59
+
60
+ # --- JSON validity (blocking) ---
61
+ CATEGORY=OPS-1
62
+ if jq -e . < "$SETTINGS" > /dev/null 2>&1; then
63
+ pass "JSON well-formed"
64
+ else
65
+ fail "JSON malformed"
66
+ out_summary
67
+ exit 1
68
+ fi
69
+
70
+ # --- SEC-2: no skipDangerousModePermissionPrompt ---
71
+ CATEGORY=SEC-2
72
+ if jq -e 'has("skipDangerousModePermissionPrompt")' < "$SETTINGS" > /dev/null 2>&1; then
73
+ fail "SEC-2 skipDangerousModePermissionPrompt is set"
74
+ else
75
+ pass "SEC-2 skipDangerousModePermissionPrompt absent"
76
+ fi
77
+
78
+ # --- SEC-1: no secret literals ---
79
+ CATEGORY=SEC-1
80
+ SECRET_LEAKS=$(jq -r '
81
+ . as $root
82
+ | [paths(scalars)] as $ps
83
+ | $ps[]
84
+ | select((last | tostring) | test("(_KEY|_TOKEN|_SECRET)$"; "i"))
85
+ | . as $p
86
+ | ($root | getpath($p)) as $v
87
+ | select(($v | type) == "string")
88
+ | select($v | test("^[A-Za-z0-9_-]{20,}$"))
89
+ | ($p | map(tostring) | join("."))
90
+ ' < "$SETTINGS" || true)
91
+
92
+ if [ -z "$SECRET_LEAKS" ]; then
93
+ pass "SEC-1 no secret literals in *_KEY/*_TOKEN/*_SECRET fields"
94
+ else
95
+ while IFS= read -r p; do fail "SEC-1 secret literal at: $p"; done <<< "$SECRET_LEAKS"
96
+ fi
97
+
98
+ # --- SEC-3: no @latest in MCP args ---
99
+ CATEGORY=SEC-3
100
+ LATEST_REFS=$(jq -r '
101
+ .mcpServers // {} | to_entries[]
102
+ | . as $s
103
+ | ($s.value.args // [])[]
104
+ | select(. | test("@latest$"))
105
+ | $s.key + " -> " + .
106
+ ' < "$SETTINGS" || true)
107
+
108
+ if [ -z "$LATEST_REFS" ]; then
109
+ pass "SEC-3 no @latest in MCP args"
110
+ else
111
+ while IFS= read -r l; do fail "SEC-3 @latest pinned in: $l"; done <<< "$LATEST_REFS"
112
+ fi
113
+
114
+ # --- MCP command resolvable ---
115
+ CATEGORY=OPS-1
116
+ while IFS= read -r cmd; do
117
+ [ -z "$cmd" ] && continue
118
+ if [[ "$cmd" == /* ]]; then
119
+ if [ -x "$cmd" ]; then
120
+ pass "MCP command executable: $cmd"
121
+ else
122
+ fail "MCP command missing or not executable: $cmd"
123
+ fi
124
+ else
125
+ if command -v "$cmd" > /dev/null 2>&1; then
126
+ pass "MCP command on PATH: $cmd"
127
+ else
128
+ fail "MCP command not on PATH: $cmd"
129
+ fi
130
+ fi
131
+ done < <(jq -r '.mcpServers // {} | to_entries[] | .value.command' < "$SETTINGS")
132
+
133
+ # --- hooks + statusLine target paths ---
134
+ CATEGORY=OPS-1
135
+ while IFS= read -r cmd; do
136
+ [ -z "$cmd" ] && continue
137
+ script=$(echo "$cmd" | awk '{for(i=1;i<=NF;i++) if($i ~ /^\//) {print $i; exit}}')
138
+ [ -z "$script" ] && script="$cmd"
139
+ if [ -f "$script" ]; then
140
+ pass "hook/statusLine target exists: $script"
141
+ else
142
+ fail "hook/statusLine target missing: $script"
143
+ fi
144
+ done < <(jq -r '
145
+ [
146
+ (.hooks // {} | to_entries[] | .value[] | .hooks[] | .command),
147
+ (.statusLine.command // empty)
148
+ ][] // empty
149
+ ' < "$SETTINGS")
150
+
151
+ # --- enabledPlugins installed? ---
152
+ CATEGORY=OPS-1
153
+ if [ -f "$PLUGINS_REG" ]; then
154
+ while IFS= read -r plugin; do
155
+ [ -z "$plugin" ] && continue
156
+ if jq -e --arg p "$plugin" '.plugins | has($p)' < "$PLUGINS_REG" > /dev/null 2>&1; then
157
+ pass "enabled plugin installed: $plugin"
158
+ else
159
+ fail "enabled plugin NOT installed: $plugin"
160
+ fi
161
+ done < <(jq -r '.enabledPlugins // {} | to_entries[] | select(.value == true) | .key' < "$SETTINGS")
162
+ else
163
+ warn "plugin registry not found at $PLUGINS_REG"
164
+ fi
165
+
166
+ # --- SEC-4: .credentials.json mode 600 ---
167
+ CATEGORY=SEC-4
168
+ if [ -f "$CREDS" ]; then
169
+ MODE=$(stat -c '%a' "$CREDS" 2>/dev/null || echo "?")
170
+ if [ "$MODE" = "600" ]; then
171
+ pass "SEC-4 .credentials.json mode 600"
172
+ else
173
+ fail "SEC-4 .credentials.json mode is $MODE (expected 600)"
174
+ fi
175
+ else
176
+ warn ".credentials.json not found (may not be logged in)"
177
+ fi
178
+
179
+ # --- OPS-2 disk budgets (soft) ---
180
+ CATEGORY=OPS-2
181
+ if [ -d "$PROJECTS_DIR" ]; then
182
+ PROJECTS_MB=$(du -sm "$PROJECTS_DIR" 2>/dev/null | awk '{print $1}')
183
+ if [ "$PROJECTS_MB" -gt 1536 ]; then
184
+ # shellcheck disable=SC2088 # literal ~ is user-readable text, not a filesystem path
185
+ warn "~/.claude/projects/ is ${PROJECTS_MB} MB (budget: 1536 MB). Prune: find ~/.claude/projects -mindepth 2 -maxdepth 2 -type f -mtime +60 -delete"
186
+ else
187
+ pass "projects/ size OK (${PROJECTS_MB} MB / 1536)"
188
+ fi
189
+ fi
190
+
191
+ if [ -d "$FILE_HISTORY_DIR" ]; then
192
+ FH_MB=$(du -sm "$FILE_HISTORY_DIR" 2>/dev/null | awk '{print $1}')
193
+ if [ "$FH_MB" -gt 100 ]; then
194
+ # shellcheck disable=SC2088 # literal ~ is user-readable text, not a filesystem path
195
+ warn "~/.claude/file-history/ is ${FH_MB} MB (budget: 100 MB)"
196
+ else
197
+ pass "file-history/ size OK (${FH_MB} MB / 100)"
198
+ fi
199
+ fi
200
+
201
+ out_summary
202
+ [ "$FAIL" -eq 0 ] && exit 0 || exit 1
@@ -0,0 +1,127 @@
1
+ import path from "path";
2
+ import {
3
+ loadFacts,
4
+ pathExists,
5
+ readText,
6
+ } from "./spec-harness-lib.mjs";
7
+ import { ValidationError, ERROR_CODES } from "./lib/errors.mjs";
8
+
9
+ /**
10
+ * Cross-reference docs/repo-facts.json against instruction files (CLAUDE.md, README.md, etc.).
11
+ *
12
+ * Checks performed:
13
+ * - instruction_files is a non-empty array in repo-facts.json
14
+ * - each instruction file listed in repo-facts.json exists on disk
15
+ * - each instruction file mentions the team_count value (stale-number detection)
16
+ * - each entry in protected_paths appears literally in CLAUDE.md (so docs don't drift from facts)
17
+ * - protected_paths entries are non-empty strings
18
+ *
19
+ * The port omits the loadSourceFacts() cross-check from squadranks (which reads src/data.js
20
+ * and src/i18n.js — project-specific to wc-squad-rankings). The harness treats repo-facts.json
21
+ * itself as the authoritative source and checks that instruction files stay in sync with it.
22
+ *
23
+ * @param {object} ctx Harness context from createHarnessContext().
24
+ * @returns {{ ok: boolean, errors: ValidationError[] }}
25
+ */
26
+ export function checkInstructionDrift(ctx) {
27
+ const errors = [];
28
+ const facts = loadFacts(ctx);
29
+
30
+ // instruction_files must be a non-empty array.
31
+ if (!Array.isArray(facts.instruction_files) || facts.instruction_files.length === 0) {
32
+ errors.push(new ValidationError({
33
+ code: ERROR_CODES.DRIFT_INSTRUCTION_FILES,
34
+ category: "drift",
35
+ file: "docs/repo-facts.json",
36
+ pointer: "instruction_files",
37
+ message: "instruction_files must be a non-empty array of file paths",
38
+ }));
39
+ // Can't proceed without knowing which files to check.
40
+ return { ok: false, errors };
41
+ }
42
+
43
+ // protected_paths entries must be non-empty strings.
44
+ for (const protectedPath of facts.protected_paths ?? []) {
45
+ if (typeof protectedPath !== "string" || !protectedPath.trim()) {
46
+ errors.push(new ValidationError({
47
+ code: ERROR_CODES.DRIFT_PROTECTED_PATH,
48
+ category: "drift",
49
+ file: "docs/repo-facts.json",
50
+ pointer: "protected_paths[]",
51
+ got: JSON.stringify(protectedPath),
52
+ message: `protected_paths entries must be non-empty strings (got ${JSON.stringify(protectedPath)})`,
53
+ }));
54
+ }
55
+ }
56
+
57
+ const teamCount = facts.team_count;
58
+
59
+ // Check each instruction file.
60
+ for (const instructionFile of facts.instruction_files) {
61
+ if (typeof instructionFile !== "string" || !instructionFile.trim()) {
62
+ errors.push(new ValidationError({
63
+ code: ERROR_CODES.DRIFT_INSTRUCTION_FILES,
64
+ category: "drift",
65
+ file: "docs/repo-facts.json",
66
+ pointer: "instruction_files[]",
67
+ message: "instruction_files entries must be non-empty strings",
68
+ }));
69
+ continue;
70
+ }
71
+
72
+ if (!pathExists(ctx, instructionFile)) {
73
+ errors.push(new ValidationError({
74
+ code: ERROR_CODES.DRIFT_INSTRUCTION_FILE_MISSING,
75
+ category: "drift",
76
+ file: "docs/repo-facts.json",
77
+ pointer: "instruction_files[]",
78
+ got: instructionFile,
79
+ message: `instruction file does not exist -> ${instructionFile}`,
80
+ hint: "create the file on disk or remove it from repo-facts.json",
81
+ }));
82
+ continue;
83
+ }
84
+
85
+ const text = readText(ctx, instructionFile);
86
+
87
+ // team_count drift: if any "N team(s)" phrase exists in the file with N != team_count, flag it.
88
+ if (typeof teamCount === "number") {
89
+ const teamPhrasePattern = /(\d+)\s+teams?/gi;
90
+ for (const match of text.matchAll(teamPhrasePattern)) {
91
+ const mentioned = parseInt(match[1], 10);
92
+ if (mentioned !== teamCount) {
93
+ errors.push(new ValidationError({
94
+ code: ERROR_CODES.DRIFT_TEAM_COUNT,
95
+ category: "drift",
96
+ file: instructionFile,
97
+ expected: String(teamCount),
98
+ got: match[0],
99
+ message: `stale team_count claim — file mentions "${match[0]}" but docs/repo-facts.json has team_count=${teamCount}`,
100
+ }));
101
+ }
102
+ }
103
+ }
104
+ }
105
+
106
+ // protected_paths drift: every protected path in repo-facts must appear literally in CLAUDE.md.
107
+ // This ensures the canonical instruction doc stays in sync when facts change.
108
+ const claudeMdPath = "CLAUDE.md";
109
+ if (pathExists(ctx, claudeMdPath) && Array.isArray(facts.protected_paths)) {
110
+ const claudeText = readText(ctx, claudeMdPath);
111
+ for (const protectedPath of facts.protected_paths) {
112
+ if (typeof protectedPath !== "string" || !protectedPath.trim()) continue;
113
+ if (!claudeText.includes(protectedPath)) {
114
+ errors.push(new ValidationError({
115
+ code: ERROR_CODES.DRIFT_PROTECTED_PATH,
116
+ category: "drift",
117
+ file: "CLAUDE.md",
118
+ expected: protectedPath,
119
+ message: `protected path "${protectedPath}" from docs/repo-facts.json is not documented`,
120
+ hint: "add the protected path entry to CLAUDE.md or remove it from repo-facts.json",
121
+ }));
122
+ }
123
+ }
124
+ }
125
+
126
+ return { ok: errors.length === 0, errors };
127
+ }
@@ -0,0 +1,95 @@
1
+ import {
2
+ anyPathMatches,
3
+ extractTemplateSection,
4
+ isBotActor,
5
+ isMeaningfulSection,
6
+ listSpecDirs,
7
+ loadFacts,
8
+ readJson,
9
+ } from "./spec-harness-lib.mjs";
10
+ import { ValidationError, ERROR_CODES } from "./lib/errors.mjs";
11
+
12
+ const COVERAGE_STATUSES = new Set(["approved", "implementing", "done"]);
13
+
14
+ function normalizeSpecId(v) {
15
+ return v.replace(/^[`'"#]+|[`'"]+$/g, "").trim();
16
+ }
17
+
18
+ /**
19
+ * Enforce the spec-coverage contract for a PR: every protected-path change
20
+ * must be covered by an approved/implementing/done spec, or the PR body must
21
+ * carry a meaningful `## No-spec rationale` section. Known bot actors bypass
22
+ * the body contract.
23
+ *
24
+ * @param {import('./spec-harness-lib.mjs').HarnessContext} ctx
25
+ * @param {{ changedFiles: string[], isPullRequest: boolean, body: string, actor: string }} input
26
+ * @returns {{
27
+ * ok: boolean,
28
+ * errors: import('./lib/errors.mjs').ValidationError[],
29
+ * protectedFiles: string[],
30
+ * uncovered: string[],
31
+ * note?: string
32
+ * }}
33
+ */
34
+ export function checkSpecCoverage(ctx, input) {
35
+ const { changedFiles, isPullRequest, body, actor } = input;
36
+ const errors = [];
37
+ const facts = loadFacts(ctx);
38
+ const protectedFiles = changedFiles.filter((f) =>
39
+ (facts.protected_paths ?? []).some((pat) => anyPathMatches(pat, [f])),
40
+ );
41
+
42
+ const specs = listSpecDirs(ctx)
43
+ .map((d) => ({ dir: d, metadata: readJson(ctx, `docs/specs/${d}/spec.json`) }))
44
+ .filter(({ metadata }) => COVERAGE_STATUSES.has(metadata.status));
45
+
46
+ const uncovered = protectedFiles.filter((file) =>
47
+ !specs.some(({ metadata }) =>
48
+ (metadata.linked_paths ?? []).some((pat) => anyPathMatches(pat, [file])),
49
+ ),
50
+ );
51
+
52
+ const specSection = extractTemplateSection(body, "Spec ID");
53
+ const rationaleSection = extractTemplateSection(body, "No-spec rationale");
54
+
55
+ if (isBotActor(actor)) {
56
+ return { ok: true, errors: [], protectedFiles, uncovered, note: "bot bypass" };
57
+ }
58
+
59
+ if (isPullRequest && !isMeaningfulSection(specSection) && !isMeaningfulSection(rationaleSection) && protectedFiles.length > 0) {
60
+ errors.push(new ValidationError({
61
+ code: ERROR_CODES.COVERAGE_NO_SPEC_RATIONALE,
62
+ category: "coverage",
63
+ message: "pull request body must include either a Spec ID or a No-spec rationale section",
64
+ hint: "add `## Spec ID\\n<id>` or `## No-spec rationale\\n<reason>` to the PR body",
65
+ }));
66
+ }
67
+
68
+ if (isMeaningfulSection(specSection)) {
69
+ const known = new Set(specs.map(({ metadata }) => metadata.id));
70
+ const requested = specSection.split(/[\s,]+/).map(normalizeSpecId).filter(Boolean);
71
+ for (const id of requested) {
72
+ if (!known.has(id)) {
73
+ errors.push(new ValidationError({
74
+ code: ERROR_CODES.COVERAGE_UNKNOWN_SPEC_ID,
75
+ category: "coverage",
76
+ got: id,
77
+ message: `pull request body references unknown Spec ID "${id}"`,
78
+ hint: "check the spec directory under docs/specs/ or create the spec first",
79
+ }));
80
+ }
81
+ }
82
+ }
83
+
84
+ if (uncovered.length > 0 && !isMeaningfulSection(rationaleSection)) {
85
+ errors.push(new ValidationError({
86
+ code: ERROR_CODES.COVERAGE_UNCOVERED,
87
+ category: "coverage",
88
+ got: uncovered.join(", "),
89
+ message: "protected files changed without an approved, implementing, or done spec",
90
+ hint: "add a covering spec (status: approved/implementing/done) or a `## No-spec rationale` section",
91
+ }));
92
+ }
93
+
94
+ return { ok: errors.length === 0, errors, protectedFiles, uncovered };
95
+ }
@@ -0,0 +1,57 @@
1
+ /**
2
+ * Public barrel for `@kaiohenricunha/harness`.
3
+ *
4
+ * Consumer contract:
5
+ * import { createHarnessContext, validateSpecs, EXIT_CODES, ValidationError } from "@kaiohenricunha/harness";
6
+ *
7
+ * The surface intentionally stays small — deep imports are NOT a supported
8
+ * contract. If you find yourself reaching for an internal helper that is not
9
+ * re-exported here, open an issue.
10
+ */
11
+
12
+ import { readFileSync } from "node:fs";
13
+ import { fileURLToPath } from "node:url";
14
+ import { dirname, resolve } from "node:path";
15
+
16
+ // --- spec-harness-lib (18 exports) ---
17
+ export {
18
+ createHarnessContext,
19
+ toPosix,
20
+ readJson,
21
+ readText,
22
+ pathExists,
23
+ git,
24
+ loadFacts,
25
+ listSpecDirs,
26
+ listRepoPaths,
27
+ escapeRegex,
28
+ globToRegExp,
29
+ matchesGlob,
30
+ anyPathMatches,
31
+ extractTemplateSection,
32
+ isMeaningfulSection,
33
+ getPullRequestContext,
34
+ isBotActor,
35
+ getChangedFiles,
36
+ } from "./spec-harness-lib.mjs";
37
+
38
+ // --- validators (6 entry points) ---
39
+ export { validateSpecs } from "./validate-specs.mjs";
40
+ export {
41
+ validateManifest,
42
+ refreshChecksums,
43
+ } from "./validate-skills-inventory.mjs";
44
+ export { checkInstructionDrift } from "./check-instruction-drift.mjs";
45
+ export { checkSpecCoverage } from "./check-spec-coverage.mjs";
46
+ export { scaffoldHarness } from "./init-harness-scaffold.mjs";
47
+
48
+ // --- error taxonomy + exit codes ---
49
+ export { ValidationError, ERROR_CODES, formatError } from "./lib/errors.mjs";
50
+ export { EXIT_CODES } from "./lib/exit-codes.mjs";
51
+
52
+ // --- package version (read from root package.json) ---
53
+ const __dirname = dirname(fileURLToPath(import.meta.url));
54
+ const pkgPath = resolve(__dirname, "..", "..", "..", "package.json");
55
+ const pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
56
+ /** The `@kaiohenricunha/harness` package version at import time. */
57
+ export const version = pkg.version;
@@ -0,0 +1,121 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import { ValidationError, ERROR_CODES } from "./lib/errors.mjs";
4
+
5
+ // Map template subtree prefixes to target prefixes
6
+ const PREFIX_MAP = [
7
+ { from: "claude/", to: ".claude/" },
8
+ { from: "docs/", to: "docs/" },
9
+ { from: "workflows/", to: ".github/workflows/" },
10
+ ];
11
+
12
+ function applyPrefixMap(relFromTemplates) {
13
+ for (const { from, to } of PREFIX_MAP) {
14
+ if (relFromTemplates.startsWith(from)) {
15
+ return to + relFromTemplates.slice(from.length);
16
+ }
17
+ }
18
+ // Fallback: keep as-is (shouldn't happen with well-formed templates/)
19
+ return relFromTemplates;
20
+ }
21
+
22
+ function substitutePlaceholders(content, placeholders) {
23
+ return content.replace(/\{\{(\w+)\}\}/g, (match, key) => {
24
+ return Object.prototype.hasOwnProperty.call(placeholders, key)
25
+ ? placeholders[key]
26
+ : match;
27
+ });
28
+ }
29
+
30
+ /**
31
+ * Walk a directory recursively, yielding file paths.
32
+ * @param {string} dir - Absolute directory path to walk
33
+ * @returns {string[]} Sorted list of absolute file paths
34
+ */
35
+ function walkFiles(dir) {
36
+ const results = [];
37
+ const SKIP_DIRS = new Set([".git", "node_modules"]);
38
+
39
+ function walk(current) {
40
+ const entries = fs.readdirSync(current, { withFileTypes: true });
41
+ for (const entry of entries) {
42
+ if (entry.isDirectory()) {
43
+ if (!SKIP_DIRS.has(entry.name)) {
44
+ walk(path.join(current, entry.name));
45
+ }
46
+ } else if (entry.isFile()) {
47
+ results.push(path.join(current, entry.name));
48
+ }
49
+ }
50
+ }
51
+
52
+ walk(dir);
53
+ return results.sort();
54
+ }
55
+
56
+ /**
57
+ * Scaffold the harness template tree into a target repository.
58
+ *
59
+ * @param {{ templatesDir: string, targetDir: string, placeholders: object }} opts
60
+ * @param {{ force?: boolean }} [options]
61
+ * @returns {{ filesWritten: string[] }}
62
+ */
63
+ export function scaffoldHarness(
64
+ { templatesDir, targetDir, placeholders },
65
+ { force = false } = {}
66
+ ) {
67
+ // Guard: refuse if already initialized
68
+ if (!force) {
69
+ const manifestPath = path.join(targetDir, ".claude", "skills-manifest.json");
70
+ const specsPath = path.join(targetDir, "docs", "specs");
71
+ if (fs.existsSync(manifestPath)) {
72
+ throw new ValidationError({
73
+ code: ERROR_CODES.SCAFFOLD_CONFLICT,
74
+ category: "scaffold",
75
+ file: manifestPath,
76
+ message:
77
+ `Repo already initialized: ${manifestPath} already exists. ` +
78
+ `Use --force to overwrite.`,
79
+ hint: "pass `{ force: true }` or remove .claude/skills-manifest.json",
80
+ });
81
+ }
82
+ if (fs.existsSync(specsPath)) {
83
+ throw new ValidationError({
84
+ code: ERROR_CODES.SCAFFOLD_CONFLICT,
85
+ category: "scaffold",
86
+ file: specsPath,
87
+ message:
88
+ `Repo already initialized: ${specsPath} already exists. ` +
89
+ `Use --force to overwrite.`,
90
+ hint: "pass `{ force: true }` or remove docs/specs/",
91
+ });
92
+ }
93
+ }
94
+
95
+ const sourceFiles = walkFiles(templatesDir);
96
+ const filesWritten = [];
97
+
98
+ for (const srcAbs of sourceFiles) {
99
+ const relFromTemplates = path.relative(templatesDir, srcAbs);
100
+ const targetRel = applyPrefixMap(relFromTemplates);
101
+ const destAbs = path.join(targetDir, targetRel);
102
+
103
+ // Create parent directories
104
+ fs.mkdirSync(path.dirname(destAbs), { recursive: true });
105
+
106
+ // Read, substitute, write
107
+ const raw = fs.readFileSync(srcAbs, "utf8");
108
+ const content = substitutePlaceholders(raw, placeholders);
109
+ fs.writeFileSync(destAbs, content, "utf8");
110
+
111
+ // Preserve executable bit from source
112
+ const srcMode = fs.statSync(srcAbs).mode;
113
+ if (srcMode & 0o111) {
114
+ fs.chmodSync(destAbs, srcMode & 0o777);
115
+ }
116
+
117
+ filesWritten.push(targetRel);
118
+ }
119
+
120
+ return { filesWritten: filesWritten.sort() };
121
+ }