@datafog/fogclaw 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/harness-docs.yml +30 -0
- package/AGENTS.md +28 -0
- package/LICENSE +21 -0
- package/README.md +208 -0
- package/dist/config.d.ts +4 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +30 -0
- package/dist/config.js.map +1 -0
- package/dist/engines/gliner.d.ts +14 -0
- package/dist/engines/gliner.d.ts.map +1 -0
- package/dist/engines/gliner.js +75 -0
- package/dist/engines/gliner.js.map +1 -0
- package/dist/engines/regex.d.ts +5 -0
- package/dist/engines/regex.d.ts.map +1 -0
- package/dist/engines/regex.js +54 -0
- package/dist/engines/regex.js.map +1 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +157 -0
- package/dist/index.js.map +1 -0
- package/dist/redactor.d.ts +3 -0
- package/dist/redactor.d.ts.map +1 -0
- package/dist/redactor.js +37 -0
- package/dist/redactor.js.map +1 -0
- package/dist/scanner.d.ts +11 -0
- package/dist/scanner.d.ts.map +1 -0
- package/dist/scanner.js +77 -0
- package/dist/scanner.js.map +1 -0
- package/dist/types.d.ts +31 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +18 -0
- package/dist/types.js.map +1 -0
- package/docs/DATA.md +28 -0
- package/docs/DESIGN.md +17 -0
- package/docs/DOMAIN_DOCS.md +30 -0
- package/docs/FRONTEND.md +24 -0
- package/docs/OBSERVABILITY.md +25 -0
- package/docs/PLANS.md +171 -0
- package/docs/PRODUCT_SENSE.md +20 -0
- package/docs/RELIABILITY.md +60 -0
- package/docs/SECURITY.md +50 -0
- package/docs/design-docs/core-beliefs.md +17 -0
- package/docs/design-docs/index.md +8 -0
- package/docs/generated/README.md +36 -0
- package/docs/generated/memory.md +1 -0
- package/docs/plans/2026-02-16-fogclaw-design.md +172 -0
- package/docs/plans/2026-02-16-fogclaw-implementation.md +1606 -0
- package/docs/plans/README.md +15 -0
- package/docs/plans/active/2026-02-16-feat-openclaw-official-submission-plan.md +386 -0
- package/docs/plans/active/2026-02-17-feat-release-fogclaw-via-datafog-package-plan.md +318 -0
- package/docs/plans/active/2026-02-17-feat-submit-fogclaw-to-openclaw-plan.md +244 -0
- package/docs/plans/tech-debt-tracker.md +42 -0
- package/docs/plugins/fogclaw.md +95 -0
- package/docs/runbooks/address-review-findings.md +30 -0
- package/docs/runbooks/ci-failures.md +46 -0
- package/docs/runbooks/code-review.md +34 -0
- package/docs/runbooks/merge-change.md +28 -0
- package/docs/runbooks/pull-request.md +45 -0
- package/docs/runbooks/record-evidence.md +43 -0
- package/docs/runbooks/reproduce-bug.md +42 -0
- package/docs/runbooks/respond-to-feedback.md +42 -0
- package/docs/runbooks/review-findings.md +31 -0
- package/docs/runbooks/submit-openclaw-plugin.md +68 -0
- package/docs/runbooks/update-agents-md.md +59 -0
- package/docs/runbooks/update-domain-docs.md +42 -0
- package/docs/runbooks/validate-current-state.md +41 -0
- package/docs/runbooks/verify-release.md +69 -0
- package/docs/specs/2026-02-16-feat-openclaw-official-submission-spec.md +115 -0
- package/docs/specs/2026-02-17-feat-submit-fogclaw-to-openclaw.md +125 -0
- package/docs/specs/README.md +5 -0
- package/docs/specs/index.md +8 -0
- package/docs/spikes/README.md +8 -0
- package/fogclaw.config.example.json +15 -0
- package/openclaw.plugin.json +45 -0
- package/package.json +37 -0
- package/scripts/ci/he-docs-config.json +123 -0
- package/scripts/ci/he-docs-drift.sh +112 -0
- package/scripts/ci/he-docs-lint.sh +234 -0
- package/scripts/ci/he-plans-lint.sh +354 -0
- package/scripts/ci/he-runbooks-lint.sh +445 -0
- package/scripts/ci/he-specs-lint.sh +258 -0
- package/scripts/ci/he-spikes-lint.sh +249 -0
- package/scripts/runbooks/select-runbooks.sh +154 -0
- package/src/config.ts +46 -0
- package/src/engines/gliner.ts +88 -0
- package/src/engines/regex.ts +71 -0
- package/src/index.ts +223 -0
- package/src/redactor.ts +51 -0
- package/src/scanner.ts +90 -0
- package/src/types.ts +52 -0
- package/tests/config.test.ts +104 -0
- package/tests/gliner.test.ts +184 -0
- package/tests/plugin-smoke.test.ts +114 -0
- package/tests/redactor.test.ts +320 -0
- package/tests/regex.test.ts +345 -0
- package/tests/scanner.test.ts +199 -0
- package/tsconfig.json +20 -0
|
@@ -0,0 +1,445 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# ---------------------------------------------------------------------------
|
|
5
|
+
# he-runbooks-lint.sh -- Lint runbook frontmatter & content
|
|
6
|
+
#
|
|
7
|
+
# Exit codes: 0=OK, 1=FAIL, 2=config error
|
|
8
|
+
# ---------------------------------------------------------------------------
|
|
9
|
+
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
|
+
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
|
12
|
+
|
|
13
|
+
DEFAULT_CONFIG_PATH="scripts/ci/he-docs-config.json"
|
|
14
|
+
|
|
15
|
+
ERRORS=0
|
|
16
|
+
WARNINGS=0
|
|
17
|
+
|
|
18
|
+
# ── env helpers ────────────────────────────────────────────────────────────
|
|
19
|
+
|
|
20
|
+
env_flag() {
|
|
21
|
+
local name="$1"
|
|
22
|
+
local default="${2:-0}"
|
|
23
|
+
local val="${!name:-$default}"
|
|
24
|
+
[[ "$val" == "1" ]]
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
# ── emit / annotate ───────────────────────────────────────────────────────
|
|
28
|
+
|
|
29
|
+
gh_annotate() {
|
|
30
|
+
local level="$1" file="$2" title="$3" msg="$4"
|
|
31
|
+
if [[ -n "$file" ]]; then
|
|
32
|
+
echo "::${level} file=${file},title=${title}::${msg}"
|
|
33
|
+
else
|
|
34
|
+
echo "::${level} title=${title}::${msg}"
|
|
35
|
+
fi
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
emit() {
|
|
39
|
+
local level="$1" file="$2" title="$3" msg="$4"
|
|
40
|
+
gh_annotate "$level" "$file" "$title" "$msg"
|
|
41
|
+
local upper
|
|
42
|
+
upper="$(echo "$level" | tr '[:lower:]' '[:upper:]')"
|
|
43
|
+
echo "${upper}: ${msg}" >&2
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
emit_and_count() {
|
|
47
|
+
local level="$1" file="$2" title="$3" msg="$4"
|
|
48
|
+
if [[ "$level" == "error" ]]; then
|
|
49
|
+
(( ERRORS++ )) || true
|
|
50
|
+
else
|
|
51
|
+
(( WARNINGS++ )) || true
|
|
52
|
+
fi
|
|
53
|
+
emit "$level" "$file" "$title" "$msg"
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
# ── config ────────────────────────────────────────────────────────────────
|
|
57
|
+
|
|
58
|
+
load_config() {
|
|
59
|
+
local config_rel="${HARNESS_DOCS_CONFIG:-$DEFAULT_CONFIG_PATH}"
|
|
60
|
+
local config_path="$REPO_ROOT/$config_rel"
|
|
61
|
+
if [[ ! -f "$config_path" ]]; then
|
|
62
|
+
echo "Error: he-runbooks-lint missing/invalid config: Missing config '${config_rel}'. Fix: create it (bootstrap should do this) or set HARNESS_DOCS_CONFIG." >&2
|
|
63
|
+
return 1
|
|
64
|
+
fi
|
|
65
|
+
# Validate it is a JSON object
|
|
66
|
+
if ! jq -e 'type == "object"' "$config_path" >/dev/null 2>&1; then
|
|
67
|
+
echo "Error: he-runbooks-lint missing/invalid config: Config must be a JSON object." >&2
|
|
68
|
+
return 1
|
|
69
|
+
fi
|
|
70
|
+
CONFIG_PATH="$config_path"
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# ── frontmatter extraction ────────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
# Reads file, outputs the frontmatter block (lines between first --- and
|
|
76
|
+
# second ---) to stdout. Returns 1 if no frontmatter found.
|
|
77
|
+
extract_frontmatter() {
|
|
78
|
+
local file="$1"
|
|
79
|
+
local in_fm=0
|
|
80
|
+
local first_line=1
|
|
81
|
+
local block=""
|
|
82
|
+
|
|
83
|
+
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
84
|
+
local trimmed
|
|
85
|
+
trimmed="$(echo "$line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
86
|
+
if (( first_line )); then
|
|
87
|
+
first_line=0
|
|
88
|
+
if [[ "$trimmed" == "---" ]]; then
|
|
89
|
+
in_fm=1
|
|
90
|
+
continue
|
|
91
|
+
else
|
|
92
|
+
return 1
|
|
93
|
+
fi
|
|
94
|
+
fi
|
|
95
|
+
if (( in_fm )); then
|
|
96
|
+
if [[ "$trimmed" == "---" ]]; then
|
|
97
|
+
printf '%s' "$block"
|
|
98
|
+
return 0
|
|
99
|
+
fi
|
|
100
|
+
if [[ -n "$block" ]]; then
|
|
101
|
+
block="${block}"$'\n'"${line}"
|
|
102
|
+
else
|
|
103
|
+
block="${line}"
|
|
104
|
+
fi
|
|
105
|
+
fi
|
|
106
|
+
done < "$file"
|
|
107
|
+
|
|
108
|
+
# Reached EOF without closing ---
|
|
109
|
+
return 1
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
# ── frontmatter parsing ──────────────────────────────────────────────────
|
|
113
|
+
|
|
114
|
+
# Sets global variables: FM_TITLE, FM_USE_WHEN, FM_CALLED_FROM (newline-
|
|
115
|
+
# separated list), FM_KEYS (newline-separated list), FM_HAS_CALLED_FROM.
|
|
116
|
+
parse_frontmatter() {
|
|
117
|
+
local block="$1"
|
|
118
|
+
|
|
119
|
+
FM_TITLE=""
|
|
120
|
+
FM_USE_WHEN=""
|
|
121
|
+
FM_CALLED_FROM=""
|
|
122
|
+
FM_KEYS=""
|
|
123
|
+
FM_HAS_CALLED_FROM=0
|
|
124
|
+
|
|
125
|
+
local lines=()
|
|
126
|
+
while IFS= read -r line; do
|
|
127
|
+
lines+=("$line")
|
|
128
|
+
done <<< "$block"
|
|
129
|
+
|
|
130
|
+
local i=0
|
|
131
|
+
local count=${#lines[@]}
|
|
132
|
+
|
|
133
|
+
while (( i < count )); do
|
|
134
|
+
local raw="${lines[$i]}"
|
|
135
|
+
local trimmed
|
|
136
|
+
trimmed="$(echo "$raw" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
137
|
+
|
|
138
|
+
# skip blanks and comments
|
|
139
|
+
if [[ -z "$trimmed" || "$trimmed" == \#* ]]; then
|
|
140
|
+
(( i++ )) || true
|
|
141
|
+
continue
|
|
142
|
+
fi
|
|
143
|
+
|
|
144
|
+
# must contain a colon to be a key
|
|
145
|
+
if [[ "$trimmed" != *:* ]]; then
|
|
146
|
+
(( i++ )) || true
|
|
147
|
+
continue
|
|
148
|
+
fi
|
|
149
|
+
|
|
150
|
+
local key val
|
|
151
|
+
key="$(echo "$trimmed" | sed 's/:.*//' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
152
|
+
val="$(echo "$trimmed" | sed 's/^[^:]*://' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
153
|
+
|
|
154
|
+
if [[ -n "$key" ]]; then
|
|
155
|
+
if [[ -n "$FM_KEYS" ]]; then
|
|
156
|
+
FM_KEYS="${FM_KEYS}"$'\n'"${key}"
|
|
157
|
+
else
|
|
158
|
+
FM_KEYS="$key"
|
|
159
|
+
fi
|
|
160
|
+
fi
|
|
161
|
+
|
|
162
|
+
if [[ "$key" == "title" ]]; then
|
|
163
|
+
FM_TITLE="$(echo "$val" | sed "s/^[[:space:]]*//;s/[[:space:]]*$//;s/^[\"']//;s/[\"']$//")"
|
|
164
|
+
(( i++ )) || true
|
|
165
|
+
continue
|
|
166
|
+
fi
|
|
167
|
+
|
|
168
|
+
if [[ "$key" == "use_when" ]]; then
|
|
169
|
+
FM_USE_WHEN="$(echo "$val" | sed "s/^[[:space:]]*//;s/[[:space:]]*$//;s/^[\"']//;s/[\"']$//")"
|
|
170
|
+
(( i++ )) || true
|
|
171
|
+
continue
|
|
172
|
+
fi
|
|
173
|
+
|
|
174
|
+
if [[ "$key" == "called_from" ]]; then
|
|
175
|
+
FM_HAS_CALLED_FROM=1
|
|
176
|
+
# Inline array form: [a, b, c]
|
|
177
|
+
if [[ "$val" == \[* ]]; then
|
|
178
|
+
parse_called_from_inline "$val"
|
|
179
|
+
(( i++ )) || true
|
|
180
|
+
continue
|
|
181
|
+
fi
|
|
182
|
+
# YAML list form
|
|
183
|
+
local items=""
|
|
184
|
+
(( i++ )) || true
|
|
185
|
+
while (( i < count )); do
|
|
186
|
+
local sub="${lines[$i]}"
|
|
187
|
+
local sub_trimmed
|
|
188
|
+
sub_trimmed="$(echo "$sub" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
189
|
+
if [[ -z "$sub_trimmed" ]]; then
|
|
190
|
+
(( i++ )) || true
|
|
191
|
+
continue
|
|
192
|
+
fi
|
|
193
|
+
# If it looks like a new key (has colon, doesn't start with -)
|
|
194
|
+
if [[ "$sub_trimmed" == *:* && "$sub_trimmed" != -* ]]; then
|
|
195
|
+
break
|
|
196
|
+
fi
|
|
197
|
+
if [[ "$sub_trimmed" == -* ]]; then
|
|
198
|
+
local item
|
|
199
|
+
item="$(echo "$sub_trimmed" | sed 's/^-[[:space:]]*//' | sed "s/^[[:space:]]*//;s/[[:space:]]*$//;s/^[\"']//;s/[\"']$//")"
|
|
200
|
+
if [[ -n "$item" ]]; then
|
|
201
|
+
if [[ -n "$items" ]]; then
|
|
202
|
+
items="${items}"$'\n'"${item}"
|
|
203
|
+
else
|
|
204
|
+
items="$item"
|
|
205
|
+
fi
|
|
206
|
+
fi
|
|
207
|
+
fi
|
|
208
|
+
(( i++ )) || true
|
|
209
|
+
done
|
|
210
|
+
FM_CALLED_FROM="$items"
|
|
211
|
+
continue
|
|
212
|
+
fi
|
|
213
|
+
|
|
214
|
+
(( i++ )) || true
|
|
215
|
+
done
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
# Parses inline [a, b, c] into FM_CALLED_FROM (newline-separated).
|
|
219
|
+
parse_called_from_inline() {
|
|
220
|
+
local val="$1"
|
|
221
|
+
FM_CALLED_FROM=""
|
|
222
|
+
# Strip outer brackets
|
|
223
|
+
local inner
|
|
224
|
+
inner="$(echo "$val" | sed 's/^[[:space:]]*\[//;s/\][[:space:]]*$//')"
|
|
225
|
+
inner="$(echo "$inner" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
226
|
+
if [[ -z "$inner" ]]; then
|
|
227
|
+
return
|
|
228
|
+
fi
|
|
229
|
+
local IFS=','
|
|
230
|
+
local parts
|
|
231
|
+
read -ra parts <<< "$inner"
|
|
232
|
+
for p in "${parts[@]}"; do
|
|
233
|
+
local trimmed
|
|
234
|
+
trimmed="$(echo "$p" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
235
|
+
if [[ -n "$trimmed" ]]; then
|
|
236
|
+
if [[ -n "$FM_CALLED_FROM" ]]; then
|
|
237
|
+
FM_CALLED_FROM="${FM_CALLED_FROM}"$'\n'"${trimmed}"
|
|
238
|
+
else
|
|
239
|
+
FM_CALLED_FROM="$trimmed"
|
|
240
|
+
fi
|
|
241
|
+
fi
|
|
242
|
+
done
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
# ── suspicious gate-waiver check ─────────────────────────────────────────
|
|
246
|
+
|
|
247
|
+
# Checks file text for patterns that suggest waiving skill gates.
|
|
248
|
+
# Sets SUSPICIOUS_MATCH to the matched snippet, or empty string.
|
|
249
|
+
check_suspicious_gate_waiver() {
|
|
250
|
+
local file="$1"
|
|
251
|
+
|
|
252
|
+
SUSPICIOUS_MATCH=""
|
|
253
|
+
|
|
254
|
+
local patterns=(
|
|
255
|
+
'\b(skip|waive|override|ignore)\b.{0,80}\b(gate|review|verify|verify-release|security|data|tests?)\b'
|
|
256
|
+
'\b(disable|turn off)\b.{0,80}\b(tests?|checks?|ci)\b'
|
|
257
|
+
'\b(force merge|merge anyway|ignore failing)\b'
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
for pat in "${patterns[@]}"; do
|
|
261
|
+
local match=""
|
|
262
|
+
# Use grep -ioP for PCRE; fall back to grep -ioE
|
|
263
|
+
match="$(grep -ioP "$pat" "$file" 2>/dev/null | head -1)" || true
|
|
264
|
+
if [[ -z "$match" ]]; then
|
|
265
|
+
match="$(grep -ioE "$pat" "$file" 2>/dev/null | head -1)" || true
|
|
266
|
+
fi
|
|
267
|
+
if [[ -z "$match" ]]; then
|
|
268
|
+
continue
|
|
269
|
+
fi
|
|
270
|
+
|
|
271
|
+
# Find byte offset to check prefix for negation
|
|
272
|
+
local byte_offset=""
|
|
273
|
+
byte_offset="$(grep -iobP "$pat" "$file" 2>/dev/null | head -1 | cut -d: -f1)" || true
|
|
274
|
+
if [[ -z "$byte_offset" ]]; then
|
|
275
|
+
byte_offset="$(grep -iobE "$pat" "$file" 2>/dev/null | head -1 | cut -d: -f1)" || true
|
|
276
|
+
fi
|
|
277
|
+
|
|
278
|
+
if [[ -n "$byte_offset" ]] && (( byte_offset > 0 )); then
|
|
279
|
+
local prefix_start=$(( byte_offset > 40 ? byte_offset - 40 : 0 ))
|
|
280
|
+
local prefix_len=$(( byte_offset - prefix_start ))
|
|
281
|
+
local prefix
|
|
282
|
+
prefix="$(dd if="$file" bs=1 skip="$prefix_start" count="$prefix_len" 2>/dev/null | tr '[:upper:]' '[:lower:]')"
|
|
283
|
+
# Check negation prefixes
|
|
284
|
+
local negated=0
|
|
285
|
+
for neg in "do not" "don't" "must not" "never" "cannot" "can't" "should not"; do
|
|
286
|
+
if [[ "$prefix" == *"$neg"* ]]; then
|
|
287
|
+
negated=1
|
|
288
|
+
break
|
|
289
|
+
fi
|
|
290
|
+
done
|
|
291
|
+
if (( negated )); then
|
|
292
|
+
continue
|
|
293
|
+
fi
|
|
294
|
+
fi
|
|
295
|
+
|
|
296
|
+
# Clean up the snippet
|
|
297
|
+
local snippet
|
|
298
|
+
snippet="$(echo "$match" | tr '\n' ' ' | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
299
|
+
SUSPICIOUS_MATCH="$snippet"
|
|
300
|
+
return 0
|
|
301
|
+
done
|
|
302
|
+
|
|
303
|
+
return 1
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
# ── lint a single runbook ─────────────────────────────────────────────────
|
|
307
|
+
|
|
308
|
+
lint_runbook() {
|
|
309
|
+
local path="$1"
|
|
310
|
+
local fail_missing_called_from="$2"
|
|
311
|
+
local fail_extra_keys="$3"
|
|
312
|
+
|
|
313
|
+
local rel="${path#"$REPO_ROOT/"}"
|
|
314
|
+
local strict=0
|
|
315
|
+
env_flag "HARNESS_STRICT_RUNBOOKS" "0" && strict=1 || true
|
|
316
|
+
|
|
317
|
+
# --- frontmatter presence ---
|
|
318
|
+
local block=""
|
|
319
|
+
if ! block="$(extract_frontmatter "$path")"; then
|
|
320
|
+
local level="warning"
|
|
321
|
+
(( strict )) && level="error"
|
|
322
|
+
emit_and_count "$level" "$rel" "Runbook frontmatter" \
|
|
323
|
+
"Runbook '${rel}' must start with YAML frontmatter ('---')."
|
|
324
|
+
return
|
|
325
|
+
fi
|
|
326
|
+
|
|
327
|
+
# --- parse ---
|
|
328
|
+
parse_frontmatter "$block"
|
|
329
|
+
|
|
330
|
+
# --- required fields ---
|
|
331
|
+
if [[ -z "$FM_TITLE" ]]; then
|
|
332
|
+
local level="warning"
|
|
333
|
+
(( strict )) && level="error"
|
|
334
|
+
emit_and_count "$level" "$rel" "Runbook frontmatter" \
|
|
335
|
+
"Runbook '${rel}' frontmatter must include a 'title:' field."
|
|
336
|
+
fi
|
|
337
|
+
|
|
338
|
+
if [[ -z "$FM_USE_WHEN" ]]; then
|
|
339
|
+
local level="warning"
|
|
340
|
+
(( strict )) && level="error"
|
|
341
|
+
emit_and_count "$level" "$rel" "Runbook frontmatter" \
|
|
342
|
+
"Runbook '${rel}' frontmatter must include a 'use_when:' field."
|
|
343
|
+
fi
|
|
344
|
+
|
|
345
|
+
# --- called_from ---
|
|
346
|
+
if (( ! FM_HAS_CALLED_FROM )) || [[ -z "$FM_CALLED_FROM" ]]; then
|
|
347
|
+
local level="warning"
|
|
348
|
+
if (( strict )) || [[ "$fail_missing_called_from" == "1" ]]; then
|
|
349
|
+
level="error"
|
|
350
|
+
fi
|
|
351
|
+
emit_and_count "$level" "$rel" "Runbook frontmatter" \
|
|
352
|
+
"Runbook '${rel}' frontmatter should include non-empty 'called_from:' (list of skills/steps where this runbook is applied)."
|
|
353
|
+
fi
|
|
354
|
+
|
|
355
|
+
# --- extra keys ---
|
|
356
|
+
local extras=""
|
|
357
|
+
if [[ -n "$FM_KEYS" ]]; then
|
|
358
|
+
while IFS= read -r k; do
|
|
359
|
+
if [[ "$k" != "title" && "$k" != "use_when" && "$k" != "called_from" ]]; then
|
|
360
|
+
if [[ -n "$extras" ]]; then
|
|
361
|
+
extras="${extras}, ${k}"
|
|
362
|
+
else
|
|
363
|
+
extras="$k"
|
|
364
|
+
fi
|
|
365
|
+
fi
|
|
366
|
+
done <<< "$FM_KEYS"
|
|
367
|
+
fi
|
|
368
|
+
|
|
369
|
+
if [[ -n "$extras" ]]; then
|
|
370
|
+
local level="warning"
|
|
371
|
+
if (( strict )) || [[ "$fail_extra_keys" == "1" ]]; then
|
|
372
|
+
level="error"
|
|
373
|
+
fi
|
|
374
|
+
emit_and_count "$level" "$rel" "Runbook frontmatter" \
|
|
375
|
+
"Runbook '${rel}' has extra frontmatter key(s): ${extras}. Prefer keeping runbooks to {title,use_when,called_from} unless you have a strong reason."
|
|
376
|
+
fi
|
|
377
|
+
|
|
378
|
+
# --- suspicious gate-waiver language ---
|
|
379
|
+
if check_suspicious_gate_waiver "$path"; then
|
|
380
|
+
local level="warning"
|
|
381
|
+
(( strict )) && level="error"
|
|
382
|
+
emit_and_count "$level" "$rel" "Potential gate waiver" \
|
|
383
|
+
"Runbook '${rel}' appears to suggest waiving skill-enforced gates: '${SUSPICIOUS_MATCH}'. Runbooks are additive only; skill gates win."
|
|
384
|
+
fi
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
# ── iter_runbooks ─────────────────────────────────────────────────────────
|
|
388
|
+
|
|
389
|
+
iter_runbooks() {
|
|
390
|
+
local dir="$1"
|
|
391
|
+
if [[ ! -d "$dir" ]]; then
|
|
392
|
+
return
|
|
393
|
+
fi
|
|
394
|
+
find "$dir" -name '*.md' -type f | sort
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
# ── main ──────────────────────────────────────────────────────────────────
|
|
398
|
+
|
|
399
|
+
main() {
|
|
400
|
+
if ! load_config; then
|
|
401
|
+
return 2
|
|
402
|
+
fi
|
|
403
|
+
|
|
404
|
+
local fail_missing_called_from=0
|
|
405
|
+
env_flag "HARNESS_FAIL_ON_MISSING_RUNBOOK_CALLED_FROM" "0" && fail_missing_called_from=1 || true
|
|
406
|
+
|
|
407
|
+
local fail_extra_keys=0
|
|
408
|
+
env_flag "HARNESS_FAIL_ON_EXTRA_RUNBOOK_FRONTMATTER" "0" && fail_extra_keys=1 || true
|
|
409
|
+
|
|
410
|
+
local runbooks_dir="$REPO_ROOT/docs/runbooks"
|
|
411
|
+
|
|
412
|
+
echo "he-runbooks-lint: starting"
|
|
413
|
+
echo "Repro: bash scripts/ci/he-runbooks-lint.sh"
|
|
414
|
+
|
|
415
|
+
# --- expected runbooks from config ---
|
|
416
|
+
local expected_runbooks
|
|
417
|
+
expected_runbooks="$(jq -r '(.expected_runbooks // .required_runbooks // []) | if type == "array" then .[] else empty end' "$CONFIG_PATH" 2>/dev/null)" || true
|
|
418
|
+
|
|
419
|
+
if [[ -n "$expected_runbooks" ]]; then
|
|
420
|
+
while IFS= read -r rb; do
|
|
421
|
+
[[ -z "$rb" ]] && continue
|
|
422
|
+
if [[ ! -f "$REPO_ROOT/$rb" ]]; then
|
|
423
|
+
emit_and_count "warning" "$rb" "Expected runbook missing" \
|
|
424
|
+
"Missing runbook: '${rb}'. Policy: runbooks are additive and should not block forward progress. Fix: create it (run he-bootstrap) or remove it from expected_runbooks in config."
|
|
425
|
+
fi
|
|
426
|
+
done <<< "$expected_runbooks"
|
|
427
|
+
fi
|
|
428
|
+
|
|
429
|
+
# --- lint each runbook ---
|
|
430
|
+
while IFS= read -r path; do
|
|
431
|
+
[[ -z "$path" ]] && continue
|
|
432
|
+
lint_runbook "$path" "$fail_missing_called_from" "$fail_extra_keys"
|
|
433
|
+
done < <(iter_runbooks "$runbooks_dir")
|
|
434
|
+
|
|
435
|
+
# --- summary ---
|
|
436
|
+
if (( ERRORS > 0 )); then
|
|
437
|
+
echo "he-runbooks-lint: FAIL (${ERRORS} error(s), ${WARNINGS} warning(s))" >&2
|
|
438
|
+
return 1
|
|
439
|
+
fi
|
|
440
|
+
|
|
441
|
+
echo "he-runbooks-lint: OK (${WARNINGS} warning(s))"
|
|
442
|
+
return 0
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
main "$@"
|
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
#!/bin/bash
|
|
2
|
+
set -euo pipefail
|
|
3
|
+
|
|
4
|
+
# ── Repo root relative to script location (scripts/ci/he-specs-lint.sh) ──
|
|
5
|
+
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
|
6
|
+
DEFAULT_CONFIG_PATH="scripts/ci/he-docs-config.json"
|
|
7
|
+
|
|
8
|
+
# ── Default required headings ──
|
|
9
|
+
DEFAULT_REQUIRED_HEADINGS=(
|
|
10
|
+
"## Purpose / Big Picture"
|
|
11
|
+
"## Scope"
|
|
12
|
+
"## Non-Goals"
|
|
13
|
+
"## Risks"
|
|
14
|
+
"## Rollout"
|
|
15
|
+
"## Validation and Acceptance Signals"
|
|
16
|
+
"## Requirements"
|
|
17
|
+
"## Success Criteria"
|
|
18
|
+
"## Priority"
|
|
19
|
+
"## Initial Milestone Candidates"
|
|
20
|
+
"## Revision Notes"
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
DEFAULT_TRIVIAL_REQUIRED_HEADINGS=(
|
|
24
|
+
"## Purpose / Big Picture"
|
|
25
|
+
"## Requirements"
|
|
26
|
+
"## Success Criteria"
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
# ── Counters ──
|
|
30
|
+
errors=0
|
|
31
|
+
warnings=0
|
|
32
|
+
|
|
33
|
+
# ── Helpers ──
|
|
34
|
+
|
|
35
|
+
gh_annotate() {
|
|
36
|
+
local level="$1" file="$2" title="$3" msg="$4"
|
|
37
|
+
if [[ -n "$file" ]]; then
|
|
38
|
+
echo "::${level} file=${file},title=${title}::${msg}"
|
|
39
|
+
else
|
|
40
|
+
echo "::${level} title=${title}::${msg}"
|
|
41
|
+
fi
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
emit() {
|
|
45
|
+
local level="$1" file="$2" title="$3" msg="$4"
|
|
46
|
+
gh_annotate "$level" "$file" "$title" "$msg"
|
|
47
|
+
local upper
|
|
48
|
+
upper="$(echo "$level" | tr '[:lower:]' '[:upper:]')"
|
|
49
|
+
echo "${upper}: ${msg}" >&2
|
|
50
|
+
if [[ "$level" == "error" ]]; then
|
|
51
|
+
(( errors++ )) || true
|
|
52
|
+
else
|
|
53
|
+
(( warnings++ )) || true
|
|
54
|
+
fi
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
# Extract frontmatter block (content between first --- and second ---).
|
|
58
|
+
# Returns via stdout; returns 1 if no valid frontmatter found.
|
|
59
|
+
extract_frontmatter() {
|
|
60
|
+
local file="$1"
|
|
61
|
+
local first_line
|
|
62
|
+
first_line="$(head -n1 "$file")"
|
|
63
|
+
# Trim whitespace
|
|
64
|
+
first_line="$(echo "$first_line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
65
|
+
if [[ "$first_line" != "---" ]]; then
|
|
66
|
+
return 1
|
|
67
|
+
fi
|
|
68
|
+
# Print lines between first --- and second ---, exclusive
|
|
69
|
+
awk 'NR==1 && /^[[:space:]]*---[[:space:]]*$/ { found=1; next }
|
|
70
|
+
found && /^[[:space:]]*---[[:space:]]*$/ { exit }
|
|
71
|
+
found { print }' "$file"
|
|
72
|
+
# Verify we actually found a closing ---
|
|
73
|
+
local count
|
|
74
|
+
count="$(awk '/^[[:space:]]*---[[:space:]]*$/ { c++ } c==2 { print c; exit }' "$file")"
|
|
75
|
+
if [[ "$count" != "2" ]]; then
|
|
76
|
+
return 1
|
|
77
|
+
fi
|
|
78
|
+
return 0
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Parse frontmatter key-value pairs into an associative array.
|
|
82
|
+
# Usage: parse_frontmatter "$frontmatter_text"
|
|
83
|
+
# Sets global associative array FM_KV.
|
|
84
|
+
parse_frontmatter() {
|
|
85
|
+
local fm_text="$1"
|
|
86
|
+
FM_KV=()
|
|
87
|
+
while IFS= read -r raw_line; do
|
|
88
|
+
# Trim
|
|
89
|
+
local line
|
|
90
|
+
line="$(echo "$raw_line" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
91
|
+
# Skip empty lines and comments
|
|
92
|
+
[[ -z "$line" || "$line" == \#* ]] && continue
|
|
93
|
+
# Must contain a colon
|
|
94
|
+
[[ "$line" != *:* ]] && continue
|
|
95
|
+
local key val
|
|
96
|
+
key="$(echo "$line" | cut -d: -f1 | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
97
|
+
val="$(echo "$line" | cut -d: -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
98
|
+
FM_KV["$key"]="$val"
|
|
99
|
+
done <<< "$fm_text"
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
# Check if file contains an exact line match.
|
|
103
|
+
has_exact_line() {
|
|
104
|
+
local file="$1" needle="$2"
|
|
105
|
+
grep -qFx "$needle" "$file"
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
# Check for placeholder tokens in file text.
|
|
109
|
+
check_placeholders() {
|
|
110
|
+
local file_rel="$1" file_path="$2" fail_ph="$3"
|
|
111
|
+
shift 3
|
|
112
|
+
local patterns=("$@")
|
|
113
|
+
for p in "${patterns[@]}"; do
|
|
114
|
+
[[ -z "$p" ]] && continue
|
|
115
|
+
if grep -qF "$p" "$file_path"; then
|
|
116
|
+
local msg="Spec '${file_rel}' contains placeholder token '${p}'."
|
|
117
|
+
if [[ "$fail_ph" == "1" ]]; then
|
|
118
|
+
emit "error" "$file_rel" "Placeholder token" "$msg"
|
|
119
|
+
else
|
|
120
|
+
emit "warning" "$file_rel" "Placeholder token" "${msg} (Set HARNESS_FAIL_ON_ARTIFACT_PLACEHOLDERS=1 to enforce.)"
|
|
121
|
+
fi
|
|
122
|
+
break
|
|
123
|
+
fi
|
|
124
|
+
done
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
# ── Load config ──
|
|
128
|
+
load_config() {
|
|
129
|
+
local config_rel="${HARNESS_DOCS_CONFIG:-$DEFAULT_CONFIG_PATH}"
|
|
130
|
+
local config_path="${REPO_ROOT}/${config_rel}"
|
|
131
|
+
if [[ ! -f "$config_path" ]]; then
|
|
132
|
+
echo "Error: he-specs-lint missing/invalid config: Missing config '${config_rel}'. Fix: create it (bootstrap should do this) or set HARNESS_DOCS_CONFIG." >&2
|
|
133
|
+
exit 2
|
|
134
|
+
fi
|
|
135
|
+
# Validate it's a JSON object
|
|
136
|
+
if ! jq -e 'type == "object"' "$config_path" > /dev/null 2>&1; then
|
|
137
|
+
echo "Error: he-specs-lint missing/invalid config: Config must be a JSON object." >&2
|
|
138
|
+
exit 2
|
|
139
|
+
fi
|
|
140
|
+
CONFIG_PATH="$config_path"
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# ── Check a single spec file ──
|
|
144
|
+
check_spec() {
|
|
145
|
+
local file_path="$1"
|
|
146
|
+
local rel="${file_path#"${REPO_ROOT}"/}"
|
|
147
|
+
|
|
148
|
+
# Extract frontmatter
|
|
149
|
+
local fm_text
|
|
150
|
+
if ! fm_text="$(extract_frontmatter "$file_path")"; then
|
|
151
|
+
emit "error" "$rel" "Missing YAML frontmatter" \
|
|
152
|
+
"Spec '${rel}' must start with YAML frontmatter delimited by '---' lines."
|
|
153
|
+
return
|
|
154
|
+
fi
|
|
155
|
+
|
|
156
|
+
# Parse frontmatter key-value pairs
|
|
157
|
+
declare -A FM_KV
|
|
158
|
+
parse_frontmatter "$fm_text"
|
|
159
|
+
|
|
160
|
+
# Required frontmatter keys from config
|
|
161
|
+
local required_keys_json
|
|
162
|
+
required_keys_json="$(jq -r '(.required_spec_frontmatter_keys // []) | if type == "array" then .[] else empty end' "$CONFIG_PATH" 2>/dev/null)" || true
|
|
163
|
+
if [[ -n "$required_keys_json" ]]; then
|
|
164
|
+
while IFS= read -r k; do
|
|
165
|
+
[[ -z "$k" ]] && continue
|
|
166
|
+
if [[ -z "${FM_KV[$k]+x}" ]]; then
|
|
167
|
+
emit "error" "$rel" "Missing frontmatter key" \
|
|
168
|
+
"Spec '${rel}' missing YAML frontmatter key '${k}:'."
|
|
169
|
+
fi
|
|
170
|
+
done <<< "$required_keys_json"
|
|
171
|
+
fi
|
|
172
|
+
|
|
173
|
+
# Validate plan_mode
|
|
174
|
+
local plan_mode="${FM_KV[plan_mode]:-}"
|
|
175
|
+
plan_mode="$(echo "$plan_mode" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
176
|
+
if [[ -n "$plan_mode" && "$plan_mode" != "trivial" && "$plan_mode" != "lightweight" && "$plan_mode" != "execution" ]]; then
|
|
177
|
+
emit "error" "$rel" "Invalid plan_mode" \
|
|
178
|
+
"Spec '${rel}' has invalid plan_mode '${plan_mode}' (must be 'trivial', 'lightweight', or 'execution')."
|
|
179
|
+
fi
|
|
180
|
+
|
|
181
|
+
# Validate spike_recommended
|
|
182
|
+
local spike_rec="${FM_KV[spike_recommended]:-}"
|
|
183
|
+
spike_rec="$(echo "$spike_rec" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
|
184
|
+
if [[ -n "$spike_rec" && "$spike_rec" != "yes" && "$spike_rec" != "no" ]]; then
|
|
185
|
+
emit "error" "$rel" "Invalid spike_recommended" \
|
|
186
|
+
"Spec '${rel}' has invalid spike_recommended '${spike_rec}' (must be 'yes' or 'no')."
|
|
187
|
+
fi
|
|
188
|
+
|
|
189
|
+
# Required headings
|
|
190
|
+
local -a required_headings
|
|
191
|
+
if [[ "$plan_mode" == "trivial" ]]; then
|
|
192
|
+
required_headings=("${DEFAULT_TRIVIAL_REQUIRED_HEADINGS[@]}")
|
|
193
|
+
else
|
|
194
|
+
required_headings=("${DEFAULT_REQUIRED_HEADINGS[@]}")
|
|
195
|
+
fi
|
|
196
|
+
for h in "${required_headings[@]}"; do
|
|
197
|
+
if ! has_exact_line "$file_path" "$h"; then
|
|
198
|
+
emit "error" "$rel" "Missing heading" \
|
|
199
|
+
"Spec '${rel}' missing required heading line '${h}'."
|
|
200
|
+
fi
|
|
201
|
+
done
|
|
202
|
+
|
|
203
|
+
# Placeholder patterns
|
|
204
|
+
local -a placeholder_patterns=()
|
|
205
|
+
local patterns_json
|
|
206
|
+
patterns_json="$(jq -r '(.artifact_placeholder_patterns // []) | if type == "array" then .[] else empty end' "$CONFIG_PATH" 2>/dev/null)" || true
|
|
207
|
+
if [[ -n "$patterns_json" ]]; then
|
|
208
|
+
while IFS= read -r p; do
|
|
209
|
+
[[ -n "$p" ]] && placeholder_patterns+=("$p")
|
|
210
|
+
done <<< "$patterns_json"
|
|
211
|
+
fi
|
|
212
|
+
|
|
213
|
+
local fail_ph="${HARNESS_FAIL_ON_ARTIFACT_PLACEHOLDERS:-0}"
|
|
214
|
+
if [[ ${#placeholder_patterns[@]} -gt 0 ]]; then
|
|
215
|
+
check_placeholders "$rel" "$file_path" "$fail_ph" "${placeholder_patterns[@]}"
|
|
216
|
+
fi
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
# ── Main ──
|
|
220
|
+
main() {
|
|
221
|
+
load_config
|
|
222
|
+
|
|
223
|
+
echo "he-specs-lint: starting"
|
|
224
|
+
echo "Repro: bash scripts/ci/he-specs-lint.sh"
|
|
225
|
+
|
|
226
|
+
local specs_dir="${REPO_ROOT}/docs/specs"
|
|
227
|
+
if [[ ! -d "$specs_dir" ]]; then
|
|
228
|
+
echo "he-specs-lint: OK (docs/specs not present)"
|
|
229
|
+
exit 0
|
|
230
|
+
fi
|
|
231
|
+
|
|
232
|
+
# Collect spec files (*.md excluding README.md and index.md), sorted
|
|
233
|
+
local -a files=()
|
|
234
|
+
while IFS= read -r -d '' f; do
|
|
235
|
+
local basename
|
|
236
|
+
basename="$(basename "$f")"
|
|
237
|
+
[[ "$basename" == "README.md" || "$basename" == "index.md" ]] && continue
|
|
238
|
+
files+=("$f")
|
|
239
|
+
done < <(find "$specs_dir" -maxdepth 1 -name '*.md' -print0 | sort -z)
|
|
240
|
+
|
|
241
|
+
if [[ ${#files[@]} -eq 0 ]]; then
|
|
242
|
+
echo "he-specs-lint: OK (no spec files)"
|
|
243
|
+
exit 0
|
|
244
|
+
fi
|
|
245
|
+
|
|
246
|
+
for f in "${files[@]}"; do
|
|
247
|
+
check_spec "$f"
|
|
248
|
+
done
|
|
249
|
+
|
|
250
|
+
if [[ $errors -gt 0 ]]; then
|
|
251
|
+
echo "he-specs-lint: FAIL (${errors} error(s), ${warnings} warning(s))" >&2
|
|
252
|
+
exit 1
|
|
253
|
+
fi
|
|
254
|
+
echo "he-specs-lint: OK (${warnings} warning(s))"
|
|
255
|
+
exit 0
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
main "$@"
|