@samahlstrom/forge-cli 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +175 -0
- package/bin/forge.js +2 -0
- package/dist/addons/index.d.ts +25 -0
- package/dist/addons/index.js +139 -0
- package/dist/addons/index.js.map +1 -0
- package/dist/commands/add.d.ts +1 -0
- package/dist/commands/add.js +61 -0
- package/dist/commands/add.js.map +1 -0
- package/dist/commands/doctor.d.ts +1 -0
- package/dist/commands/doctor.js +177 -0
- package/dist/commands/doctor.js.map +1 -0
- package/dist/commands/ingest.d.ts +24 -0
- package/dist/commands/ingest.js +316 -0
- package/dist/commands/ingest.js.map +1 -0
- package/dist/commands/init.d.ts +8 -0
- package/dist/commands/init.js +557 -0
- package/dist/commands/init.js.map +1 -0
- package/dist/commands/remove.d.ts +1 -0
- package/dist/commands/remove.js +42 -0
- package/dist/commands/remove.js.map +1 -0
- package/dist/commands/status.d.ts +1 -0
- package/dist/commands/status.js +48 -0
- package/dist/commands/status.js.map +1 -0
- package/dist/commands/upgrade.d.ts +5 -0
- package/dist/commands/upgrade.js +190 -0
- package/dist/commands/upgrade.js.map +1 -0
- package/dist/detect/features.d.ts +10 -0
- package/dist/detect/features.js +33 -0
- package/dist/detect/features.js.map +1 -0
- package/dist/detect/go.d.ts +3 -0
- package/dist/detect/go.js +38 -0
- package/dist/detect/go.js.map +1 -0
- package/dist/detect/index.d.ts +25 -0
- package/dist/detect/index.js +32 -0
- package/dist/detect/index.js.map +1 -0
- package/dist/detect/node.d.ts +3 -0
- package/dist/detect/node.js +99 -0
- package/dist/detect/node.js.map +1 -0
- package/dist/detect/python.d.ts +3 -0
- package/dist/detect/python.js +86 -0
- package/dist/detect/python.js.map +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +51 -0
- package/dist/index.js.map +1 -0
- package/dist/render/engine.d.ts +8 -0
- package/dist/render/engine.js +71 -0
- package/dist/render/engine.js.map +1 -0
- package/dist/render/merge.d.ts +5 -0
- package/dist/render/merge.js +33 -0
- package/dist/render/merge.js.map +1 -0
- package/dist/utils/fs.d.ts +8 -0
- package/dist/utils/fs.js +42 -0
- package/dist/utils/fs.js.map +1 -0
- package/dist/utils/git.d.ts +3 -0
- package/dist/utils/git.js +31 -0
- package/dist/utils/git.js.map +1 -0
- package/dist/utils/hash.d.ts +8 -0
- package/dist/utils/hash.js +22 -0
- package/dist/utils/hash.js.map +1 -0
- package/dist/utils/yaml.d.ts +3 -0
- package/dist/utils/yaml.js +12 -0
- package/dist/utils/yaml.js.map +1 -0
- package/package.json +53 -0
- package/templates/addons/beads-dolt-backend/files/dolt-setup.sh +267 -0
- package/templates/addons/beads-dolt-backend/manifest.yaml +13 -0
- package/templates/addons/browser-testing/files/browser-smoke.sh +196 -0
- package/templates/addons/browser-testing/files/visual-qa.md +103 -0
- package/templates/addons/browser-testing/manifest.yaml +20 -0
- package/templates/addons/compliance-hipaa/files/hipaa-checks.sh +184 -0
- package/templates/addons/compliance-hipaa/files/hipaa-context.md +91 -0
- package/templates/addons/compliance-hipaa/manifest.yaml +15 -0
- package/templates/addons/compliance-soc2/files/soc2-checks.sh +232 -0
- package/templates/addons/compliance-soc2/files/soc2-context.md +147 -0
- package/templates/addons/compliance-soc2/manifest.yaml +15 -0
- package/templates/core/CLAUDE.md.hbs +70 -0
- package/templates/core/agents/architect.md.hbs +68 -0
- package/templates/core/agents/backend.md.hbs +27 -0
- package/templates/core/agents/frontend.md.hbs +25 -0
- package/templates/core/agents/quality.md.hbs +40 -0
- package/templates/core/agents/security.md.hbs +53 -0
- package/templates/core/context/project.md.hbs +60 -0
- package/templates/core/forge.yaml.hbs +69 -0
- package/templates/core/hooks/post-edit.sh.hbs +8 -0
- package/templates/core/hooks/pre-edit.sh.hbs +41 -0
- package/templates/core/hooks/session-start.sh.hbs +34 -0
- package/templates/core/pipeline/classify.sh.hbs +159 -0
- package/templates/core/pipeline/decompose.md.hbs +100 -0
- package/templates/core/pipeline/deliver.sh.hbs +171 -0
- package/templates/core/pipeline/execute.md.hbs +138 -0
- package/templates/core/pipeline/intake.sh.hbs +152 -0
- package/templates/core/pipeline/orchestrator.sh.hbs +361 -0
- package/templates/core/pipeline/verify.sh.hbs +160 -0
- package/templates/core/settings.json.hbs +55 -0
- package/templates/core/skill-creator.md.hbs +151 -0
- package/templates/core/skill-deliver.md.hbs +46 -0
- package/templates/core/skill-ingest.md.hbs +245 -0
- package/templates/presets/go/stack.md.hbs +133 -0
- package/templates/presets/python-fastapi/stack.md.hbs +101 -0
- package/templates/presets/react-next-ts/stack.md.hbs +77 -0
- package/templates/presets/sveltekit-ts/stack.md.hbs +116 -0
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# forge pipeline orchestrator — main state machine
|
|
3
|
+
# Generated by: forge init
|
|
4
|
+
# Uses bd (steveyegge/beads) for task tracking
|
|
5
|
+
set -euo pipefail
|
|
6
|
+
|
|
7
|
+
FORGE_DIR=".forge"
|
|
8
|
+
PIPELINE_DIR="${FORGE_DIR}/pipeline"
|
|
9
|
+
CONFIG_FILE="${FORGE_DIR}/forge.yaml"
|
|
10
|
+
INTAKE="${PIPELINE_DIR}/intake.sh"
|
|
11
|
+
CLASSIFY="${PIPELINE_DIR}/classify.sh"
|
|
12
|
+
VERIFY="${PIPELINE_DIR}/verify.sh"
|
|
13
|
+
DELIVER="${PIPELINE_DIR}/deliver.sh"
|
|
14
|
+
PROMPTS_DIR="${PIPELINE_DIR}"
|
|
15
|
+
|
|
16
|
+
# --- Helpers ---
|
|
17
|
+
|
|
18
|
+
emit_json() {
|
|
19
|
+
echo "$1"
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
emit_pause() {
|
|
23
|
+
local task="$1" prompt_file="$2" output_file="$3" resume_stage="$4" task_id="$5"
|
|
24
|
+
shift 5
|
|
25
|
+
local ctx_files=("$@")
|
|
26
|
+
local ctx_json
|
|
27
|
+
ctx_json=$(printf '%s\n' "${ctx_files[@]}" | jq -R . | jq -s .)
|
|
28
|
+
jq -n \
|
|
29
|
+
--arg status "PAUSE" \
|
|
30
|
+
--arg task "$task" \
|
|
31
|
+
--arg prompt_file "$prompt_file" \
|
|
32
|
+
--arg output_file "$output_file" \
|
|
33
|
+
--argjson context "$ctx_json" \
|
|
34
|
+
--arg resume "bash ${PIPELINE_DIR}/orchestrator.sh --resume ${task_id} --stage ${resume_stage}" \
|
|
35
|
+
'{status:$status, task:$task, prompt_file:$prompt_file, output_file:$output_file, context:$context, resume:$resume}'
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
emit_human_input() {
|
|
39
|
+
local question="$1" task_id="$2"
|
|
40
|
+
shift 2
|
|
41
|
+
local options=("$@")
|
|
42
|
+
local opts_json
|
|
43
|
+
opts_json=$(printf '%s\n' "${options[@]}" | jq -R . | jq -s .)
|
|
44
|
+
jq -n \
|
|
45
|
+
--arg status "HUMAN_INPUT" \
|
|
46
|
+
--arg question "$question" \
|
|
47
|
+
--argjson options "$opts_json" \
|
|
48
|
+
--arg resume "bash ${PIPELINE_DIR}/orchestrator.sh --resume ${task_id} --answer \"\${idx}\"" \
|
|
49
|
+
'{status:$status, question:$question, options:$options, resume:$resume}'
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
emit_done() {
|
|
53
|
+
local pr_url="$1" branch="$2" summary="$3"
|
|
54
|
+
jq -n \
|
|
55
|
+
--arg status "DONE" \
|
|
56
|
+
--arg pr_url "$pr_url" \
|
|
57
|
+
--arg branch "$branch" \
|
|
58
|
+
--arg summary "$summary" \
|
|
59
|
+
'{status:$status, pr_url:$pr_url, branch:$branch, summary:$summary}'
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
emit_error() {
|
|
63
|
+
local stage="$1" error="$2" debug_file="${3:-}" action="${4:-}"
|
|
64
|
+
jq -n \
|
|
65
|
+
--arg status "ERROR" \
|
|
66
|
+
--arg stage "$stage" \
|
|
67
|
+
--arg error "$error" \
|
|
68
|
+
--arg debug_file "$debug_file" \
|
|
69
|
+
--arg action "$action" \
|
|
70
|
+
'{status:$status, stage:$stage, error:$error, debug_file:$debug_file, action:$action}'
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
read_config() {
|
|
74
|
+
local key="$1"
|
|
75
|
+
local file="$CONFIG_FILE"
|
|
76
|
+
if [[ ! -f "$file" ]]; then
|
|
77
|
+
echo ""
|
|
78
|
+
return
|
|
79
|
+
fi
|
|
80
|
+
if [[ "$key" == *.* ]]; then
|
|
81
|
+
local section="${key%%.*}"
|
|
82
|
+
local subkey="${key#*.}"
|
|
83
|
+
sed -n "/^${section}:/,/^[^ ]/p" "$file" | grep "^ ${subkey}:" | head -1 | sed 's/^[^:]*: *//' | sed 's/^ *"//' | sed 's/" *$//'
|
|
84
|
+
else
|
|
85
|
+
grep "^${key}:" "$file" | head -1 | sed 's/^[^:]*: *//' | sed 's/^ *"//' | sed 's/" *$//'
|
|
86
|
+
fi
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
# --- bd helpers ---
|
|
90
|
+
|
|
91
|
+
bd_get_field() {
|
|
92
|
+
local task_id="$1" field="$2"
|
|
93
|
+
bd show "$task_id" --json 2>/dev/null | jq -r ".${field} // empty"
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
bd_get_label() {
|
|
97
|
+
local task_id="$1" label_prefix="$2"
|
|
98
|
+
bd show "$task_id" --json 2>/dev/null | jq -r ".labels[]? // empty" | grep "^${label_prefix}:" | head -1 | sed "s/^${label_prefix}://"
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
# --- Stage Functions ---
|
|
102
|
+
|
|
103
|
+
stage_intake() {
|
|
104
|
+
local args=("$@")
|
|
105
|
+
local intake_result
|
|
106
|
+
intake_result=$(bash "$INTAKE" "${args[@]}") || {
|
|
107
|
+
emit_error "intake" "Intake parsing failed" "" "bash ${INTAKE} ${args[*]}"
|
|
108
|
+
exit 1
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
local title description source mode quality_score
|
|
112
|
+
title=$(echo "$intake_result" | jq -r '.title')
|
|
113
|
+
description=$(echo "$intake_result" | jq -r '.description')
|
|
114
|
+
source=$(echo "$intake_result" | jq -r '.source')
|
|
115
|
+
mode=$(echo "$intake_result" | jq -r '.mode')
|
|
116
|
+
quality_score=$(echo "$intake_result" | jq -r '.quality_score')
|
|
117
|
+
|
|
118
|
+
# Create task in bd
|
|
119
|
+
local task_id
|
|
120
|
+
task_id=$(echo "$description" | bd create "$title" --stdin -l "mode:${mode},source:${source}" --json 2>/dev/null | jq -r '.id') || {
|
|
121
|
+
emit_error "intake" "Failed to create task in bd" "" ""
|
|
122
|
+
exit 1
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
# Store intake result alongside the task
|
|
126
|
+
local task_dir="${FORGE_DIR}/pipeline/runs/${task_id}"
|
|
127
|
+
mkdir -p "$task_dir"
|
|
128
|
+
echo "$intake_result" > "${task_dir}/intake.json"
|
|
129
|
+
|
|
130
|
+
echo "$task_id|$quality_score|$mode"
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
stage_quality_gate() {
|
|
134
|
+
local task_id="$1" quality_score="$2"
|
|
135
|
+
local threshold
|
|
136
|
+
threshold=$(read_config "quality_threshold")
|
|
137
|
+
threshold="${threshold:-0.4}"
|
|
138
|
+
|
|
139
|
+
if (( $(echo "$quality_score < $threshold" | bc -l) )); then
|
|
140
|
+
emit_human_input \
|
|
141
|
+
"Work description scored ${quality_score} (threshold: ${threshold}). The description may be too vague. Would you like to:" \
|
|
142
|
+
"$task_id" \
|
|
143
|
+
"Continue anyway" \
|
|
144
|
+
"Provide more detail" \
|
|
145
|
+
"Cancel"
|
|
146
|
+
exit 0
|
|
147
|
+
fi
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
stage_classify() {
|
|
151
|
+
local task_id="$1"
|
|
152
|
+
local description
|
|
153
|
+
description=$(bd_get_field "$task_id" "description")
|
|
154
|
+
|
|
155
|
+
local classify_result
|
|
156
|
+
classify_result=$(bash "$CLASSIFY" "$description") || {
|
|
157
|
+
emit_error "classify" "Classification failed" "" ""
|
|
158
|
+
exit 1
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
local tier reason
|
|
162
|
+
tier=$(echo "$classify_result" | jq -r '.tier')
|
|
163
|
+
reason=$(echo "$classify_result" | jq -r '.reason')
|
|
164
|
+
|
|
165
|
+
# Add tier as a label
|
|
166
|
+
bd update "$task_id" -l "tier:${tier}" 2>/dev/null
|
|
167
|
+
|
|
168
|
+
echo "$tier"
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
stage_decompose_check() {
|
|
172
|
+
local task_id="$1" tier="$2" mode="$3"
|
|
173
|
+
local decompose_threshold
|
|
174
|
+
decompose_threshold=$(read_config "decompose_threshold")
|
|
175
|
+
decompose_threshold="${decompose_threshold:-T2}"
|
|
176
|
+
|
|
177
|
+
if [[ "$mode" == "quick" || "$mode" == "hotfix" ]]; then
|
|
178
|
+
echo "skip"
|
|
179
|
+
return
|
|
180
|
+
fi
|
|
181
|
+
|
|
182
|
+
if [[ "$tier" == "T1" && "$decompose_threshold" != "T1" ]]; then
|
|
183
|
+
echo "skip"
|
|
184
|
+
return
|
|
185
|
+
fi
|
|
186
|
+
|
|
187
|
+
echo "decompose"
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
stage_decompose() {
|
|
191
|
+
local task_id="$1"
|
|
192
|
+
local task_dir="${FORGE_DIR}/pipeline/runs/${task_id}"
|
|
193
|
+
local output_file="${task_dir}/decomposition.json"
|
|
194
|
+
mkdir -p "$task_dir"
|
|
195
|
+
|
|
196
|
+
# Claim the task
|
|
197
|
+
bd update "$task_id" --claim 2>/dev/null
|
|
198
|
+
|
|
199
|
+
emit_pause \
|
|
200
|
+
"decompose" \
|
|
201
|
+
"${PROMPTS_DIR}/decompose.md" \
|
|
202
|
+
"$output_file" \
|
|
203
|
+
"execute" \
|
|
204
|
+
"$task_id" \
|
|
205
|
+
"{{stackFile}}" "{{projectFile}}"
|
|
206
|
+
exit 0
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
stage_execute() {
|
|
210
|
+
local task_id="$1"
|
|
211
|
+
local task_dir="${FORGE_DIR}/pipeline/runs/${task_id}"
|
|
212
|
+
local output_file="${task_dir}/execution.json"
|
|
213
|
+
mkdir -p "$task_dir"
|
|
214
|
+
|
|
215
|
+
# Ensure task is claimed/in-progress
|
|
216
|
+
bd update "$task_id" --claim 2>/dev/null
|
|
217
|
+
|
|
218
|
+
emit_pause \
|
|
219
|
+
"execute" \
|
|
220
|
+
"${PROMPTS_DIR}/execute.md" \
|
|
221
|
+
"$output_file" \
|
|
222
|
+
"verify" \
|
|
223
|
+
"$task_id" \
|
|
224
|
+
"{{stackFile}}" "{{projectFile}}"
|
|
225
|
+
exit 0
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
stage_verify() {
|
|
229
|
+
local task_id="$1" tier="$2"
|
|
230
|
+
|
|
231
|
+
local verify_result
|
|
232
|
+
verify_result=$(bash "$VERIFY" "$task_id" "$tier") || true
|
|
233
|
+
|
|
234
|
+
local passed
|
|
235
|
+
passed=$(echo "$verify_result" | jq -r '.passed // false')
|
|
236
|
+
|
|
237
|
+
if [[ "$passed" != "true" ]]; then
|
|
238
|
+
local failed_check
|
|
239
|
+
failed_check=$(echo "$verify_result" | jq -r '.failed_check // "unknown"')
|
|
240
|
+
|
|
241
|
+
local task_dir="${FORGE_DIR}/pipeline/runs/${task_id}"
|
|
242
|
+
local debug_file="${task_dir}/verify-failure.json"
|
|
243
|
+
mkdir -p "$task_dir"
|
|
244
|
+
echo "$verify_result" > "$debug_file"
|
|
245
|
+
|
|
246
|
+
emit_error "verify" "Check failed: ${failed_check}" "$debug_file" \
|
|
247
|
+
"bash ${PIPELINE_DIR}/orchestrator.sh --resume ${task_id} --stage verify"
|
|
248
|
+
exit 1
|
|
249
|
+
fi
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
stage_deliver() {
|
|
253
|
+
local task_id="$1"
|
|
254
|
+
|
|
255
|
+
local deliver_result
|
|
256
|
+
deliver_result=$(bash "$DELIVER" "$task_id") || {
|
|
257
|
+
emit_error "deliver" "Delivery failed" "" ""
|
|
258
|
+
exit 1
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
local pr_url branch
|
|
262
|
+
pr_url=$(echo "$deliver_result" | jq -r '.pr_url // ""')
|
|
263
|
+
branch=$(echo "$deliver_result" | jq -r '.branch // ""')
|
|
264
|
+
|
|
265
|
+
if [[ -z "$pr_url" ]]; then
|
|
266
|
+
echo "$deliver_result"
|
|
267
|
+
exit 0
|
|
268
|
+
fi
|
|
269
|
+
|
|
270
|
+
local title
|
|
271
|
+
title=$(bd_get_field "$task_id" "title")
|
|
272
|
+
|
|
273
|
+
# Close the task in bd
|
|
274
|
+
bd close "$task_id" --reason "Delivered: PR ${pr_url}" 2>/dev/null
|
|
275
|
+
emit_done "$pr_url" "$branch" "Delivered: ${title}"
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
# --- Main ---
|
|
279
|
+
|
|
280
|
+
RESUME_ID=""
|
|
281
|
+
RESUME_STAGE=""
|
|
282
|
+
ANSWER=""
|
|
283
|
+
INPUT_ARGS=()
|
|
284
|
+
|
|
285
|
+
while [[ $# -gt 0 ]]; do
|
|
286
|
+
case "$1" in
|
|
287
|
+
--resume)
|
|
288
|
+
RESUME_ID="$2"; shift 2 ;;
|
|
289
|
+
--stage)
|
|
290
|
+
RESUME_STAGE="$2"; shift 2 ;;
|
|
291
|
+
--answer)
|
|
292
|
+
ANSWER="$2"; shift 2 ;;
|
|
293
|
+
*)
|
|
294
|
+
INPUT_ARGS+=("$1"); shift ;;
|
|
295
|
+
esac
|
|
296
|
+
done
|
|
297
|
+
|
|
298
|
+
# Resume mode
|
|
299
|
+
if [[ -n "$RESUME_ID" && -n "$RESUME_STAGE" ]]; then
|
|
300
|
+
task_id="$RESUME_ID"
|
|
301
|
+
tier=$(bd_get_label "$task_id" "tier")
|
|
302
|
+
tier="${tier:-T1}"
|
|
303
|
+
mode=$(bd_get_label "$task_id" "mode")
|
|
304
|
+
mode="${mode:-normal}"
|
|
305
|
+
|
|
306
|
+
case "$RESUME_STAGE" in
|
|
307
|
+
execute) stage_execute "$task_id" ;;
|
|
308
|
+
verify) stage_verify "$task_id" "$tier" ;;
|
|
309
|
+
deliver) stage_deliver "$task_id" ;;
|
|
310
|
+
*) emit_error "resume" "Unknown stage: ${RESUME_STAGE}" "" ""; exit 1 ;;
|
|
311
|
+
esac
|
|
312
|
+
exit 0
|
|
313
|
+
fi
|
|
314
|
+
|
|
315
|
+
# Handle quality gate answer
|
|
316
|
+
if [[ -n "$RESUME_ID" && -n "$ANSWER" ]]; then
|
|
317
|
+
task_id="$RESUME_ID"
|
|
318
|
+
case "$ANSWER" in
|
|
319
|
+
0) ;; # Continue anyway
|
|
320
|
+
1) emit_error "quality-gate" "User chose to provide more detail" "" ""; exit 0 ;;
|
|
321
|
+
2) bd close "$task_id" --reason "Cancelled by user" 2>/dev/null; emit_error "quality-gate" "Cancelled by user" "" ""; exit 0 ;;
|
|
322
|
+
esac
|
|
323
|
+
tier=$(stage_classify "$task_id")
|
|
324
|
+
mode=$(bd_get_label "$task_id" "mode")
|
|
325
|
+
mode="${mode:-normal}"
|
|
326
|
+
decompose_decision=$(stage_decompose_check "$task_id" "$tier" "$mode")
|
|
327
|
+
if [[ "$decompose_decision" == "decompose" ]]; then
|
|
328
|
+
stage_decompose "$task_id"
|
|
329
|
+
fi
|
|
330
|
+
stage_execute "$task_id"
|
|
331
|
+
exit 0
|
|
332
|
+
fi
|
|
333
|
+
|
|
334
|
+
# Fresh run
|
|
335
|
+
if [[ ${#INPUT_ARGS[@]} -eq 0 ]]; then
|
|
336
|
+
emit_error "intake" "No work description provided" "" "forge work \"<description>\""
|
|
337
|
+
exit 1
|
|
338
|
+
fi
|
|
339
|
+
|
|
340
|
+
# Stage: intake
|
|
341
|
+
intake_output=$(stage_intake "${INPUT_ARGS[@]}")
|
|
342
|
+
task_id=$(echo "$intake_output" | cut -d'|' -f1)
|
|
343
|
+
quality_score=$(echo "$intake_output" | cut -d'|' -f2)
|
|
344
|
+
mode=$(echo "$intake_output" | cut -d'|' -f3)
|
|
345
|
+
|
|
346
|
+
# Stage: quality-gate
|
|
347
|
+
stage_quality_gate "$task_id" "$quality_score"
|
|
348
|
+
|
|
349
|
+
# Stage: classify
|
|
350
|
+
tier=$(stage_classify "$task_id")
|
|
351
|
+
|
|
352
|
+
# Stage: decompose-check
|
|
353
|
+
decompose_decision=$(stage_decompose_check "$task_id" "$tier" "$mode")
|
|
354
|
+
|
|
355
|
+
# Stage: decompose (if needed)
|
|
356
|
+
if [[ "$decompose_decision" == "decompose" ]]; then
|
|
357
|
+
stage_decompose "$task_id"
|
|
358
|
+
fi
|
|
359
|
+
|
|
360
|
+
# Stage: execute
|
|
361
|
+
stage_execute "$task_id"
|
|
@@ -0,0 +1,160 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# forge pipeline — verify: run verification checks against task
|
|
3
|
+
# Generated by: forge init
|
|
4
|
+
set -euo pipefail
|
|
5
|
+
|
|
6
|
+
FORGE_DIR=".forge"
|
|
7
|
+
CONFIG_FILE="${FORGE_DIR}/forge.yaml"
|
|
8
|
+
|
|
9
|
+
TASK_ID="${1:-}"
|
|
10
|
+
TIER="${2:-T1}"
|
|
11
|
+
|
|
12
|
+
if [[ -z "$TASK_ID" ]]; then
|
|
13
|
+
echo '{"error":"Usage: verify.sh <task-id> <tier>"}' >&2
|
|
14
|
+
exit 1
|
|
15
|
+
fi
|
|
16
|
+
|
|
17
|
+
# --- Config reading ---
|
|
18
|
+
|
|
19
|
+
read_config() {
|
|
20
|
+
local key="$1"
|
|
21
|
+
local file="$CONFIG_FILE"
|
|
22
|
+
if [[ ! -f "$file" ]]; then
|
|
23
|
+
echo ""
|
|
24
|
+
return
|
|
25
|
+
fi
|
|
26
|
+
if [[ "$key" == *.* ]]; then
|
|
27
|
+
local section="${key%%.*}"
|
|
28
|
+
local subkey="${key#*.}"
|
|
29
|
+
sed -n "/^${section}:/,/^[^ ]/p" "$file" | grep "^ ${subkey}:" | head -1 | sed 's/^[^:]*: *//' | sed 's/^ *"//' | sed 's/" *$//'
|
|
30
|
+
else
|
|
31
|
+
grep "^${key}:" "$file" | head -1 | sed 's/^[^:]*: *//' | sed 's/^ *"//' | sed 's/" *$//'
|
|
32
|
+
fi
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
read_config_bool() {
|
|
36
|
+
local val
|
|
37
|
+
val=$(read_config "$1")
|
|
38
|
+
[[ "$val" == "true" || "$val" == "yes" || "$val" == "1" ]]
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
# --- Check definitions ---
|
|
42
|
+
|
|
43
|
+
CMD_TYPECHECK="{{commands.typecheck}}"
|
|
44
|
+
CMD_LINT="{{commands.lint}}"
|
|
45
|
+
CMD_TEST="{{commands.test}}"
|
|
46
|
+
|
|
47
|
+
# Fallbacks if template vars are empty
|
|
48
|
+
CMD_TYPECHECK="${CMD_TYPECHECK:-npm run check}"
|
|
49
|
+
CMD_LINT="${CMD_LINT:-npm run lint}"
|
|
50
|
+
CMD_TEST="${CMD_TEST:-npx vitest run}"
|
|
51
|
+
|
|
52
|
+
# Optional checks from config
|
|
53
|
+
CMD_COVERAGE=$(read_config "commands.coverage")
|
|
54
|
+
CMD_SECURITY=$(read_config "commands.security")
|
|
55
|
+
CMD_ANTIPATTERN=$(read_config "commands.antipatterns")
|
|
56
|
+
|
|
57
|
+
ENABLE_COVERAGE=$(read_config "verify.coverage")
|
|
58
|
+
ENABLE_SECURITY=$(read_config "verify.security")
|
|
59
|
+
ENABLE_ANTIPATTERN=$(read_config "verify.antipatterns")
|
|
60
|
+
|
|
61
|
+
# --- Run a single check ---
|
|
62
|
+
|
|
63
|
+
RESULTS=()
|
|
64
|
+
PASSED=true
|
|
65
|
+
FAILED_CHECK=""
|
|
66
|
+
FAILED_STDERR=""
|
|
67
|
+
|
|
68
|
+
run_check() {
|
|
69
|
+
local name="$1"
|
|
70
|
+
local cmd="$2"
|
|
71
|
+
local required="${3:-true}"
|
|
72
|
+
|
|
73
|
+
if [[ -z "$cmd" ]]; then
|
|
74
|
+
RESULTS+=("{\"check\":\"${name}\",\"status\":\"skipped\",\"reason\":\"no command configured\"}")
|
|
75
|
+
return 0
|
|
76
|
+
fi
|
|
77
|
+
|
|
78
|
+
local start_time
|
|
79
|
+
start_time=$(date +%s)
|
|
80
|
+
|
|
81
|
+
local stderr_file
|
|
82
|
+
stderr_file=$(mktemp)
|
|
83
|
+
local exit_code=0
|
|
84
|
+
eval "$cmd" 2>"$stderr_file" >/dev/null || exit_code=$?
|
|
85
|
+
|
|
86
|
+
local end_time
|
|
87
|
+
end_time=$(date +%s)
|
|
88
|
+
local duration=$((end_time - start_time))
|
|
89
|
+
|
|
90
|
+
if [[ $exit_code -eq 0 ]]; then
|
|
91
|
+
RESULTS+=("{\"check\":\"${name}\",\"status\":\"passed\",\"duration_s\":${duration}}")
|
|
92
|
+
rm -f "$stderr_file"
|
|
93
|
+
return 0
|
|
94
|
+
else
|
|
95
|
+
local stderr_content
|
|
96
|
+
stderr_content=$(cat "$stderr_file" | head -50 | jq -Rs .)
|
|
97
|
+
rm -f "$stderr_file"
|
|
98
|
+
|
|
99
|
+
RESULTS+=("{\"check\":\"${name}\",\"status\":\"failed\",\"exit_code\":${exit_code},\"duration_s\":${duration},\"stderr\":${stderr_content}}")
|
|
100
|
+
|
|
101
|
+
if [[ "$required" == "true" ]]; then
|
|
102
|
+
PASSED=false
|
|
103
|
+
FAILED_CHECK="$name"
|
|
104
|
+
FAILED_STDERR=$(echo "$stderr_content" | jq -r .)
|
|
105
|
+
return 1
|
|
106
|
+
fi
|
|
107
|
+
return 0
|
|
108
|
+
fi
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
# --- Execute checks sequentially ---
|
|
112
|
+
|
|
113
|
+
# 1. Typecheck (always required)
|
|
114
|
+
run_check "typecheck" "$CMD_TYPECHECK" "true" || true
|
|
115
|
+
|
|
116
|
+
# 2. Lint (always required)
|
|
117
|
+
if [[ "$PASSED" == "true" ]]; then
|
|
118
|
+
run_check "lint" "$CMD_LINT" "true" || true
|
|
119
|
+
fi
|
|
120
|
+
|
|
121
|
+
# 3. Tests (always required)
|
|
122
|
+
if [[ "$PASSED" == "true" ]]; then
|
|
123
|
+
run_check "test" "$CMD_TEST" "true" || true
|
|
124
|
+
fi
|
|
125
|
+
|
|
126
|
+
# 4. Coverage (optional, T2+ only)
|
|
127
|
+
if [[ "$PASSED" == "true" && "$ENABLE_COVERAGE" == "true" && ("$TIER" == "T2" || "$TIER" == "T3") ]]; then
|
|
128
|
+
run_check "coverage" "$CMD_COVERAGE" "false" || true
|
|
129
|
+
fi
|
|
130
|
+
|
|
131
|
+
# 5. Security scan (optional, T3 only)
|
|
132
|
+
if [[ "$PASSED" == "true" && "$ENABLE_SECURITY" == "true" && "$TIER" == "T3" ]]; then
|
|
133
|
+
run_check "security" "$CMD_SECURITY" "true" || true
|
|
134
|
+
fi
|
|
135
|
+
|
|
136
|
+
# 6. Anti-pattern check (optional)
|
|
137
|
+
if [[ "$PASSED" == "true" && "$ENABLE_ANTIPATTERN" == "true" ]]; then
|
|
138
|
+
run_check "antipatterns" "$CMD_ANTIPATTERN" "false" || true
|
|
139
|
+
fi
|
|
140
|
+
|
|
141
|
+
# --- Build results JSON ---
|
|
142
|
+
|
|
143
|
+
RESULTS_JSON=$(printf '%s\n' "${RESULTS[@]}" | jq -s '.')
|
|
144
|
+
|
|
145
|
+
if [[ "$PASSED" == "true" ]]; then
|
|
146
|
+
jq -n \
|
|
147
|
+
--argjson passed true \
|
|
148
|
+
--argjson checks "$RESULTS_JSON" \
|
|
149
|
+
--arg tier "$TIER" \
|
|
150
|
+
'{passed:$passed, tier:$tier, checks:$checks}'
|
|
151
|
+
else
|
|
152
|
+
|
|
153
|
+
jq -n \
|
|
154
|
+
--argjson passed false \
|
|
155
|
+
--arg failed_check "$FAILED_CHECK" \
|
|
156
|
+
--arg stderr "$FAILED_STDERR" \
|
|
157
|
+
--argjson checks "$RESULTS_JSON" \
|
|
158
|
+
--arg tier "$TIER" \
|
|
159
|
+
'{passed:$passed, failed_check:$failed_check, stderr:$stderr, tier:$tier, checks:$checks}'
|
|
160
|
+
fi
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
{
|
|
2
|
+
"permissions": {
|
|
3
|
+
"allow": [
|
|
4
|
+
"Read",
|
|
5
|
+
"Edit",
|
|
6
|
+
"Write",
|
|
7
|
+
"Glob",
|
|
8
|
+
"Grep",
|
|
9
|
+
"WebFetch",
|
|
10
|
+
"Bash(npm run *)",
|
|
11
|
+
"Bash(npx *)",
|
|
12
|
+
"Bash(git *)",
|
|
13
|
+
"Bash(gh *)",
|
|
14
|
+
"Bash(bash .forge/*)",
|
|
15
|
+
"Bash(ls *)",
|
|
16
|
+
"Bash(mkdir *)",
|
|
17
|
+
"Bash(cat *)",
|
|
18
|
+
"Bash(head *)",
|
|
19
|
+
"Bash(tail *)",
|
|
20
|
+
"Bash(wc *)",
|
|
21
|
+
"Bash(find *)",
|
|
22
|
+
"Bash(which *)",
|
|
23
|
+
"Bash(echo *)",
|
|
24
|
+
"Bash(sort *)",
|
|
25
|
+
"Bash(diff *)",
|
|
26
|
+
"Bash(cp *)",
|
|
27
|
+
"Bash(mv *)",
|
|
28
|
+
"Bash(touch *)",
|
|
29
|
+
"Bash(chmod *)",
|
|
30
|
+
"Bash(jq *)"
|
|
31
|
+
]
|
|
32
|
+
},
|
|
33
|
+
"hooks": {
|
|
34
|
+
"SessionStart": [
|
|
35
|
+
{
|
|
36
|
+
"command": "bash .forge/hooks/session-start.sh",
|
|
37
|
+
"timeout": 10000
|
|
38
|
+
}
|
|
39
|
+
],
|
|
40
|
+
"PreToolUse": [
|
|
41
|
+
{
|
|
42
|
+
"matcher": "Edit|Write",
|
|
43
|
+
"command": "bash .forge/hooks/pre-edit.sh",
|
|
44
|
+
"timeout": 5000
|
|
45
|
+
}
|
|
46
|
+
],
|
|
47
|
+
"PostToolUse": [
|
|
48
|
+
{
|
|
49
|
+
"matcher": "Edit|Write",
|
|
50
|
+
"command": "bash .forge/hooks/post-edit.sh",
|
|
51
|
+
"timeout": 5000
|
|
52
|
+
}
|
|
53
|
+
]
|
|
54
|
+
}
|
|
55
|
+
}
|