@sandrinio/vbounce 1.5.0 → 1.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +108 -18
- package/bin/vbounce.mjs +291 -146
- package/brains/AGENTS.md +12 -10
- package/brains/CHANGELOG.md +99 -1
- package/brains/CLAUDE.md +29 -22
- package/brains/GEMINI.md +47 -9
- package/brains/SETUP.md +11 -5
- package/brains/claude-agents/architect.md +22 -6
- package/brains/claude-agents/developer.md +2 -2
- package/brains/claude-agents/devops.md +3 -0
- package/brains/claude-agents/qa.md +25 -9
- package/brains/copilot/copilot-instructions.md +49 -0
- package/brains/cursor-rules/vbounce-process.mdc +9 -7
- package/brains/windsurf/.windsurfrules +30 -0
- package/package.json +2 -4
- package/scripts/close_sprint.mjs +94 -0
- package/scripts/complete_story.mjs +113 -0
- package/scripts/doctor.mjs +144 -0
- package/scripts/init_gate_config.sh +151 -0
- package/scripts/init_sprint.mjs +121 -0
- package/scripts/pre_gate_common.sh +576 -0
- package/scripts/pre_gate_runner.sh +176 -0
- package/scripts/prep_arch_context.mjs +178 -0
- package/scripts/prep_qa_context.mjs +134 -0
- package/scripts/prep_sprint_context.mjs +118 -0
- package/scripts/prep_sprint_summary.mjs +154 -0
- package/scripts/sprint_trends.mjs +160 -0
- package/scripts/suggest_improvements.mjs +200 -0
- package/scripts/update_state.mjs +132 -0
- package/scripts/validate_bounce_readiness.mjs +125 -0
- package/scripts/validate_report.mjs +39 -2
- package/scripts/validate_sprint_plan.mjs +117 -0
- package/scripts/validate_state.mjs +99 -0
- package/skills/agent-team/SKILL.md +56 -21
- package/skills/agent-team/references/cleanup.md +42 -0
- package/skills/agent-team/references/delivery-sync.md +43 -0
- package/skills/agent-team/references/git-strategy.md +52 -0
- package/skills/agent-team/references/mid-sprint-triage.md +71 -0
- package/skills/agent-team/references/report-naming.md +34 -0
- package/skills/doc-manager/SKILL.md +5 -4
- package/skills/improve/SKILL.md +27 -1
- package/skills/lesson/SKILL.md +23 -0
- package/templates/delivery_plan.md +1 -1
- package/templates/hotfix.md +1 -1
- package/templates/sprint.md +65 -13
- package/templates/sprint_report.md +8 -1
- package/templates/story.md +1 -1
- package/scripts/pre_bounce_sync.sh +0 -37
- package/scripts/vbounce_ask.mjs +0 -98
- package/scripts/vbounce_index.mjs +0 -184
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# pre_gate_runner.sh — Runs pre-gate checks before QA or Architect agents
|
|
3
|
+
# Usage: ./scripts/pre_gate_runner.sh <qa|arch> [worktree-path] [base-branch]
|
|
4
|
+
#
|
|
5
|
+
# Reads .bounce/gate-checks.json for check configuration.
|
|
6
|
+
# If no config exists, runs universal defaults with auto-detected stack.
|
|
7
|
+
|
|
8
|
+
set -euo pipefail
|
|
9
|
+
|
|
10
|
+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
11
|
+
source "${SCRIPT_DIR}/pre_gate_common.sh"
|
|
12
|
+
|
|
13
|
+
# ── Arguments ────────────────────────────────────────────────────────
|
|
14
|
+
|
|
15
|
+
GATE_TYPE="${1:-}"
|
|
16
|
+
WORKTREE_PATH="${2:-.}"
|
|
17
|
+
BASE_BRANCH="${3:-}"
|
|
18
|
+
PLAIN_RESULTS=""
|
|
19
|
+
|
|
20
|
+
if [[ -z "$GATE_TYPE" ]] || [[ "$GATE_TYPE" != "qa" && "$GATE_TYPE" != "arch" ]]; then
|
|
21
|
+
echo "Usage: ./scripts/pre_gate_runner.sh <qa|arch> [worktree-path] [base-branch]"
|
|
22
|
+
echo ""
|
|
23
|
+
echo " qa — Run QA pre-gate checks (before QA agent)"
|
|
24
|
+
echo " arch — Run Architect pre-gate checks (before Architect agent)"
|
|
25
|
+
echo ""
|
|
26
|
+
echo " worktree-path — Path to story worktree (default: current dir)"
|
|
27
|
+
echo " base-branch — Branch to diff against (default: auto-detect)"
|
|
28
|
+
exit 1
|
|
29
|
+
fi
|
|
30
|
+
|
|
31
|
+
# Resolve to absolute path
|
|
32
|
+
WORKTREE_PATH="$(cd "$WORKTREE_PATH" && pwd)"
|
|
33
|
+
|
|
34
|
+
echo -e "${CYAN}V-Bounce OS Pre-Gate Scanner${NC}"
|
|
35
|
+
echo -e "Gate: ${YELLOW}${GATE_TYPE}${NC}"
|
|
36
|
+
echo -e "Target: ${WORKTREE_PATH}"
|
|
37
|
+
echo ""
|
|
38
|
+
|
|
39
|
+
# ── Auto-detect base branch if not provided ──────────────────────────
|
|
40
|
+
|
|
41
|
+
if [[ -z "$BASE_BRANCH" ]]; then
|
|
42
|
+
cd "$WORKTREE_PATH"
|
|
43
|
+
# Try to find the sprint branch this story branched from
|
|
44
|
+
BASE_BRANCH=$(git log --oneline --merges -1 --format=%H 2>/dev/null || echo "")
|
|
45
|
+
if [[ -z "$BASE_BRANCH" ]]; then
|
|
46
|
+
# Fall back to parent branch detection
|
|
47
|
+
BASE_BRANCH=$(git rev-parse --abbrev-ref HEAD@{upstream} 2>/dev/null || echo "")
|
|
48
|
+
fi
|
|
49
|
+
fi
|
|
50
|
+
|
|
51
|
+
# ── Load config or use defaults ──────────────────────────────────────
|
|
52
|
+
|
|
53
|
+
CONFIG_PATH="${WORKTREE_PATH}/.bounce/gate-checks.json"
|
|
54
|
+
HAS_CONFIG=false
|
|
55
|
+
|
|
56
|
+
if [[ -f "$CONFIG_PATH" ]]; then
|
|
57
|
+
HAS_CONFIG=true
|
|
58
|
+
echo -e "Config: ${GREEN}${CONFIG_PATH}${NC}"
|
|
59
|
+
else
|
|
60
|
+
# Check parent repo too (worktree might not have it)
|
|
61
|
+
REPO_ROOT=$(cd "$WORKTREE_PATH" && git rev-parse --show-toplevel 2>/dev/null || echo "$WORKTREE_PATH")
|
|
62
|
+
CONFIG_PATH="${REPO_ROOT}/.bounce/gate-checks.json"
|
|
63
|
+
if [[ -f "$CONFIG_PATH" ]]; then
|
|
64
|
+
HAS_CONFIG=true
|
|
65
|
+
echo -e "Config: ${GREEN}${CONFIG_PATH}${NC}"
|
|
66
|
+
else
|
|
67
|
+
echo -e "Config: ${YELLOW}None found — using universal defaults${NC}"
|
|
68
|
+
fi
|
|
69
|
+
fi
|
|
70
|
+
|
|
71
|
+
echo ""
|
|
72
|
+
|
|
73
|
+
# ── Get modified files ───────────────────────────────────────────────
|
|
74
|
+
|
|
75
|
+
MODIFIED_FILES=$(get_modified_files "$WORKTREE_PATH" "$BASE_BRANCH")
|
|
76
|
+
|
|
77
|
+
# ── Run checks ───────────────────────────────────────────────────────
|
|
78
|
+
|
|
79
|
+
run_checks_from_config() {
|
|
80
|
+
local gate="$1"
|
|
81
|
+
local checks_key="${gate}_checks"
|
|
82
|
+
|
|
83
|
+
# Parse config with node (available since V-Bounce requires it)
|
|
84
|
+
local check_ids
|
|
85
|
+
check_ids=$(node -e "
|
|
86
|
+
const fs = require('fs');
|
|
87
|
+
const cfg = JSON.parse(fs.readFileSync('${CONFIG_PATH}', 'utf8'));
|
|
88
|
+
const checks = cfg['${checks_key}'] || [];
|
|
89
|
+
checks.filter(c => c.enabled !== false).forEach(c => {
|
|
90
|
+
console.log(JSON.stringify(c));
|
|
91
|
+
});
|
|
92
|
+
" 2>/dev/null)
|
|
93
|
+
|
|
94
|
+
while IFS= read -r check_json; do
|
|
95
|
+
[[ -z "$check_json" ]] && continue
|
|
96
|
+
|
|
97
|
+
local id cmd pattern glob should_find max_lines description
|
|
98
|
+
id=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.id||'')" 2>/dev/null)
|
|
99
|
+
cmd=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.cmd||'')" 2>/dev/null)
|
|
100
|
+
pattern=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.pattern||'')" 2>/dev/null)
|
|
101
|
+
glob=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.glob||'')" 2>/dev/null)
|
|
102
|
+
should_find=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.should_find||'false')" 2>/dev/null)
|
|
103
|
+
max_lines=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.max_lines||'500')" 2>/dev/null)
|
|
104
|
+
description=$(echo "$check_json" | node -e "const d=JSON.parse(require('fs').readFileSync('/dev/stdin','utf8'));console.log(d.description||d.id||'')" 2>/dev/null)
|
|
105
|
+
|
|
106
|
+
case "$id" in
|
|
107
|
+
tests_exist) check_tests_exist "$WORKTREE_PATH" "$MODIFIED_FILES" ;;
|
|
108
|
+
tests_pass) check_tests_pass "$WORKTREE_PATH" ;;
|
|
109
|
+
build) check_build "$WORKTREE_PATH" ;;
|
|
110
|
+
lint) check_lint "$WORKTREE_PATH" ;;
|
|
111
|
+
no_debug_output) check_no_debug_output "$WORKTREE_PATH" "$MODIFIED_FILES" ;;
|
|
112
|
+
no_todo_fixme) check_no_todo_fixme "$WORKTREE_PATH" "$MODIFIED_FILES" ;;
|
|
113
|
+
exports_have_docs) check_exports_have_docs "$WORKTREE_PATH" "$MODIFIED_FILES" ;;
|
|
114
|
+
no_new_deps) check_no_new_dependencies "$WORKTREE_PATH" "$BASE_BRANCH" ;;
|
|
115
|
+
file_size) check_file_size_limit "$WORKTREE_PATH" "$MODIFIED_FILES" "$max_lines" ;;
|
|
116
|
+
custom_cmd) run_custom_check "$WORKTREE_PATH" "$description" "$cmd" "$description" ;;
|
|
117
|
+
custom_grep) run_custom_grep_check "$WORKTREE_PATH" "$description" "$pattern" "$glob" "$should_find" ;;
|
|
118
|
+
*)
|
|
119
|
+
# Unknown built-in — try as custom command if cmd is provided
|
|
120
|
+
if [[ -n "$cmd" ]]; then
|
|
121
|
+
run_custom_check "$WORKTREE_PATH" "$id" "$cmd" "$description"
|
|
122
|
+
else
|
|
123
|
+
record_result "$id" "SKIP" "Unknown check type"
|
|
124
|
+
record_result_plain "$id" "SKIP" "Unknown check type"
|
|
125
|
+
fi
|
|
126
|
+
;;
|
|
127
|
+
esac
|
|
128
|
+
done <<< "$check_ids"
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
run_universal_defaults() {
|
|
132
|
+
local gate="$1"
|
|
133
|
+
|
|
134
|
+
# QA-level checks (always run)
|
|
135
|
+
check_tests_exist "$WORKTREE_PATH" "$MODIFIED_FILES"
|
|
136
|
+
check_tests_pass "$WORKTREE_PATH"
|
|
137
|
+
check_build "$WORKTREE_PATH"
|
|
138
|
+
check_lint "$WORKTREE_PATH"
|
|
139
|
+
check_no_debug_output "$WORKTREE_PATH" "$MODIFIED_FILES"
|
|
140
|
+
check_no_todo_fixme "$WORKTREE_PATH" "$MODIFIED_FILES"
|
|
141
|
+
check_exports_have_docs "$WORKTREE_PATH" "$MODIFIED_FILES"
|
|
142
|
+
|
|
143
|
+
# Architect-level checks (only for arch gate)
|
|
144
|
+
if [[ "$gate" == "arch" ]]; then
|
|
145
|
+
check_no_new_dependencies "$WORKTREE_PATH" "$BASE_BRANCH"
|
|
146
|
+
check_file_size_limit "$WORKTREE_PATH" "$MODIFIED_FILES" 500
|
|
147
|
+
fi
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
# ── Execute ──────────────────────────────────────────────────────────
|
|
151
|
+
|
|
152
|
+
if [[ "$HAS_CONFIG" == "true" ]]; then
|
|
153
|
+
run_checks_from_config "$GATE_TYPE"
|
|
154
|
+
else
|
|
155
|
+
run_universal_defaults "$GATE_TYPE"
|
|
156
|
+
fi
|
|
157
|
+
|
|
158
|
+
# ── Output ───────────────────────────────────────────────────────────
|
|
159
|
+
|
|
160
|
+
print_summary
|
|
161
|
+
|
|
162
|
+
# Write report
|
|
163
|
+
REPORT_DIR="${WORKTREE_PATH}/.bounce/reports"
|
|
164
|
+
REPORT_FILE="${REPORT_DIR}/pre-${GATE_TYPE}-scan.txt"
|
|
165
|
+
write_report "$REPORT_FILE"
|
|
166
|
+
echo ""
|
|
167
|
+
echo -e "Report: ${CYAN}${REPORT_FILE}${NC}"
|
|
168
|
+
|
|
169
|
+
# Exit code
|
|
170
|
+
if [[ $FAIL_COUNT -gt 0 ]]; then
|
|
171
|
+
echo -e "\n${RED}Gate check failed with ${FAIL_COUNT} failure(s).${NC}"
|
|
172
|
+
exit 1
|
|
173
|
+
else
|
|
174
|
+
echo -e "\n${GREEN}All checks passed.${NC}"
|
|
175
|
+
exit 0
|
|
176
|
+
fi
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* prep_arch_context.mjs
|
|
5
|
+
* Generates an Architect context pack for a story.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* ./scripts/prep_arch_context.mjs STORY-005-02
|
|
9
|
+
*
|
|
10
|
+
* Output: .bounce/arch-context-STORY-005-02.md
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import fs from 'fs';
|
|
14
|
+
import path from 'path';
|
|
15
|
+
import { fileURLToPath } from 'url';
|
|
16
|
+
import { execSync } from 'child_process';
|
|
17
|
+
import yaml from 'js-yaml';
|
|
18
|
+
|
|
19
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
20
|
+
const ROOT = path.resolve(__dirname, '..');
|
|
21
|
+
|
|
22
|
+
const storyId = process.argv[2];
|
|
23
|
+
if (!storyId) {
|
|
24
|
+
console.error('Usage: prep_arch_context.mjs STORY-ID');
|
|
25
|
+
process.exit(1);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// Load config
|
|
29
|
+
let config = { maxDiffLines: 500 };
|
|
30
|
+
const configFile = path.join(ROOT, 'vbounce.config.json');
|
|
31
|
+
if (fs.existsSync(configFile)) {
|
|
32
|
+
try { config = { ...config, ...JSON.parse(fs.readFileSync(configFile, 'utf8')) }; } catch {}
|
|
33
|
+
}
|
|
34
|
+
const MAX_DIFF_LINES = config.maxDiffLines || 500;
|
|
35
|
+
|
|
36
|
+
function findFilesMatching(dir, pattern) {
|
|
37
|
+
const results = [];
|
|
38
|
+
if (!fs.existsSync(dir)) return results;
|
|
39
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
40
|
+
for (const e of entries) {
|
|
41
|
+
const full = path.join(dir, e.name);
|
|
42
|
+
if (e.isDirectory()) results.push(...findFilesMatching(full, pattern));
|
|
43
|
+
else if (pattern.test(e.name)) results.push(full);
|
|
44
|
+
}
|
|
45
|
+
return results;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const searchDirs = [
|
|
49
|
+
path.join(ROOT, '.worktrees', storyId, '.bounce', 'reports'),
|
|
50
|
+
path.join(ROOT, '.bounce', 'reports'),
|
|
51
|
+
path.join(ROOT, '.bounce', 'archive'),
|
|
52
|
+
];
|
|
53
|
+
|
|
54
|
+
// 1. Find dev report (required)
|
|
55
|
+
const devPattern = new RegExp(`${storyId.replace(/[-]/g, '[-]')}.*-dev\\.md$`);
|
|
56
|
+
let devReport = null;
|
|
57
|
+
for (const dir of searchDirs) {
|
|
58
|
+
const m = findFilesMatching(dir, devPattern);
|
|
59
|
+
if (m.length > 0) { devReport = m[0]; break; }
|
|
60
|
+
}
|
|
61
|
+
if (!devReport) {
|
|
62
|
+
console.error(`ERROR: Dev report not found for ${storyId}`);
|
|
63
|
+
process.exit(1);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// 2. Find QA report (optional but warn)
|
|
67
|
+
const qaPattern = new RegExp(`${storyId.replace(/[-]/g, '[-]')}.*-qa.*\\.md$`);
|
|
68
|
+
let qaReport = null;
|
|
69
|
+
for (const dir of searchDirs) {
|
|
70
|
+
const m = findFilesMatching(dir, qaPattern);
|
|
71
|
+
if (m.length > 0) { qaReport = m[m.length - 1]; break; } // latest QA report
|
|
72
|
+
}
|
|
73
|
+
if (!qaReport) console.warn(`⚠ QA report not found for ${storyId} — proceeding without it`);
|
|
74
|
+
|
|
75
|
+
// 3. Find story spec (required)
|
|
76
|
+
const storyPattern = new RegExp(`${storyId.replace(/[-]/g, '[-]')}.*\\.md$`);
|
|
77
|
+
const storyMatches = findFilesMatching(path.join(ROOT, 'product_plans'), storyPattern);
|
|
78
|
+
if (storyMatches.length === 0) {
|
|
79
|
+
console.error(`ERROR: Story spec not found for ${storyId} in product_plans/`);
|
|
80
|
+
process.exit(1);
|
|
81
|
+
}
|
|
82
|
+
const storySpecFile = storyMatches[0];
|
|
83
|
+
|
|
84
|
+
// Parse frontmatters
|
|
85
|
+
let devFm = {}, qaFm = {};
|
|
86
|
+
try {
|
|
87
|
+
const dc = fs.readFileSync(devReport, 'utf8');
|
|
88
|
+
const dm = dc.match(/^---\s*\n([\s\S]*?)\n---/);
|
|
89
|
+
if (dm) devFm = yaml.load(dm[1]) || {};
|
|
90
|
+
} catch {}
|
|
91
|
+
if (qaReport) {
|
|
92
|
+
try {
|
|
93
|
+
const qc = fs.readFileSync(qaReport, 'utf8');
|
|
94
|
+
const qm = qc.match(/^---\s*\n([\s\S]*?)\n---/);
|
|
95
|
+
if (qm) qaFm = yaml.load(qm[1]) || {};
|
|
96
|
+
} catch {}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
// 4. Get git diff
|
|
100
|
+
let diffContent = '';
|
|
101
|
+
let diffTruncated = false;
|
|
102
|
+
const stateFile = path.join(ROOT, '.bounce', 'state.json');
|
|
103
|
+
try {
|
|
104
|
+
let diffCmd = 'git diff HEAD~5';
|
|
105
|
+
if (fs.existsSync(stateFile)) {
|
|
106
|
+
const state = JSON.parse(fs.readFileSync(stateFile, 'utf8'));
|
|
107
|
+
const sprintBranch = `sprint/${state.sprint_id}`;
|
|
108
|
+
try {
|
|
109
|
+
execSync(`git rev-parse ${sprintBranch}`, { cwd: ROOT, stdio: 'pipe' });
|
|
110
|
+
diffCmd = `git diff ${sprintBranch}...HEAD`;
|
|
111
|
+
} catch {}
|
|
112
|
+
}
|
|
113
|
+
diffContent = execSync(diffCmd, { cwd: ROOT }).toString();
|
|
114
|
+
|
|
115
|
+
if (!diffContent.trim()) {
|
|
116
|
+
console.warn(`⚠ Git diff is empty — proceeding without diff`);
|
|
117
|
+
} else {
|
|
118
|
+
const diffLines = diffContent.split('\n');
|
|
119
|
+
if (diffLines.length > MAX_DIFF_LINES) {
|
|
120
|
+
diffTruncated = true;
|
|
121
|
+
const fullDiffPath = path.join(ROOT, '.bounce', `arch-full-diff-${storyId}.txt`);
|
|
122
|
+
fs.writeFileSync(fullDiffPath, diffContent);
|
|
123
|
+
console.warn(`⚠ Diff truncated at ${MAX_DIFF_LINES} lines (was ${diffLines.length}). Full diff saved to .bounce/arch-full-diff-${storyId}.txt`);
|
|
124
|
+
diffContent = diffLines.slice(0, MAX_DIFF_LINES).join('\n');
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
} catch (e) {
|
|
128
|
+
console.warn(`⚠ Could not get git diff: ${e.message}`);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// 5. Read LESSONS.md
|
|
132
|
+
const lessonsFile = path.join(ROOT, 'LESSONS.md');
|
|
133
|
+
let lessonsExcerpt = '_No LESSONS.md found_';
|
|
134
|
+
if (fs.existsSync(lessonsFile)) {
|
|
135
|
+
const lines = fs.readFileSync(lessonsFile, 'utf8').split('\n');
|
|
136
|
+
lessonsExcerpt = lines.slice(0, 20).join('\n');
|
|
137
|
+
if (lines.length > 20) lessonsExcerpt += `\n_(+${lines.length - 20} more lines)_`;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
// 6. Assemble context pack
|
|
141
|
+
const lines = [
|
|
142
|
+
`# Architect Context: ${storyId}`,
|
|
143
|
+
`> Generated: ${new Date().toISOString().split('T')[0]}`,
|
|
144
|
+
'',
|
|
145
|
+
`## Dev Report Summary`,
|
|
146
|
+
`| Field | Value |`,
|
|
147
|
+
`|-------|-------|`,
|
|
148
|
+
`| Status | ${devFm.status || '—'} |`,
|
|
149
|
+
`| Correction Tax | ${devFm.correction_tax || '—'} |`,
|
|
150
|
+
`| Tests Written | ${devFm.tests_written ?? '—'} |`,
|
|
151
|
+
`| Files Modified | ${Array.isArray(devFm.files_modified) ? devFm.files_modified.length : '—'} |`,
|
|
152
|
+
'',
|
|
153
|
+
`## QA Report Summary`,
|
|
154
|
+
qaReport
|
|
155
|
+
? [`| Field | Value |`, `|-------|-------|`,
|
|
156
|
+
`| Status | ${qaFm.status || '—'} |`,
|
|
157
|
+
`| Bounce Count | ${qaFm.bounce_count ?? '—'} |`,
|
|
158
|
+
`| Bugs Found | ${qaFm.bugs_found ?? '—'} |`].join('\n')
|
|
159
|
+
: '_QA report not found_',
|
|
160
|
+
'',
|
|
161
|
+
`## Story Spec`,
|
|
162
|
+
`- File: \`${path.relative(ROOT, storySpecFile)}\``,
|
|
163
|
+
`- Read §3 Implementation Guide and §3.1 ADR References before auditing`,
|
|
164
|
+
'',
|
|
165
|
+
`## Git Diff${diffTruncated ? ` (TRUNCATED at ${MAX_DIFF_LINES} lines — full diff in .bounce/arch-full-diff-${storyId}.txt)` : ''}`,
|
|
166
|
+
'```diff',
|
|
167
|
+
diffContent || '(no diff available)',
|
|
168
|
+
'```',
|
|
169
|
+
'',
|
|
170
|
+
`## Relevant Lessons`,
|
|
171
|
+
lessonsExcerpt,
|
|
172
|
+
];
|
|
173
|
+
|
|
174
|
+
const output = lines.join('\n');
|
|
175
|
+
const outputFile = path.join(ROOT, '.bounce', `arch-context-${storyId}.md`);
|
|
176
|
+
fs.writeFileSync(outputFile, output);
|
|
177
|
+
console.log(`✓ Architect context pack written to .bounce/arch-context-${storyId}.md`);
|
|
178
|
+
if (diffTruncated) console.log(` ⚠ Diff truncated — full diff at .bounce/arch-full-diff-${storyId}.txt`);
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* prep_qa_context.mjs
|
|
5
|
+
* Generates a QA context pack for a story.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* ./scripts/prep_qa_context.mjs STORY-005-02
|
|
9
|
+
*
|
|
10
|
+
* Output: .bounce/qa-context-STORY-005-02.md
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import fs from 'fs';
|
|
14
|
+
import path from 'path';
|
|
15
|
+
import { fileURLToPath } from 'url';
|
|
16
|
+
import yaml from 'js-yaml';
|
|
17
|
+
|
|
18
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
19
|
+
const ROOT = path.resolve(__dirname, '..');
|
|
20
|
+
|
|
21
|
+
const storyId = process.argv[2];
|
|
22
|
+
if (!storyId) {
|
|
23
|
+
console.error('Usage: prep_qa_context.mjs STORY-ID');
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const MAX_CONTEXT_LINES = 300;
|
|
28
|
+
|
|
29
|
+
function findFilesMatching(dir, pattern) {
|
|
30
|
+
const results = [];
|
|
31
|
+
if (!fs.existsSync(dir)) return results;
|
|
32
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
33
|
+
for (const e of entries) {
|
|
34
|
+
const full = path.join(dir, e.name);
|
|
35
|
+
if (e.isDirectory()) results.push(...findFilesMatching(full, pattern));
|
|
36
|
+
else if (pattern.test(e.name)) results.push(full);
|
|
37
|
+
}
|
|
38
|
+
return results;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
// 1. Find dev report (required)
|
|
42
|
+
const devReportPattern = new RegExp(`${storyId.replace(/[-]/g, '[-]')}.*-dev\\.md$`);
|
|
43
|
+
const searchDirs = [
|
|
44
|
+
path.join(ROOT, '.worktrees', storyId, '.bounce', 'reports'),
|
|
45
|
+
path.join(ROOT, '.bounce', 'reports'),
|
|
46
|
+
path.join(ROOT, '.bounce', 'archive'),
|
|
47
|
+
];
|
|
48
|
+
let devReport = null;
|
|
49
|
+
for (const dir of searchDirs) {
|
|
50
|
+
const matches = findFilesMatching(dir, devReportPattern);
|
|
51
|
+
if (matches.length > 0) { devReport = matches[0]; break; }
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (!devReport) {
|
|
55
|
+
console.error(`ERROR: Dev report not found for ${storyId}. Searched in:`);
|
|
56
|
+
searchDirs.forEach(d => console.error(` - ${d}`));
|
|
57
|
+
process.exit(1);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// Parse dev report frontmatter
|
|
61
|
+
let devFm = {};
|
|
62
|
+
try {
|
|
63
|
+
const devContent = fs.readFileSync(devReport, 'utf8');
|
|
64
|
+
const fmMatch = devContent.match(/^---\s*\n([\s\S]*?)\n---/);
|
|
65
|
+
if (fmMatch) devFm = yaml.load(fmMatch[1]) || {};
|
|
66
|
+
} catch (e) {
|
|
67
|
+
console.error(`ERROR: Dev report has invalid YAML frontmatter — ${e.message}`);
|
|
68
|
+
process.exit(1);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// 2. Find story spec (required)
|
|
72
|
+
const storySpecPattern = new RegExp(`${storyId.replace(/[-]/g, '[-]')}.*\\.md$`);
|
|
73
|
+
const storySpecMatches = findFilesMatching(path.join(ROOT, 'product_plans'), storySpecPattern);
|
|
74
|
+
if (storySpecMatches.length === 0) {
|
|
75
|
+
console.error(`ERROR: Story spec not found for ${storyId} in product_plans/`);
|
|
76
|
+
process.exit(1);
|
|
77
|
+
}
|
|
78
|
+
const storySpecFile = storySpecMatches[0];
|
|
79
|
+
const storyContent = fs.readFileSync(storySpecFile, 'utf8');
|
|
80
|
+
|
|
81
|
+
// Extract §2 acceptance criteria
|
|
82
|
+
const criteriaMatch = storyContent.match(/##\s*(2\.|§2|The Truth|Acceptance)[^\n]*\n([\s\S]*?)(?=\n##|\n---|\Z)/i);
|
|
83
|
+
const criteriaSection = criteriaMatch ? criteriaMatch[2].trim().split('\n').slice(0, 30).join('\n') : '_Could not extract §2 — read story spec directly_';
|
|
84
|
+
|
|
85
|
+
// 3. Read LESSONS.md
|
|
86
|
+
const lessonsFile = path.join(ROOT, 'LESSONS.md');
|
|
87
|
+
let lessonsExcerpt = '_No LESSONS.md found_';
|
|
88
|
+
if (fs.existsSync(lessonsFile)) {
|
|
89
|
+
const lines = fs.readFileSync(lessonsFile, 'utf8').split('\n');
|
|
90
|
+
lessonsExcerpt = lines.slice(0, 30).join('\n');
|
|
91
|
+
if (lines.length > 30) lessonsExcerpt += `\n_(+${lines.length - 30} more lines)_`;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// 4. Format files modified list
|
|
95
|
+
const filesModified = Array.isArray(devFm.files_modified)
|
|
96
|
+
? devFm.files_modified.map(f => `- ${f}`).join('\n')
|
|
97
|
+
: '_Not specified in dev report_';
|
|
98
|
+
|
|
99
|
+
// 5. Assemble context pack
|
|
100
|
+
const lines = [
|
|
101
|
+
`# QA Context: ${storyId}`,
|
|
102
|
+
`> Generated: ${new Date().toISOString().split('T')[0]}`,
|
|
103
|
+
`> Dev report: ${path.relative(ROOT, devReport)}`,
|
|
104
|
+
`> Story spec: ${path.relative(ROOT, storySpecFile)}`,
|
|
105
|
+
'',
|
|
106
|
+
`## Dev Report Summary`,
|
|
107
|
+
`| Field | Value |`,
|
|
108
|
+
`|-------|-------|`,
|
|
109
|
+
`| Status | ${devFm.status || '—'} |`,
|
|
110
|
+
`| Correction Tax | ${devFm.correction_tax || '—'} |`,
|
|
111
|
+
`| Tests Written | ${devFm.tests_written ?? '—'} |`,
|
|
112
|
+
`| Lessons Flagged | ${devFm.lessons_flagged || 'none'} |`,
|
|
113
|
+
'',
|
|
114
|
+
`## Story Acceptance Criteria (§2)`,
|
|
115
|
+
criteriaSection,
|
|
116
|
+
'',
|
|
117
|
+
`## Files Modified`,
|
|
118
|
+
filesModified,
|
|
119
|
+
'',
|
|
120
|
+
`## Relevant Lessons`,
|
|
121
|
+
lessonsExcerpt,
|
|
122
|
+
];
|
|
123
|
+
|
|
124
|
+
const output = lines.join('\n');
|
|
125
|
+
const outputLines = output.split('\n');
|
|
126
|
+
let finalOutput = output;
|
|
127
|
+
if (outputLines.length > MAX_CONTEXT_LINES) {
|
|
128
|
+
finalOutput = outputLines.slice(0, MAX_CONTEXT_LINES).join('\n');
|
|
129
|
+
finalOutput += `\n\n> ⚠ Truncated at ${MAX_CONTEXT_LINES} lines. Read source files for complete content.`;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const outputFile = path.join(ROOT, '.bounce', `qa-context-${storyId}.md`);
|
|
133
|
+
fs.writeFileSync(outputFile, finalOutput);
|
|
134
|
+
console.log(`✓ QA context pack written to .bounce/qa-context-${storyId}.md`);
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* prep_sprint_context.mjs
|
|
5
|
+
* Generates a sprint context pack — single file replacing 6+ separate reads.
|
|
6
|
+
*
|
|
7
|
+
* Usage:
|
|
8
|
+
* ./scripts/prep_sprint_context.mjs S-05
|
|
9
|
+
*
|
|
10
|
+
* Output: .bounce/sprint-context-S-05.md
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
import fs from 'fs';
|
|
14
|
+
import path from 'path';
|
|
15
|
+
import { fileURLToPath } from 'url';
|
|
16
|
+
|
|
17
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
18
|
+
const ROOT = path.resolve(__dirname, '..');
|
|
19
|
+
|
|
20
|
+
const sprintId = process.argv[2];
|
|
21
|
+
if (!sprintId) {
|
|
22
|
+
console.error('Usage: prep_sprint_context.mjs S-XX');
|
|
23
|
+
process.exit(1);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const MAX_CONTEXT_LINES = 200;
|
|
27
|
+
|
|
28
|
+
// 1. Read state.json (required)
|
|
29
|
+
const stateFile = path.join(ROOT, '.bounce', 'state.json');
|
|
30
|
+
if (!fs.existsSync(stateFile)) {
|
|
31
|
+
console.error('ERROR: .bounce/state.json not found. Run: vbounce sprint init');
|
|
32
|
+
process.exit(1);
|
|
33
|
+
}
|
|
34
|
+
const state = JSON.parse(fs.readFileSync(stateFile, 'utf8'));
|
|
35
|
+
|
|
36
|
+
// 2. Find sprint plan
|
|
37
|
+
const sprintNum = sprintId.replace('S-', '');
|
|
38
|
+
const sprintPlanPath = path.join(ROOT, 'product_plans', 'sprints', `sprint-${sprintNum}`, `sprint-${sprintNum}.md`);
|
|
39
|
+
if (!fs.existsSync(sprintPlanPath)) {
|
|
40
|
+
console.error(`ERROR: Sprint plan not found at ${sprintPlanPath}`);
|
|
41
|
+
process.exit(1);
|
|
42
|
+
}
|
|
43
|
+
const sprintPlan = fs.readFileSync(sprintPlanPath, 'utf8');
|
|
44
|
+
|
|
45
|
+
// Extract sprint goal from frontmatter
|
|
46
|
+
const goalMatch = sprintPlan.match(/sprint_goal:\s*"([^"]+)"/);
|
|
47
|
+
const sprintGoal = goalMatch ? goalMatch[1] : 'TBD';
|
|
48
|
+
|
|
49
|
+
// 3. Read LESSONS.md (first 50 lines)
|
|
50
|
+
const lessonsFile = path.join(ROOT, 'LESSONS.md');
|
|
51
|
+
let lessonsExcerpt = '_No LESSONS.md found_';
|
|
52
|
+
if (fs.existsSync(lessonsFile)) {
|
|
53
|
+
const lines = fs.readFileSync(lessonsFile, 'utf8').split('\n');
|
|
54
|
+
lessonsExcerpt = lines.slice(0, 50).join('\n');
|
|
55
|
+
if (lines.length > 50) lessonsExcerpt += `\n\n_(${lines.length - 50} more lines — read LESSONS.md for full content)_`;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// 4. Find RISK_REGISTRY
|
|
59
|
+
let riskExcerpt = '_No RISK_REGISTRY.md found_';
|
|
60
|
+
const riskPaths = [
|
|
61
|
+
path.join(ROOT, 'product_plans', 'strategy', 'RISK_REGISTRY.md'),
|
|
62
|
+
path.join(ROOT, 'RISK_REGISTRY.md'),
|
|
63
|
+
];
|
|
64
|
+
for (const rp of riskPaths) {
|
|
65
|
+
if (fs.existsSync(rp)) {
|
|
66
|
+
const lines = fs.readFileSync(rp, 'utf8').split('\n');
|
|
67
|
+
riskExcerpt = lines.slice(0, 20).join('\n');
|
|
68
|
+
if (lines.length > 20) riskExcerpt += `\n\n_(${lines.length - 20} more lines — read RISK_REGISTRY.md for full content)_`;
|
|
69
|
+
break;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
// 5. Build story state table from state.json
|
|
74
|
+
const storyRows = Object.entries(state.stories || {})
|
|
75
|
+
.map(([id, s]) => `| ${id} | ${s.state} | ${s.qa_bounces} | ${s.arch_bounces} | ${s.worktree || '—'} |`)
|
|
76
|
+
.join('\n');
|
|
77
|
+
|
|
78
|
+
// 6. Assemble context pack
|
|
79
|
+
const lines = [
|
|
80
|
+
`# Sprint Context: ${sprintId}`,
|
|
81
|
+
`> Generated: ${new Date().toISOString().split('T')[0]} | Sprint: ${sprintId} | Delivery: ${state.delivery_id}`,
|
|
82
|
+
'',
|
|
83
|
+
`## Sprint Plan Summary`,
|
|
84
|
+
`- **Goal**: ${sprintGoal}`,
|
|
85
|
+
`- **Phase**: ${state.phase || 'N/A'}`,
|
|
86
|
+
`- **Last action**: ${state.last_action || 'N/A'}`,
|
|
87
|
+
`- **Stories**: ${Object.keys(state.stories || {}).length}`,
|
|
88
|
+
'',
|
|
89
|
+
`## Current State`,
|
|
90
|
+
`| Story | State | QA Bounces | Arch Bounces | Worktree |`,
|
|
91
|
+
`|-------|-------|------------|--------------|----------|`,
|
|
92
|
+
storyRows || '| (no stories) | — | — | — | — |',
|
|
93
|
+
'',
|
|
94
|
+
`## Relevant Lessons`,
|
|
95
|
+
lessonsExcerpt,
|
|
96
|
+
'',
|
|
97
|
+
`## Risk Summary`,
|
|
98
|
+
riskExcerpt,
|
|
99
|
+
];
|
|
100
|
+
|
|
101
|
+
const output = lines.join('\n');
|
|
102
|
+
const outputLines = output.split('\n');
|
|
103
|
+
|
|
104
|
+
let finalOutput = output;
|
|
105
|
+
let truncated = false;
|
|
106
|
+
if (outputLines.length > MAX_CONTEXT_LINES) {
|
|
107
|
+
finalOutput = outputLines.slice(0, MAX_CONTEXT_LINES).join('\n');
|
|
108
|
+
finalOutput += `\n\n> ⚠ Context pack truncated at ${MAX_CONTEXT_LINES} lines (was ${outputLines.length}). Read source files for complete content.`;
|
|
109
|
+
truncated = true;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// 7. Write output
|
|
113
|
+
const outputFile = path.join(ROOT, '.bounce', `sprint-context-${sprintId}.md`);
|
|
114
|
+
fs.writeFileSync(outputFile, finalOutput);
|
|
115
|
+
|
|
116
|
+
console.log(`✓ Sprint context pack written to .bounce/sprint-context-${sprintId}.md`);
|
|
117
|
+
if (truncated) console.warn(` ⚠ Content was truncated (exceeded ${MAX_CONTEXT_LINES} lines)`);
|
|
118
|
+
console.log(` Stories: ${Object.keys(state.stories || {}).length} | Phase: ${state.phase || 'N/A'}`);
|