@firatcand/forge 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ETHOS.md +81 -0
- package/LICENSE +21 -0
- package/README.md +134 -0
- package/agents/backend-dev.md +36 -0
- package/agents/code-reviewer.md +37 -0
- package/agents/db-architect.md +36 -0
- package/agents/design-reviewer.md +31 -0
- package/agents/devops-engineer.md +34 -0
- package/agents/frontend-dev.md +36 -0
- package/agents/learning-curator.md +35 -0
- package/agents/linear-syncer.md +36 -0
- package/agents/phase-gatekeeper.md +23 -0
- package/agents/product-decomposer.md +39 -0
- package/agents/qa-engineer.md +31 -0
- package/agents/security-auditor.md +34 -0
- package/bin/forge.js +368 -0
- package/lib/companions.js +67 -0
- package/lib/github-helpers.sh +148 -0
- package/lib/linear-helpers.sh +188 -0
- package/lib/paths.js +13 -0
- package/lib/tools.js +68 -0
- package/lib/validators.sh +284 -0
- package/lib/worktree-helpers.sh +136 -0
- package/package.json +53 -0
- package/skills/codex/SKILL.md +50 -0
- package/skills/decompose/SKILL.md +47 -0
- package/skills/draft-design/SKILL.md +55 -0
- package/skills/draft-prd/SKILL.md +47 -0
- package/skills/draft-spec/SKILL.md +42 -0
- package/skills/fix/SKILL.md +23 -0
- package/skills/forge/SKILL.md +87 -0
- package/skills/implement/SKILL.md +24 -0
- package/skills/ingest-spec/SKILL.md +46 -0
- package/skills/investigate/SKILL.md +26 -0
- package/skills/learn/SKILL.md +53 -0
- package/skills/phase-gate/SKILL.md +37 -0
- package/skills/pickup-task/SKILL.md +53 -0
- package/skills/plan-task/SKILL.md +22 -0
- package/skills/push-to-linear/SKILL.md +42 -0
- package/skills/qa/SKILL.md +22 -0
- package/skills/retro/SKILL.md +27 -0
- package/skills/review/SKILL.md +20 -0
- package/skills/setup-repo/SKILL.md +63 -0
- package/skills/ship/SKILL.md +34 -0
- package/skills/sync-status/SKILL.md +14 -0
- package/templates/BRIEF.template.md +34 -0
- package/templates/CLAUDE.project.template.md +37 -0
- package/templates/CRITICAL.template.md +11 -0
- package/templates/DESIGN.template.md +37 -0
- package/templates/PRD.template.md +30 -0
- package/templates/SPEC.template.md +49 -0
- package/templates/github-workflows/claude-issue.yml +27 -0
- package/templates/github-workflows/claude-pr-review.yml +22 -0
- package/templates/github-workflows/claude-scheduled.yml +23 -0
- package/templates/github-workflows/test.yml +18 -0
- package/templates/learning.template.md +14 -0
- package/templates/phases.template.yaml +45 -0
- package/templates/retro.template.md +27 -0
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# linear-helpers.sh — Linear MCP wrappers used by /push-to-linear, /sync-status.
|
|
3
|
+
#
|
|
4
|
+
# These functions assume the Linear MCP server is configured for Claude Code
|
|
5
|
+
# and that tool calls happen through the agent. Bash callers (e.g. setup
|
|
6
|
+
# scripts) can use the JSON-printing variants which read API_KEY from
|
|
7
|
+
# $LINEAR_API_KEY as a fallback.
|
|
8
|
+
#
|
|
9
|
+
# Source this file: `source "${FORGE_DIR}/lib/linear-helpers.sh"`
|
|
10
|
+
|
|
11
|
+
set -euo pipefail
|
|
12
|
+
|
|
13
|
+
# ──────────────────────────────────────────────
|
|
14
|
+
# Logging
|
|
15
|
+
# ──────────────────────────────────────────────
|
|
16
|
+
_log() { printf '\033[1;36m[linear]\033[0m %s\n' "$*" >&2; }
|
|
17
|
+
_warn() { printf '\033[1;33m[linear]\033[0m %s\n' "$*" >&2; }
|
|
18
|
+
_err() { printf '\033[1;31m[linear]\033[0m %s\n' "$*" >&2; }
|
|
19
|
+
|
|
20
|
+
# ──────────────────────────────────────────────
|
|
21
|
+
# linear_check_mcp
|
|
22
|
+
# Verifies that the Linear MCP server is registered with Claude Code.
|
|
23
|
+
# Returns 0 if available, 1 otherwise.
|
|
24
|
+
# ──────────────────────────────────────────────
|
|
25
|
+
linear_check_mcp() {
|
|
26
|
+
if ! command -v claude >/dev/null 2>&1; then
|
|
27
|
+
_err "claude CLI not found on PATH"
|
|
28
|
+
return 1
|
|
29
|
+
fi
|
|
30
|
+
|
|
31
|
+
if ! claude mcp list 2>/dev/null | grep -qi 'linear'; then
|
|
32
|
+
_warn "Linear MCP server not registered. Run: claude mcp add linear ..."
|
|
33
|
+
return 1
|
|
34
|
+
fi
|
|
35
|
+
|
|
36
|
+
_log "Linear MCP server registered"
|
|
37
|
+
return 0
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
# ──────────────────────────────────────────────
|
|
41
|
+
# linear_api_call ENDPOINT_PATH METHOD [BODY_JSON]
|
|
42
|
+
# Low-level wrapper for direct Linear API access (fallback when MCP isn't
|
|
43
|
+
# configured). Requires LINEAR_API_KEY in env.
|
|
44
|
+
# ──────────────────────────────────────────────
|
|
45
|
+
linear_api_call() {
|
|
46
|
+
local endpoint="${1:?endpoint required}"
|
|
47
|
+
local method="${2:-GET}"
|
|
48
|
+
local body="${3:-}"
|
|
49
|
+
|
|
50
|
+
if [[ -z "${LINEAR_API_KEY:-}" ]]; then
|
|
51
|
+
_err "LINEAR_API_KEY not set"
|
|
52
|
+
return 1
|
|
53
|
+
fi
|
|
54
|
+
|
|
55
|
+
local args=(-sS -X "${method}" \
|
|
56
|
+
-H "Authorization: ${LINEAR_API_KEY}" \
|
|
57
|
+
-H "Content-Type: application/json" \
|
|
58
|
+
"https://api.linear.app${endpoint}")
|
|
59
|
+
|
|
60
|
+
if [[ -n "${body}" ]]; then
|
|
61
|
+
args+=(-d "${body}")
|
|
62
|
+
fi
|
|
63
|
+
|
|
64
|
+
curl "${args[@]}"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
# ──────────────────────────────────────────────
|
|
68
|
+
# linear_create_project NAME [DESCRIPTION]
|
|
69
|
+
# Echoes the new project's id to stdout.
|
|
70
|
+
# ──────────────────────────────────────────────
|
|
71
|
+
linear_create_project() {
|
|
72
|
+
local name="${1:?name required}"
|
|
73
|
+
local description="${2:-}"
|
|
74
|
+
|
|
75
|
+
local payload
|
|
76
|
+
payload=$(jq -n --arg name "${name}" --arg description "${description}" \
|
|
77
|
+
'{query: "mutation($name: String!, $description: String!) { projectCreate(input: {name: $name, description: $description}) { project { id name } } }",
|
|
78
|
+
variables: {name: $name, description: $description}}')
|
|
79
|
+
|
|
80
|
+
linear_api_call /graphql POST "${payload}" | jq -r '.data.projectCreate.project.id'
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
# ──────────────────────────────────────────────
|
|
84
|
+
# linear_create_cycle TEAM_ID NAME [STARTS_AT] [ENDS_AT]
|
|
85
|
+
# Echoes the new cycle's id.
|
|
86
|
+
# ──────────────────────────────────────────────
|
|
87
|
+
linear_create_cycle() {
|
|
88
|
+
local team_id="${1:?team_id required}"
|
|
89
|
+
local name="${2:?name required}"
|
|
90
|
+
local starts_at="${3:-}"
|
|
91
|
+
local ends_at="${4:-}"
|
|
92
|
+
|
|
93
|
+
local payload
|
|
94
|
+
payload=$(jq -n \
|
|
95
|
+
--arg team "${team_id}" \
|
|
96
|
+
--arg name "${name}" \
|
|
97
|
+
--arg startsAt "${starts_at}" \
|
|
98
|
+
--arg endsAt "${ends_at}" \
|
|
99
|
+
'{query: "mutation($input: CycleCreateInput!) { cycleCreate(input: $input) { cycle { id name } } }",
|
|
100
|
+
variables: {input: {teamId: $team, name: $name, startsAt: $startsAt, endsAt: $endsAt}}}')
|
|
101
|
+
|
|
102
|
+
linear_api_call /graphql POST "${payload}" | jq -r '.data.cycleCreate.cycle.id'
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
# ──────────────────────────────────────────────
|
|
106
|
+
# linear_create_issue PROJECT_ID TITLE BODY PRIORITY ESTIMATE [CYCLE_ID]
|
|
107
|
+
# Priority: 1 (urgent), 2 (high), 3 (medium), 4 (low)
|
|
108
|
+
# Estimate: numeric (e.g. 1=S, 3=M, 5=L)
|
|
109
|
+
# Echoes the new issue's id.
|
|
110
|
+
# ──────────────────────────────────────────────
|
|
111
|
+
linear_create_issue() {
|
|
112
|
+
local project_id="${1:?project_id required}"
|
|
113
|
+
local title="${2:?title required}"
|
|
114
|
+
local body="${3:-}"
|
|
115
|
+
local priority="${4:-3}"
|
|
116
|
+
local estimate="${5:-3}"
|
|
117
|
+
local cycle_id="${6:-}"
|
|
118
|
+
|
|
119
|
+
local payload
|
|
120
|
+
payload=$(jq -n \
|
|
121
|
+
--arg project "${project_id}" \
|
|
122
|
+
--arg title "${title}" \
|
|
123
|
+
--arg body "${body}" \
|
|
124
|
+
--argjson priority "${priority}" \
|
|
125
|
+
--argjson estimate "${estimate}" \
|
|
126
|
+
--arg cycle "${cycle_id}" \
|
|
127
|
+
'{query: "mutation($input: IssueCreateInput!) { issueCreate(input: $input) { issue { id identifier } } }",
|
|
128
|
+
variables: {input: {projectId: $project, title: $title, description: $body, priority: $priority, estimate: $estimate, cycleId: ($cycle | select(. != ""))}}}')
|
|
129
|
+
|
|
130
|
+
linear_api_call /graphql POST "${payload}" | jq -r '.data.issueCreate.issue.id'
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
# ──────────────────────────────────────────────
|
|
134
|
+
# linear_set_blocks ISSUE_ID BLOCKER_IDS...
|
|
135
|
+
# Sets "blocked by" relations from BLOCKER_IDS to ISSUE_ID.
|
|
136
|
+
# ──────────────────────────────────────────────
|
|
137
|
+
linear_set_blocks() {
|
|
138
|
+
local issue_id="${1:?issue_id required}"
|
|
139
|
+
shift
|
|
140
|
+
|
|
141
|
+
local blocker
|
|
142
|
+
for blocker in "$@"; do
|
|
143
|
+
local payload
|
|
144
|
+
payload=$(jq -n \
|
|
145
|
+
--arg issue "${issue_id}" \
|
|
146
|
+
--arg blocker "${blocker}" \
|
|
147
|
+
'{query: "mutation($issueId: String!, $relatedIssueId: String!) { issueRelationCreate(input: {issueId: $issueId, relatedIssueId: $relatedIssueId, type: blocks}) { success } }",
|
|
148
|
+
variables: {issueId: $blocker, relatedIssueId: $issue}}')
|
|
149
|
+
|
|
150
|
+
linear_api_call /graphql POST "${payload}" >/dev/null
|
|
151
|
+
done
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
# ──────────────────────────────────────────────
|
|
155
|
+
# linear_get_issue_status ISSUE_ID
|
|
156
|
+
# Echoes the current status name (e.g. "Todo", "In Progress").
|
|
157
|
+
# ──────────────────────────────────────────────
|
|
158
|
+
linear_get_issue_status() {
|
|
159
|
+
local issue_id="${1:?issue_id required}"
|
|
160
|
+
|
|
161
|
+
local payload
|
|
162
|
+
payload=$(jq -n --arg id "${issue_id}" \
|
|
163
|
+
'{query: "query($id: String!) { issue(id: $id) { state { name } } }", variables: {id: $id}}')
|
|
164
|
+
|
|
165
|
+
linear_api_call /graphql POST "${payload}" | jq -r '.data.issue.state.name'
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
# ──────────────────────────────────────────────
|
|
169
|
+
# linear_link_github PROJECT_ID GITHUB_REPO_SLUG
|
|
170
|
+
# Connects a Linear project to a GitHub repo for native sync.
|
|
171
|
+
# Note: Linear's GraphQL API for this is limited. In practice this is
|
|
172
|
+
# configured via the Linear UI; this function prints a manual instruction.
|
|
173
|
+
# ──────────────────────────────────────────────
|
|
174
|
+
linear_link_github() {
|
|
175
|
+
local project_id="${1:?project_id required}"
|
|
176
|
+
local repo="${2:?repo (owner/name) required}"
|
|
177
|
+
|
|
178
|
+
cat <<EOF >&2
|
|
179
|
+
[linear] Manual step required:
|
|
180
|
+
1. Open Linear → Settings → Integrations → GitHub
|
|
181
|
+
2. Connect repo: ${repo}
|
|
182
|
+
3. Map to project: ${project_id}
|
|
183
|
+
4. Enable: branch-name auto-link, PR-status sync
|
|
184
|
+
|
|
185
|
+
Once linked, branches like 'feat/{ID}-slug' auto-attach to the issue,
|
|
186
|
+
and PRs move issues through Todo → In Progress → In Review → Done.
|
|
187
|
+
EOF
|
|
188
|
+
}
|
package/lib/paths.js
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import { fileURLToPath } from 'url';
|
|
3
|
+
import fs from 'fs-extra';
|
|
4
|
+
|
|
5
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
6
|
+
const __dirname = path.dirname(__filename);
|
|
7
|
+
|
|
8
|
+
export const FORGE_ROOT = path.resolve(__dirname, '..');
|
|
9
|
+
|
|
10
|
+
export function getPackageVersion() {
|
|
11
|
+
const pkg = fs.readJsonSync(path.join(FORGE_ROOT, 'package.json'));
|
|
12
|
+
return pkg.version;
|
|
13
|
+
}
|
package/lib/tools.js
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import path from 'path';
|
|
2
|
+
import os from 'os';
|
|
3
|
+
import fs from 'fs-extra';
|
|
4
|
+
|
|
5
|
+
const TOOL_DEFINITIONS = [
|
|
6
|
+
{
|
|
7
|
+
key: 'claude',
|
|
8
|
+
name: 'Claude Code',
|
|
9
|
+
dir: path.join(os.homedir(), '.claude'),
|
|
10
|
+
skillsDir: 'skills',
|
|
11
|
+
agentsDir: 'agents',
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
key: 'codex',
|
|
15
|
+
name: 'Codex CLI',
|
|
16
|
+
dir: path.join(os.homedir(), '.codex'),
|
|
17
|
+
skillsDir: 'skills',
|
|
18
|
+
agentsDir: 'subagents',
|
|
19
|
+
},
|
|
20
|
+
{
|
|
21
|
+
key: 'cursor',
|
|
22
|
+
name: 'Cursor',
|
|
23
|
+
dir: path.join(os.homedir(), '.cursor'),
|
|
24
|
+
skillsDir: 'skills',
|
|
25
|
+
agentsDir: 'agents',
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
key: 'gemini',
|
|
29
|
+
name: 'Gemini CLI',
|
|
30
|
+
dir: path.join(os.homedir(), '.gemini'),
|
|
31
|
+
skillsDir: 'extensions',
|
|
32
|
+
agentsDir: null,
|
|
33
|
+
},
|
|
34
|
+
];
|
|
35
|
+
|
|
36
|
+
export function detectTools() {
|
|
37
|
+
return TOOL_DEFINITIONS.filter(tool => fs.pathExistsSync(tool.dir));
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
export async function installToTool(tool, forgeRoot) {
|
|
41
|
+
const skillsTarget = path.join(tool.dir, tool.skillsDir);
|
|
42
|
+
const agentsTarget = tool.agentsDir ? path.join(tool.dir, tool.agentsDir) : null;
|
|
43
|
+
|
|
44
|
+
await fs.ensureDir(skillsTarget);
|
|
45
|
+
if (agentsTarget) await fs.ensureDir(agentsTarget);
|
|
46
|
+
|
|
47
|
+
const skillsSource = path.join(forgeRoot, 'skills');
|
|
48
|
+
const skills = await fs.readdir(skillsSource);
|
|
49
|
+
for (const skill of skills) {
|
|
50
|
+
const src = path.join(skillsSource, skill);
|
|
51
|
+
const dst = path.join(skillsTarget, skill);
|
|
52
|
+
await fs.remove(dst);
|
|
53
|
+
await fs.copy(src, dst, { overwrite: true, errorOnExist: false });
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (agentsTarget) {
|
|
57
|
+
const agentsSource = path.join(forgeRoot, 'agents');
|
|
58
|
+
if (await fs.pathExists(agentsSource)) {
|
|
59
|
+
const agents = await fs.readdir(agentsSource);
|
|
60
|
+
for (const agent of agents) {
|
|
61
|
+
const src = path.join(agentsSource, agent);
|
|
62
|
+
const dst = path.join(agentsTarget, agent);
|
|
63
|
+
await fs.remove(dst);
|
|
64
|
+
await fs.copy(src, dst, { overwrite: true, errorOnExist: false });
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
@@ -0,0 +1,284 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# validators.sh — spec + phases.yaml validation used by /ingest-spec, /decompose.
|
|
3
|
+
#
|
|
4
|
+
# Source: `source "${FORGE_DIR}/lib/validators.sh"`
|
|
5
|
+
|
|
6
|
+
set -euo pipefail
|
|
7
|
+
|
|
8
|
+
# ──────────────────────────────────────────────
|
|
9
|
+
# Logging
|
|
10
|
+
# ──────────────────────────────────────────────
|
|
11
|
+
_log() { printf '\033[1;34m[validate]\033[0m %s\n' "$*" >&2; }
|
|
12
|
+
_warn() { printf '\033[1;33m[validate]\033[0m %s\n' "$*" >&2; }
|
|
13
|
+
_err() { printf '\033[1;31m[validate]\033[0m %s\n' "$*" >&2; }
|
|
14
|
+
|
|
15
|
+
# ──────────────────────────────────────────────
|
|
16
|
+
# validate_spec_section FILE SECTION_HEADER
|
|
17
|
+
# Verifies the file contains a non-empty section under the given header.
|
|
18
|
+
# Returns 0 if section exists with content, 1 otherwise.
|
|
19
|
+
# ──────────────────────────────────────────────
|
|
20
|
+
validate_spec_section() {
|
|
21
|
+
local file="${1:?file required}"
|
|
22
|
+
local header="${2:?header required}"
|
|
23
|
+
|
|
24
|
+
if [[ ! -f "${file}" ]]; then
|
|
25
|
+
_err "${file}: file does not exist"
|
|
26
|
+
return 1
|
|
27
|
+
fi
|
|
28
|
+
|
|
29
|
+
# Find the section and check it contains content beyond the header
|
|
30
|
+
awk -v hdr="## ${header}" '
|
|
31
|
+
BEGIN { found=0; content=0 }
|
|
32
|
+
$0 == hdr { found=1; next }
|
|
33
|
+
found && /^## / { exit }
|
|
34
|
+
found && /[^[:space:]]/ && !/^<!--/ { content=1 }
|
|
35
|
+
END { exit (found && content) ? 0 : 1 }
|
|
36
|
+
' "${file}"
|
|
37
|
+
|
|
38
|
+
local rc=$?
|
|
39
|
+
if [[ ${rc} -ne 0 ]]; then
|
|
40
|
+
_err "${file}: missing or empty section '${header}'"
|
|
41
|
+
fi
|
|
42
|
+
return ${rc}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
# ──────────────────────────────────────────────
|
|
46
|
+
# validate_brief PATH
|
|
47
|
+
# Validates spec/BRIEF.md against required sections.
|
|
48
|
+
# ──────────────────────────────────────────────
|
|
49
|
+
validate_brief() {
|
|
50
|
+
local file="${1:?path required}"
|
|
51
|
+
local sections=(
|
|
52
|
+
"The pain"
|
|
53
|
+
"The user"
|
|
54
|
+
"The unfair advantage"
|
|
55
|
+
"The smallest valuable thing"
|
|
56
|
+
"Non-goals"
|
|
57
|
+
"North-star metric"
|
|
58
|
+
"Kill criteria"
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
local failed=0
|
|
62
|
+
local s
|
|
63
|
+
for s in "${sections[@]}"; do
|
|
64
|
+
validate_spec_section "${file}" "${s}" || failed=$((failed + 1))
|
|
65
|
+
done
|
|
66
|
+
|
|
67
|
+
if [[ ${failed} -eq 0 ]]; then
|
|
68
|
+
_log "✓ BRIEF.md complete"
|
|
69
|
+
fi
|
|
70
|
+
return ${failed}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
# ──────────────────────────────────────────────
|
|
74
|
+
# validate_prd PATH
|
|
75
|
+
# ──────────────────────────────────────────────
|
|
76
|
+
validate_prd() {
|
|
77
|
+
local file="${1:?path required}"
|
|
78
|
+
local sections=(
|
|
79
|
+
"Problem"
|
|
80
|
+
"Target user"
|
|
81
|
+
"Acceptance Criteria (the MVP)"
|
|
82
|
+
"Explicit non-goals"
|
|
83
|
+
"Success metrics"
|
|
84
|
+
"Constraints"
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
local failed=0
|
|
88
|
+
local s
|
|
89
|
+
for s in "${sections[@]}"; do
|
|
90
|
+
validate_spec_section "${file}" "${s}" || failed=$((failed + 1))
|
|
91
|
+
done
|
|
92
|
+
|
|
93
|
+
if [[ ${failed} -eq 0 ]]; then
|
|
94
|
+
_log "✓ PRD.md complete"
|
|
95
|
+
fi
|
|
96
|
+
return ${failed}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# ──────────────────────────────────────────────
|
|
100
|
+
# validate_spec PATH
|
|
101
|
+
# ──────────────────────────────────────────────
|
|
102
|
+
validate_spec() {
|
|
103
|
+
local file="${1:?path required}"
|
|
104
|
+
local sections=(
|
|
105
|
+
"Stack"
|
|
106
|
+
"Data model"
|
|
107
|
+
"Key flows"
|
|
108
|
+
"Security model"
|
|
109
|
+
"Environment variables"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
local failed=0
|
|
113
|
+
local s
|
|
114
|
+
for s in "${sections[@]}"; do
|
|
115
|
+
validate_spec_section "${file}" "${s}" || failed=$((failed + 1))
|
|
116
|
+
done
|
|
117
|
+
|
|
118
|
+
if [[ ${failed} -eq 0 ]]; then
|
|
119
|
+
_log "✓ SPEC.md complete"
|
|
120
|
+
fi
|
|
121
|
+
return ${failed}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
# ──────────────────────────────────────────────
|
|
125
|
+
# validate_phases_yaml PATH
|
|
126
|
+
# Validates structure + DAG. Requires `yq`.
|
|
127
|
+
#
|
|
128
|
+
# Checks:
|
|
129
|
+
# - top-level fields: project, phases
|
|
130
|
+
# - each phase has id, name, goal, gate_criteria, tasks
|
|
131
|
+
# - each task has id, title, type, priority, depends_on, estimate, owner_type, acceptance
|
|
132
|
+
# - no XL estimates
|
|
133
|
+
# - dependency graph is a DAG (no cycles)
|
|
134
|
+
# ──────────────────────────────────────────────
|
|
135
|
+
validate_phases_yaml() {
|
|
136
|
+
local file="${1:?path required}"
|
|
137
|
+
|
|
138
|
+
if [[ ! -f "${file}" ]]; then
|
|
139
|
+
_err "${file}: does not exist"
|
|
140
|
+
return 1
|
|
141
|
+
fi
|
|
142
|
+
|
|
143
|
+
if ! command -v yq >/dev/null 2>&1; then
|
|
144
|
+
_err "yq required (https://github.com/mikefarah/yq)"
|
|
145
|
+
return 1
|
|
146
|
+
fi
|
|
147
|
+
|
|
148
|
+
local failed=0
|
|
149
|
+
|
|
150
|
+
# Top-level fields
|
|
151
|
+
for field in project phases; do
|
|
152
|
+
if [[ "$(yq ".${field}" "${file}")" == "null" ]]; then
|
|
153
|
+
_err "missing top-level field: ${field}"
|
|
154
|
+
failed=$((failed + 1))
|
|
155
|
+
fi
|
|
156
|
+
done
|
|
157
|
+
|
|
158
|
+
# Phase fields
|
|
159
|
+
local phase_count
|
|
160
|
+
phase_count=$(yq '.phases | length' "${file}")
|
|
161
|
+
local i
|
|
162
|
+
for ((i = 0; i < phase_count; i++)); do
|
|
163
|
+
for field in id name goal gate_criteria tasks; do
|
|
164
|
+
if [[ "$(yq ".phases[${i}].${field}" "${file}")" == "null" ]]; then
|
|
165
|
+
_err "phase ${i}: missing field '${field}'"
|
|
166
|
+
failed=$((failed + 1))
|
|
167
|
+
fi
|
|
168
|
+
done
|
|
169
|
+
done
|
|
170
|
+
|
|
171
|
+
# Task fields + XL check + collect ids/deps for DAG check
|
|
172
|
+
local all_ids=()
|
|
173
|
+
local all_deps=()
|
|
174
|
+
|
|
175
|
+
for ((i = 0; i < phase_count; i++)); do
|
|
176
|
+
local task_count
|
|
177
|
+
task_count=$(yq ".phases[${i}].tasks | length" "${file}")
|
|
178
|
+
local j
|
|
179
|
+
for ((j = 0; j < task_count; j++)); do
|
|
180
|
+
local prefix=".phases[${i}].tasks[${j}]"
|
|
181
|
+
|
|
182
|
+
for field in id title type priority depends_on estimate owner_type acceptance; do
|
|
183
|
+
if [[ "$(yq "${prefix}.${field}" "${file}")" == "null" ]]; then
|
|
184
|
+
_err "task ${prefix}: missing field '${field}'"
|
|
185
|
+
failed=$((failed + 1))
|
|
186
|
+
fi
|
|
187
|
+
done
|
|
188
|
+
|
|
189
|
+
local estimate
|
|
190
|
+
estimate=$(yq "${prefix}.estimate" "${file}")
|
|
191
|
+
if [[ "${estimate}" == "XL" ]]; then
|
|
192
|
+
_err "task $(yq "${prefix}.id" "${file}"): XL estimates are not allowed — split it"
|
|
193
|
+
failed=$((failed + 1))
|
|
194
|
+
fi
|
|
195
|
+
|
|
196
|
+
local task_id
|
|
197
|
+
task_id=$(yq "${prefix}.id" "${file}")
|
|
198
|
+
all_ids+=("${task_id}")
|
|
199
|
+
|
|
200
|
+
local dep_count
|
|
201
|
+
dep_count=$(yq "${prefix}.depends_on | length // 0" "${file}")
|
|
202
|
+
local k
|
|
203
|
+
for ((k = 0; k < dep_count; k++)); do
|
|
204
|
+
local dep
|
|
205
|
+
dep=$(yq "${prefix}.depends_on[${k}]" "${file}")
|
|
206
|
+
all_deps+=("${task_id}:${dep}")
|
|
207
|
+
done
|
|
208
|
+
done
|
|
209
|
+
done
|
|
210
|
+
|
|
211
|
+
# DAG cycle check via Kahn's algorithm
|
|
212
|
+
if [[ ${#all_deps[@]} -gt 0 ]]; then
|
|
213
|
+
if ! _check_dag "${all_ids[@]}" "--" "${all_deps[@]}"; then
|
|
214
|
+
_err "dependency graph contains a cycle"
|
|
215
|
+
failed=$((failed + 1))
|
|
216
|
+
fi
|
|
217
|
+
fi
|
|
218
|
+
|
|
219
|
+
if [[ ${failed} -eq 0 ]]; then
|
|
220
|
+
_log "✓ phases.yaml valid (DAG, all required fields present, no XL)"
|
|
221
|
+
fi
|
|
222
|
+
return ${failed}
|
|
223
|
+
}
|
|
224
|
+
|
|
225
|
+
# ──────────────────────────────────────────────
|
|
226
|
+
# _check_dag NODES... -- EDGES (each as "from:to" meaning from depends on to)
|
|
227
|
+
# Returns 0 if DAG, 1 if cycle detected.
|
|
228
|
+
# ──────────────────────────────────────────────
|
|
229
|
+
_check_dag() {
|
|
230
|
+
local sep=0
|
|
231
|
+
local -a nodes=()
|
|
232
|
+
local -a edges=()
|
|
233
|
+
local arg
|
|
234
|
+
for arg in "$@"; do
|
|
235
|
+
if [[ "${arg}" == "--" ]]; then sep=1; continue; fi
|
|
236
|
+
if [[ ${sep} -eq 0 ]]; then nodes+=("${arg}"); else edges+=("${arg}"); fi
|
|
237
|
+
done
|
|
238
|
+
|
|
239
|
+
# Use Python for the cycle check — simpler than pure bash for graphs.
|
|
240
|
+
python3 - "${nodes[@]}" "${#nodes[@]}" "${edges[@]}" <<'PY'
|
|
241
|
+
import sys
|
|
242
|
+
from collections import defaultdict
|
|
243
|
+
|
|
244
|
+
argv = sys.argv[1:]
|
|
245
|
+
n = int(argv[len(argv) - 1 - len([a for a in argv if ':' in a])])
|
|
246
|
+
# Reconstruct: nodes are the first n args, then count, then edges.
|
|
247
|
+
# Simpler: split by ':' to find edges.
|
|
248
|
+
nodes = []
|
|
249
|
+
edges = []
|
|
250
|
+
i = 0
|
|
251
|
+
while i < len(argv):
|
|
252
|
+
a = argv[i]
|
|
253
|
+
if a.isdigit() and i + 1 < len(argv) and ':' in argv[i + 1]:
|
|
254
|
+
i += 1
|
|
255
|
+
while i < len(argv):
|
|
256
|
+
edges.append(argv[i])
|
|
257
|
+
i += 1
|
|
258
|
+
break
|
|
259
|
+
else:
|
|
260
|
+
nodes.append(a)
|
|
261
|
+
i += 1
|
|
262
|
+
|
|
263
|
+
graph = defaultdict(list)
|
|
264
|
+
in_degree = {n: 0 for n in nodes}
|
|
265
|
+
for e in edges:
|
|
266
|
+
if ':' not in e: continue
|
|
267
|
+
src, dst = e.split(':', 1)
|
|
268
|
+
if src in in_degree and dst in in_degree:
|
|
269
|
+
graph[dst].append(src) # dependency: src depends on dst, so dst → src
|
|
270
|
+
in_degree[src] += 1
|
|
271
|
+
|
|
272
|
+
queue = [n for n, d in in_degree.items() if d == 0]
|
|
273
|
+
visited = 0
|
|
274
|
+
while queue:
|
|
275
|
+
node = queue.pop()
|
|
276
|
+
visited += 1
|
|
277
|
+
for nxt in graph[node]:
|
|
278
|
+
in_degree[nxt] -= 1
|
|
279
|
+
if in_degree[nxt] == 0:
|
|
280
|
+
queue.append(nxt)
|
|
281
|
+
|
|
282
|
+
sys.exit(0 if visited == len(nodes) else 1)
|
|
283
|
+
PY
|
|
284
|
+
}
|
|
@@ -0,0 +1,136 @@
|
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
# worktree-helpers.sh — git worktree wrappers used by /pickup-task.
|
|
3
|
+
#
|
|
4
|
+
# Layout:
|
|
5
|
+
# ~/repos/{project}/ ← main checkout (dev branch)
|
|
6
|
+
# ~/repos/{project}-worktrees/{TICKET}/ ← per-task worktrees
|
|
7
|
+
#
|
|
8
|
+
# Source this file: `source "${FORGE_DIR}/lib/worktree-helpers.sh"`
|
|
9
|
+
|
|
10
|
+
set -euo pipefail
|
|
11
|
+
|
|
12
|
+
# ──────────────────────────────────────────────
|
|
13
|
+
# Logging
|
|
14
|
+
# ──────────────────────────────────────────────
|
|
15
|
+
_log() { printf '\033[1;32m[worktree]\033[0m %s\n' "$*" >&2; }
|
|
16
|
+
_warn() { printf '\033[1;33m[worktree]\033[0m %s\n' "$*" >&2; }
|
|
17
|
+
_err() { printf '\033[1;31m[worktree]\033[0m %s\n' "$*" >&2; }
|
|
18
|
+
|
|
19
|
+
# ──────────────────────────────────────────────
|
|
20
|
+
# worktree_path PROJECT TICKET_ID
|
|
21
|
+
# Echoes the canonical worktree path for a ticket.
|
|
22
|
+
# ──────────────────────────────────────────────
|
|
23
|
+
worktree_path() {
|
|
24
|
+
local project="${1:?project required}"
|
|
25
|
+
local ticket="${2:?ticket required}"
|
|
26
|
+
echo "../${project}-worktrees/${ticket}"
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
# ──────────────────────────────────────────────
|
|
30
|
+
# worktree_create TICKET_ID BRANCH [BASE_BRANCH=dev]
|
|
31
|
+
# Creates a new worktree at ../${project}-worktrees/${TICKET_ID} on
|
|
32
|
+
# branch BRANCH, branched from BASE_BRANCH. Idempotent.
|
|
33
|
+
# ──────────────────────────────────────────────
|
|
34
|
+
worktree_create() {
|
|
35
|
+
local ticket="${1:?ticket required}"
|
|
36
|
+
local branch="${2:?branch required}"
|
|
37
|
+
local base="${3:-dev}"
|
|
38
|
+
|
|
39
|
+
if ! git rev-parse --is-inside-work-tree >/dev/null 2>&1; then
|
|
40
|
+
_err "not inside a git repo"
|
|
41
|
+
return 1
|
|
42
|
+
fi
|
|
43
|
+
|
|
44
|
+
local project
|
|
45
|
+
project=$(basename "$(git rev-parse --show-toplevel)")
|
|
46
|
+
local target
|
|
47
|
+
target=$(worktree_path "${project}" "${ticket}")
|
|
48
|
+
|
|
49
|
+
if [[ -d "${target}" ]]; then
|
|
50
|
+
_warn "worktree already exists at ${target}"
|
|
51
|
+
return 0
|
|
52
|
+
fi
|
|
53
|
+
|
|
54
|
+
# Ensure base branch exists locally
|
|
55
|
+
if ! git rev-parse --verify "${base}" >/dev/null 2>&1; then
|
|
56
|
+
_err "base branch '${base}' does not exist locally"
|
|
57
|
+
return 1
|
|
58
|
+
fi
|
|
59
|
+
|
|
60
|
+
_log "Creating worktree: ${target} (branch ${branch} from ${base})"
|
|
61
|
+
git worktree add "${target}" -b "${branch}" "${base}"
|
|
62
|
+
_log "✓ worktree ready: cd ${target}"
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
# ──────────────────────────────────────────────
|
|
66
|
+
# worktree_list
|
|
67
|
+
# Lists all worktrees with their branch and HEAD.
|
|
68
|
+
# ──────────────────────────────────────────────
|
|
69
|
+
worktree_list() {
|
|
70
|
+
git worktree list --porcelain | awk '
|
|
71
|
+
/^worktree / {wt=$2}
|
|
72
|
+
/^branch / {br=$2}
|
|
73
|
+
/^HEAD / {hd=substr($2, 1, 8)}
|
|
74
|
+
/^$/ {if (wt) printf " %-40s %-30s %s\n", wt, br, hd; wt=""; br=""; hd=""}
|
|
75
|
+
END {if (wt) printf " %-40s %-30s %s\n", wt, br, hd}
|
|
76
|
+
'
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
# ──────────────────────────────────────────────
|
|
80
|
+
# worktree_remove TICKET_ID
|
|
81
|
+
# Removes the worktree for a ticket. Refuses if uncommitted changes exist.
|
|
82
|
+
# ──────────────────────────────────────────────
|
|
83
|
+
worktree_remove() {
|
|
84
|
+
local ticket="${1:?ticket required}"
|
|
85
|
+
|
|
86
|
+
local project
|
|
87
|
+
project=$(basename "$(git rev-parse --show-toplevel)")
|
|
88
|
+
local target
|
|
89
|
+
target=$(worktree_path "${project}" "${ticket}")
|
|
90
|
+
|
|
91
|
+
if [[ ! -d "${target}" ]]; then
|
|
92
|
+
_warn "worktree does not exist: ${target}"
|
|
93
|
+
return 0
|
|
94
|
+
fi
|
|
95
|
+
|
|
96
|
+
_log "Removing worktree: ${target}"
|
|
97
|
+
git worktree remove "${target}"
|
|
98
|
+
_log "✓ removed"
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
# ──────────────────────────────────────────────
|
|
102
|
+
# worktree_cleanup
|
|
103
|
+
# Removes worktrees whose branch has been deleted upstream or merged.
|
|
104
|
+
# Dry-run by default; pass --apply to actually remove.
|
|
105
|
+
# ──────────────────────────────────────────────
|
|
106
|
+
worktree_cleanup() {
|
|
107
|
+
local apply="${1:-}"
|
|
108
|
+
|
|
109
|
+
git worktree prune
|
|
110
|
+
|
|
111
|
+
local removed=0
|
|
112
|
+
while IFS= read -r line; do
|
|
113
|
+
local wt branch
|
|
114
|
+
wt=$(echo "${line}" | awk '{print $1}')
|
|
115
|
+
branch=$(echo "${line}" | sed -n 's/.*\[\(.*\)\].*/\1/p')
|
|
116
|
+
|
|
117
|
+
[[ -z "${branch}" ]] && continue
|
|
118
|
+
[[ "${wt}" == "$(git rev-parse --show-toplevel)" ]] && continue
|
|
119
|
+
|
|
120
|
+
# If branch is gone upstream
|
|
121
|
+
if ! git ls-remote --exit-code --heads origin "${branch}" >/dev/null 2>&1; then
|
|
122
|
+
if [[ "${apply}" == "--apply" ]]; then
|
|
123
|
+
_log "Removing ${wt} (branch ${branch} gone upstream)"
|
|
124
|
+
git worktree remove "${wt}" --force
|
|
125
|
+
git branch -D "${branch}" 2>/dev/null || true
|
|
126
|
+
else
|
|
127
|
+
_warn "[dry-run] would remove: ${wt} (branch ${branch} gone)"
|
|
128
|
+
fi
|
|
129
|
+
removed=$((removed + 1))
|
|
130
|
+
fi
|
|
131
|
+
done < <(git worktree list)
|
|
132
|
+
|
|
133
|
+
if [[ "${apply}" != "--apply" && ${removed} -gt 0 ]]; then
|
|
134
|
+
echo "Run with --apply to actually remove" >&2
|
|
135
|
+
fi
|
|
136
|
+
}
|