@pennyfarthing/core 7.8.0 → 7.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/package.json +2 -1
- package/packages/core/dist/cli/commands/doctor.d.ts +3 -0
- package/packages/core/dist/cli/commands/doctor.d.ts.map +1 -1
- package/packages/core/dist/cli/commands/doctor.js +20 -9
- package/packages/core/dist/cli/commands/doctor.js.map +1 -1
- package/pennyfarthing-dist/scripts/core/agent-session.sh +2 -2
- package/pennyfarthing-dist/scripts/core/prime.sh +8 -0
- package/pennyfarthing_scripts/__init__.py +17 -0
- package/pennyfarthing_scripts/__pycache__/__init__.cpython-311.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/config.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_epic_creation.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_sync.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/jira_sync_story.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/sprint.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/workflow.cpython-311.pyc +0 -0
- package/pennyfarthing_scripts/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/bellmode_hook.py +154 -0
- package/pennyfarthing_scripts/brownfield/__init__.py +35 -0
- package/pennyfarthing_scripts/brownfield/__main__.py +7 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/__pycache__/discover.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/brownfield/cli.py +131 -0
- package/pennyfarthing_scripts/brownfield/discover.py +753 -0
- package/pennyfarthing_scripts/common/__init__.py +49 -0
- package/pennyfarthing_scripts/common/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/__pycache__/config.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/__pycache__/output.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/common/config.py +65 -0
- package/pennyfarthing_scripts/common/output.py +180 -0
- package/pennyfarthing_scripts/config.py +21 -0
- package/pennyfarthing_scripts/git/__init__.py +29 -0
- package/pennyfarthing_scripts/git/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/__pycache__/create_branches.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/__pycache__/status_all.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/git/create_branches.py +439 -0
- package/pennyfarthing_scripts/git/status_all.py +310 -0
- package/pennyfarthing_scripts/hooks.py +455 -0
- package/pennyfarthing_scripts/jira/__init__.py +93 -0
- package/pennyfarthing_scripts/jira/__main__.py +10 -0
- package/pennyfarthing_scripts/jira/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/bidirectional.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/claim.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/client.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/compat.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/epic.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/mappings.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/story.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/__pycache__/sync.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/jira/bidirectional.py +561 -0
- package/pennyfarthing_scripts/jira/claim.py +211 -0
- package/pennyfarthing_scripts/jira/cli.py +150 -0
- package/pennyfarthing_scripts/jira/client.py +613 -0
- package/pennyfarthing_scripts/jira/epic.py +176 -0
- package/pennyfarthing_scripts/jira/story.py +219 -0
- package/pennyfarthing_scripts/jira/sync.py +350 -0
- package/pennyfarthing_scripts/jira_bidirectional_sync.py +37 -0
- package/pennyfarthing_scripts/jira_epic_creation.py +30 -0
- package/pennyfarthing_scripts/jira_sync.py +36 -0
- package/pennyfarthing_scripts/jira_sync_story.py +30 -0
- package/pennyfarthing_scripts/output.py +37 -0
- package/pennyfarthing_scripts/preflight/__init__.py +17 -0
- package/pennyfarthing_scripts/preflight/__main__.py +10 -0
- package/pennyfarthing_scripts/preflight/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/__pycache__/finish.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/preflight/cli.py +141 -0
- package/pennyfarthing_scripts/preflight/finish.py +382 -0
- package/pennyfarthing_scripts/pretooluse_hook.py +142 -0
- package/pennyfarthing_scripts/prime/__init__.py +38 -0
- package/pennyfarthing_scripts/prime/__main__.py +8 -0
- package/pennyfarthing_scripts/prime/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/loader.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/models.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/persona.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/session.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/__pycache__/workflow.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/prime/cli.py +220 -0
- package/pennyfarthing_scripts/prime/loader.py +239 -0
- package/pennyfarthing_scripts/sprint/__init__.py +66 -0
- package/pennyfarthing_scripts/sprint/__main__.py +10 -0
- package/pennyfarthing_scripts/sprint/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/__main__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/archive.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/cli.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/loader.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/status.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/validator.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/__pycache__/work.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/sprint/archive.py +108 -0
- package/pennyfarthing_scripts/sprint/cli.py +124 -0
- package/pennyfarthing_scripts/sprint/loader.py +193 -0
- package/pennyfarthing_scripts/sprint/status.py +122 -0
- package/pennyfarthing_scripts/sprint/validator.py +405 -0
- package/pennyfarthing_scripts/sprint/work.py +192 -0
- package/pennyfarthing_scripts/story/__init__.py +67 -0
- package/pennyfarthing_scripts/story/__main__.py +10 -0
- package/pennyfarthing_scripts/story/cli.py +105 -0
- package/pennyfarthing_scripts/story/create.py +167 -0
- package/pennyfarthing_scripts/story/size.py +113 -0
- package/pennyfarthing_scripts/story/template.py +151 -0
- package/pennyfarthing_scripts/swebench.py +216 -0
- package/pennyfarthing_scripts/tests/__init__.py +1 -0
- package/pennyfarthing_scripts/tests/__pycache__/__init__.cpython-314.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/conftest.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_brownfield.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_git_utils.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_prime.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/__pycache__/test_sprint_validator.cpython-314-pytest-9.0.2.pyc +0 -0
- package/pennyfarthing_scripts/tests/conftest.py +106 -0
- package/pennyfarthing_scripts/tests/test_brownfield.py +842 -0
- package/pennyfarthing_scripts/tests/test_cli_modules.py +245 -0
- package/pennyfarthing_scripts/tests/test_common.py +180 -0
- package/pennyfarthing_scripts/tests/test_git_utils.py +866 -0
- package/pennyfarthing_scripts/tests/test_jira_package.py +334 -0
- package/pennyfarthing_scripts/tests/test_package_structure.py +372 -0
- package/pennyfarthing_scripts/tests/test_prime.py +397 -0
- package/pennyfarthing_scripts/tests/test_sprint_package.py +236 -0
- package/pennyfarthing_scripts/tests/test_sprint_validator.py +675 -0
- package/pennyfarthing_scripts/tests/test_story_package.py +156 -0
- package/pennyfarthing_scripts/welcome_hook.py +157 -0
- package/pennyfarthing_scripts/workflow.py +183 -0
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Async preflight checks for story finish.
|
|
3
|
+
|
|
4
|
+
Runs all checks in parallel using asyncio.gather() to guarantee
|
|
5
|
+
concurrent execution regardless of model behavior.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import json
|
|
10
|
+
import re
|
|
11
|
+
import sys
|
|
12
|
+
from dataclasses import dataclass, field
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class PreflightIssue:
|
|
19
|
+
"""A blocking issue found during preflight."""
|
|
20
|
+
severity: str # "critical" or "warning"
|
|
21
|
+
issue: str
|
|
22
|
+
fix: str | None = None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class PRStatus:
|
|
27
|
+
"""PR status from GitHub."""
|
|
28
|
+
state: str | None = None
|
|
29
|
+
merged: bool = False
|
|
30
|
+
mergeable: str | None = None
|
|
31
|
+
url: str | None = None
|
|
32
|
+
error: str | None = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class LintResult:
|
|
37
|
+
"""Lint check result."""
|
|
38
|
+
clean: bool = False
|
|
39
|
+
output: str = ""
|
|
40
|
+
error: str | None = None
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class JiraStatus:
|
|
45
|
+
"""Jira issue status."""
|
|
46
|
+
current: str | None = None
|
|
47
|
+
key: str | None = None
|
|
48
|
+
error: str | None = None
|
|
49
|
+
skipped: bool = False
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass
|
|
53
|
+
class AcceptanceCriteria:
|
|
54
|
+
"""Acceptance criteria check result."""
|
|
55
|
+
total: int = 0
|
|
56
|
+
checked: int = 0
|
|
57
|
+
unchecked: list[str] = field(default_factory=list)
|
|
58
|
+
error: str | None = None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class PreflightResult:
|
|
63
|
+
"""Aggregated preflight check results."""
|
|
64
|
+
status: str # "success" or "blocked"
|
|
65
|
+
ready_to_finish: bool
|
|
66
|
+
story_id: str
|
|
67
|
+
pr: PRStatus = field(default_factory=PRStatus)
|
|
68
|
+
lint: LintResult = field(default_factory=LintResult)
|
|
69
|
+
jira: JiraStatus = field(default_factory=JiraStatus)
|
|
70
|
+
acceptance_criteria: AcceptanceCriteria = field(default_factory=AcceptanceCriteria)
|
|
71
|
+
issues: list[PreflightIssue] = field(default_factory=list)
|
|
72
|
+
warnings: list[str] = field(default_factory=list)
|
|
73
|
+
|
|
74
|
+
def to_dict(self) -> dict[str, Any]:
|
|
75
|
+
"""Convert to dictionary for JSON output."""
|
|
76
|
+
result: dict[str, Any] = {
|
|
77
|
+
"status": self.status,
|
|
78
|
+
"ready_to_finish": self.ready_to_finish,
|
|
79
|
+
"story_id": self.story_id,
|
|
80
|
+
"pr": {
|
|
81
|
+
"state": self.pr.state,
|
|
82
|
+
"merged": self.pr.merged,
|
|
83
|
+
"mergeable": self.pr.mergeable,
|
|
84
|
+
"url": self.pr.url,
|
|
85
|
+
},
|
|
86
|
+
"lint": {
|
|
87
|
+
"clean": self.lint.clean,
|
|
88
|
+
},
|
|
89
|
+
"acceptance_criteria": {
|
|
90
|
+
"total": self.acceptance_criteria.total,
|
|
91
|
+
"checked": self.acceptance_criteria.checked,
|
|
92
|
+
},
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
if self.jira.skipped:
|
|
96
|
+
result["jira_skipped"] = True
|
|
97
|
+
result["jira"] = {"skipped": True}
|
|
98
|
+
else:
|
|
99
|
+
result["jira"] = {
|
|
100
|
+
"current": self.jira.current,
|
|
101
|
+
"key": self.jira.key,
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if self.issues:
|
|
105
|
+
result["issues"] = [
|
|
106
|
+
{"severity": i.severity, "issue": i.issue, "fix": i.fix}
|
|
107
|
+
for i in self.issues
|
|
108
|
+
]
|
|
109
|
+
|
|
110
|
+
if self.warnings:
|
|
111
|
+
result["warnings"] = self.warnings
|
|
112
|
+
|
|
113
|
+
# Add next_steps based on status
|
|
114
|
+
if self.ready_to_finish:
|
|
115
|
+
result["next_steps"] = [
|
|
116
|
+
"Preflight passed. Run finish-story.sh to complete.",
|
|
117
|
+
f"Command: .pennyfarthing/scripts/core/run.sh workflow/finish-story.sh {self.story_id}",
|
|
118
|
+
"Then commit and push sprint archive changes.",
|
|
119
|
+
]
|
|
120
|
+
else:
|
|
121
|
+
result["next_steps"] = [
|
|
122
|
+
f"Cannot finish. {len(self.issues)} blocking issue(s).",
|
|
123
|
+
]
|
|
124
|
+
for issue in self.issues[:3]: # Show first 3
|
|
125
|
+
result["next_steps"].append(f"- {issue.issue}")
|
|
126
|
+
if issue.fix:
|
|
127
|
+
result["next_steps"].append(f" Fix: {issue.fix}")
|
|
128
|
+
|
|
129
|
+
return result
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
async def check_pr_status(branch: str, repo: str | None = None) -> PRStatus:
|
|
133
|
+
"""Check PR status via gh CLI."""
|
|
134
|
+
result = PRStatus()
|
|
135
|
+
|
|
136
|
+
# Note: 'merged' is not a valid field, use 'mergedAt' instead
|
|
137
|
+
cmd = ["gh", "pr", "view", branch, "--json", "state,mergedAt,mergeable,url"]
|
|
138
|
+
if repo:
|
|
139
|
+
cmd.extend(["--repo", repo])
|
|
140
|
+
|
|
141
|
+
try:
|
|
142
|
+
proc = await asyncio.create_subprocess_exec(
|
|
143
|
+
*cmd,
|
|
144
|
+
stdout=asyncio.subprocess.PIPE,
|
|
145
|
+
stderr=asyncio.subprocess.PIPE,
|
|
146
|
+
)
|
|
147
|
+
stdout, stderr = await proc.communicate()
|
|
148
|
+
|
|
149
|
+
if proc.returncode == 0:
|
|
150
|
+
data = json.loads(stdout.decode())
|
|
151
|
+
result.state = data.get("state")
|
|
152
|
+
# PR is merged if mergedAt is non-null
|
|
153
|
+
result.merged = data.get("mergedAt") is not None
|
|
154
|
+
result.mergeable = data.get("mergeable")
|
|
155
|
+
result.url = data.get("url")
|
|
156
|
+
else:
|
|
157
|
+
result.error = stderr.decode().strip() or "PR not found"
|
|
158
|
+
|
|
159
|
+
except Exception as e:
|
|
160
|
+
result.error = str(e)
|
|
161
|
+
|
|
162
|
+
return result
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
async def check_lint(project_root: Path | None = None) -> LintResult:
|
|
166
|
+
"""Run npm run lint."""
|
|
167
|
+
result = LintResult()
|
|
168
|
+
|
|
169
|
+
cwd = project_root or Path.cwd()
|
|
170
|
+
|
|
171
|
+
try:
|
|
172
|
+
proc = await asyncio.create_subprocess_exec(
|
|
173
|
+
"npm", "run", "lint",
|
|
174
|
+
stdout=asyncio.subprocess.PIPE,
|
|
175
|
+
stderr=asyncio.subprocess.PIPE,
|
|
176
|
+
cwd=cwd,
|
|
177
|
+
)
|
|
178
|
+
stdout, stderr = await proc.communicate()
|
|
179
|
+
|
|
180
|
+
result.output = stdout.decode() + stderr.decode()
|
|
181
|
+
result.clean = proc.returncode == 0
|
|
182
|
+
|
|
183
|
+
if not result.clean:
|
|
184
|
+
result.error = "Lint errors found"
|
|
185
|
+
|
|
186
|
+
except Exception as e:
|
|
187
|
+
result.error = str(e)
|
|
188
|
+
|
|
189
|
+
return result
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
async def check_jira_status(jira_key: str) -> JiraStatus:
|
|
193
|
+
"""Check Jira issue status."""
|
|
194
|
+
result = JiraStatus(key=jira_key)
|
|
195
|
+
|
|
196
|
+
try:
|
|
197
|
+
# Use --raw for JSON output (much easier to parse)
|
|
198
|
+
proc = await asyncio.create_subprocess_exec(
|
|
199
|
+
"jira", "issue", "view", jira_key, "--raw",
|
|
200
|
+
stdout=asyncio.subprocess.PIPE,
|
|
201
|
+
stderr=asyncio.subprocess.PIPE,
|
|
202
|
+
)
|
|
203
|
+
stdout, stderr = await proc.communicate()
|
|
204
|
+
|
|
205
|
+
if proc.returncode == 0:
|
|
206
|
+
data = json.loads(stdout.decode())
|
|
207
|
+
# Status is at fields.status.name
|
|
208
|
+
result.current = data.get("fields", {}).get("status", {}).get("name")
|
|
209
|
+
else:
|
|
210
|
+
result.error = stderr.decode().strip() or "Failed to fetch Jira issue"
|
|
211
|
+
|
|
212
|
+
except json.JSONDecodeError as e:
|
|
213
|
+
result.error = f"Failed to parse Jira JSON: {e}"
|
|
214
|
+
except Exception as e:
|
|
215
|
+
result.error = str(e)
|
|
216
|
+
|
|
217
|
+
return result
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
async def check_acceptance_criteria(story_id: str, project_root: Path | None = None) -> AcceptanceCriteria:
|
|
221
|
+
"""Check acceptance criteria from session file."""
|
|
222
|
+
result = AcceptanceCriteria()
|
|
223
|
+
|
|
224
|
+
root = project_root or Path.cwd()
|
|
225
|
+
session_file = root / ".session" / f"{story_id}-session.md"
|
|
226
|
+
|
|
227
|
+
try:
|
|
228
|
+
if not session_file.exists():
|
|
229
|
+
result.error = f"Session file not found: {session_file}"
|
|
230
|
+
return result
|
|
231
|
+
|
|
232
|
+
content = session_file.read_text()
|
|
233
|
+
|
|
234
|
+
# Find all checkbox patterns: - [ ] or - [x]
|
|
235
|
+
checked_pattern = re.compile(r"^\s*-\s*\[x\]", re.MULTILINE | re.IGNORECASE)
|
|
236
|
+
unchecked_pattern = re.compile(r"^\s*-\s*\[ \]\s*(.+)$", re.MULTILINE)
|
|
237
|
+
|
|
238
|
+
checked_matches = checked_pattern.findall(content)
|
|
239
|
+
unchecked_matches = unchecked_pattern.findall(content)
|
|
240
|
+
|
|
241
|
+
result.checked = len(checked_matches)
|
|
242
|
+
result.total = result.checked + len(unchecked_matches)
|
|
243
|
+
result.unchecked = [m.strip() for m in unchecked_matches]
|
|
244
|
+
|
|
245
|
+
except Exception as e:
|
|
246
|
+
result.error = str(e)
|
|
247
|
+
|
|
248
|
+
return result
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def aggregate_results(
|
|
252
|
+
story_id: str,
|
|
253
|
+
pr: PRStatus,
|
|
254
|
+
lint: LintResult,
|
|
255
|
+
jira: JiraStatus,
|
|
256
|
+
acceptance: AcceptanceCriteria,
|
|
257
|
+
) -> PreflightResult:
|
|
258
|
+
"""Aggregate check results into final preflight result."""
|
|
259
|
+
issues: list[PreflightIssue] = []
|
|
260
|
+
warnings: list[str] = []
|
|
261
|
+
|
|
262
|
+
# Check PR status
|
|
263
|
+
if pr.error:
|
|
264
|
+
if "no pull requests found" in pr.error.lower():
|
|
265
|
+
issues.append(PreflightIssue(
|
|
266
|
+
severity="critical",
|
|
267
|
+
issue="No PR found for branch",
|
|
268
|
+
fix="Create PR with: gh pr create",
|
|
269
|
+
))
|
|
270
|
+
else:
|
|
271
|
+
warnings.append(f"PR check failed: {pr.error}")
|
|
272
|
+
elif not pr.merged:
|
|
273
|
+
if pr.state == "OPEN":
|
|
274
|
+
issues.append(PreflightIssue(
|
|
275
|
+
severity="critical",
|
|
276
|
+
issue="PR is still open (not merged)",
|
|
277
|
+
fix="Merge the PR before finishing",
|
|
278
|
+
))
|
|
279
|
+
elif pr.state == "CLOSED":
|
|
280
|
+
issues.append(PreflightIssue(
|
|
281
|
+
severity="critical",
|
|
282
|
+
issue="PR was closed without merging",
|
|
283
|
+
fix="Reopen and merge, or create new PR",
|
|
284
|
+
))
|
|
285
|
+
|
|
286
|
+
# Check lint
|
|
287
|
+
if lint.error and not lint.clean:
|
|
288
|
+
issues.append(PreflightIssue(
|
|
289
|
+
severity="critical",
|
|
290
|
+
issue="Lint check failed",
|
|
291
|
+
fix="Run 'npm run lint' and fix errors",
|
|
292
|
+
))
|
|
293
|
+
|
|
294
|
+
# Check Jira (if not skipped)
|
|
295
|
+
if not jira.skipped:
|
|
296
|
+
if jira.error:
|
|
297
|
+
warnings.append(f"Jira check failed: {jira.error}")
|
|
298
|
+
elif jira.current and jira.current.lower() == "done":
|
|
299
|
+
warnings.append("Jira issue already marked as Done")
|
|
300
|
+
|
|
301
|
+
# Check acceptance criteria
|
|
302
|
+
if acceptance.error:
|
|
303
|
+
warnings.append(f"Acceptance criteria check failed: {acceptance.error}")
|
|
304
|
+
elif acceptance.unchecked:
|
|
305
|
+
issues.append(PreflightIssue(
|
|
306
|
+
severity="critical",
|
|
307
|
+
issue=f"{len(acceptance.unchecked)} unchecked acceptance criteria",
|
|
308
|
+
fix=f"Complete: {acceptance.unchecked[0]}" if acceptance.unchecked else None,
|
|
309
|
+
))
|
|
310
|
+
|
|
311
|
+
# Determine overall status
|
|
312
|
+
ready = len(issues) == 0
|
|
313
|
+
status = "success" if ready else "blocked"
|
|
314
|
+
|
|
315
|
+
return PreflightResult(
|
|
316
|
+
status=status,
|
|
317
|
+
ready_to_finish=ready,
|
|
318
|
+
story_id=story_id,
|
|
319
|
+
pr=pr,
|
|
320
|
+
lint=lint,
|
|
321
|
+
jira=jira,
|
|
322
|
+
acceptance_criteria=acceptance,
|
|
323
|
+
issues=issues,
|
|
324
|
+
warnings=warnings,
|
|
325
|
+
)
|
|
326
|
+
|
|
327
|
+
|
|
328
|
+
async def run_finish_preflight(
|
|
329
|
+
story_id: str,
|
|
330
|
+
branch: str,
|
|
331
|
+
jira_key: str | None = None,
|
|
332
|
+
repo: str | None = None,
|
|
333
|
+
project_root: Path | None = None,
|
|
334
|
+
) -> PreflightResult:
|
|
335
|
+
"""
|
|
336
|
+
Run all finish preflight checks in parallel.
|
|
337
|
+
|
|
338
|
+
Args:
|
|
339
|
+
story_id: Story identifier (e.g., "31-10")
|
|
340
|
+
branch: Feature branch name
|
|
341
|
+
jira_key: Jira issue key (optional, skips Jira checks if absent)
|
|
342
|
+
repo: Repository name for PR lookup (optional)
|
|
343
|
+
project_root: Project root path (defaults to cwd)
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
PreflightResult with aggregated check results
|
|
347
|
+
"""
|
|
348
|
+
root = Path(project_root) if project_root else Path.cwd()
|
|
349
|
+
|
|
350
|
+
# Build list of checks to run
|
|
351
|
+
checks = [
|
|
352
|
+
check_pr_status(branch, repo),
|
|
353
|
+
check_lint(root),
|
|
354
|
+
check_acceptance_criteria(story_id, root),
|
|
355
|
+
]
|
|
356
|
+
|
|
357
|
+
# Conditionally add Jira check
|
|
358
|
+
if jira_key:
|
|
359
|
+
checks.append(check_jira_status(jira_key))
|
|
360
|
+
|
|
361
|
+
# Run all checks in parallel
|
|
362
|
+
results = await asyncio.gather(*checks, return_exceptions=True)
|
|
363
|
+
|
|
364
|
+
# Unpack results
|
|
365
|
+
pr_result = results[0] if not isinstance(results[0], Exception) else PRStatus(error=str(results[0]))
|
|
366
|
+
lint_result = results[1] if not isinstance(results[1], Exception) else LintResult(error=str(results[1]))
|
|
367
|
+
acceptance_result = results[2] if not isinstance(results[2], Exception) else AcceptanceCriteria(error=str(results[2]))
|
|
368
|
+
|
|
369
|
+
# Handle Jira result
|
|
370
|
+
if jira_key:
|
|
371
|
+
jira_result = results[3] if not isinstance(results[3], Exception) else JiraStatus(error=str(results[3]))
|
|
372
|
+
else:
|
|
373
|
+
jira_result = JiraStatus(skipped=True)
|
|
374
|
+
|
|
375
|
+
# Aggregate and return
|
|
376
|
+
return aggregate_results(
|
|
377
|
+
story_id=story_id,
|
|
378
|
+
pr=pr_result,
|
|
379
|
+
lint=lint_result,
|
|
380
|
+
jira=jira_result,
|
|
381
|
+
acceptance=acceptance_result,
|
|
382
|
+
)
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Cyclist PreToolUse Hook (Python)
|
|
4
|
+
|
|
5
|
+
This script is called by Claude Code before each tool execution.
|
|
6
|
+
It communicates with WheelHub (Cyclist's central coordination server)
|
|
7
|
+
via HTTP to get approval decisions.
|
|
8
|
+
|
|
9
|
+
Flow:
|
|
10
|
+
1. Claude Code calls this script with tool info via stdin (JSON)
|
|
11
|
+
2. Script reads port from .cyclist-port in project directory
|
|
12
|
+
3. Script sends request to WheelHub's /api/hook-request endpoint
|
|
13
|
+
4. WheelHub shows approval modal, user decides
|
|
14
|
+
5. Script receives response, outputs JSON decision to stdout
|
|
15
|
+
6. Claude Code proceeds or blocks based on decision
|
|
16
|
+
|
|
17
|
+
Per ADR-0004: All communication converges through WheelHub.
|
|
18
|
+
|
|
19
|
+
Story: MSSCI-12409 - Hook consistency and WheelHub consolidation
|
|
20
|
+
|
|
21
|
+
Usage:
|
|
22
|
+
Install in ~/.claude/settings.json or project .claude/settings.json:
|
|
23
|
+
{
|
|
24
|
+
"hooks": {
|
|
25
|
+
"PreToolUse": [{
|
|
26
|
+
"matcher": "Bash",
|
|
27
|
+
"hooks": [{
|
|
28
|
+
"type": "command",
|
|
29
|
+
"command": "python3 /path/to/pretooluse_hook.py"
|
|
30
|
+
}]
|
|
31
|
+
}]
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
import sys
|
|
37
|
+
from pathlib import Path
|
|
38
|
+
|
|
39
|
+
# Add parent directory to path for imports
|
|
40
|
+
sys.path.insert(0, str(Path(__file__).parent))
|
|
41
|
+
|
|
42
|
+
from hooks import (
|
|
43
|
+
find_project_root,
|
|
44
|
+
get_cyclist_port,
|
|
45
|
+
send_to_cyclist,
|
|
46
|
+
read_stdin_json,
|
|
47
|
+
output_hook_response,
|
|
48
|
+
HookResponse,
|
|
49
|
+
load_settings,
|
|
50
|
+
is_cyclist_running,
|
|
51
|
+
get_context_state,
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def main() -> None:
|
|
56
|
+
"""Main entry point for PreToolUse hook."""
|
|
57
|
+
try:
|
|
58
|
+
# Read tool data from Claude Code
|
|
59
|
+
tool_data = read_stdin_json()
|
|
60
|
+
|
|
61
|
+
# Extract relevant fields
|
|
62
|
+
tool_name = tool_data.get("tool_name", "")
|
|
63
|
+
tool_id = tool_data.get("tool_use_id", "")
|
|
64
|
+
tool_input = tool_data.get("tool_input", {})
|
|
65
|
+
session_id = tool_data.get("session_id")
|
|
66
|
+
|
|
67
|
+
# Find project root
|
|
68
|
+
project_root = find_project_root()
|
|
69
|
+
|
|
70
|
+
# Check if Cyclist is running
|
|
71
|
+
if not is_cyclist_running(project_root):
|
|
72
|
+
# No Cyclist - defer to Claude Code's built-in approval
|
|
73
|
+
output_hook_response(HookResponse(
|
|
74
|
+
event_name="PreToolUse",
|
|
75
|
+
decision="ask",
|
|
76
|
+
reason="Cyclist not running, deferring to Claude Code",
|
|
77
|
+
))
|
|
78
|
+
sys.exit(0)
|
|
79
|
+
|
|
80
|
+
# Load settings to check for auto-approval mode
|
|
81
|
+
settings = load_settings(project_root)
|
|
82
|
+
if settings.permission_mode == "accept":
|
|
83
|
+
# Auto-accept mode - approve everything
|
|
84
|
+
output_hook_response(HookResponse(
|
|
85
|
+
event_name="PreToolUse",
|
|
86
|
+
decision="allow",
|
|
87
|
+
reason="Auto-accept mode enabled",
|
|
88
|
+
))
|
|
89
|
+
sys.exit(0)
|
|
90
|
+
|
|
91
|
+
# Get context state for inclusion in request
|
|
92
|
+
context = get_context_state(project_root)
|
|
93
|
+
|
|
94
|
+
# Send approval request to WheelHub with context info
|
|
95
|
+
response = send_to_cyclist(
|
|
96
|
+
endpoint="/api/hook-request",
|
|
97
|
+
data={
|
|
98
|
+
"toolName": tool_name,
|
|
99
|
+
"toolId": tool_id,
|
|
100
|
+
"input": tool_input,
|
|
101
|
+
"sessionId": session_id,
|
|
102
|
+
"context": {
|
|
103
|
+
"percentage": context.percentage,
|
|
104
|
+
"isHigh": context.is_high,
|
|
105
|
+
"isCritical": context.is_critical,
|
|
106
|
+
},
|
|
107
|
+
},
|
|
108
|
+
project_root=project_root,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
if response is None:
|
|
112
|
+
# Connection failed - defer to Claude Code
|
|
113
|
+
output_hook_response(HookResponse(
|
|
114
|
+
event_name="PreToolUse",
|
|
115
|
+
decision="ask",
|
|
116
|
+
reason="Could not connect to WheelHub",
|
|
117
|
+
))
|
|
118
|
+
sys.exit(0)
|
|
119
|
+
|
|
120
|
+
# Extract decision from response
|
|
121
|
+
decision = response.get("decision", "ask")
|
|
122
|
+
reason = response.get("reason", "")
|
|
123
|
+
data = response.get("data")
|
|
124
|
+
|
|
125
|
+
# Output decision
|
|
126
|
+
output_hook_response(HookResponse(
|
|
127
|
+
event_name="PreToolUse",
|
|
128
|
+
decision=decision,
|
|
129
|
+
reason=reason,
|
|
130
|
+
updated_input=data,
|
|
131
|
+
))
|
|
132
|
+
sys.exit(0)
|
|
133
|
+
|
|
134
|
+
except Exception as e:
|
|
135
|
+
# On error, output to stderr and exit with code 0 (allow)
|
|
136
|
+
# We don't want hook failures to block the user
|
|
137
|
+
print(f"[pretooluse-hook] Error: {e}", file=sys.stderr)
|
|
138
|
+
sys.exit(0)
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
if __name__ == "__main__":
|
|
142
|
+
main()
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Prime - Load essential project context at agent activation.
|
|
3
|
+
|
|
4
|
+
This module provides context loading for Pennyfarthing agents, with
|
|
5
|
+
priority-based ordering optimized for attention (most important first).
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
python -m pennyfarthing_scripts.prime [--agent <name>] [--minimal] [--full] [--quiet]
|
|
9
|
+
|
|
10
|
+
Public API:
|
|
11
|
+
prime() - Load and print context (main entry point)
|
|
12
|
+
load_agent_definition() - Load agent markdown
|
|
13
|
+
load_behavior_guide() - Load shared behavior guide
|
|
14
|
+
load_sprint_context() - Load sprint summary
|
|
15
|
+
load_session_context() - Load active session header and assessment
|
|
16
|
+
load_sidecars() - Load agent-specific patterns, gotchas, decisions
|
|
17
|
+
load_domain_docs() - Load domain documentation (--full only)
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from pennyfarthing_scripts.prime.loader import (
|
|
21
|
+
load_agent_definition,
|
|
22
|
+
load_behavior_guide,
|
|
23
|
+
load_domain_docs,
|
|
24
|
+
load_session_context,
|
|
25
|
+
load_sidecars,
|
|
26
|
+
load_sprint_context,
|
|
27
|
+
)
|
|
28
|
+
from pennyfarthing_scripts.prime.cli import prime
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
"prime",
|
|
32
|
+
"load_agent_definition",
|
|
33
|
+
"load_behavior_guide",
|
|
34
|
+
"load_sprint_context",
|
|
35
|
+
"load_session_context",
|
|
36
|
+
"load_sidecars",
|
|
37
|
+
"load_domain_docs",
|
|
38
|
+
]
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|