@hustle-together/api-dev-tools 3.12.3 → 4.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/adr-requests/.gitkeep +10 -0
- package/.claude/agents/adr-researcher.md +109 -0
- package/.claude/agents/visual-analyzer.md +183 -0
- package/.claude/api-dev-state.json +7 -463
- package/.claude/documentation-audit.json +114 -0
- package/.claude/registry.json +289 -0
- package/.claude/settings.json +45 -1
- package/.claude/workflow-logs/None.json +49 -0
- package/.claude/workflow-logs/session-20251230-143727.json +106 -0
- package/.skills/adr-deep-research/SKILL.md +351 -0
- package/.skills/api-create/SKILL.md +116 -17
- package/.skills/api-research/SKILL.md +130 -0
- package/.skills/docs-sync/SKILL.md +260 -0
- package/.skills/docs-update/SKILL.md +205 -0
- package/.skills/hustle-brand/SKILL.md +368 -0
- package/.skills/hustle-build/SKILL.md +786 -0
- package/.skills/hustle-build-review/SKILL.md +518 -0
- package/.skills/parallel-spawn/SKILL.md +212 -0
- package/.skills/ralph-continue/SKILL.md +151 -0
- package/.skills/ralph-loop/SKILL.md +341 -0
- package/.skills/ralph-status/SKILL.md +87 -0
- package/.skills/refactor/SKILL.md +59 -0
- package/.skills/shadcn/SKILL.md +522 -0
- package/.skills/test-all/SKILL.md +210 -0
- package/.skills/test-builds/SKILL.md +208 -0
- package/.skills/test-debug/SKILL.md +212 -0
- package/.skills/test-e2e/SKILL.md +168 -0
- package/.skills/test-review/SKILL.md +707 -0
- package/.skills/test-unit/SKILL.md +143 -0
- package/.skills/test-visual/SKILL.md +301 -0
- package/.skills/token-report/SKILL.md +132 -0
- package/CHANGELOG.md +575 -0
- package/README.md +426 -56
- package/bin/cli.js +1538 -88
- package/commands/hustle-api-create.md +22 -0
- package/commands/hustle-build.md +259 -0
- package/commands/hustle-combine.md +81 -2
- package/commands/hustle-ui-create-page.md +84 -2
- package/commands/hustle-ui-create.md +82 -2
- package/hooks/__pycache__/api-workflow-check.cpython-314.pyc +0 -0
- package/hooks/__pycache__/auto-answer.cpython-314.pyc +0 -0
- package/hooks/__pycache__/cache-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-api-routes.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-playwright-setup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-storybook-setup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-update.cpython-314.pyc +0 -0
- package/hooks/__pycache__/completion-promise-detector.cpython-314.pyc +0 -0
- package/hooks/__pycache__/context-capacity-warning.cpython-314.pyc +0 -0
- package/hooks/__pycache__/detect-interruption.cpython-314.pyc +0 -0
- package/hooks/__pycache__/docs-update-check.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-a11y-audit.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-brand-guide.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-component-type-confirm.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-deep-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-disambiguation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-documentation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-dry-run.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-environment.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-external-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-freshness.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-page-components.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-page-data-schema.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-questions-sourced.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-refactor.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-schema-from-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-schema.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-scope.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-tdd-red.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-ui-disambiguation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-ui-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-verify.cpython-314.pyc +0 -0
- package/hooks/__pycache__/generate-adr-options.cpython-314.pyc +0 -0
- package/hooks/__pycache__/generate-manifest-entry.cpython-314.pyc +0 -0
- package/hooks/__pycache__/hook_utils.cpython-314.pyc +0 -0
- package/hooks/__pycache__/notify-input-needed.cpython-314.pyc +0 -0
- package/hooks/__pycache__/notify-phase-complete.cpython-314.pyc +0 -0
- package/hooks/__pycache__/ntfy-on-question.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-completion.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-handoff.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-session-startup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/parallel-orchestrator.cpython-314.pyc +0 -0
- package/hooks/__pycache__/periodic-reground.cpython-314.pyc +0 -0
- package/hooks/__pycache__/project-document-prompt.cpython-314.pyc +0 -0
- package/hooks/__pycache__/remote-question-proxy.cpython-314.pyc +0 -0
- package/hooks/__pycache__/remote-question-server.cpython-314.pyc +0 -0
- package/hooks/__pycache__/run-code-review.cpython-314.pyc +0 -0
- package/hooks/__pycache__/run-visual-qa.cpython-314.pyc +0 -0
- package/hooks/__pycache__/session-logger.cpython-314.pyc +0 -0
- package/hooks/__pycache__/session-startup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-scope-coverage.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-token-usage.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-tool-use.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-adr-decision.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-api-showcase.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-registry.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-ui-showcase.cpython-314.pyc +0 -0
- package/hooks/__pycache__/verify-after-green.cpython-314.pyc +0 -0
- package/hooks/__pycache__/verify-implementation.cpython-314.pyc +0 -0
- package/hooks/api-workflow-check.py +34 -0
- package/hooks/auto-answer.py +305 -0
- package/hooks/check-update.py +132 -0
- package/hooks/completion-promise-detector.py +293 -0
- package/hooks/context-capacity-warning.py +171 -0
- package/hooks/docs-update-check.py +120 -0
- package/hooks/enforce-dry-run.py +134 -0
- package/hooks/enforce-external-research.py +25 -0
- package/hooks/enforce-interview.py +20 -0
- package/hooks/generate-adr-options.py +282 -0
- package/hooks/hook_utils.py +609 -0
- package/hooks/lib/__pycache__/__init__.cpython-314.pyc +0 -0
- package/hooks/lib/__pycache__/greptile.cpython-314.pyc +0 -0
- package/hooks/lib/__pycache__/ntfy.cpython-314.pyc +0 -0
- package/hooks/ntfy-on-question.py +240 -0
- package/hooks/orchestrator-completion.py +313 -0
- package/hooks/orchestrator-handoff.py +267 -0
- package/hooks/orchestrator-session-startup.py +146 -0
- package/hooks/parallel-orchestrator.py +451 -0
- package/hooks/periodic-reground.py +270 -67
- package/hooks/project-document-prompt.py +302 -0
- package/hooks/remote-question-proxy.py +284 -0
- package/hooks/remote-question-server.py +1224 -0
- package/hooks/run-code-review.py +176 -29
- package/hooks/run-visual-qa.py +338 -0
- package/hooks/session-logger.py +27 -1
- package/hooks/session-startup.py +113 -0
- package/hooks/update-adr-decision.py +236 -0
- package/hooks/update-api-showcase.py +13 -1
- package/hooks/update-testing-checklist.py +195 -0
- package/hooks/update-ui-showcase.py +13 -1
- package/package.json +7 -3
- package/scripts/extract-schema-docs.cjs +322 -0
- package/templates/.skills/hustle-interview/SKILL.md +174 -0
- package/templates/CLAUDE-SECTION.md +89 -64
- package/templates/adr-viewer/_components/ADRViewer.tsx +326 -0
- package/templates/api-dev-state.json +33 -1
- package/templates/api-showcase/_components/APIModal.tsx +100 -8
- package/templates/api-showcase/_components/APIShowcase.tsx +36 -4
- package/templates/api-showcase/_components/APITester.tsx +367 -58
- package/templates/brand-page/page.tsx +645 -0
- package/templates/component/Component.visual.spec.ts +30 -24
- package/templates/docs/page.tsx +230 -0
- package/templates/eslint-plugin-zod-schema/index.js +446 -0
- package/templates/eslint-plugin-zod-schema/package.json +26 -0
- package/templates/github-workflows/security.yml +274 -0
- package/templates/hustle-build-defaults.json +136 -0
- package/templates/hustle-dev-dashboard/page.tsx +365 -0
- package/templates/page/page.e2e.test.ts +30 -26
- package/templates/performance-budgets.json +63 -5
- package/templates/playwright-report/page.tsx +258 -0
- package/templates/registry.json +279 -3
- package/templates/review-dashboard/page.tsx +510 -0
- package/templates/settings.json +155 -7
- package/templates/test-results/page.tsx +237 -0
- package/templates/typedoc.json +19 -0
- package/templates/ui-showcase/_components/UIShowcase.tsx +48 -1
- package/templates/ui-showcase/_components/VisualTestingDashboard.tsx +579 -0
- package/templates/ui-showcase/page.tsx +1 -1
|
@@ -0,0 +1,609 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Shared utilities for Hustle Dev Tools hooks.
|
|
4
|
+
|
|
5
|
+
This module provides common functions used across multiple hooks:
|
|
6
|
+
- Workflow logging (events, decisions, phase transitions)
|
|
7
|
+
- State file management
|
|
8
|
+
- Configuration loading
|
|
9
|
+
- Path resolution
|
|
10
|
+
- Source repository detection
|
|
11
|
+
|
|
12
|
+
Version: 4.5.0
|
|
13
|
+
Updated: v3.12.13 - Added source repository detection
|
|
14
|
+
Updated: v4.5.0 - Added comprehensive logging, directory management, iteration tracking
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import json
|
|
18
|
+
import os
|
|
19
|
+
import sys
|
|
20
|
+
import shutil
|
|
21
|
+
from datetime import datetime
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
# =============================================================================
|
|
26
|
+
# PATH UTILITIES
|
|
27
|
+
# =============================================================================
|
|
28
|
+
|
|
29
|
+
def get_project_dir():
|
|
30
|
+
"""Get the project directory from environment or current working directory."""
|
|
31
|
+
return os.environ.get("CLAUDE_PROJECT_DIR", os.getcwd())
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_state_file_path():
|
|
35
|
+
"""Get the path to the api-dev-state.json file."""
|
|
36
|
+
project_dir = get_project_dir()
|
|
37
|
+
return Path(project_dir) / ".claude" / "api-dev-state.json"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def get_config_file_path():
|
|
41
|
+
"""Get the path to the hustle-build-defaults.json config file."""
|
|
42
|
+
project_dir = get_project_dir()
|
|
43
|
+
# Check project-level first
|
|
44
|
+
project_config = Path(project_dir) / ".claude" / "hustle-build-defaults.json"
|
|
45
|
+
if project_config.exists():
|
|
46
|
+
return project_config
|
|
47
|
+
# Fall back to templates
|
|
48
|
+
template_config = Path(project_dir) / "templates" / "hustle-build-defaults.json"
|
|
49
|
+
if template_config.exists():
|
|
50
|
+
return template_config
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# =============================================================================
|
|
55
|
+
# STATE MANAGEMENT
|
|
56
|
+
# =============================================================================
|
|
57
|
+
|
|
58
|
+
def load_state():
|
|
59
|
+
"""Load the current workflow state, or return empty dict if not exists."""
|
|
60
|
+
state_file = get_state_file_path()
|
|
61
|
+
if not state_file.exists():
|
|
62
|
+
return {}
|
|
63
|
+
try:
|
|
64
|
+
return json.loads(state_file.read_text())
|
|
65
|
+
except (json.JSONDecodeError, IOError):
|
|
66
|
+
return {}
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def save_state(state):
|
|
70
|
+
"""Save the workflow state to file."""
|
|
71
|
+
state_file = get_state_file_path()
|
|
72
|
+
state_file.parent.mkdir(parents=True, exist_ok=True)
|
|
73
|
+
state_file.write_text(json.dumps(state, indent=2))
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def load_config():
|
|
77
|
+
"""Load the hustle-build-defaults configuration."""
|
|
78
|
+
config_path = get_config_file_path()
|
|
79
|
+
if not config_path or not config_path.exists():
|
|
80
|
+
return {}
|
|
81
|
+
try:
|
|
82
|
+
return json.loads(config_path.read_text())
|
|
83
|
+
except (json.JSONDecodeError, IOError):
|
|
84
|
+
return {}
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
# =============================================================================
|
|
88
|
+
# WORKFLOW LOGGING
|
|
89
|
+
# =============================================================================
|
|
90
|
+
|
|
91
|
+
def get_workflow_id():
|
|
92
|
+
"""Get or create a workflow ID for the current session."""
|
|
93
|
+
state = load_state()
|
|
94
|
+
if "workflow_id" in state:
|
|
95
|
+
return state["workflow_id"]
|
|
96
|
+
|
|
97
|
+
# Generate new workflow ID
|
|
98
|
+
workflow_id = f"session-{datetime.now().strftime('%Y%m%d-%H%M%S')}"
|
|
99
|
+
state["workflow_id"] = workflow_id
|
|
100
|
+
save_state(state)
|
|
101
|
+
return workflow_id
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def get_workflow_log_path():
|
|
105
|
+
"""Get the path to the current workflow's log file."""
|
|
106
|
+
project_dir = get_project_dir()
|
|
107
|
+
logs_dir = Path(project_dir) / ".claude" / "workflow-logs"
|
|
108
|
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
|
109
|
+
|
|
110
|
+
workflow_id = get_workflow_id()
|
|
111
|
+
return logs_dir / f"{workflow_id}.json"
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def log_workflow_event(event_type, data=None):
|
|
115
|
+
"""
|
|
116
|
+
Log a workflow event to the session log file.
|
|
117
|
+
|
|
118
|
+
Event types:
|
|
119
|
+
- "session_start" - Session began
|
|
120
|
+
- "phase_transition" - Phase status changed
|
|
121
|
+
- "interview_decision" - User answered interview question
|
|
122
|
+
- "auto_answer" - Question auto-answered by defaults
|
|
123
|
+
- "promise_emitted" - Ralph loop promise detected
|
|
124
|
+
- "iteration_count" - Iteration count updated
|
|
125
|
+
- "dry_run_block" - Write blocked by dry-run mode
|
|
126
|
+
- "resume_attempt" - Attempted to resume workflow
|
|
127
|
+
- "directory_created" - Directory was created
|
|
128
|
+
- "registry_created" - Registry file was created
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
event_type: Type of event being logged
|
|
132
|
+
data: Additional data for the event (dict)
|
|
133
|
+
"""
|
|
134
|
+
if data is None:
|
|
135
|
+
data = {}
|
|
136
|
+
|
|
137
|
+
log_file = get_workflow_log_path()
|
|
138
|
+
|
|
139
|
+
# Load existing or create new log
|
|
140
|
+
if log_file.exists():
|
|
141
|
+
try:
|
|
142
|
+
log_data = json.loads(log_file.read_text())
|
|
143
|
+
except (json.JSONDecodeError, IOError):
|
|
144
|
+
log_data = None
|
|
145
|
+
else:
|
|
146
|
+
log_data = None
|
|
147
|
+
|
|
148
|
+
if log_data is None:
|
|
149
|
+
workflow_id = get_workflow_id()
|
|
150
|
+
log_data = {
|
|
151
|
+
"workflow_id": workflow_id,
|
|
152
|
+
"started_at": datetime.now().isoformat(),
|
|
153
|
+
"events": []
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
# Append event
|
|
157
|
+
event = {
|
|
158
|
+
"timestamp": datetime.now().isoformat(),
|
|
159
|
+
"type": event_type
|
|
160
|
+
}
|
|
161
|
+
event.update(data)
|
|
162
|
+
log_data["events"].append(event)
|
|
163
|
+
|
|
164
|
+
# Update last activity
|
|
165
|
+
log_data["last_activity"] = datetime.now().isoformat()
|
|
166
|
+
|
|
167
|
+
# Write back
|
|
168
|
+
log_file.write_text(json.dumps(log_data, indent=2))
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
# =============================================================================
|
|
172
|
+
# DIRECTORY & REGISTRY MANAGEMENT
|
|
173
|
+
# =============================================================================
|
|
174
|
+
|
|
175
|
+
def ensure_directories():
|
|
176
|
+
"""
|
|
177
|
+
Ensure all required .claude/ directories exist.
|
|
178
|
+
|
|
179
|
+
Creates:
|
|
180
|
+
- .claude/workflow-logs/
|
|
181
|
+
- .claude/adrs/
|
|
182
|
+
- .claude/adr-requests/
|
|
183
|
+
- .claude/research/
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
list: Directories that were created (not already existing)
|
|
187
|
+
"""
|
|
188
|
+
project_dir = get_project_dir()
|
|
189
|
+
created = []
|
|
190
|
+
|
|
191
|
+
directories = [
|
|
192
|
+
".claude/workflow-logs",
|
|
193
|
+
".claude/adrs",
|
|
194
|
+
".claude/adr-requests",
|
|
195
|
+
".claude/research"
|
|
196
|
+
]
|
|
197
|
+
|
|
198
|
+
for dir_path in directories:
|
|
199
|
+
full_path = Path(project_dir) / dir_path
|
|
200
|
+
if not full_path.exists():
|
|
201
|
+
full_path.mkdir(parents=True, exist_ok=True)
|
|
202
|
+
created.append(dir_path)
|
|
203
|
+
|
|
204
|
+
return created
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def ensure_registry():
|
|
208
|
+
"""
|
|
209
|
+
Ensure .claude/registry.json exists, creating from template if needed.
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
tuple: (success: bool, created: bool) - success and whether it was newly created
|
|
213
|
+
"""
|
|
214
|
+
project_dir = get_project_dir()
|
|
215
|
+
registry_path = Path(project_dir) / ".claude" / "registry.json"
|
|
216
|
+
|
|
217
|
+
if registry_path.exists():
|
|
218
|
+
return True, False # Exists, not created
|
|
219
|
+
|
|
220
|
+
# Try to copy from template
|
|
221
|
+
template_path = Path(project_dir) / "templates" / "registry.json"
|
|
222
|
+
if template_path.exists():
|
|
223
|
+
try:
|
|
224
|
+
# Ensure parent directory exists
|
|
225
|
+
registry_path.parent.mkdir(parents=True, exist_ok=True)
|
|
226
|
+
shutil.copy(template_path, registry_path)
|
|
227
|
+
return True, True # Success, created from template
|
|
228
|
+
except IOError:
|
|
229
|
+
pass
|
|
230
|
+
|
|
231
|
+
# Create minimal registry
|
|
232
|
+
try:
|
|
233
|
+
registry_path.parent.mkdir(parents=True, exist_ok=True)
|
|
234
|
+
registry = {
|
|
235
|
+
"version": "1.0.0",
|
|
236
|
+
"updated_at": datetime.now().isoformat(),
|
|
237
|
+
"apis": {},
|
|
238
|
+
"components": {},
|
|
239
|
+
"pages": {},
|
|
240
|
+
"combined": {},
|
|
241
|
+
"adrs": {}
|
|
242
|
+
}
|
|
243
|
+
registry_path.write_text(json.dumps(registry, indent=2))
|
|
244
|
+
return True, True # Success, created minimal
|
|
245
|
+
except IOError:
|
|
246
|
+
return False, False # Failed
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
# =============================================================================
|
|
250
|
+
# DRY-RUN MODE
|
|
251
|
+
# =============================================================================
|
|
252
|
+
|
|
253
|
+
def check_dry_run_mode():
|
|
254
|
+
"""Check if dry-run mode is active."""
|
|
255
|
+
state = load_state()
|
|
256
|
+
return state.get("dry_run_mode", False)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def set_dry_run_mode(enabled=True):
|
|
260
|
+
"""Set dry-run mode in state."""
|
|
261
|
+
state = load_state()
|
|
262
|
+
state["dry_run_mode"] = enabled
|
|
263
|
+
save_state(state)
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
# =============================================================================
|
|
267
|
+
# ITERATION TRACKING
|
|
268
|
+
# =============================================================================
|
|
269
|
+
|
|
270
|
+
def get_phase_iterations(phase):
|
|
271
|
+
"""Get current iteration count for a phase."""
|
|
272
|
+
state = load_state()
|
|
273
|
+
iterations = state.get("phase_iterations", {})
|
|
274
|
+
return iterations.get(phase, 0)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
def increment_phase_iteration(phase):
|
|
278
|
+
"""
|
|
279
|
+
Increment and return the iteration count for a phase.
|
|
280
|
+
|
|
281
|
+
Returns:
|
|
282
|
+
tuple: (current_iteration, max_iterations, limit_exceeded)
|
|
283
|
+
"""
|
|
284
|
+
state = load_state()
|
|
285
|
+
config = load_config()
|
|
286
|
+
|
|
287
|
+
# Get max iterations
|
|
288
|
+
max_iterations = config.get("autonomous", {}).get("max_iterations", 25)
|
|
289
|
+
phase_limits = config.get("max_iterations", {}).get("phases", {})
|
|
290
|
+
phase_limit = phase_limits.get(phase, max_iterations)
|
|
291
|
+
|
|
292
|
+
# Increment
|
|
293
|
+
iterations = state.get("phase_iterations", {})
|
|
294
|
+
current = iterations.get(phase, 0) + 1
|
|
295
|
+
iterations[phase] = current
|
|
296
|
+
state["phase_iterations"] = iterations
|
|
297
|
+
save_state(state)
|
|
298
|
+
|
|
299
|
+
return current, phase_limit, current > phase_limit
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def reset_phase_iterations():
|
|
303
|
+
"""Reset all phase iteration counters."""
|
|
304
|
+
state = load_state()
|
|
305
|
+
state["phase_iterations"] = {}
|
|
306
|
+
save_state(state)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
# =============================================================================
|
|
310
|
+
# HOOK I/O HELPERS
|
|
311
|
+
# =============================================================================
|
|
312
|
+
|
|
313
|
+
def get_input_from_stdin():
|
|
314
|
+
"""Read and parse JSON input from stdin."""
|
|
315
|
+
try:
|
|
316
|
+
return json.load(sys.stdin)
|
|
317
|
+
except json.JSONDecodeError:
|
|
318
|
+
return {}
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def output_result(result):
|
|
322
|
+
"""Output a hook result as JSON."""
|
|
323
|
+
print(json.dumps(result))
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def allow_continue():
|
|
327
|
+
"""Output a simple continue result."""
|
|
328
|
+
output_result({"continue": True})
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def block_with_reason(reason):
|
|
332
|
+
"""Output a blocking result with a reason."""
|
|
333
|
+
output_result({
|
|
334
|
+
"continue": False,
|
|
335
|
+
"reason": reason
|
|
336
|
+
})
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
# =============================================================================
|
|
340
|
+
# RESUME FUNCTIONALITY (v4.5.0)
|
|
341
|
+
# =============================================================================
|
|
342
|
+
|
|
343
|
+
def handle_resume(workflow_id):
|
|
344
|
+
"""
|
|
345
|
+
Resume a previous workflow by ID.
|
|
346
|
+
|
|
347
|
+
Looks for the workflow in:
|
|
348
|
+
1. Current api-dev-state.json (if workflow_id matches)
|
|
349
|
+
2. Archived workflow logs in .claude/workflow-logs/
|
|
350
|
+
|
|
351
|
+
Args:
|
|
352
|
+
workflow_id: The workflow ID to resume (e.g., "session-20251230-143022")
|
|
353
|
+
|
|
354
|
+
Returns:
|
|
355
|
+
tuple: (state_dict or None, message_string)
|
|
356
|
+
"""
|
|
357
|
+
project_dir = get_project_dir()
|
|
358
|
+
|
|
359
|
+
# Check current state file
|
|
360
|
+
state_file = get_state_file_path()
|
|
361
|
+
if state_file.exists():
|
|
362
|
+
try:
|
|
363
|
+
state = json.loads(state_file.read_text())
|
|
364
|
+
if state.get("workflow_id") == workflow_id:
|
|
365
|
+
# Find last incomplete phase
|
|
366
|
+
phases = state.get("phases", {})
|
|
367
|
+
in_progress_phase = None
|
|
368
|
+
for phase_name, phase_data in phases.items():
|
|
369
|
+
if isinstance(phase_data, dict) and phase_data.get("status") == "in_progress":
|
|
370
|
+
in_progress_phase = phase_name
|
|
371
|
+
break
|
|
372
|
+
|
|
373
|
+
if in_progress_phase:
|
|
374
|
+
return state, f"Resuming from phase: {in_progress_phase}"
|
|
375
|
+
return state, "Workflow found but all phases complete"
|
|
376
|
+
except (json.JSONDecodeError, IOError):
|
|
377
|
+
pass
|
|
378
|
+
|
|
379
|
+
# Check workflow logs archive
|
|
380
|
+
logs_dir = Path(project_dir) / ".claude" / "workflow-logs"
|
|
381
|
+
log_file = logs_dir / f"{workflow_id}.json"
|
|
382
|
+
|
|
383
|
+
if log_file.exists():
|
|
384
|
+
try:
|
|
385
|
+
log_data = json.loads(log_file.read_text())
|
|
386
|
+
|
|
387
|
+
# Check if this log has state information
|
|
388
|
+
if "state_snapshot" in log_data:
|
|
389
|
+
# Restore state from snapshot
|
|
390
|
+
restored_state = log_data["state_snapshot"]
|
|
391
|
+
save_state(restored_state)
|
|
392
|
+
|
|
393
|
+
# Log the resume attempt
|
|
394
|
+
log_workflow_event("resume_attempt", {
|
|
395
|
+
"workflow_id": workflow_id,
|
|
396
|
+
"source": "workflow_log",
|
|
397
|
+
"success": True
|
|
398
|
+
})
|
|
399
|
+
|
|
400
|
+
return restored_state, f"Restored workflow {workflow_id} from archive"
|
|
401
|
+
|
|
402
|
+
# If no state snapshot, we can at least provide the log info
|
|
403
|
+
events = log_data.get("events", [])
|
|
404
|
+
last_event = events[-1] if events else {}
|
|
405
|
+
|
|
406
|
+
return None, f"Found log for {workflow_id} but no resumable state. Last event: {last_event.get('type', 'unknown')}"
|
|
407
|
+
|
|
408
|
+
except (json.JSONDecodeError, IOError):
|
|
409
|
+
pass
|
|
410
|
+
|
|
411
|
+
return None, f"Workflow {workflow_id} not found in state or logs"
|
|
412
|
+
|
|
413
|
+
|
|
414
|
+
def list_resumable_workflows():
|
|
415
|
+
"""
|
|
416
|
+
List all workflows that can potentially be resumed.
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
list: List of dicts with workflow info
|
|
420
|
+
"""
|
|
421
|
+
project_dir = get_project_dir()
|
|
422
|
+
workflows = []
|
|
423
|
+
|
|
424
|
+
# Check current state
|
|
425
|
+
state_file = get_state_file_path()
|
|
426
|
+
if state_file.exists():
|
|
427
|
+
try:
|
|
428
|
+
state = json.loads(state_file.read_text())
|
|
429
|
+
if "workflow_id" in state:
|
|
430
|
+
workflows.append({
|
|
431
|
+
"workflow_id": state["workflow_id"],
|
|
432
|
+
"source": "active",
|
|
433
|
+
"endpoint": state.get("active_endpoint"),
|
|
434
|
+
"last_modified": state_file.stat().st_mtime
|
|
435
|
+
})
|
|
436
|
+
except Exception:
|
|
437
|
+
pass
|
|
438
|
+
|
|
439
|
+
# Check workflow logs
|
|
440
|
+
logs_dir = Path(project_dir) / ".claude" / "workflow-logs"
|
|
441
|
+
if logs_dir.exists():
|
|
442
|
+
for log_file in logs_dir.glob("*.json"):
|
|
443
|
+
try:
|
|
444
|
+
log_data = json.loads(log_file.read_text())
|
|
445
|
+
workflow_id = log_data.get("workflow_id", log_file.stem)
|
|
446
|
+
|
|
447
|
+
# Skip if already in list
|
|
448
|
+
if any(w["workflow_id"] == workflow_id for w in workflows):
|
|
449
|
+
continue
|
|
450
|
+
|
|
451
|
+
workflows.append({
|
|
452
|
+
"workflow_id": workflow_id,
|
|
453
|
+
"source": "archived",
|
|
454
|
+
"started_at": log_data.get("started_at"),
|
|
455
|
+
"last_activity": log_data.get("last_activity"),
|
|
456
|
+
"has_state_snapshot": "state_snapshot" in log_data
|
|
457
|
+
})
|
|
458
|
+
except Exception:
|
|
459
|
+
continue
|
|
460
|
+
|
|
461
|
+
return workflows
|
|
462
|
+
|
|
463
|
+
|
|
464
|
+
def snapshot_state_to_log():
|
|
465
|
+
"""
|
|
466
|
+
Save a snapshot of the current state to the workflow log.
|
|
467
|
+
This enables resume functionality for interrupted workflows.
|
|
468
|
+
"""
|
|
469
|
+
state = load_state()
|
|
470
|
+
if not state:
|
|
471
|
+
return False
|
|
472
|
+
|
|
473
|
+
log_file = get_workflow_log_path()
|
|
474
|
+
try:
|
|
475
|
+
if log_file.exists():
|
|
476
|
+
log_data = json.loads(log_file.read_text())
|
|
477
|
+
else:
|
|
478
|
+
log_data = {"workflow_id": get_workflow_id(), "events": []}
|
|
479
|
+
|
|
480
|
+
log_data["state_snapshot"] = state
|
|
481
|
+
log_data["snapshot_at"] = datetime.now().isoformat()
|
|
482
|
+
log_file.write_text(json.dumps(log_data, indent=2))
|
|
483
|
+
return True
|
|
484
|
+
except Exception:
|
|
485
|
+
return False
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
# =============================================================================
|
|
489
|
+
# SOURCE REPOSITORY DETECTION (v3.12.13)
|
|
490
|
+
# =============================================================================
|
|
491
|
+
|
|
492
|
+
def is_source_repository() -> bool:
|
|
493
|
+
"""
|
|
494
|
+
Check if we're running in the api-dev-tools source repository.
|
|
495
|
+
If so, hooks should NOT enforce workflow - we're developing, not using.
|
|
496
|
+
|
|
497
|
+
Detection methods:
|
|
498
|
+
1. package.json name = @hustle-together/api-dev-tools
|
|
499
|
+
2. templates/ folder exists (only in source repo, not installed)
|
|
500
|
+
|
|
501
|
+
Returns:
|
|
502
|
+
True if in source repository (skip enforcement)
|
|
503
|
+
False if in a target project (enforce normally)
|
|
504
|
+
"""
|
|
505
|
+
try:
|
|
506
|
+
package_json = Path.cwd() / "package.json"
|
|
507
|
+
if package_json.exists():
|
|
508
|
+
data = json.loads(package_json.read_text())
|
|
509
|
+
# If this is the source repo, skip enforcement
|
|
510
|
+
if data.get("name") == "@hustle-together/api-dev-tools":
|
|
511
|
+
return True
|
|
512
|
+
|
|
513
|
+
# Also check for templates/ folder (only exists in source repo)
|
|
514
|
+
if (Path.cwd() / "templates").is_dir():
|
|
515
|
+
return True
|
|
516
|
+
|
|
517
|
+
except Exception:
|
|
518
|
+
pass
|
|
519
|
+
return False
|
|
520
|
+
|
|
521
|
+
|
|
522
|
+
def skip_if_source_repo() -> bool:
|
|
523
|
+
"""
|
|
524
|
+
Convenience function for hooks to call early.
|
|
525
|
+
Returns True if hook should exit immediately (we're in source repo).
|
|
526
|
+
|
|
527
|
+
Usage at top of hook:
|
|
528
|
+
from hook_utils import skip_if_source_repo
|
|
529
|
+
if skip_if_source_repo():
|
|
530
|
+
print(json.dumps({"decision": "approve"}))
|
|
531
|
+
sys.exit(0)
|
|
532
|
+
"""
|
|
533
|
+
return is_source_repository()
|
|
534
|
+
|
|
535
|
+
|
|
536
|
+
# =============================================================================
|
|
537
|
+
# NTFY NOTIFICATIONS (v4.6.0)
|
|
538
|
+
# =============================================================================
|
|
539
|
+
|
|
540
|
+
def get_ntfy_config():
|
|
541
|
+
"""
|
|
542
|
+
Get NTFY configuration from multiple sources (priority order):
|
|
543
|
+
1. Environment variables (NTFY_TOPIC, NTFY_SERVER)
|
|
544
|
+
2. hustle-build-defaults.json ntfy section
|
|
545
|
+
3. .env file
|
|
546
|
+
|
|
547
|
+
Returns:
|
|
548
|
+
tuple: (topic, server) - topic is None if not configured
|
|
549
|
+
"""
|
|
550
|
+
topic = os.environ.get("NTFY_TOPIC")
|
|
551
|
+
server = os.environ.get("NTFY_SERVER", "https://ntfy.sh")
|
|
552
|
+
project_dir = get_project_dir()
|
|
553
|
+
|
|
554
|
+
if not topic:
|
|
555
|
+
# Try loading from hustle-build-defaults.json
|
|
556
|
+
config = load_config()
|
|
557
|
+
ntfy_config = config.get("ntfy", {})
|
|
558
|
+
if ntfy_config.get("enabled", False):
|
|
559
|
+
topic = ntfy_config.get("topic")
|
|
560
|
+
server = ntfy_config.get("server", server)
|
|
561
|
+
|
|
562
|
+
if not topic:
|
|
563
|
+
# Try loading from .env
|
|
564
|
+
env_file = Path(project_dir) / ".env"
|
|
565
|
+
if env_file.exists():
|
|
566
|
+
try:
|
|
567
|
+
for line in env_file.read_text().splitlines():
|
|
568
|
+
if line.startswith("NTFY_TOPIC="):
|
|
569
|
+
topic = line.split("=", 1)[1].strip().strip('"\'')
|
|
570
|
+
elif line.startswith("NTFY_SERVER="):
|
|
571
|
+
server = line.split("=", 1)[1].strip().strip('"\'')
|
|
572
|
+
except Exception:
|
|
573
|
+
pass
|
|
574
|
+
|
|
575
|
+
return topic, server
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
def send_ntfy_notification(title, message, priority="default", tags=None):
|
|
579
|
+
"""
|
|
580
|
+
Send a notification via NTFY.
|
|
581
|
+
|
|
582
|
+
Args:
|
|
583
|
+
title: Notification title
|
|
584
|
+
message: Notification body
|
|
585
|
+
priority: "min", "low", "default", "high", "urgent"
|
|
586
|
+
tags: List of emoji tags (e.g., ["robot", "warning"])
|
|
587
|
+
|
|
588
|
+
Returns:
|
|
589
|
+
bool: True if notification sent successfully
|
|
590
|
+
"""
|
|
591
|
+
import subprocess
|
|
592
|
+
|
|
593
|
+
topic, server = get_ntfy_config()
|
|
594
|
+
if not topic:
|
|
595
|
+
return False
|
|
596
|
+
|
|
597
|
+
try:
|
|
598
|
+
headers = ["-H", f"Title: {title}", "-H", f"Priority: {priority}"]
|
|
599
|
+
if tags:
|
|
600
|
+
headers.extend(["-H", f"Tags: {','.join(tags)}"])
|
|
601
|
+
|
|
602
|
+
result = subprocess.run(
|
|
603
|
+
["curl", "-s", "-d", message, *headers, f"{server}/{topic}"],
|
|
604
|
+
capture_output=True,
|
|
605
|
+
timeout=5
|
|
606
|
+
)
|
|
607
|
+
return result.returncode == 0
|
|
608
|
+
except Exception:
|
|
609
|
+
return False
|
|
Binary file
|
|
Binary file
|
|
Binary file
|