@hustle-together/api-dev-tools 3.12.3 → 4.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/adr-requests/.gitkeep +10 -0
- package/.claude/agents/adr-researcher.md +109 -0
- package/.claude/agents/visual-analyzer.md +183 -0
- package/.claude/api-dev-state.json +7 -463
- package/.claude/documentation-audit.json +114 -0
- package/.claude/registry.json +289 -0
- package/.claude/settings.json +45 -1
- package/.claude/workflow-logs/None.json +49 -0
- package/.claude/workflow-logs/session-20251230-143727.json +106 -0
- package/.skills/adr-deep-research/SKILL.md +351 -0
- package/.skills/api-create/SKILL.md +116 -17
- package/.skills/api-research/SKILL.md +130 -0
- package/.skills/docs-sync/SKILL.md +260 -0
- package/.skills/docs-update/SKILL.md +205 -0
- package/.skills/hustle-brand/SKILL.md +368 -0
- package/.skills/hustle-build/SKILL.md +786 -0
- package/.skills/hustle-build-review/SKILL.md +518 -0
- package/.skills/parallel-spawn/SKILL.md +212 -0
- package/.skills/ralph-continue/SKILL.md +151 -0
- package/.skills/ralph-loop/SKILL.md +341 -0
- package/.skills/ralph-status/SKILL.md +87 -0
- package/.skills/refactor/SKILL.md +59 -0
- package/.skills/shadcn/SKILL.md +522 -0
- package/.skills/test-all/SKILL.md +210 -0
- package/.skills/test-builds/SKILL.md +208 -0
- package/.skills/test-debug/SKILL.md +212 -0
- package/.skills/test-e2e/SKILL.md +168 -0
- package/.skills/test-review/SKILL.md +707 -0
- package/.skills/test-unit/SKILL.md +143 -0
- package/.skills/test-visual/SKILL.md +301 -0
- package/.skills/token-report/SKILL.md +132 -0
- package/CHANGELOG.md +575 -0
- package/README.md +426 -56
- package/bin/cli.js +1538 -88
- package/commands/hustle-api-create.md +22 -0
- package/commands/hustle-build.md +259 -0
- package/commands/hustle-combine.md +81 -2
- package/commands/hustle-ui-create-page.md +84 -2
- package/commands/hustle-ui-create.md +82 -2
- package/hooks/__pycache__/api-workflow-check.cpython-314.pyc +0 -0
- package/hooks/__pycache__/auto-answer.cpython-314.pyc +0 -0
- package/hooks/__pycache__/cache-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-api-routes.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-playwright-setup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-storybook-setup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/check-update.cpython-314.pyc +0 -0
- package/hooks/__pycache__/completion-promise-detector.cpython-314.pyc +0 -0
- package/hooks/__pycache__/context-capacity-warning.cpython-314.pyc +0 -0
- package/hooks/__pycache__/detect-interruption.cpython-314.pyc +0 -0
- package/hooks/__pycache__/docs-update-check.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-a11y-audit.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-brand-guide.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-component-type-confirm.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-deep-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-disambiguation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-documentation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-dry-run.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-environment.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-external-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-freshness.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-page-components.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-page-data-schema.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-questions-sourced.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-refactor.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-research.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-schema-from-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-schema.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-scope.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-tdd-red.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-ui-disambiguation.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-ui-interview.cpython-314.pyc +0 -0
- package/hooks/__pycache__/enforce-verify.cpython-314.pyc +0 -0
- package/hooks/__pycache__/generate-adr-options.cpython-314.pyc +0 -0
- package/hooks/__pycache__/generate-manifest-entry.cpython-314.pyc +0 -0
- package/hooks/__pycache__/hook_utils.cpython-314.pyc +0 -0
- package/hooks/__pycache__/notify-input-needed.cpython-314.pyc +0 -0
- package/hooks/__pycache__/notify-phase-complete.cpython-314.pyc +0 -0
- package/hooks/__pycache__/ntfy-on-question.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-completion.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-handoff.cpython-314.pyc +0 -0
- package/hooks/__pycache__/orchestrator-session-startup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/parallel-orchestrator.cpython-314.pyc +0 -0
- package/hooks/__pycache__/periodic-reground.cpython-314.pyc +0 -0
- package/hooks/__pycache__/project-document-prompt.cpython-314.pyc +0 -0
- package/hooks/__pycache__/remote-question-proxy.cpython-314.pyc +0 -0
- package/hooks/__pycache__/remote-question-server.cpython-314.pyc +0 -0
- package/hooks/__pycache__/run-code-review.cpython-314.pyc +0 -0
- package/hooks/__pycache__/run-visual-qa.cpython-314.pyc +0 -0
- package/hooks/__pycache__/session-logger.cpython-314.pyc +0 -0
- package/hooks/__pycache__/session-startup.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-scope-coverage.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-token-usage.cpython-314.pyc +0 -0
- package/hooks/__pycache__/track-tool-use.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-adr-decision.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-api-showcase.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-registry.cpython-314.pyc +0 -0
- package/hooks/__pycache__/update-ui-showcase.cpython-314.pyc +0 -0
- package/hooks/__pycache__/verify-after-green.cpython-314.pyc +0 -0
- package/hooks/__pycache__/verify-implementation.cpython-314.pyc +0 -0
- package/hooks/api-workflow-check.py +34 -0
- package/hooks/auto-answer.py +305 -0
- package/hooks/check-update.py +132 -0
- package/hooks/completion-promise-detector.py +293 -0
- package/hooks/context-capacity-warning.py +171 -0
- package/hooks/docs-update-check.py +120 -0
- package/hooks/enforce-dry-run.py +134 -0
- package/hooks/enforce-external-research.py +25 -0
- package/hooks/enforce-interview.py +20 -0
- package/hooks/generate-adr-options.py +282 -0
- package/hooks/hook_utils.py +609 -0
- package/hooks/lib/__pycache__/__init__.cpython-314.pyc +0 -0
- package/hooks/lib/__pycache__/greptile.cpython-314.pyc +0 -0
- package/hooks/lib/__pycache__/ntfy.cpython-314.pyc +0 -0
- package/hooks/ntfy-on-question.py +240 -0
- package/hooks/orchestrator-completion.py +313 -0
- package/hooks/orchestrator-handoff.py +267 -0
- package/hooks/orchestrator-session-startup.py +146 -0
- package/hooks/parallel-orchestrator.py +451 -0
- package/hooks/periodic-reground.py +270 -67
- package/hooks/project-document-prompt.py +302 -0
- package/hooks/remote-question-proxy.py +284 -0
- package/hooks/remote-question-server.py +1224 -0
- package/hooks/run-code-review.py +176 -29
- package/hooks/run-visual-qa.py +338 -0
- package/hooks/session-logger.py +27 -1
- package/hooks/session-startup.py +113 -0
- package/hooks/update-adr-decision.py +236 -0
- package/hooks/update-api-showcase.py +13 -1
- package/hooks/update-testing-checklist.py +195 -0
- package/hooks/update-ui-showcase.py +13 -1
- package/package.json +7 -3
- package/scripts/extract-schema-docs.cjs +322 -0
- package/templates/.skills/hustle-interview/SKILL.md +174 -0
- package/templates/CLAUDE-SECTION.md +89 -64
- package/templates/adr-viewer/_components/ADRViewer.tsx +326 -0
- package/templates/api-dev-state.json +33 -1
- package/templates/api-showcase/_components/APIModal.tsx +100 -8
- package/templates/api-showcase/_components/APIShowcase.tsx +36 -4
- package/templates/api-showcase/_components/APITester.tsx +367 -58
- package/templates/brand-page/page.tsx +645 -0
- package/templates/component/Component.visual.spec.ts +30 -24
- package/templates/docs/page.tsx +230 -0
- package/templates/eslint-plugin-zod-schema/index.js +446 -0
- package/templates/eslint-plugin-zod-schema/package.json +26 -0
- package/templates/github-workflows/security.yml +274 -0
- package/templates/hustle-build-defaults.json +136 -0
- package/templates/hustle-dev-dashboard/page.tsx +365 -0
- package/templates/page/page.e2e.test.ts +30 -26
- package/templates/performance-budgets.json +63 -5
- package/templates/playwright-report/page.tsx +258 -0
- package/templates/registry.json +279 -3
- package/templates/review-dashboard/page.tsx +510 -0
- package/templates/settings.json +155 -7
- package/templates/test-results/page.tsx +237 -0
- package/templates/typedoc.json +19 -0
- package/templates/ui-showcase/_components/UIShowcase.tsx +48 -1
- package/templates/ui-showcase/_components/VisualTestingDashboard.tsx +579 -0
- package/templates/ui-showcase/page.tsx +1 -1
|
@@ -0,0 +1,451 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Parallel Orchestrator Hook
|
|
4
|
+
|
|
5
|
+
Coordinates parallel agent execution across git worktrees for the --parallel flag.
|
|
6
|
+
This enables multiple independent workflows to run simultaneously.
|
|
7
|
+
|
|
8
|
+
Hook Type: SessionStart (when --parallel detected)
|
|
9
|
+
UserPromptSubmit (for /parallel-spawn command)
|
|
10
|
+
|
|
11
|
+
Features:
|
|
12
|
+
- Creates git worktrees for isolated parallel execution
|
|
13
|
+
- Injects shared interview decisions into each worktree
|
|
14
|
+
- Tracks agent status and merges results
|
|
15
|
+
- Cleans up worktrees after completion
|
|
16
|
+
|
|
17
|
+
v4.5.0: Initial implementation
|
|
18
|
+
|
|
19
|
+
References:
|
|
20
|
+
- docs/AUTONOMOUS_LOOPS.md - Parallel execution section
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
import json
|
|
24
|
+
import os
|
|
25
|
+
import re
|
|
26
|
+
import subprocess
|
|
27
|
+
import sys
|
|
28
|
+
from datetime import datetime
|
|
29
|
+
from pathlib import Path
|
|
30
|
+
|
|
31
|
+
# Import shared utilities
|
|
32
|
+
try:
|
|
33
|
+
from hook_utils import (
|
|
34
|
+
log_workflow_event,
|
|
35
|
+
load_state,
|
|
36
|
+
save_state,
|
|
37
|
+
get_project_dir
|
|
38
|
+
)
|
|
39
|
+
UTILS_AVAILABLE = True
|
|
40
|
+
except ImportError:
|
|
41
|
+
UTILS_AVAILABLE = False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def get_current_branch():
|
|
45
|
+
"""Get the current git branch name."""
|
|
46
|
+
try:
|
|
47
|
+
result = subprocess.run(
|
|
48
|
+
["git", "rev-parse", "--abbrev-ref", "HEAD"],
|
|
49
|
+
capture_output=True,
|
|
50
|
+
text=True,
|
|
51
|
+
check=True
|
|
52
|
+
)
|
|
53
|
+
return result.stdout.strip()
|
|
54
|
+
except subprocess.CalledProcessError:
|
|
55
|
+
return "main"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def setup_parallel_execution(workflows, shared_decisions=None):
|
|
59
|
+
"""
|
|
60
|
+
Set up git worktrees for parallel agent execution.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
workflows: List of workflow configs, each with:
|
|
64
|
+
- name: Workflow identifier
|
|
65
|
+
- type: "api", "component", "page"
|
|
66
|
+
- config: Additional workflow config
|
|
67
|
+
shared_decisions: Dict of interview decisions to share across all workflows
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
list: Worktree info dicts
|
|
71
|
+
"""
|
|
72
|
+
project_dir = get_project_dir()
|
|
73
|
+
base_branch = get_current_branch()
|
|
74
|
+
worktrees = []
|
|
75
|
+
|
|
76
|
+
for i, workflow in enumerate(workflows):
|
|
77
|
+
worktree_name = f"parallel-{workflow.get('name', 'workflow')}-{i}"
|
|
78
|
+
worktree_path = str(Path(project_dir).parent / worktree_name)
|
|
79
|
+
|
|
80
|
+
try:
|
|
81
|
+
# Create worktree with new branch
|
|
82
|
+
subprocess.run(
|
|
83
|
+
["git", "worktree", "add", worktree_path, "-b", worktree_name],
|
|
84
|
+
cwd=project_dir,
|
|
85
|
+
capture_output=True,
|
|
86
|
+
check=True
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Create .claude directory in worktree
|
|
90
|
+
claude_dir = Path(worktree_path) / ".claude"
|
|
91
|
+
claude_dir.mkdir(parents=True, exist_ok=True)
|
|
92
|
+
|
|
93
|
+
# Copy shared decisions to worktree state
|
|
94
|
+
if shared_decisions:
|
|
95
|
+
worktree_state = {
|
|
96
|
+
"workflow_id": f"parallel-{worktree_name}",
|
|
97
|
+
"workflow": workflow.get("type", "api-create"),
|
|
98
|
+
"active_endpoint": workflow.get("name"),
|
|
99
|
+
"shared_decisions": shared_decisions,
|
|
100
|
+
"parallel_execution": True,
|
|
101
|
+
"parent_worktree": project_dir
|
|
102
|
+
}
|
|
103
|
+
state_file = claude_dir / "api-dev-state.json"
|
|
104
|
+
state_file.write_text(json.dumps(worktree_state, indent=2))
|
|
105
|
+
|
|
106
|
+
worktrees.append({
|
|
107
|
+
"name": worktree_name,
|
|
108
|
+
"path": worktree_path,
|
|
109
|
+
"workflow": workflow,
|
|
110
|
+
"status": "pending",
|
|
111
|
+
"created_at": datetime.now().isoformat()
|
|
112
|
+
})
|
|
113
|
+
|
|
114
|
+
except subprocess.CalledProcessError as e:
|
|
115
|
+
# Log error but continue with other worktrees
|
|
116
|
+
worktrees.append({
|
|
117
|
+
"name": worktree_name,
|
|
118
|
+
"path": worktree_path,
|
|
119
|
+
"workflow": workflow,
|
|
120
|
+
"status": "error",
|
|
121
|
+
"error": str(e)
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
return worktrees
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def generate_spawn_instructions(worktrees, shared_decisions=None):
|
|
128
|
+
"""
|
|
129
|
+
Generate instructions for spawning parallel Task agents.
|
|
130
|
+
|
|
131
|
+
This returns data that the AI can use to spawn multiple Task tools
|
|
132
|
+
in a single message.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
worktrees: List of worktree info from setup_parallel_execution
|
|
136
|
+
shared_decisions: Shared interview decisions
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
list: Task tool invocation configs
|
|
140
|
+
"""
|
|
141
|
+
spawn_instructions = []
|
|
142
|
+
|
|
143
|
+
for wt in worktrees:
|
|
144
|
+
if wt.get("status") == "error":
|
|
145
|
+
continue
|
|
146
|
+
|
|
147
|
+
workflow = wt.get("workflow", {})
|
|
148
|
+
workflow_type = workflow.get("type", "api-create")
|
|
149
|
+
workflow_name = workflow.get("name", "unknown")
|
|
150
|
+
|
|
151
|
+
prompt = f"""Execute workflow in parallel worktree: {wt['path']}
|
|
152
|
+
|
|
153
|
+
## Workflow Configuration
|
|
154
|
+
- Type: {workflow_type}
|
|
155
|
+
- Name: {workflow_name}
|
|
156
|
+
- Worktree: {wt['name']}
|
|
157
|
+
|
|
158
|
+
## Shared Decisions (DO NOT re-ask these questions)
|
|
159
|
+
{json.dumps(shared_decisions, indent=2) if shared_decisions else "None"}
|
|
160
|
+
|
|
161
|
+
## Instructions
|
|
162
|
+
|
|
163
|
+
1. Change to the worktree directory:
|
|
164
|
+
cd {wt['path']}
|
|
165
|
+
|
|
166
|
+
2. Run the appropriate workflow:
|
|
167
|
+
- For API: Execute /api-create {workflow_name} (use shared decisions)
|
|
168
|
+
- For Component: Execute /ui-create-component {workflow_name}
|
|
169
|
+
- For Page: Execute /ui-create-page {workflow_name}
|
|
170
|
+
|
|
171
|
+
3. When complete, signal with:
|
|
172
|
+
<promise>DONE</promise>
|
|
173
|
+
|
|
174
|
+
4. Important:
|
|
175
|
+
- Use the shared decisions - do NOT re-interview
|
|
176
|
+
- Stay within the worktree directory
|
|
177
|
+
- Report any errors clearly
|
|
178
|
+
"""
|
|
179
|
+
|
|
180
|
+
spawn_instructions.append({
|
|
181
|
+
"subagent_type": "general-purpose",
|
|
182
|
+
"prompt": prompt,
|
|
183
|
+
"description": f"Parallel: {workflow_name}",
|
|
184
|
+
"run_in_background": True
|
|
185
|
+
})
|
|
186
|
+
|
|
187
|
+
return spawn_instructions
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def merge_parallel_results(worktrees):
|
|
191
|
+
"""
|
|
192
|
+
Merge completed worktrees back to main branch.
|
|
193
|
+
|
|
194
|
+
Args:
|
|
195
|
+
worktrees: List of worktree info with status updates
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
dict: Merge results
|
|
199
|
+
"""
|
|
200
|
+
project_dir = get_project_dir()
|
|
201
|
+
results = {
|
|
202
|
+
"merged": [],
|
|
203
|
+
"failed": [],
|
|
204
|
+
"cleaned": []
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
for wt in worktrees:
|
|
208
|
+
if wt.get("status") != "complete":
|
|
209
|
+
continue
|
|
210
|
+
|
|
211
|
+
try:
|
|
212
|
+
# Merge worktree branch
|
|
213
|
+
subprocess.run(
|
|
214
|
+
["git", "merge", wt["name"], "--no-ff", "-m", f"Merge parallel workflow: {wt['name']}"],
|
|
215
|
+
cwd=project_dir,
|
|
216
|
+
capture_output=True,
|
|
217
|
+
check=True
|
|
218
|
+
)
|
|
219
|
+
results["merged"].append(wt["name"])
|
|
220
|
+
|
|
221
|
+
# Remove worktree
|
|
222
|
+
subprocess.run(
|
|
223
|
+
["git", "worktree", "remove", wt["path"]],
|
|
224
|
+
cwd=project_dir,
|
|
225
|
+
capture_output=True,
|
|
226
|
+
check=True
|
|
227
|
+
)
|
|
228
|
+
results["cleaned"].append(wt["name"])
|
|
229
|
+
|
|
230
|
+
except subprocess.CalledProcessError as e:
|
|
231
|
+
results["failed"].append({
|
|
232
|
+
"name": wt["name"],
|
|
233
|
+
"error": str(e)
|
|
234
|
+
})
|
|
235
|
+
|
|
236
|
+
return results
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def check_parallel_status():
|
|
240
|
+
"""
|
|
241
|
+
Check the status of parallel execution.
|
|
242
|
+
|
|
243
|
+
Returns:
|
|
244
|
+
dict: Current parallel execution status
|
|
245
|
+
"""
|
|
246
|
+
if not UTILS_AVAILABLE:
|
|
247
|
+
return {"error": "hook_utils not available"}
|
|
248
|
+
|
|
249
|
+
state = load_state()
|
|
250
|
+
parallel = state.get("parallel_execution", {})
|
|
251
|
+
|
|
252
|
+
if not parallel.get("enabled"):
|
|
253
|
+
return {"active": False, "message": "No parallel execution in progress"}
|
|
254
|
+
|
|
255
|
+
worktrees = parallel.get("worktrees", [])
|
|
256
|
+
status_counts = {"pending": 0, "in_progress": 0, "complete": 0, "error": 0}
|
|
257
|
+
|
|
258
|
+
for wt in worktrees:
|
|
259
|
+
status = wt.get("status", "pending")
|
|
260
|
+
status_counts[status] = status_counts.get(status, 0) + 1
|
|
261
|
+
|
|
262
|
+
return {
|
|
263
|
+
"active": True,
|
|
264
|
+
"worktree_count": len(worktrees),
|
|
265
|
+
"status_counts": status_counts,
|
|
266
|
+
"worktrees": worktrees,
|
|
267
|
+
"shared_decisions": parallel.get("shared_decisions", {}),
|
|
268
|
+
"merge_status": parallel.get("merge_status", "pending")
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def handle_session_start():
|
|
273
|
+
"""Handle SessionStart event - detect --parallel flag."""
|
|
274
|
+
try:
|
|
275
|
+
hook_input = json.loads(sys.stdin.read())
|
|
276
|
+
except json.JSONDecodeError:
|
|
277
|
+
print(json.dumps({"result": "continue"}))
|
|
278
|
+
return
|
|
279
|
+
|
|
280
|
+
# The --parallel flag would be detected from the user's initial message
|
|
281
|
+
# For now, just initialize the parallel state structure if needed
|
|
282
|
+
if UTILS_AVAILABLE:
|
|
283
|
+
state = load_state()
|
|
284
|
+
if "parallel_execution" not in state:
|
|
285
|
+
state["parallel_execution"] = {
|
|
286
|
+
"enabled": False,
|
|
287
|
+
"worktrees": [],
|
|
288
|
+
"shared_decisions": {},
|
|
289
|
+
"merge_status": "pending"
|
|
290
|
+
}
|
|
291
|
+
save_state(state)
|
|
292
|
+
|
|
293
|
+
print(json.dumps({"result": "continue"}))
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def handle_user_prompt():
|
|
297
|
+
"""Handle UserPromptSubmit - detect parallel commands."""
|
|
298
|
+
try:
|
|
299
|
+
hook_input = json.loads(sys.stdin.read())
|
|
300
|
+
except json.JSONDecodeError:
|
|
301
|
+
print(json.dumps({"result": "continue"}))
|
|
302
|
+
return
|
|
303
|
+
|
|
304
|
+
prompt = hook_input.get("prompt", "").lower()
|
|
305
|
+
|
|
306
|
+
# Check for parallel spawn command
|
|
307
|
+
if "/parallel-spawn" in prompt:
|
|
308
|
+
# Extract workflows from the prompt
|
|
309
|
+
# Format: /parallel-spawn api:users api:products component:chart
|
|
310
|
+
workflows = parse_workflow_list(prompt)
|
|
311
|
+
|
|
312
|
+
if workflows:
|
|
313
|
+
status = check_parallel_status()
|
|
314
|
+
|
|
315
|
+
if status.get("active"):
|
|
316
|
+
print(json.dumps({
|
|
317
|
+
"result": "continue",
|
|
318
|
+
"message": f"""
|
|
319
|
+
⚠️ Parallel execution already in progress!
|
|
320
|
+
|
|
321
|
+
Current status:
|
|
322
|
+
- Worktrees: {status.get('worktree_count', 0)}
|
|
323
|
+
- Pending: {status.get('status_counts', {}).get('pending', 0)}
|
|
324
|
+
- In Progress: {status.get('status_counts', {}).get('in_progress', 0)}
|
|
325
|
+
- Complete: {status.get('status_counts', {}).get('complete', 0)}
|
|
326
|
+
|
|
327
|
+
Use /parallel-status to see details.
|
|
328
|
+
Use /parallel-merge when all are complete.
|
|
329
|
+
"""
|
|
330
|
+
}))
|
|
331
|
+
return
|
|
332
|
+
|
|
333
|
+
# Ready to start parallel execution
|
|
334
|
+
print(json.dumps({
|
|
335
|
+
"result": "continue",
|
|
336
|
+
"message": f"""
|
|
337
|
+
Ready to spawn {len(workflows)} parallel workflows:
|
|
338
|
+
|
|
339
|
+
{chr(10).join(f" - {w['type']}: {w['name']}" for w in workflows)}
|
|
340
|
+
|
|
341
|
+
Next steps:
|
|
342
|
+
1. The AI will create git worktrees for each workflow
|
|
343
|
+
2. Spawn background Task agents for each
|
|
344
|
+
3. Monitor progress with /parallel-status
|
|
345
|
+
4. Merge results with /parallel-merge when complete
|
|
346
|
+
|
|
347
|
+
Proceeding with parallel setup...
|
|
348
|
+
"""
|
|
349
|
+
}))
|
|
350
|
+
return
|
|
351
|
+
|
|
352
|
+
# Check for parallel status command
|
|
353
|
+
if "/parallel-status" in prompt:
|
|
354
|
+
status = check_parallel_status()
|
|
355
|
+
|
|
356
|
+
if not status.get("active"):
|
|
357
|
+
print(json.dumps({
|
|
358
|
+
"result": "continue",
|
|
359
|
+
"message": "No parallel execution in progress.\nStart with: /parallel-spawn api:name1 api:name2 ..."
|
|
360
|
+
}))
|
|
361
|
+
return
|
|
362
|
+
|
|
363
|
+
worktree_lines = []
|
|
364
|
+
for wt in status.get("worktrees", []):
|
|
365
|
+
status_icon = {"pending": "⏳", "in_progress": "🔄", "complete": "✓", "error": "❌"}.get(wt.get("status"), "?")
|
|
366
|
+
worktree_lines.append(f" {status_icon} {wt.get('name')}: {wt.get('status')}")
|
|
367
|
+
|
|
368
|
+
print(json.dumps({
|
|
369
|
+
"result": "continue",
|
|
370
|
+
"message": f"""
|
|
371
|
+
┌─────────────────────────────────────────────────────────────────┐
|
|
372
|
+
│ PARALLEL EXECUTION STATUS │
|
|
373
|
+
├─────────────────────────────────────────────────────────────────┤
|
|
374
|
+
│ │
|
|
375
|
+
│ Worktrees: {status.get('worktree_count', 0):<50} │
|
|
376
|
+
│ Pending: {status.get('status_counts', {}).get('pending', 0):<51} │
|
|
377
|
+
│ In Progress: {status.get('status_counts', {}).get('in_progress', 0):<47} │
|
|
378
|
+
│ Complete: {status.get('status_counts', {}).get('complete', 0):<50} │
|
|
379
|
+
│ │
|
|
380
|
+
│ Worktree Details: │
|
|
381
|
+
{chr(10).join(f"│{line:<64}│" for line in worktree_lines) if worktree_lines else "│ (none) │"}
|
|
382
|
+
│ │
|
|
383
|
+
└─────────────────────────────────────────────────────────────────┘
|
|
384
|
+
"""
|
|
385
|
+
}))
|
|
386
|
+
return
|
|
387
|
+
|
|
388
|
+
# Check for parallel merge command
|
|
389
|
+
if "/parallel-merge" in prompt:
|
|
390
|
+
status = check_parallel_status()
|
|
391
|
+
|
|
392
|
+
if not status.get("active"):
|
|
393
|
+
print(json.dumps({
|
|
394
|
+
"result": "continue",
|
|
395
|
+
"message": "No parallel execution to merge."
|
|
396
|
+
}))
|
|
397
|
+
return
|
|
398
|
+
|
|
399
|
+
incomplete = status.get("status_counts", {}).get("pending", 0) + status.get("status_counts", {}).get("in_progress", 0)
|
|
400
|
+
if incomplete > 0:
|
|
401
|
+
print(json.dumps({
|
|
402
|
+
"result": "continue",
|
|
403
|
+
"message": f"⚠️ Cannot merge: {incomplete} workflows still in progress.\nWait for all to complete or use /parallel-abort."
|
|
404
|
+
}))
|
|
405
|
+
return
|
|
406
|
+
|
|
407
|
+
print(json.dumps({
|
|
408
|
+
"result": "continue",
|
|
409
|
+
"message": "Ready to merge parallel results. Proceeding with merge..."
|
|
410
|
+
}))
|
|
411
|
+
return
|
|
412
|
+
|
|
413
|
+
print(json.dumps({"result": "continue"}))
|
|
414
|
+
|
|
415
|
+
|
|
416
|
+
def parse_workflow_list(prompt):
|
|
417
|
+
"""
|
|
418
|
+
Parse workflow list from prompt.
|
|
419
|
+
|
|
420
|
+
Format: /parallel-spawn api:users api:products component:chart
|
|
421
|
+
"""
|
|
422
|
+
workflows = []
|
|
423
|
+
|
|
424
|
+
# Pattern: type:name
|
|
425
|
+
pattern = r'(api|component|page):(\w+)'
|
|
426
|
+
matches = re.findall(pattern, prompt, re.IGNORECASE)
|
|
427
|
+
|
|
428
|
+
for match in matches:
|
|
429
|
+
workflow_type, name = match
|
|
430
|
+
workflows.append({
|
|
431
|
+
"type": f"{workflow_type.lower()}-create",
|
|
432
|
+
"name": name.lower()
|
|
433
|
+
})
|
|
434
|
+
|
|
435
|
+
return workflows
|
|
436
|
+
|
|
437
|
+
|
|
438
|
+
def main():
|
|
439
|
+
"""Main entry point - determine hook type from environment."""
|
|
440
|
+
hook_type = os.environ.get("CLAUDE_HOOK_TYPE", "SessionStart")
|
|
441
|
+
|
|
442
|
+
if hook_type == "SessionStart":
|
|
443
|
+
handle_session_start()
|
|
444
|
+
elif hook_type == "UserPromptSubmit":
|
|
445
|
+
handle_user_prompt()
|
|
446
|
+
else:
|
|
447
|
+
print(json.dumps({"result": "continue"}))
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
if __name__ == "__main__":
|
|
451
|
+
main()
|