prizmkit 1.1.0 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundled/VERSION.json +3 -3
- package/bundled/adapters/claude/agent-adapter.js +18 -0
- package/bundled/adapters/claude/command-adapter.js +1 -27
- package/bundled/agents/prizm-dev-team-critic.md +2 -0
- package/bundled/agents/prizm-dev-team-dev.md +2 -0
- package/bundled/agents/prizm-dev-team-reviewer.md +2 -0
- package/bundled/dev-pipeline/README.md +63 -63
- package/bundled/dev-pipeline/assets/feature-list-example.json +1 -1
- package/bundled/dev-pipeline/assets/prizm-dev-team-integration.md +1 -1
- package/bundled/dev-pipeline/{launch-daemon.sh → launch-feature-daemon.sh} +33 -33
- package/bundled/dev-pipeline/launch-refactor-daemon.sh +454 -0
- package/bundled/dev-pipeline/lib/branch.sh +1 -1
- package/bundled/dev-pipeline/reset-feature.sh +3 -3
- package/bundled/dev-pipeline/reset-refactor.sh +312 -0
- package/bundled/dev-pipeline/{retry-bug.sh → retry-bugfix.sh} +47 -59
- package/bundled/dev-pipeline/retry-feature.sh +41 -54
- package/bundled/dev-pipeline/retry-refactor.sh +358 -0
- package/bundled/dev-pipeline/run-bugfix.sh +6 -0
- package/bundled/dev-pipeline/{run.sh → run-feature.sh} +31 -31
- package/bundled/dev-pipeline/run-refactor.sh +787 -0
- package/bundled/dev-pipeline/scripts/generate-bootstrap-prompt.py +177 -10
- package/bundled/dev-pipeline/scripts/generate-refactor-prompt.py +419 -0
- package/bundled/dev-pipeline/scripts/init-refactor-pipeline.py +393 -0
- package/bundled/dev-pipeline/scripts/update-refactor-status.py +726 -0
- package/bundled/dev-pipeline/templates/agent-prompts/critic-code-challenge.md +13 -0
- package/bundled/dev-pipeline/templates/agent-prompts/critic-plan-challenge.md +7 -0
- package/bundled/dev-pipeline/templates/agent-prompts/dev-fix.md +7 -0
- package/bundled/dev-pipeline/templates/agent-prompts/dev-implement.md +26 -0
- package/bundled/dev-pipeline/templates/agent-prompts/dev-resume.md +5 -0
- package/bundled/dev-pipeline/templates/agent-prompts/reviewer-analyze.md +5 -0
- package/bundled/dev-pipeline/templates/agent-prompts/reviewer-review.md +12 -0
- package/bundled/dev-pipeline/templates/bootstrap-tier1.md +29 -2
- package/bundled/dev-pipeline/templates/bootstrap-tier2.md +8 -7
- package/bundled/dev-pipeline/templates/bootstrap-tier3.md +11 -10
- package/bundled/dev-pipeline/templates/bugfix-bootstrap-prompt.md +2 -3
- package/bundled/dev-pipeline/templates/feature-list-schema.json +1 -1
- package/bundled/dev-pipeline/templates/refactor-list-schema.json +159 -0
- package/bundled/dev-pipeline/templates/sections/ac-verification-checklist.md +13 -0
- package/bundled/dev-pipeline/templates/sections/feature-context.md +1 -1
- package/bundled/dev-pipeline/templates/sections/phase-analyze-agent.md +9 -8
- package/bundled/dev-pipeline/templates/sections/phase-analyze-full.md +9 -8
- package/bundled/dev-pipeline/templates/sections/phase-browser-verification.md +2 -1
- package/bundled/dev-pipeline/templates/sections/phase-critic-code.md +8 -10
- package/bundled/dev-pipeline/templates/sections/phase-critic-plan-full.md +9 -10
- package/bundled/dev-pipeline/templates/sections/phase-critic-plan.md +8 -9
- package/bundled/dev-pipeline/templates/sections/phase-implement-agent.md +7 -10
- package/bundled/dev-pipeline/templates/sections/phase-implement-full.md +8 -15
- package/bundled/dev-pipeline/templates/sections/phase-review-agent.md +7 -12
- package/bundled/dev-pipeline/templates/sections/phase-review-full.md +8 -19
- package/bundled/dev-pipeline/templates/sections/test-failure-recovery.md +75 -0
- package/bundled/skills/_metadata.json +33 -6
- package/bundled/skills/app-planner/SKILL.md +105 -320
- package/bundled/skills/app-planner/assets/app-design-guide.md +101 -0
- package/bundled/skills/app-planner/references/frontend-design-guide.md +1 -1
- package/bundled/skills/app-planner/references/project-brief-guide.md +49 -80
- package/bundled/skills/bug-fix-workflow/SKILL.md +2 -2
- package/bundled/skills/bug-planner/SKILL.md +68 -5
- package/bundled/skills/bug-planner/scripts/validate-bug-list.py +3 -2
- package/bundled/skills/bugfix-pipeline-launcher/SKILL.md +19 -5
- package/bundled/skills/{dev-pipeline-launcher → feature-pipeline-launcher}/SKILL.md +32 -32
- package/bundled/skills/feature-planner/SKILL.md +337 -0
- package/bundled/skills/{app-planner → feature-planner}/assets/evaluation-guide.md +4 -4
- package/bundled/skills/{app-planner → feature-planner}/assets/planning-guide.md +3 -171
- package/bundled/skills/{app-planner → feature-planner}/references/browser-interaction.md +6 -5
- package/bundled/skills/feature-planner/references/decomposition-patterns.md +75 -0
- package/bundled/skills/{app-planner → feature-planner}/references/error-recovery.md +8 -8
- package/bundled/skills/{app-planner → feature-planner}/references/incremental-feature-planning.md +1 -1
- package/bundled/skills/{app-planner/references/new-app-planning.md → feature-planner/references/new-project-planning.md} +1 -1
- package/bundled/skills/{app-planner → feature-planner}/scripts/validate-and-generate.py +4 -4
- package/bundled/skills/feature-workflow/SKILL.md +23 -23
- package/bundled/skills/prizm-kit/SKILL.md +1 -3
- package/bundled/skills/prizmkit-analyze/SKILL.md +2 -5
- package/bundled/skills/prizmkit-code-review/SKILL.md +2 -2
- package/bundled/skills/prizmkit-committer/SKILL.md +4 -8
- package/bundled/skills/prizmkit-deploy/SKILL.md +1 -5
- package/bundled/skills/prizmkit-implement/SKILL.md +3 -50
- package/bundled/skills/prizmkit-init/SKILL.md +5 -77
- package/bundled/skills/prizmkit-plan/SKILL.md +1 -12
- package/bundled/skills/prizmkit-prizm-docs/SKILL.md +6 -24
- package/bundled/skills/prizmkit-prizm-docs/assets/PRIZM-SPEC.md +21 -0
- package/bundled/skills/prizmkit-retrospective/SKILL.md +12 -117
- package/bundled/skills/recovery-workflow/SKILL.md +166 -316
- package/bundled/skills/recovery-workflow/evals/evals.json +29 -13
- package/bundled/skills/recovery-workflow/scripts/detect-recovery-state.py +232 -274
- package/bundled/skills/refactor-pipeline-launcher/SKILL.md +352 -0
- package/bundled/skills/refactor-planner/SKILL.md +436 -0
- package/bundled/skills/refactor-planner/assets/planning-guide.md +292 -0
- package/bundled/skills/refactor-planner/references/behavior-preservation.md +301 -0
- package/bundled/skills/refactor-planner/references/refactor-scoping-guide.md +221 -0
- package/bundled/skills/refactor-planner/scripts/validate-and-generate-refactor.py +786 -0
- package/bundled/skills/refactor-workflow/SKILL.md +299 -319
- package/package.json +1 -1
- package/src/clean.js +3 -3
- package/src/scaffold.js +6 -6
- /package/bundled/skills/{dev-pipeline-launcher → feature-pipeline-launcher}/scripts/preflight-check.py +0 -0
|
@@ -0,0 +1,726 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Core state machine for updating refactor status in the refactor pipeline.
|
|
3
|
+
|
|
4
|
+
Handles six actions:
|
|
5
|
+
- get_next: Find the next refactor to process based on dependency order, priority, complexity
|
|
6
|
+
- update: Update a refactor's status based on session outcome
|
|
7
|
+
- status: Print a formatted overview of all refactors
|
|
8
|
+
- pause: Save pipeline state for graceful shutdown
|
|
9
|
+
- reset: Reset a refactor to pending (status + retry count)
|
|
10
|
+
- clean: Reset + delete session history + delete refactor artifacts
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
python3 update-refactor-status.py \
|
|
14
|
+
--refactor-list <path> --state-dir <path> \
|
|
15
|
+
--action <get_next|update|status|pause|reset|clean> \
|
|
16
|
+
[--refactor-id <id>] [--session-status <status>] \
|
|
17
|
+
[--session-id <id>] [--max-retries <n>]
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
import argparse
|
|
21
|
+
import json
|
|
22
|
+
import os
|
|
23
|
+
import shutil
|
|
24
|
+
from datetime import datetime, timezone
|
|
25
|
+
|
|
26
|
+
from utils import (
|
|
27
|
+
load_json_file,
|
|
28
|
+
write_json_file,
|
|
29
|
+
error_out,
|
|
30
|
+
pad_right,
|
|
31
|
+
_build_progress_bar,
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
SESSION_STATUS_VALUES = [
|
|
36
|
+
"success",
|
|
37
|
+
"partial_resumable",
|
|
38
|
+
"partial_not_resumable",
|
|
39
|
+
"failed",
|
|
40
|
+
"crashed",
|
|
41
|
+
"timed_out",
|
|
42
|
+
"merge_conflict",
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
TERMINAL_STATUSES = {"completed", "failed", "skipped"}
|
|
46
|
+
|
|
47
|
+
# Artifact directory names (relative to project root)
|
|
48
|
+
REFACTOR_ARTIFACTS_REL = os.path.join(".prizmkit", "refactor")
|
|
49
|
+
DEV_TEAM_DIR_NAME = ".dev-team"
|
|
50
|
+
|
|
51
|
+
# Priority ordering (lower number = higher priority)
|
|
52
|
+
PRIORITY_ORDER = {
|
|
53
|
+
"critical": 0,
|
|
54
|
+
"high": 1,
|
|
55
|
+
"medium": 2,
|
|
56
|
+
"low": 3,
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
# Complexity ordering (lower number = simpler, processed first)
|
|
60
|
+
COMPLEXITY_ORDER = {
|
|
61
|
+
"low": 0,
|
|
62
|
+
"medium": 1,
|
|
63
|
+
"high": 2,
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def parse_args():
|
|
68
|
+
parser = argparse.ArgumentParser(
|
|
69
|
+
description="Core state machine for refactor pipeline status management."
|
|
70
|
+
)
|
|
71
|
+
parser.add_argument("--refactor-list", required=True, help="Path to the refactor-list.json file")
|
|
72
|
+
parser.add_argument("--state-dir", required=True, help="Path to the refactor-state/ directory")
|
|
73
|
+
parser.add_argument(
|
|
74
|
+
"--action", required=True,
|
|
75
|
+
choices=["get_next", "update", "status", "pause", "reset", "clean"],
|
|
76
|
+
help="Action to perform",
|
|
77
|
+
)
|
|
78
|
+
parser.add_argument("--refactor-id", default=None, help="Refactor ID (required for 'update'/'reset'/'clean' actions)")
|
|
79
|
+
parser.add_argument(
|
|
80
|
+
"--session-status", default=None, choices=SESSION_STATUS_VALUES,
|
|
81
|
+
help="Session outcome status (required for 'update' action)",
|
|
82
|
+
)
|
|
83
|
+
parser.add_argument("--session-id", default=None, help="Session ID (optional, for 'update' action)")
|
|
84
|
+
parser.add_argument("--max-retries", type=int, default=3, help="Maximum retry count (default: 3)")
|
|
85
|
+
parser.add_argument("--project-root", default=None, help="Project root directory. Required for 'clean' action.")
|
|
86
|
+
return parser.parse_args()
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def now_iso():
|
|
90
|
+
return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _default_status(refactor_id):
|
|
94
|
+
"""Create a default refactor status object."""
|
|
95
|
+
now = now_iso()
|
|
96
|
+
return {
|
|
97
|
+
"refactor_id": refactor_id,
|
|
98
|
+
"status": "pending",
|
|
99
|
+
"retry_count": 0,
|
|
100
|
+
"max_retries": 3,
|
|
101
|
+
"sessions": [],
|
|
102
|
+
"last_session_id": None,
|
|
103
|
+
"resume_from_phase": None,
|
|
104
|
+
"created_at": now,
|
|
105
|
+
"updated_at": now,
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def load_refactor_status(state_dir, refactor_id):
|
|
110
|
+
status_path = os.path.join(state_dir, "refactors", refactor_id, "status.json")
|
|
111
|
+
if not os.path.isfile(status_path):
|
|
112
|
+
return _default_status(refactor_id)
|
|
113
|
+
data, err = load_json_file(status_path)
|
|
114
|
+
if err:
|
|
115
|
+
return _default_status(refactor_id)
|
|
116
|
+
return data
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def save_refactor_status(state_dir, refactor_id, status_data):
|
|
120
|
+
status_path = os.path.join(state_dir, "refactors", refactor_id, "status.json")
|
|
121
|
+
return write_json_file(status_path, status_data)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def update_refactor_in_list(refactor_list_path, refactor_id, new_status):
|
|
125
|
+
data, err = load_json_file(refactor_list_path)
|
|
126
|
+
if err:
|
|
127
|
+
return err
|
|
128
|
+
refactors = data.get("refactors", [])
|
|
129
|
+
found = False
|
|
130
|
+
for refactor in refactors:
|
|
131
|
+
if isinstance(refactor, dict) and refactor.get("id") == refactor_id:
|
|
132
|
+
refactor["status"] = new_status
|
|
133
|
+
found = True
|
|
134
|
+
break
|
|
135
|
+
if not found:
|
|
136
|
+
return "Refactor '{}' not found in refactor-list.json".format(refactor_id)
|
|
137
|
+
return write_json_file(refactor_list_path, data)
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
# ---------------------------------------------------------------------------
|
|
141
|
+
# Action: get_next
|
|
142
|
+
# ---------------------------------------------------------------------------
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _dependencies_met(refactor, completed_set):
|
|
146
|
+
"""Check if all dependencies for a refactor are in terminal (completed) status."""
|
|
147
|
+
deps = refactor.get("dependencies", [])
|
|
148
|
+
if not deps or not isinstance(deps, list):
|
|
149
|
+
return True
|
|
150
|
+
return all(dep in completed_set for dep in deps)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def _count_unmet_deps(refactor, completed_set):
|
|
154
|
+
"""Count how many dependencies are not yet completed."""
|
|
155
|
+
deps = refactor.get("dependencies", [])
|
|
156
|
+
if not deps or not isinstance(deps, list):
|
|
157
|
+
return 0
|
|
158
|
+
return sum(1 for dep in deps if dep not in completed_set)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def action_get_next(refactor_list_data, state_dir):
|
|
162
|
+
"""Find the next refactor to process.
|
|
163
|
+
|
|
164
|
+
Priority logic:
|
|
165
|
+
1. Skip terminal statuses (completed, failed, skipped)
|
|
166
|
+
2. Only consider refactors whose dependencies are all completed
|
|
167
|
+
3. Prefer in_progress refactors (interrupted session resume) over pending
|
|
168
|
+
4. Sort by: dependency order (no-dependency items first),
|
|
169
|
+
then priority (critical > high > medium > low),
|
|
170
|
+
then complexity (low first)
|
|
171
|
+
"""
|
|
172
|
+
refactors = refactor_list_data.get("refactors", [])
|
|
173
|
+
if not refactors:
|
|
174
|
+
print("PIPELINE_COMPLETE")
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
# Build status map and completed set
|
|
178
|
+
status_map = {}
|
|
179
|
+
status_data_map = {}
|
|
180
|
+
for r in refactors:
|
|
181
|
+
if not isinstance(r, dict):
|
|
182
|
+
continue
|
|
183
|
+
rid = r.get("id")
|
|
184
|
+
if not rid:
|
|
185
|
+
continue
|
|
186
|
+
rs = load_refactor_status(state_dir, rid)
|
|
187
|
+
status_map[rid] = rs.get("status", "pending")
|
|
188
|
+
status_data_map[rid] = rs
|
|
189
|
+
|
|
190
|
+
completed_set = {rid for rid, st in status_map.items() if st in TERMINAL_STATUSES}
|
|
191
|
+
|
|
192
|
+
# Check if all refactors are terminal
|
|
193
|
+
non_terminal = [
|
|
194
|
+
r for r in refactors
|
|
195
|
+
if isinstance(r, dict) and r.get("id")
|
|
196
|
+
and status_map.get(r["id"], "pending") not in TERMINAL_STATUSES
|
|
197
|
+
]
|
|
198
|
+
if not non_terminal:
|
|
199
|
+
print("PIPELINE_COMPLETE")
|
|
200
|
+
return
|
|
201
|
+
|
|
202
|
+
# Filter to only those with met dependencies
|
|
203
|
+
eligible = [r for r in non_terminal if _dependencies_met(r, completed_set)]
|
|
204
|
+
if not eligible:
|
|
205
|
+
print("PIPELINE_BLOCKED")
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
# Separate in_progress from pending
|
|
209
|
+
in_progress_refactors = []
|
|
210
|
+
pending_refactors = []
|
|
211
|
+
for r in eligible:
|
|
212
|
+
rid = r.get("id")
|
|
213
|
+
rstatus = status_map.get(rid, "pending")
|
|
214
|
+
if rstatus == "in_progress":
|
|
215
|
+
in_progress_refactors.append(r)
|
|
216
|
+
elif rstatus == "pending":
|
|
217
|
+
pending_refactors.append(r)
|
|
218
|
+
|
|
219
|
+
def sort_key(r):
|
|
220
|
+
unmet = _count_unmet_deps(r, completed_set)
|
|
221
|
+
priority = PRIORITY_ORDER.get(r.get("priority", "medium"), 2)
|
|
222
|
+
complexity = COMPLEXITY_ORDER.get(r.get("complexity", "medium"), 1)
|
|
223
|
+
return (unmet, priority, complexity)
|
|
224
|
+
|
|
225
|
+
if in_progress_refactors:
|
|
226
|
+
candidates = sorted(in_progress_refactors, key=sort_key)
|
|
227
|
+
elif pending_refactors:
|
|
228
|
+
candidates = sorted(pending_refactors, key=sort_key)
|
|
229
|
+
else:
|
|
230
|
+
print("PIPELINE_BLOCKED")
|
|
231
|
+
return
|
|
232
|
+
|
|
233
|
+
chosen = candidates[0]
|
|
234
|
+
chosen_id = chosen["id"]
|
|
235
|
+
chosen_status_data = status_data_map.get(chosen_id, {})
|
|
236
|
+
|
|
237
|
+
result = {
|
|
238
|
+
"refactor_id": chosen_id,
|
|
239
|
+
"title": chosen.get("title", ""),
|
|
240
|
+
"type": chosen.get("type", "restructure"),
|
|
241
|
+
"priority": chosen.get("priority", "medium"),
|
|
242
|
+
"complexity": chosen.get("complexity", "medium"),
|
|
243
|
+
"retry_count": chosen_status_data.get("retry_count", 0),
|
|
244
|
+
"resume_from_phase": chosen_status_data.get("resume_from_phase", None),
|
|
245
|
+
}
|
|
246
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
# ---------------------------------------------------------------------------
|
|
250
|
+
# Action: update
|
|
251
|
+
# ---------------------------------------------------------------------------
|
|
252
|
+
|
|
253
|
+
def action_update(args, refactor_list_path, state_dir):
|
|
254
|
+
refactor_id = args.refactor_id
|
|
255
|
+
session_status = args.session_status
|
|
256
|
+
session_id = args.session_id
|
|
257
|
+
max_retries = args.max_retries
|
|
258
|
+
|
|
259
|
+
if not refactor_id:
|
|
260
|
+
error_out("--refactor-id is required for 'update' action")
|
|
261
|
+
return
|
|
262
|
+
if not session_status:
|
|
263
|
+
error_out("--session-status is required for 'update' action")
|
|
264
|
+
return
|
|
265
|
+
|
|
266
|
+
rs = load_refactor_status(state_dir, refactor_id)
|
|
267
|
+
|
|
268
|
+
if session_status == "success":
|
|
269
|
+
rs["status"] = "completed"
|
|
270
|
+
rs["resume_from_phase"] = None
|
|
271
|
+
err = update_refactor_in_list(refactor_list_path, refactor_id, "completed")
|
|
272
|
+
if err:
|
|
273
|
+
error_out("Failed to update refactor-list.json: {}".format(err))
|
|
274
|
+
return
|
|
275
|
+
elif session_status == "merge_conflict":
|
|
276
|
+
rs["retry_count"] = rs.get("retry_count", 0) + 1
|
|
277
|
+
|
|
278
|
+
if rs["retry_count"] >= max_retries:
|
|
279
|
+
rs["status"] = "failed"
|
|
280
|
+
target_status = "failed"
|
|
281
|
+
else:
|
|
282
|
+
rs["status"] = "merge_conflict"
|
|
283
|
+
target_status = "merge_conflict"
|
|
284
|
+
|
|
285
|
+
rs["resume_from_phase"] = None
|
|
286
|
+
rs["sessions"] = []
|
|
287
|
+
rs["last_session_id"] = None
|
|
288
|
+
|
|
289
|
+
err = update_refactor_in_list(refactor_list_path, refactor_id, target_status)
|
|
290
|
+
if err:
|
|
291
|
+
error_out("Failed to update refactor-list.json: {}".format(err))
|
|
292
|
+
return
|
|
293
|
+
else:
|
|
294
|
+
rs["retry_count"] = rs.get("retry_count", 0) + 1
|
|
295
|
+
|
|
296
|
+
cleaned = cleanup_refactor_artifacts(
|
|
297
|
+
state_dir=state_dir,
|
|
298
|
+
refactor_id=refactor_id,
|
|
299
|
+
project_root=args.project_root,
|
|
300
|
+
)
|
|
301
|
+
|
|
302
|
+
if rs["retry_count"] >= max_retries:
|
|
303
|
+
rs["status"] = "failed"
|
|
304
|
+
target_status = "failed"
|
|
305
|
+
else:
|
|
306
|
+
rs["status"] = "pending"
|
|
307
|
+
target_status = "pending"
|
|
308
|
+
|
|
309
|
+
rs["resume_from_phase"] = None
|
|
310
|
+
rs["sessions"] = []
|
|
311
|
+
rs["last_session_id"] = None
|
|
312
|
+
|
|
313
|
+
err = update_refactor_in_list(refactor_list_path, refactor_id, target_status)
|
|
314
|
+
if err:
|
|
315
|
+
error_out("Failed to update refactor-list.json: {}".format(err))
|
|
316
|
+
return
|
|
317
|
+
|
|
318
|
+
if session_status == "success" and session_id:
|
|
319
|
+
sessions = rs.get("sessions", [])
|
|
320
|
+
if session_id not in sessions:
|
|
321
|
+
sessions.append(session_id)
|
|
322
|
+
rs["sessions"] = sessions
|
|
323
|
+
rs["last_session_id"] = session_id
|
|
324
|
+
|
|
325
|
+
rs["updated_at"] = now_iso()
|
|
326
|
+
|
|
327
|
+
err = save_refactor_status(state_dir, refactor_id, rs)
|
|
328
|
+
if err:
|
|
329
|
+
error_out("Failed to save refactor status: {}".format(err))
|
|
330
|
+
return
|
|
331
|
+
|
|
332
|
+
summary = {
|
|
333
|
+
"action": "update",
|
|
334
|
+
"refactor_id": refactor_id,
|
|
335
|
+
"session_status": session_status,
|
|
336
|
+
"new_status": rs["status"],
|
|
337
|
+
"retry_count": rs["retry_count"],
|
|
338
|
+
"resume_from_phase": rs.get("resume_from_phase"),
|
|
339
|
+
"updated_at": rs["updated_at"],
|
|
340
|
+
}
|
|
341
|
+
if session_status == "merge_conflict":
|
|
342
|
+
summary["degraded_reason"] = "merge_conflict"
|
|
343
|
+
summary["restart_policy"] = "finalization_retry"
|
|
344
|
+
elif session_status != "success":
|
|
345
|
+
summary["restart_policy"] = "full_restart"
|
|
346
|
+
summary["cleanup_performed"] = cleaned
|
|
347
|
+
|
|
348
|
+
print(json.dumps(summary, indent=2, ensure_ascii=False))
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def _default_project_root():
|
|
352
|
+
return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def cleanup_refactor_artifacts(state_dir, refactor_id, project_root=None):
|
|
356
|
+
"""Delete intermediate artifacts for a failed refactor run."""
|
|
357
|
+
if not project_root:
|
|
358
|
+
project_root = _default_project_root()
|
|
359
|
+
|
|
360
|
+
cleaned = []
|
|
361
|
+
|
|
362
|
+
# 1) Remove all session history
|
|
363
|
+
sessions_dir = os.path.join(state_dir, "refactors", refactor_id, "sessions")
|
|
364
|
+
sessions_deleted = 0
|
|
365
|
+
if os.path.isdir(sessions_dir):
|
|
366
|
+
for entry in os.listdir(sessions_dir):
|
|
367
|
+
entry_path = os.path.join(sessions_dir, entry)
|
|
368
|
+
if os.path.isdir(entry_path):
|
|
369
|
+
shutil.rmtree(entry_path)
|
|
370
|
+
sessions_deleted += 1
|
|
371
|
+
cleaned.append("Deleted {} session(s) from {}".format(sessions_deleted, sessions_dir))
|
|
372
|
+
|
|
373
|
+
# 2) Remove transient files under refactor dir (keep status.json)
|
|
374
|
+
refactor_dir = os.path.join(state_dir, "refactors", refactor_id)
|
|
375
|
+
if os.path.isdir(refactor_dir):
|
|
376
|
+
for entry in os.listdir(refactor_dir):
|
|
377
|
+
if entry == "status.json" or entry == "sessions":
|
|
378
|
+
continue
|
|
379
|
+
entry_path = os.path.join(refactor_dir, entry)
|
|
380
|
+
if os.path.isdir(entry_path):
|
|
381
|
+
shutil.rmtree(entry_path)
|
|
382
|
+
cleaned.append("Deleted directory {}".format(entry_path))
|
|
383
|
+
elif os.path.isfile(entry_path):
|
|
384
|
+
os.remove(entry_path)
|
|
385
|
+
cleaned.append("Deleted file {}".format(entry_path))
|
|
386
|
+
|
|
387
|
+
# 3) Remove refactor artifacts
|
|
388
|
+
refactor_artifact_dir = os.path.join(project_root, REFACTOR_ARTIFACTS_REL, refactor_id)
|
|
389
|
+
if os.path.isdir(refactor_artifact_dir):
|
|
390
|
+
shutil.rmtree(refactor_artifact_dir)
|
|
391
|
+
cleaned.append("Deleted {}".format(refactor_artifact_dir))
|
|
392
|
+
|
|
393
|
+
# 4) Remove shared dev-team workspace
|
|
394
|
+
dev_team_dir = os.path.join(project_root, DEV_TEAM_DIR_NAME)
|
|
395
|
+
if os.path.isdir(dev_team_dir):
|
|
396
|
+
shutil.rmtree(dev_team_dir)
|
|
397
|
+
cleaned.append("Deleted {}".format(dev_team_dir))
|
|
398
|
+
|
|
399
|
+
return cleaned
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def load_session_status(state_dir, refactor_id, session_id):
|
|
403
|
+
session_status_path = os.path.join(
|
|
404
|
+
state_dir, "refactors", refactor_id, "sessions",
|
|
405
|
+
session_id, "session-status.json"
|
|
406
|
+
)
|
|
407
|
+
data, err = load_json_file(session_status_path)
|
|
408
|
+
if err:
|
|
409
|
+
return None, err
|
|
410
|
+
return data, None
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
# ---------------------------------------------------------------------------
|
|
414
|
+
# Action: status
|
|
415
|
+
# ---------------------------------------------------------------------------
|
|
416
|
+
|
|
417
|
+
COLOR_GREEN = "\033[92m"
|
|
418
|
+
COLOR_YELLOW = "\033[93m"
|
|
419
|
+
COLOR_RED = "\033[91m"
|
|
420
|
+
COLOR_GRAY = "\033[90m"
|
|
421
|
+
COLOR_MAGENTA = "\033[95m"
|
|
422
|
+
COLOR_CYAN = "\033[96m"
|
|
423
|
+
COLOR_BOLD = "\033[1m"
|
|
424
|
+
COLOR_RESET = "\033[0m"
|
|
425
|
+
|
|
426
|
+
BOX_WIDTH = 72
|
|
427
|
+
|
|
428
|
+
TYPE_ICONS = {
|
|
429
|
+
"extract": "📦",
|
|
430
|
+
"rename": "🏷️",
|
|
431
|
+
"restructure": "🏗️",
|
|
432
|
+
"simplify": "✂️",
|
|
433
|
+
"decouple": "🔗",
|
|
434
|
+
"migrate": "🚀",
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
COMPLEXITY_BADGES = {
|
|
438
|
+
"low": COLOR_GREEN + "[LOW]" + COLOR_RESET,
|
|
439
|
+
"medium": COLOR_YELLOW + "[MED]" + COLOR_RESET,
|
|
440
|
+
"high": COLOR_RED + "[HI]" + COLOR_RESET,
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
|
|
444
|
+
def action_status(refactor_list_data, state_dir):
|
|
445
|
+
refactors = refactor_list_data.get("refactors", [])
|
|
446
|
+
project_name = refactor_list_data.get("project_name", "Unknown")
|
|
447
|
+
|
|
448
|
+
counts = {"completed": 0, "in_progress": 0, "failed": 0, "pending": 0, "skipped": 0}
|
|
449
|
+
refactor_lines = []
|
|
450
|
+
|
|
451
|
+
for r in refactors:
|
|
452
|
+
if not isinstance(r, dict):
|
|
453
|
+
continue
|
|
454
|
+
rid = r.get("id")
|
|
455
|
+
title = r.get("title", "Untitled")
|
|
456
|
+
rtype = r.get("type", "restructure")
|
|
457
|
+
complexity = r.get("complexity", "medium")
|
|
458
|
+
if not rid:
|
|
459
|
+
continue
|
|
460
|
+
|
|
461
|
+
rs = load_refactor_status(state_dir, rid)
|
|
462
|
+
rstatus = rs.get("status", "pending")
|
|
463
|
+
retry_count = rs.get("retry_count", 0)
|
|
464
|
+
max_retries_val = rs.get("max_retries", 3)
|
|
465
|
+
resume_phase = rs.get("resume_from_phase")
|
|
466
|
+
|
|
467
|
+
if rstatus in counts:
|
|
468
|
+
counts[rstatus] += 1
|
|
469
|
+
else:
|
|
470
|
+
counts["pending"] += 1
|
|
471
|
+
|
|
472
|
+
# Status icon
|
|
473
|
+
if rstatus == "completed":
|
|
474
|
+
icon = COLOR_GREEN + "[✓]" + COLOR_RESET
|
|
475
|
+
elif rstatus == "in_progress":
|
|
476
|
+
icon = COLOR_YELLOW + "[→]" + COLOR_RESET
|
|
477
|
+
elif rstatus == "failed":
|
|
478
|
+
icon = COLOR_RED + "[✗]" + COLOR_RESET
|
|
479
|
+
elif rstatus == "skipped":
|
|
480
|
+
icon = COLOR_GRAY + "[—]" + COLOR_RESET
|
|
481
|
+
else:
|
|
482
|
+
icon = COLOR_GRAY + "[ ]" + COLOR_RESET
|
|
483
|
+
|
|
484
|
+
# Type badge
|
|
485
|
+
type_icon = TYPE_ICONS.get(rtype, "🔧")
|
|
486
|
+
type_badge = "[{}]".format(rtype[:6].upper())
|
|
487
|
+
|
|
488
|
+
# Complexity badge
|
|
489
|
+
cmplx_badge = COMPLEXITY_BADGES.get(complexity, "[MED]")
|
|
490
|
+
|
|
491
|
+
# Detail
|
|
492
|
+
detail = ""
|
|
493
|
+
if rstatus == "in_progress":
|
|
494
|
+
parts = []
|
|
495
|
+
if retry_count > 0:
|
|
496
|
+
parts.append("retry {}/{}".format(retry_count, max_retries_val))
|
|
497
|
+
if resume_phase is not None:
|
|
498
|
+
parts.append("CP-RF-{}".format(resume_phase))
|
|
499
|
+
if parts:
|
|
500
|
+
detail = " ({})".format(", ".join(parts))
|
|
501
|
+
elif rstatus == "failed":
|
|
502
|
+
detail = " (failed after {} retries)".format(retry_count)
|
|
503
|
+
|
|
504
|
+
# Colorize title based on status
|
|
505
|
+
if rstatus == "completed":
|
|
506
|
+
colored_title = COLOR_GREEN + title + COLOR_RESET
|
|
507
|
+
elif rstatus == "in_progress":
|
|
508
|
+
colored_title = COLOR_YELLOW + title + COLOR_RESET
|
|
509
|
+
elif rstatus == "failed":
|
|
510
|
+
colored_title = COLOR_RED + title + COLOR_RESET
|
|
511
|
+
else:
|
|
512
|
+
colored_title = COLOR_GRAY + title + COLOR_RESET
|
|
513
|
+
|
|
514
|
+
line_content = "{} {} {} {} {} {} {}{}".format(
|
|
515
|
+
rid, icon, type_badge, cmplx_badge, type_icon, colored_title, "", detail
|
|
516
|
+
)
|
|
517
|
+
|
|
518
|
+
refactor_lines.append(line_content)
|
|
519
|
+
|
|
520
|
+
total = len(refactors)
|
|
521
|
+
completed = counts["completed"]
|
|
522
|
+
percent = round(completed / total * 100, 1) if total > 0 else 0.0
|
|
523
|
+
progress_bar = _build_progress_bar(percent, width=24)
|
|
524
|
+
|
|
525
|
+
summary_line = "Total: {} refactors | Completed: {} | In Progress: {}".format(
|
|
526
|
+
total, completed, counts["in_progress"]
|
|
527
|
+
)
|
|
528
|
+
summary_line2 = "Failed: {} | Pending: {} | Skipped: {}".format(
|
|
529
|
+
counts["failed"], counts["pending"], counts["skipped"]
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
inner = BOX_WIDTH - 2
|
|
533
|
+
print("╔" + "═" * BOX_WIDTH + "╗")
|
|
534
|
+
print("║" + pad_right(COLOR_BOLD + " Refactor Pipeline Status" + COLOR_RESET, inner) + " ║")
|
|
535
|
+
print("╠" + "═" * BOX_WIDTH + "╣")
|
|
536
|
+
print("║" + pad_right(" Project: {}".format(project_name), inner) + " ║")
|
|
537
|
+
print("║" + pad_right(" {}".format(summary_line), inner) + " ║")
|
|
538
|
+
print("║" + pad_right(" {}".format(summary_line2), inner) + " ║")
|
|
539
|
+
print("╠" + "─" * BOX_WIDTH + "╣")
|
|
540
|
+
print("║" + pad_right(" Progress: {}".format(progress_bar), inner) + " ║")
|
|
541
|
+
print("╠" + "═" * BOX_WIDTH + "╣")
|
|
542
|
+
for line in refactor_lines:
|
|
543
|
+
print("║" + pad_right(" {}".format(line), inner) + " ║")
|
|
544
|
+
print("╚" + "═" * BOX_WIDTH + "╝")
|
|
545
|
+
|
|
546
|
+
|
|
547
|
+
# ---------------------------------------------------------------------------
|
|
548
|
+
# Action: reset
|
|
549
|
+
# ---------------------------------------------------------------------------
|
|
550
|
+
|
|
551
|
+
def action_reset(args, refactor_list_path, state_dir):
|
|
552
|
+
refactor_id = args.refactor_id
|
|
553
|
+
if not refactor_id:
|
|
554
|
+
error_out("--refactor-id is required for 'reset' action")
|
|
555
|
+
return
|
|
556
|
+
|
|
557
|
+
rs = load_refactor_status(state_dir, refactor_id)
|
|
558
|
+
old_status = rs.get("status", "unknown")
|
|
559
|
+
old_retry = rs.get("retry_count", 0)
|
|
560
|
+
|
|
561
|
+
rs["status"] = "pending"
|
|
562
|
+
rs["retry_count"] = 0
|
|
563
|
+
rs["sessions"] = []
|
|
564
|
+
rs["last_session_id"] = None
|
|
565
|
+
rs["resume_from_phase"] = None
|
|
566
|
+
rs["updated_at"] = now_iso()
|
|
567
|
+
|
|
568
|
+
err = save_refactor_status(state_dir, refactor_id, rs)
|
|
569
|
+
if err:
|
|
570
|
+
error_out("Failed to save refactor status: {}".format(err))
|
|
571
|
+
return
|
|
572
|
+
|
|
573
|
+
err = update_refactor_in_list(refactor_list_path, refactor_id, "pending")
|
|
574
|
+
if err:
|
|
575
|
+
error_out("Failed to update refactor-list.json: {}".format(err))
|
|
576
|
+
return
|
|
577
|
+
|
|
578
|
+
result = {
|
|
579
|
+
"action": "reset",
|
|
580
|
+
"refactor_id": refactor_id,
|
|
581
|
+
"old_status": old_status,
|
|
582
|
+
"old_retry_count": old_retry,
|
|
583
|
+
"new_status": "pending",
|
|
584
|
+
}
|
|
585
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
586
|
+
|
|
587
|
+
|
|
588
|
+
# ---------------------------------------------------------------------------
|
|
589
|
+
# Action: clean
|
|
590
|
+
# ---------------------------------------------------------------------------
|
|
591
|
+
|
|
592
|
+
def action_clean(args, refactor_list_path, state_dir):
|
|
593
|
+
refactor_id = args.refactor_id
|
|
594
|
+
project_root = args.project_root
|
|
595
|
+
|
|
596
|
+
if not refactor_id:
|
|
597
|
+
error_out("--refactor-id is required for 'clean' action")
|
|
598
|
+
return
|
|
599
|
+
if not project_root:
|
|
600
|
+
error_out("--project-root is required for 'clean' action")
|
|
601
|
+
return
|
|
602
|
+
|
|
603
|
+
cleaned = []
|
|
604
|
+
|
|
605
|
+
# 1. Delete session history
|
|
606
|
+
sessions_dir = os.path.join(state_dir, "refactors", refactor_id, "sessions")
|
|
607
|
+
sessions_deleted = 0
|
|
608
|
+
if os.path.isdir(sessions_dir):
|
|
609
|
+
for entry in os.listdir(sessions_dir):
|
|
610
|
+
entry_path = os.path.join(sessions_dir, entry)
|
|
611
|
+
if os.path.isdir(entry_path):
|
|
612
|
+
shutil.rmtree(entry_path)
|
|
613
|
+
sessions_deleted += 1
|
|
614
|
+
cleaned.append("Deleted {} session(s) from {}".format(sessions_deleted, sessions_dir))
|
|
615
|
+
|
|
616
|
+
# 2. Delete refactor artifacts for this refactor
|
|
617
|
+
refactor_artifact_dir = os.path.join(project_root, REFACTOR_ARTIFACTS_REL, refactor_id)
|
|
618
|
+
if os.path.isdir(refactor_artifact_dir):
|
|
619
|
+
shutil.rmtree(refactor_artifact_dir)
|
|
620
|
+
cleaned.append("Deleted {}".format(refactor_artifact_dir))
|
|
621
|
+
|
|
622
|
+
# 3. Delete shared dev-team workspace
|
|
623
|
+
dev_team_dir = os.path.join(project_root, DEV_TEAM_DIR_NAME)
|
|
624
|
+
if os.path.isdir(dev_team_dir):
|
|
625
|
+
shutil.rmtree(dev_team_dir)
|
|
626
|
+
cleaned.append("Deleted {}".format(dev_team_dir))
|
|
627
|
+
|
|
628
|
+
# 4. Reset status
|
|
629
|
+
rs = load_refactor_status(state_dir, refactor_id)
|
|
630
|
+
old_status = rs.get("status", "unknown")
|
|
631
|
+
old_retry = rs.get("retry_count", 0)
|
|
632
|
+
|
|
633
|
+
rs["status"] = "pending"
|
|
634
|
+
rs["retry_count"] = 0
|
|
635
|
+
rs["sessions"] = []
|
|
636
|
+
rs["last_session_id"] = None
|
|
637
|
+
rs["resume_from_phase"] = None
|
|
638
|
+
rs["updated_at"] = now_iso()
|
|
639
|
+
|
|
640
|
+
err = save_refactor_status(state_dir, refactor_id, rs)
|
|
641
|
+
if err:
|
|
642
|
+
error_out("Failed to save refactor status: {}".format(err))
|
|
643
|
+
return
|
|
644
|
+
|
|
645
|
+
err = update_refactor_in_list(refactor_list_path, refactor_id, "pending")
|
|
646
|
+
if err:
|
|
647
|
+
error_out("Failed to update refactor-list.json: {}".format(err))
|
|
648
|
+
return
|
|
649
|
+
|
|
650
|
+
result = {
|
|
651
|
+
"action": "clean",
|
|
652
|
+
"refactor_id": refactor_id,
|
|
653
|
+
"old_status": old_status,
|
|
654
|
+
"old_retry_count": old_retry,
|
|
655
|
+
"new_status": "pending",
|
|
656
|
+
"sessions_deleted": sessions_deleted,
|
|
657
|
+
"cleaned": cleaned,
|
|
658
|
+
}
|
|
659
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
660
|
+
|
|
661
|
+
|
|
662
|
+
# ---------------------------------------------------------------------------
|
|
663
|
+
# Action: pause
|
|
664
|
+
# ---------------------------------------------------------------------------
|
|
665
|
+
|
|
666
|
+
def action_pause(state_dir):
|
|
667
|
+
pipeline_path = os.path.join(state_dir, "pipeline.json")
|
|
668
|
+
data, err = load_json_file(pipeline_path)
|
|
669
|
+
if err:
|
|
670
|
+
data = {"status": "paused", "paused_at": now_iso()}
|
|
671
|
+
else:
|
|
672
|
+
data["status"] = "paused"
|
|
673
|
+
data["paused_at"] = now_iso()
|
|
674
|
+
|
|
675
|
+
err = write_json_file(pipeline_path, data)
|
|
676
|
+
if err:
|
|
677
|
+
error_out("Failed to write pipeline.json: {}".format(err))
|
|
678
|
+
return
|
|
679
|
+
|
|
680
|
+
result = {
|
|
681
|
+
"action": "pause",
|
|
682
|
+
"status": "paused",
|
|
683
|
+
"paused_at": data["paused_at"],
|
|
684
|
+
}
|
|
685
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
# ---------------------------------------------------------------------------
|
|
689
|
+
# Main
|
|
690
|
+
# ---------------------------------------------------------------------------
|
|
691
|
+
|
|
692
|
+
def main():
|
|
693
|
+
args = parse_args()
|
|
694
|
+
|
|
695
|
+
if args.action == "update":
|
|
696
|
+
if not args.refactor_id:
|
|
697
|
+
error_out("--refactor-id is required for 'update' action")
|
|
698
|
+
if not args.session_status:
|
|
699
|
+
error_out("--session-status is required for 'update' action")
|
|
700
|
+
if args.action in ("reset", "clean"):
|
|
701
|
+
if not args.refactor_id:
|
|
702
|
+
error_out("--refactor-id is required for '{}' action".format(args.action))
|
|
703
|
+
if args.action == "clean":
|
|
704
|
+
if not args.project_root:
|
|
705
|
+
error_out("--project-root is required for 'clean' action")
|
|
706
|
+
|
|
707
|
+
refactor_list_data, err = load_json_file(args.refactor_list)
|
|
708
|
+
if err:
|
|
709
|
+
error_out("Cannot load refactor list: {}".format(err))
|
|
710
|
+
|
|
711
|
+
if args.action == "get_next":
|
|
712
|
+
action_get_next(refactor_list_data, args.state_dir)
|
|
713
|
+
elif args.action == "update":
|
|
714
|
+
action_update(args, args.refactor_list, args.state_dir)
|
|
715
|
+
elif args.action == "status":
|
|
716
|
+
action_status(refactor_list_data, args.state_dir)
|
|
717
|
+
elif args.action == "reset":
|
|
718
|
+
action_reset(args, args.refactor_list, args.state_dir)
|
|
719
|
+
elif args.action == "clean":
|
|
720
|
+
action_clean(args, args.refactor_list, args.state_dir)
|
|
721
|
+
elif args.action == "pause":
|
|
722
|
+
action_pause(args.state_dir)
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
if __name__ == "__main__":
|
|
726
|
+
main()
|