prizmkit 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundled/VERSION.json +5 -0
- package/bundled/adapters/claude/agent-adapter.js +108 -0
- package/bundled/adapters/claude/command-adapter.js +104 -0
- package/bundled/adapters/claude/paths.js +35 -0
- package/bundled/adapters/claude/rules-adapter.js +77 -0
- package/bundled/adapters/claude/settings-adapter.js +73 -0
- package/bundled/adapters/claude/team-adapter.js +183 -0
- package/bundled/adapters/codebuddy/agent-adapter.js +43 -0
- package/bundled/adapters/codebuddy/paths.js +29 -0
- package/bundled/adapters/codebuddy/settings-adapter.js +47 -0
- package/bundled/adapters/codebuddy/skill-adapter.js +68 -0
- package/bundled/adapters/codebuddy/team-adapter.js +46 -0
- package/bundled/adapters/shared/frontmatter.js +77 -0
- package/bundled/agents/prizm-dev-team-coordinator.md +142 -0
- package/bundled/agents/prizm-dev-team-dev.md +99 -0
- package/bundled/agents/prizm-dev-team-pm.md +114 -0
- package/bundled/agents/prizm-dev-team-reviewer.md +119 -0
- package/bundled/dev-pipeline/README.md +482 -0
- package/bundled/dev-pipeline/assets/feature-list-example.json +147 -0
- package/bundled/dev-pipeline/assets/prizm-dev-team-integration.md +138 -0
- package/bundled/dev-pipeline/launch-bugfix-daemon.sh +425 -0
- package/bundled/dev-pipeline/launch-daemon.sh +549 -0
- package/bundled/dev-pipeline/reset-feature.sh +209 -0
- package/bundled/dev-pipeline/retry-bug.sh +344 -0
- package/bundled/dev-pipeline/retry-feature.sh +338 -0
- package/bundled/dev-pipeline/run-bugfix.sh +638 -0
- package/bundled/dev-pipeline/run.sh +845 -0
- package/bundled/dev-pipeline/scripts/check-session-status.py +158 -0
- package/bundled/dev-pipeline/scripts/detect-stuck.py +385 -0
- package/bundled/dev-pipeline/scripts/generate-bootstrap-prompt.py +598 -0
- package/bundled/dev-pipeline/scripts/generate-bugfix-prompt.py +402 -0
- package/bundled/dev-pipeline/scripts/init-bugfix-pipeline.py +294 -0
- package/bundled/dev-pipeline/scripts/init-dev-team.py +134 -0
- package/bundled/dev-pipeline/scripts/init-pipeline.py +335 -0
- package/bundled/dev-pipeline/scripts/update-bug-status.py +748 -0
- package/bundled/dev-pipeline/scripts/update-feature-status.py +1076 -0
- package/bundled/dev-pipeline/templates/bootstrap-prompt.md +262 -0
- package/bundled/dev-pipeline/templates/bug-fix-list-schema.json +159 -0
- package/bundled/dev-pipeline/templates/bugfix-bootstrap-prompt.md +291 -0
- package/bundled/dev-pipeline/templates/feature-list-schema.json +112 -0
- package/bundled/dev-pipeline/templates/session-status-schema.json +77 -0
- package/bundled/skills/_metadata.json +267 -0
- package/bundled/skills/app-planner/SKILL.md +580 -0
- package/bundled/skills/app-planner/assets/planning-guide.md +313 -0
- package/bundled/skills/app-planner/scripts/validate-and-generate.py +758 -0
- package/bundled/skills/bug-planner/SKILL.md +235 -0
- package/bundled/skills/bugfix-pipeline-launcher/SKILL.md +252 -0
- package/bundled/skills/dev-pipeline-launcher/SKILL.md +223 -0
- package/bundled/skills/prizm-kit/SKILL.md +151 -0
- package/bundled/skills/prizm-kit/assets/claude-md-template.md +38 -0
- package/bundled/skills/prizm-kit/assets/codebuddy-md-template.md +35 -0
- package/bundled/skills/prizm-kit/assets/hooks/prizm-commit-hook.json +15 -0
- package/bundled/skills/prizmkit-adr-manager/SKILL.md +68 -0
- package/bundled/skills/prizmkit-adr-manager/assets/adr-template.md +26 -0
- package/bundled/skills/prizmkit-analyze/SKILL.md +194 -0
- package/bundled/skills/prizmkit-api-doc-generator/SKILL.md +56 -0
- package/bundled/skills/prizmkit-bug-fix-workflow/SKILL.md +351 -0
- package/bundled/skills/prizmkit-bug-reproducer/SKILL.md +62 -0
- package/bundled/skills/prizmkit-ci-cd-generator/SKILL.md +54 -0
- package/bundled/skills/prizmkit-clarify/SKILL.md +52 -0
- package/bundled/skills/prizmkit-code-review/SKILL.md +70 -0
- package/bundled/skills/prizmkit-committer/SKILL.md +117 -0
- package/bundled/skills/prizmkit-db-migration/SKILL.md +65 -0
- package/bundled/skills/prizmkit-dependency-health/SKILL.md +123 -0
- package/bundled/skills/prizmkit-deployment-strategy/SKILL.md +58 -0
- package/bundled/skills/prizmkit-error-triage/SKILL.md +55 -0
- package/bundled/skills/prizmkit-implement/SKILL.md +47 -0
- package/bundled/skills/prizmkit-init/SKILL.md +156 -0
- package/bundled/skills/prizmkit-log-analyzer/SKILL.md +55 -0
- package/bundled/skills/prizmkit-monitoring-setup/SKILL.md +75 -0
- package/bundled/skills/prizmkit-onboarding-generator/SKILL.md +70 -0
- package/bundled/skills/prizmkit-perf-profiler/SKILL.md +55 -0
- package/bundled/skills/prizmkit-plan/SKILL.md +54 -0
- package/bundled/skills/prizmkit-plan/assets/plan-template.md +37 -0
- package/bundled/skills/prizmkit-prizm-docs/SKILL.md +140 -0
- package/bundled/skills/prizmkit-prizm-docs/assets/PRIZM-SPEC.md +943 -0
- package/bundled/skills/prizmkit-retrospective/SKILL.md +79 -0
- package/bundled/skills/prizmkit-security-audit/SKILL.md +130 -0
- package/bundled/skills/prizmkit-specify/SKILL.md +52 -0
- package/bundled/skills/prizmkit-specify/assets/spec-template.md +37 -0
- package/bundled/skills/prizmkit-summarize/SKILL.md +51 -0
- package/bundled/skills/prizmkit-summarize/assets/registry-template.md +18 -0
- package/bundled/skills/prizmkit-tasks/SKILL.md +50 -0
- package/bundled/skills/prizmkit-tasks/assets/tasks-template.md +21 -0
- package/bundled/skills/prizmkit-tech-debt-tracker/SKILL.md +139 -0
- package/bundled/team/prizm-dev-team.json +47 -0
- package/bundled/templates/claude-md-template.md +38 -0
- package/bundled/templates/codebuddy-md-template.md +35 -0
- package/package.json +2 -1
|
@@ -0,0 +1,1076 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Core state machine for updating feature status in the dev-pipeline.
|
|
3
|
+
|
|
4
|
+
Handles seven actions:
|
|
5
|
+
- get_next: Find the next feature to process based on priority and dependencies
|
|
6
|
+
- update: Update a feature's status based on session outcome
|
|
7
|
+
- status: Print a formatted overview of all features
|
|
8
|
+
- pause: Save pipeline state for graceful shutdown
|
|
9
|
+
- reset: Reset a feature to pending (status + retry count)
|
|
10
|
+
- clean: Reset + delete session history + delete prizmkit artifacts
|
|
11
|
+
- complete: Shortcut for manually marking a feature as completed
|
|
12
|
+
|
|
13
|
+
Usage:
|
|
14
|
+
python3 update-feature-status.py \
|
|
15
|
+
--feature-list <path> --state-dir <path> \
|
|
16
|
+
--action <get_next|update|status|pause|reset|clean|complete> \
|
|
17
|
+
[--feature-id <id>] [--session-status <status>] \
|
|
18
|
+
[--session-id <id>] [--max-retries <n>]
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
import argparse
|
|
22
|
+
import json
|
|
23
|
+
import os
|
|
24
|
+
import re
|
|
25
|
+
import shutil
|
|
26
|
+
import sys
|
|
27
|
+
from datetime import datetime, timezone
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
SESSION_STATUS_VALUES = [
|
|
31
|
+
"success",
|
|
32
|
+
"partial_resumable",
|
|
33
|
+
"partial_not_resumable",
|
|
34
|
+
"failed",
|
|
35
|
+
"crashed",
|
|
36
|
+
"timed_out",
|
|
37
|
+
]
|
|
38
|
+
|
|
39
|
+
TERMINAL_STATUSES = {"completed", "failed", "skipped"}
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def parse_args():
|
|
43
|
+
parser = argparse.ArgumentParser(
|
|
44
|
+
description="Core state machine for dev-pipeline feature status management."
|
|
45
|
+
)
|
|
46
|
+
parser.add_argument(
|
|
47
|
+
"--feature-list",
|
|
48
|
+
required=True,
|
|
49
|
+
help="Path to the feature-list.json file",
|
|
50
|
+
)
|
|
51
|
+
parser.add_argument(
|
|
52
|
+
"--state-dir",
|
|
53
|
+
required=True,
|
|
54
|
+
help="Path to the state/ directory",
|
|
55
|
+
)
|
|
56
|
+
parser.add_argument(
|
|
57
|
+
"--action",
|
|
58
|
+
required=True,
|
|
59
|
+
choices=["get_next", "update", "status", "pause", "reset", "clean", "complete"],
|
|
60
|
+
help="Action to perform",
|
|
61
|
+
)
|
|
62
|
+
parser.add_argument(
|
|
63
|
+
"--feature-id",
|
|
64
|
+
default=None,
|
|
65
|
+
help="Feature ID (required for 'update' action)",
|
|
66
|
+
)
|
|
67
|
+
parser.add_argument(
|
|
68
|
+
"--session-status",
|
|
69
|
+
default=None,
|
|
70
|
+
choices=SESSION_STATUS_VALUES,
|
|
71
|
+
help="Session outcome status (required for 'update' action)",
|
|
72
|
+
)
|
|
73
|
+
parser.add_argument(
|
|
74
|
+
"--session-id",
|
|
75
|
+
default=None,
|
|
76
|
+
help="Session ID (optional, for 'update' action)",
|
|
77
|
+
)
|
|
78
|
+
parser.add_argument(
|
|
79
|
+
"--max-retries",
|
|
80
|
+
type=int,
|
|
81
|
+
default=3,
|
|
82
|
+
help="Maximum retry count before marking as failed (default: 3)",
|
|
83
|
+
)
|
|
84
|
+
parser.add_argument(
|
|
85
|
+
"--feature-slug",
|
|
86
|
+
default=None,
|
|
87
|
+
help="Feature slug (e.g. 007-import-export-desktop). Required for 'clean' action.",
|
|
88
|
+
)
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--project-root",
|
|
91
|
+
default=None,
|
|
92
|
+
help="Project root directory. Required for 'clean' action.",
|
|
93
|
+
)
|
|
94
|
+
return parser.parse_args()
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def now_iso():
|
|
98
|
+
"""Return the current UTC time in ISO8601 format."""
|
|
99
|
+
return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def load_json_file(path):
|
|
103
|
+
"""Load and return parsed JSON from a file.
|
|
104
|
+
|
|
105
|
+
Returns (data, error_string). On success error_string is None.
|
|
106
|
+
"""
|
|
107
|
+
abs_path = os.path.abspath(path)
|
|
108
|
+
if not os.path.isfile(abs_path):
|
|
109
|
+
return None, "File not found: {}".format(abs_path)
|
|
110
|
+
try:
|
|
111
|
+
with open(abs_path, "r", encoding="utf-8") as f:
|
|
112
|
+
data = json.load(f)
|
|
113
|
+
except json.JSONDecodeError as e:
|
|
114
|
+
return None, "Invalid JSON: {}".format(str(e))
|
|
115
|
+
except IOError as e:
|
|
116
|
+
return None, "Cannot read file: {}".format(str(e))
|
|
117
|
+
return data, None
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def write_json_file(path, data):
|
|
121
|
+
"""Write data as JSON to a file. Creates parent directories if needed.
|
|
122
|
+
|
|
123
|
+
Returns an error string on failure, None on success.
|
|
124
|
+
"""
|
|
125
|
+
abs_path = os.path.abspath(path)
|
|
126
|
+
parent = os.path.dirname(abs_path)
|
|
127
|
+
if parent and not os.path.isdir(parent):
|
|
128
|
+
try:
|
|
129
|
+
os.makedirs(parent, exist_ok=True)
|
|
130
|
+
except OSError as e:
|
|
131
|
+
return "Cannot create directory: {}".format(str(e))
|
|
132
|
+
try:
|
|
133
|
+
with open(abs_path, "w", encoding="utf-8") as f:
|
|
134
|
+
json.dump(data, f, indent=2, ensure_ascii=False)
|
|
135
|
+
f.write("\n")
|
|
136
|
+
except IOError as e:
|
|
137
|
+
return "Cannot write file: {}".format(str(e))
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def load_feature_status(state_dir, feature_id):
|
|
142
|
+
"""Load the status.json for a feature.
|
|
143
|
+
|
|
144
|
+
If the file does not exist, return a default pending status.
|
|
145
|
+
"""
|
|
146
|
+
status_path = os.path.join(
|
|
147
|
+
state_dir, "features", feature_id, "status.json"
|
|
148
|
+
)
|
|
149
|
+
if not os.path.isfile(status_path):
|
|
150
|
+
now = now_iso()
|
|
151
|
+
return {
|
|
152
|
+
"feature_id": feature_id,
|
|
153
|
+
"status": "pending",
|
|
154
|
+
"retry_count": 0,
|
|
155
|
+
"max_retries": 3,
|
|
156
|
+
"sessions": [],
|
|
157
|
+
"last_session_id": None,
|
|
158
|
+
"resume_from_phase": None,
|
|
159
|
+
"created_at": now,
|
|
160
|
+
"updated_at": now,
|
|
161
|
+
}
|
|
162
|
+
data, err = load_json_file(status_path)
|
|
163
|
+
if err:
|
|
164
|
+
# If we can't read it, treat as pending
|
|
165
|
+
now = now_iso()
|
|
166
|
+
return {
|
|
167
|
+
"feature_id": feature_id,
|
|
168
|
+
"status": "pending",
|
|
169
|
+
"retry_count": 0,
|
|
170
|
+
"max_retries": 3,
|
|
171
|
+
"sessions": [],
|
|
172
|
+
"last_session_id": None,
|
|
173
|
+
"resume_from_phase": None,
|
|
174
|
+
"created_at": now,
|
|
175
|
+
"updated_at": now,
|
|
176
|
+
}
|
|
177
|
+
return data
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def save_feature_status(state_dir, feature_id, status_data):
|
|
181
|
+
"""Write the status.json for a feature."""
|
|
182
|
+
status_path = os.path.join(
|
|
183
|
+
state_dir, "features", feature_id, "status.json"
|
|
184
|
+
)
|
|
185
|
+
return write_json_file(status_path, status_data)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def update_feature_in_list(feature_list_path, feature_id, new_status):
|
|
189
|
+
"""Update a feature's status field in feature-list.json.
|
|
190
|
+
|
|
191
|
+
Reads the whole file, modifies the target feature's status, writes back.
|
|
192
|
+
Returns an error string on failure, None on success.
|
|
193
|
+
"""
|
|
194
|
+
data, err = load_json_file(feature_list_path)
|
|
195
|
+
if err:
|
|
196
|
+
return err
|
|
197
|
+
features = data.get("features", [])
|
|
198
|
+
found = False
|
|
199
|
+
for feature in features:
|
|
200
|
+
if isinstance(feature, dict) and feature.get("id") == feature_id:
|
|
201
|
+
feature["status"] = new_status
|
|
202
|
+
found = True
|
|
203
|
+
break
|
|
204
|
+
if not found:
|
|
205
|
+
return "Feature '{}' not found in feature-list.json".format(feature_id)
|
|
206
|
+
return write_json_file(feature_list_path, data)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _default_project_root():
|
|
210
|
+
return os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _build_feature_slug(feature_id, title):
|
|
214
|
+
numeric = feature_id.replace("F-", "").replace("f-", "").zfill(3)
|
|
215
|
+
cleaned = re.sub(r"[^a-z0-9\s-]", "", (title or "").lower())
|
|
216
|
+
cleaned = re.sub(r"[\s]+", "-", cleaned.strip())
|
|
217
|
+
cleaned = re.sub(r"-+", "-", cleaned).strip("-")
|
|
218
|
+
if not cleaned:
|
|
219
|
+
cleaned = "feature"
|
|
220
|
+
return "{}-{}".format(numeric, cleaned)
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def _get_feature_slug(feature_list_path, feature_id):
|
|
224
|
+
data, err = load_json_file(feature_list_path)
|
|
225
|
+
if err:
|
|
226
|
+
return None
|
|
227
|
+
for feature in data.get("features", []):
|
|
228
|
+
if isinstance(feature, dict) and feature.get("id") == feature_id:
|
|
229
|
+
return _build_feature_slug(feature_id, feature.get("title", ""))
|
|
230
|
+
return None
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def cleanup_feature_artifacts(feature_list_path, state_dir, feature_id, project_root=None):
|
|
234
|
+
"""Delete intermediate artifacts for a failed feature run.
|
|
235
|
+
|
|
236
|
+
Cleans session history, per-feature transient state, generated specs,
|
|
237
|
+
current-session pointer, and .dev-team workspace to avoid context pollution.
|
|
238
|
+
"""
|
|
239
|
+
if not project_root:
|
|
240
|
+
project_root = _default_project_root()
|
|
241
|
+
|
|
242
|
+
cleaned = []
|
|
243
|
+
|
|
244
|
+
# 1) Remove all session history
|
|
245
|
+
sessions_dir = os.path.join(state_dir, "features", feature_id, "sessions")
|
|
246
|
+
sessions_deleted = 0
|
|
247
|
+
if os.path.isdir(sessions_dir):
|
|
248
|
+
for entry in os.listdir(sessions_dir):
|
|
249
|
+
entry_path = os.path.join(sessions_dir, entry)
|
|
250
|
+
if os.path.isdir(entry_path):
|
|
251
|
+
shutil.rmtree(entry_path)
|
|
252
|
+
sessions_deleted += 1
|
|
253
|
+
cleaned.append("Deleted {} session(s) from {}".format(sessions_deleted, sessions_dir))
|
|
254
|
+
|
|
255
|
+
# 2) Remove transient files under feature state dir (keep status.json)
|
|
256
|
+
feature_dir = os.path.join(state_dir, "features", feature_id)
|
|
257
|
+
if os.path.isdir(feature_dir):
|
|
258
|
+
for entry in os.listdir(feature_dir):
|
|
259
|
+
if entry == "status.json" or entry == "sessions":
|
|
260
|
+
continue
|
|
261
|
+
entry_path = os.path.join(feature_dir, entry)
|
|
262
|
+
if os.path.isdir(entry_path):
|
|
263
|
+
shutil.rmtree(entry_path)
|
|
264
|
+
cleaned.append("Deleted directory {}".format(entry_path))
|
|
265
|
+
elif os.path.isfile(entry_path):
|
|
266
|
+
os.remove(entry_path)
|
|
267
|
+
cleaned.append("Deleted file {}".format(entry_path))
|
|
268
|
+
|
|
269
|
+
# 3) Remove generated prizm specs for this feature
|
|
270
|
+
feature_slug = _get_feature_slug(feature_list_path, feature_id)
|
|
271
|
+
if feature_slug:
|
|
272
|
+
specs_dir = os.path.join(project_root, ".prizmkit", "specs", feature_slug)
|
|
273
|
+
if os.path.isdir(specs_dir):
|
|
274
|
+
file_count = sum(len(files) for _, _, files in os.walk(specs_dir))
|
|
275
|
+
shutil.rmtree(specs_dir)
|
|
276
|
+
cleaned.append("Deleted {} ({} files)".format(specs_dir, file_count))
|
|
277
|
+
|
|
278
|
+
# 4) Remove global dev-team workspace to avoid stale context contamination
|
|
279
|
+
dev_team_dir = os.path.join(project_root, ".dev-team")
|
|
280
|
+
if os.path.isdir(dev_team_dir):
|
|
281
|
+
file_count = sum(len(files) for _, _, files in os.walk(dev_team_dir))
|
|
282
|
+
shutil.rmtree(dev_team_dir)
|
|
283
|
+
cleaned.append("Deleted {} ({} files)".format(dev_team_dir, file_count))
|
|
284
|
+
|
|
285
|
+
# 5) Clear current-session pointer if it points to this feature
|
|
286
|
+
current_session_path = os.path.join(state_dir, "current-session.json")
|
|
287
|
+
if os.path.isfile(current_session_path):
|
|
288
|
+
current_session, _ = load_json_file(current_session_path)
|
|
289
|
+
if current_session and current_session.get("feature_id") == feature_id:
|
|
290
|
+
os.remove(current_session_path)
|
|
291
|
+
cleaned.append("Deleted {}".format(current_session_path))
|
|
292
|
+
|
|
293
|
+
return cleaned
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
def load_session_status(state_dir, feature_id, session_id):
|
|
297
|
+
"""Load a session's session-status.json file."""
|
|
298
|
+
session_status_path = os.path.join(
|
|
299
|
+
state_dir, "features", feature_id, "sessions",
|
|
300
|
+
session_id, "session-status.json"
|
|
301
|
+
)
|
|
302
|
+
data, err = load_json_file(session_status_path)
|
|
303
|
+
if err:
|
|
304
|
+
return None, err
|
|
305
|
+
return data, None
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
# ---------------------------------------------------------------------------
|
|
309
|
+
# Action: get_next
|
|
310
|
+
# ---------------------------------------------------------------------------
|
|
311
|
+
|
|
312
|
+
def action_get_next(feature_list_data, state_dir):
|
|
313
|
+
"""Find the next feature to process.
|
|
314
|
+
|
|
315
|
+
Priority logic:
|
|
316
|
+
1. Skip terminal statuses (completed, failed, skipped)
|
|
317
|
+
2. Check that all dependencies are completed
|
|
318
|
+
3. Prefer in_progress features over pending ones (interrupted session resume)
|
|
319
|
+
4. Among eligible features, pick lowest priority number (highest priority)
|
|
320
|
+
"""
|
|
321
|
+
features = feature_list_data.get("features", [])
|
|
322
|
+
if not features:
|
|
323
|
+
print("PIPELINE_COMPLETE")
|
|
324
|
+
return
|
|
325
|
+
|
|
326
|
+
# Build a map of feature statuses from state dir
|
|
327
|
+
status_map = {} # feature_id -> status string
|
|
328
|
+
status_data_map = {} # feature_id -> full status data
|
|
329
|
+
for feature in features:
|
|
330
|
+
if not isinstance(feature, dict):
|
|
331
|
+
continue
|
|
332
|
+
fid = feature.get("id")
|
|
333
|
+
if not fid:
|
|
334
|
+
continue
|
|
335
|
+
fs = load_feature_status(state_dir, fid)
|
|
336
|
+
status_map[fid] = fs.get("status", "pending")
|
|
337
|
+
status_data_map[fid] = fs
|
|
338
|
+
|
|
339
|
+
# Check if all features are in terminal state
|
|
340
|
+
non_terminal = [
|
|
341
|
+
f for f in features
|
|
342
|
+
if isinstance(f, dict) and f.get("id")
|
|
343
|
+
and status_map.get(f["id"], "pending") not in TERMINAL_STATUSES
|
|
344
|
+
]
|
|
345
|
+
if not non_terminal:
|
|
346
|
+
print("PIPELINE_COMPLETE")
|
|
347
|
+
return
|
|
348
|
+
|
|
349
|
+
# Find eligible features (dependencies all completed)
|
|
350
|
+
eligible = []
|
|
351
|
+
has_remaining = False
|
|
352
|
+
for feature in non_terminal:
|
|
353
|
+
fid = feature.get("id")
|
|
354
|
+
if not fid:
|
|
355
|
+
continue
|
|
356
|
+
has_remaining = True
|
|
357
|
+
deps = feature.get("dependencies", [])
|
|
358
|
+
all_deps_completed = True
|
|
359
|
+
for dep_id in deps:
|
|
360
|
+
if status_map.get(dep_id, "pending") != "completed":
|
|
361
|
+
all_deps_completed = False
|
|
362
|
+
break
|
|
363
|
+
if all_deps_completed:
|
|
364
|
+
eligible.append(feature)
|
|
365
|
+
|
|
366
|
+
if not eligible:
|
|
367
|
+
if has_remaining:
|
|
368
|
+
print("PIPELINE_BLOCKED")
|
|
369
|
+
else:
|
|
370
|
+
print("PIPELINE_COMPLETE")
|
|
371
|
+
return
|
|
372
|
+
|
|
373
|
+
# Separate in_progress from pending
|
|
374
|
+
in_progress_features = []
|
|
375
|
+
pending_features = []
|
|
376
|
+
for feature in eligible:
|
|
377
|
+
fid = feature.get("id")
|
|
378
|
+
fstatus = status_map.get(fid, "pending")
|
|
379
|
+
if fstatus == "in_progress":
|
|
380
|
+
in_progress_features.append(feature)
|
|
381
|
+
else:
|
|
382
|
+
pending_features.append(feature)
|
|
383
|
+
|
|
384
|
+
# Prefer in_progress features, then pending; sort by priority (lowest number = highest priority)
|
|
385
|
+
if in_progress_features:
|
|
386
|
+
candidates = sorted(
|
|
387
|
+
in_progress_features,
|
|
388
|
+
key=lambda f: f.get("priority", 999)
|
|
389
|
+
)
|
|
390
|
+
else:
|
|
391
|
+
candidates = sorted(
|
|
392
|
+
pending_features,
|
|
393
|
+
key=lambda f: f.get("priority", 999)
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
chosen = candidates[0]
|
|
397
|
+
chosen_id = chosen["id"]
|
|
398
|
+
chosen_status_data = status_data_map.get(chosen_id, {})
|
|
399
|
+
|
|
400
|
+
result = {
|
|
401
|
+
"feature_id": chosen_id,
|
|
402
|
+
"title": chosen.get("title", ""),
|
|
403
|
+
"retry_count": chosen_status_data.get("retry_count", 0),
|
|
404
|
+
"resume_from_phase": chosen_status_data.get("resume_from_phase", None),
|
|
405
|
+
}
|
|
406
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
# ---------------------------------------------------------------------------
|
|
410
|
+
# Action: update
|
|
411
|
+
# ---------------------------------------------------------------------------
|
|
412
|
+
|
|
413
|
+
def action_update(args, feature_list_path, state_dir):
|
|
414
|
+
"""Update a feature's status based on session outcome.
|
|
415
|
+
|
|
416
|
+
Failure policy:
|
|
417
|
+
- Never continue from partial/failed session context
|
|
418
|
+
- Always clean intermediate artifacts and restart from scratch
|
|
419
|
+
"""
|
|
420
|
+
feature_id = args.feature_id
|
|
421
|
+
session_status = args.session_status
|
|
422
|
+
session_id = args.session_id
|
|
423
|
+
max_retries = args.max_retries
|
|
424
|
+
|
|
425
|
+
if not feature_id:
|
|
426
|
+
error_out("--feature-id is required for 'update' action")
|
|
427
|
+
return
|
|
428
|
+
if not session_status:
|
|
429
|
+
error_out("--session-status is required for 'update' action")
|
|
430
|
+
return
|
|
431
|
+
|
|
432
|
+
fs = load_feature_status(state_dir, feature_id)
|
|
433
|
+
|
|
434
|
+
if session_status == "success":
|
|
435
|
+
fs["status"] = "completed"
|
|
436
|
+
fs["resume_from_phase"] = None
|
|
437
|
+
err = update_feature_in_list(feature_list_path, feature_id, "completed")
|
|
438
|
+
if err:
|
|
439
|
+
error_out("Failed to update feature-list.json: {}".format(err))
|
|
440
|
+
return
|
|
441
|
+
else:
|
|
442
|
+
fs["retry_count"] = fs.get("retry_count", 0) + 1
|
|
443
|
+
|
|
444
|
+
cleaned = cleanup_feature_artifacts(
|
|
445
|
+
feature_list_path=feature_list_path,
|
|
446
|
+
state_dir=state_dir,
|
|
447
|
+
feature_id=feature_id,
|
|
448
|
+
project_root=args.project_root,
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
if fs["retry_count"] >= max_retries:
|
|
452
|
+
fs["status"] = "failed"
|
|
453
|
+
target_status = "failed"
|
|
454
|
+
else:
|
|
455
|
+
fs["status"] = "pending"
|
|
456
|
+
target_status = "pending"
|
|
457
|
+
|
|
458
|
+
fs["resume_from_phase"] = None
|
|
459
|
+
fs["sessions"] = []
|
|
460
|
+
fs["last_session_id"] = None
|
|
461
|
+
|
|
462
|
+
err = update_feature_in_list(feature_list_path, feature_id, target_status)
|
|
463
|
+
if err:
|
|
464
|
+
error_out("Failed to update feature-list.json: {}".format(err))
|
|
465
|
+
return
|
|
466
|
+
|
|
467
|
+
if session_status == "success" and session_id:
|
|
468
|
+
sessions = fs.get("sessions", [])
|
|
469
|
+
if session_id not in sessions:
|
|
470
|
+
sessions.append(session_id)
|
|
471
|
+
fs["sessions"] = sessions
|
|
472
|
+
fs["last_session_id"] = session_id
|
|
473
|
+
|
|
474
|
+
fs["updated_at"] = now_iso()
|
|
475
|
+
|
|
476
|
+
err = save_feature_status(state_dir, feature_id, fs)
|
|
477
|
+
if err:
|
|
478
|
+
error_out("Failed to save feature status: {}".format(err))
|
|
479
|
+
return
|
|
480
|
+
|
|
481
|
+
summary = {
|
|
482
|
+
"action": "update",
|
|
483
|
+
"feature_id": feature_id,
|
|
484
|
+
"session_status": session_status,
|
|
485
|
+
"new_status": fs["status"],
|
|
486
|
+
"retry_count": fs["retry_count"],
|
|
487
|
+
"resume_from_phase": fs.get("resume_from_phase"),
|
|
488
|
+
"updated_at": fs["updated_at"],
|
|
489
|
+
}
|
|
490
|
+
if session_status != "success":
|
|
491
|
+
summary["restart_policy"] = "full_restart"
|
|
492
|
+
summary["cleanup_performed"] = cleaned
|
|
493
|
+
|
|
494
|
+
print(json.dumps(summary, indent=2, ensure_ascii=False))
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
# ---------------------------------------------------------------------------
|
|
498
|
+
# Action: status
|
|
499
|
+
# ---------------------------------------------------------------------------
|
|
500
|
+
|
|
501
|
+
# ANSI color codes
|
|
502
|
+
COLOR_GREEN = "\033[92m"
|
|
503
|
+
COLOR_YELLOW = "\033[93m"
|
|
504
|
+
COLOR_RED = "\033[91m"
|
|
505
|
+
COLOR_GRAY = "\033[90m"
|
|
506
|
+
COLOR_BOLD = "\033[1m"
|
|
507
|
+
COLOR_RESET = "\033[0m"
|
|
508
|
+
|
|
509
|
+
BOX_WIDTH = 68
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
def pad_right(text, width):
|
|
513
|
+
"""Pad text with spaces to fill width, accounting for ANSI escape codes."""
|
|
514
|
+
# Strip ANSI codes to calculate visible length
|
|
515
|
+
visible = text
|
|
516
|
+
i = 0
|
|
517
|
+
visible_len = 0
|
|
518
|
+
while i < len(text):
|
|
519
|
+
if text[i] == "\033":
|
|
520
|
+
# Skip until 'm'
|
|
521
|
+
while i < len(text) and text[i] != "m":
|
|
522
|
+
i += 1
|
|
523
|
+
i += 1 # skip the 'm'
|
|
524
|
+
else:
|
|
525
|
+
visible_len += 1
|
|
526
|
+
i += 1
|
|
527
|
+
padding = width - visible_len
|
|
528
|
+
if padding > 0:
|
|
529
|
+
return text + " " * padding
|
|
530
|
+
return text
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
def _calc_feature_duration(state_dir, feature_id):
|
|
534
|
+
"""计算已完成 Feature 的耗时(秒)。
|
|
535
|
+
|
|
536
|
+
通过 status.json 的 created_at 和 updated_at 来计算。
|
|
537
|
+
如果有 session 记录,尝试用第一个 session 的 started_at 到最后更新时间来计算。
|
|
538
|
+
返回 None 如果无法计算。
|
|
539
|
+
"""
|
|
540
|
+
fs_path = os.path.join(state_dir, "features", feature_id, "status.json")
|
|
541
|
+
if not os.path.isfile(fs_path):
|
|
542
|
+
return None
|
|
543
|
+
data, err = load_json_file(fs_path)
|
|
544
|
+
if err or not data:
|
|
545
|
+
return None
|
|
546
|
+
|
|
547
|
+
created_at = data.get("created_at")
|
|
548
|
+
updated_at = data.get("updated_at")
|
|
549
|
+
if not created_at or not updated_at:
|
|
550
|
+
return None
|
|
551
|
+
|
|
552
|
+
try:
|
|
553
|
+
fmt = "%Y-%m-%dT%H:%M:%SZ"
|
|
554
|
+
t_start = datetime.strptime(created_at, fmt)
|
|
555
|
+
t_end = datetime.strptime(updated_at, fmt)
|
|
556
|
+
delta = (t_end - t_start).total_seconds()
|
|
557
|
+
# 过滤异常值:少于 10 秒或超过 24 小时的忽略
|
|
558
|
+
if delta < 10 or delta > 86400:
|
|
559
|
+
return None
|
|
560
|
+
return delta
|
|
561
|
+
except (ValueError, TypeError):
|
|
562
|
+
return None
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
def _format_duration(seconds):
|
|
566
|
+
"""将秒数格式化为人类可读的时间字符串。"""
|
|
567
|
+
if seconds is None:
|
|
568
|
+
return "N/A"
|
|
569
|
+
seconds = int(seconds)
|
|
570
|
+
if seconds < 60:
|
|
571
|
+
return "{}s".format(seconds)
|
|
572
|
+
elif seconds < 3600:
|
|
573
|
+
m = seconds // 60
|
|
574
|
+
s = seconds % 60
|
|
575
|
+
return "{}m{}s".format(m, s)
|
|
576
|
+
else:
|
|
577
|
+
h = seconds // 3600
|
|
578
|
+
m = (seconds % 3600) // 60
|
|
579
|
+
return "{}h{}m".format(h, m)
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def _build_progress_bar(percent, width=20):
|
|
583
|
+
"""生成文本进度条。
|
|
584
|
+
|
|
585
|
+
例如: ████████░░░░░░░░░░░░ 40%
|
|
586
|
+
"""
|
|
587
|
+
filled = int(width * percent / 100)
|
|
588
|
+
empty = width - filled
|
|
589
|
+
bar = "█" * filled + "░" * empty
|
|
590
|
+
return "{} {:>3}%".format(bar, int(percent))
|
|
591
|
+
|
|
592
|
+
|
|
593
|
+
def _estimate_remaining_time(features, state_dir, counts):
|
|
594
|
+
"""基于已完成 Feature 的历史耗时,按 complexity 加权预估剩余时间。
|
|
595
|
+
|
|
596
|
+
策略:
|
|
597
|
+
1. 收集所有已完成 Feature 的耗时,按 complexity 分组
|
|
598
|
+
2. 对剩余的 pending/in_progress Feature,用对应 complexity 的平均耗时估算
|
|
599
|
+
3. 如果某个 complexity 没有历史数据,用全局平均值兜底
|
|
600
|
+
|
|
601
|
+
返回 (estimated_seconds, confidence) 元组。
|
|
602
|
+
confidence: "high" (>=50% 已完成), "medium" (>=25%), "low" (<25%)
|
|
603
|
+
"""
|
|
604
|
+
# complexity 权重(用于没有历史数据时的估算)
|
|
605
|
+
COMPLEXITY_WEIGHT = {"low": 1.0, "medium": 2.0, "high": 4.0}
|
|
606
|
+
|
|
607
|
+
# 按 complexity 分组收集已完成 Feature 的耗时
|
|
608
|
+
duration_by_complexity = {} # complexity -> [duration_seconds]
|
|
609
|
+
feature_complexity_map = {} # feature_id -> complexity
|
|
610
|
+
|
|
611
|
+
for feature in features:
|
|
612
|
+
if not isinstance(feature, dict):
|
|
613
|
+
continue
|
|
614
|
+
fid = feature.get("id")
|
|
615
|
+
if not fid:
|
|
616
|
+
continue
|
|
617
|
+
complexity = feature.get("estimated_complexity", "medium")
|
|
618
|
+
feature_complexity_map[fid] = complexity
|
|
619
|
+
|
|
620
|
+
all_durations = []
|
|
621
|
+
for feature in features:
|
|
622
|
+
if not isinstance(feature, dict):
|
|
623
|
+
continue
|
|
624
|
+
fid = feature.get("id")
|
|
625
|
+
if not fid:
|
|
626
|
+
continue
|
|
627
|
+
fs = load_feature_status(state_dir, fid)
|
|
628
|
+
if fs.get("status") != "completed":
|
|
629
|
+
continue
|
|
630
|
+
duration = _calc_feature_duration(state_dir, fid)
|
|
631
|
+
if duration is None:
|
|
632
|
+
continue
|
|
633
|
+
complexity = feature_complexity_map.get(fid, "medium")
|
|
634
|
+
if complexity not in duration_by_complexity:
|
|
635
|
+
duration_by_complexity[complexity] = []
|
|
636
|
+
duration_by_complexity[complexity].append(duration)
|
|
637
|
+
all_durations.append(duration)
|
|
638
|
+
|
|
639
|
+
if not all_durations:
|
|
640
|
+
return None, "low"
|
|
641
|
+
|
|
642
|
+
# 计算各 complexity 的平均耗时
|
|
643
|
+
avg_by_complexity = {}
|
|
644
|
+
for c, durations in duration_by_complexity.items():
|
|
645
|
+
avg_by_complexity[c] = sum(durations) / len(durations)
|
|
646
|
+
global_avg = sum(all_durations) / len(all_durations)
|
|
647
|
+
|
|
648
|
+
# 估算剩余 Feature 的耗时
|
|
649
|
+
remaining_seconds = 0.0
|
|
650
|
+
remaining_count = 0
|
|
651
|
+
for feature in features:
|
|
652
|
+
if not isinstance(feature, dict):
|
|
653
|
+
continue
|
|
654
|
+
fid = feature.get("id")
|
|
655
|
+
if not fid:
|
|
656
|
+
continue
|
|
657
|
+
fs = load_feature_status(state_dir, fid)
|
|
658
|
+
fstatus = fs.get("status", "pending")
|
|
659
|
+
if fstatus in TERMINAL_STATUSES:
|
|
660
|
+
continue
|
|
661
|
+
remaining_count += 1
|
|
662
|
+
complexity = feature_complexity_map.get(fid, "medium")
|
|
663
|
+
if complexity in avg_by_complexity:
|
|
664
|
+
remaining_seconds += avg_by_complexity[complexity]
|
|
665
|
+
else:
|
|
666
|
+
# 没有该 complexity 的历史数据,用全局均值 × 权重比例估算
|
|
667
|
+
weight = COMPLEXITY_WEIGHT.get(complexity, 2.0)
|
|
668
|
+
base_weight = COMPLEXITY_WEIGHT.get("medium", 2.0)
|
|
669
|
+
remaining_seconds += global_avg * (weight / base_weight)
|
|
670
|
+
|
|
671
|
+
# 计算置信度
|
|
672
|
+
total = len([f for f in features if isinstance(f, dict) and f.get("id")])
|
|
673
|
+
completed = counts.get("completed", 0)
|
|
674
|
+
if total > 0:
|
|
675
|
+
ratio = completed / total
|
|
676
|
+
if ratio >= 0.5:
|
|
677
|
+
confidence = "high"
|
|
678
|
+
elif ratio >= 0.25:
|
|
679
|
+
confidence = "medium"
|
|
680
|
+
else:
|
|
681
|
+
confidence = "low"
|
|
682
|
+
else:
|
|
683
|
+
confidence = "low"
|
|
684
|
+
|
|
685
|
+
return remaining_seconds, confidence
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def action_status(feature_list_data, state_dir):
|
|
689
|
+
"""Print a formatted overview of all features and their status."""
|
|
690
|
+
features = feature_list_data.get("features", [])
|
|
691
|
+
app_name = feature_list_data.get("app_name", "Unknown")
|
|
692
|
+
|
|
693
|
+
# Gather status info
|
|
694
|
+
counts = {"completed": 0, "in_progress": 0, "failed": 0, "pending": 0, "skipped": 0}
|
|
695
|
+
feature_lines = []
|
|
696
|
+
|
|
697
|
+
# Build dependency info: feature_id -> list of dep_ids that are not completed
|
|
698
|
+
status_map = {}
|
|
699
|
+
for feature in features:
|
|
700
|
+
if not isinstance(feature, dict):
|
|
701
|
+
continue
|
|
702
|
+
fid = feature.get("id")
|
|
703
|
+
if not fid:
|
|
704
|
+
continue
|
|
705
|
+
fs = load_feature_status(state_dir, fid)
|
|
706
|
+
status_map[fid] = fs.get("status", "pending")
|
|
707
|
+
|
|
708
|
+
for feature in features:
|
|
709
|
+
if not isinstance(feature, dict):
|
|
710
|
+
continue
|
|
711
|
+
fid = feature.get("id")
|
|
712
|
+
title = feature.get("title", "Untitled")
|
|
713
|
+
if not fid:
|
|
714
|
+
continue
|
|
715
|
+
|
|
716
|
+
fs = load_feature_status(state_dir, fid)
|
|
717
|
+
fstatus = fs.get("status", "pending")
|
|
718
|
+
retry_count = fs.get("retry_count", 0)
|
|
719
|
+
max_retries_val = fs.get("max_retries", 3)
|
|
720
|
+
resume_phase = fs.get("resume_from_phase")
|
|
721
|
+
|
|
722
|
+
# Count statuses
|
|
723
|
+
if fstatus in counts:
|
|
724
|
+
counts[fstatus] += 1
|
|
725
|
+
else:
|
|
726
|
+
counts["pending"] += 1
|
|
727
|
+
|
|
728
|
+
# Build status indicator and color
|
|
729
|
+
if fstatus == "completed":
|
|
730
|
+
icon = COLOR_GREEN + "[✓]" + COLOR_RESET
|
|
731
|
+
elif fstatus == "in_progress":
|
|
732
|
+
icon = COLOR_YELLOW + "[→]" + COLOR_RESET
|
|
733
|
+
elif fstatus == "failed":
|
|
734
|
+
icon = COLOR_RED + "[✗]" + COLOR_RESET
|
|
735
|
+
elif fstatus == "skipped":
|
|
736
|
+
icon = COLOR_GRAY + "[—]" + COLOR_RESET
|
|
737
|
+
else:
|
|
738
|
+
icon = COLOR_GRAY + "[ ]" + COLOR_RESET
|
|
739
|
+
|
|
740
|
+
# Build detail suffix
|
|
741
|
+
detail = ""
|
|
742
|
+
if fstatus == "in_progress":
|
|
743
|
+
parts = []
|
|
744
|
+
if retry_count > 0:
|
|
745
|
+
parts.append("retry {}/{}".format(retry_count, max_retries_val))
|
|
746
|
+
if resume_phase is not None:
|
|
747
|
+
parts.append("CP-{}".format(resume_phase))
|
|
748
|
+
if parts:
|
|
749
|
+
detail = " ({})".format(", ".join(parts))
|
|
750
|
+
elif fstatus == "failed":
|
|
751
|
+
detail = " (failed after {} retries)".format(retry_count)
|
|
752
|
+
elif fstatus == "pending":
|
|
753
|
+
# Check if blocked by dependencies
|
|
754
|
+
deps = feature.get("dependencies", [])
|
|
755
|
+
blocking = [
|
|
756
|
+
d for d in deps
|
|
757
|
+
if status_map.get(d, "pending") != "completed"
|
|
758
|
+
]
|
|
759
|
+
if blocking:
|
|
760
|
+
detail = " (blocked by {})".format(", ".join(blocking))
|
|
761
|
+
|
|
762
|
+
# Apply color to the whole line content
|
|
763
|
+
if fstatus == "completed":
|
|
764
|
+
line_content = "{} {} {}{}".format(
|
|
765
|
+
fid, icon, COLOR_GREEN + title + COLOR_RESET, detail
|
|
766
|
+
)
|
|
767
|
+
elif fstatus == "in_progress":
|
|
768
|
+
line_content = "{} {} {}{}".format(
|
|
769
|
+
fid, icon, COLOR_YELLOW + title + COLOR_RESET, detail
|
|
770
|
+
)
|
|
771
|
+
elif fstatus == "failed":
|
|
772
|
+
line_content = "{} {} {}{}".format(
|
|
773
|
+
fid, icon, COLOR_RED + title + COLOR_RESET, detail
|
|
774
|
+
)
|
|
775
|
+
else:
|
|
776
|
+
line_content = "{} {} {}{}".format(
|
|
777
|
+
fid, icon, COLOR_GRAY + title + COLOR_RESET, detail
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
feature_lines.append(line_content)
|
|
781
|
+
|
|
782
|
+
total = len(features)
|
|
783
|
+
completed = counts["completed"]
|
|
784
|
+
|
|
785
|
+
# 计算百分比
|
|
786
|
+
if total > 0:
|
|
787
|
+
percent = round(completed / total * 100, 1)
|
|
788
|
+
else:
|
|
789
|
+
percent = 0.0
|
|
790
|
+
|
|
791
|
+
# 生成进度条
|
|
792
|
+
progress_bar = _build_progress_bar(percent, width=24)
|
|
793
|
+
|
|
794
|
+
# 预估剩余时间
|
|
795
|
+
est_remaining, confidence = _estimate_remaining_time(
|
|
796
|
+
features, state_dir, counts
|
|
797
|
+
)
|
|
798
|
+
|
|
799
|
+
summary_line = "Total: {} features | Completed: {} | In Progress: {}".format(
|
|
800
|
+
total, completed, counts["in_progress"]
|
|
801
|
+
)
|
|
802
|
+
summary_line2 = "Failed: {} | Pending: {} | Skipped: {}".format(
|
|
803
|
+
counts["failed"], counts["pending"], counts["skipped"]
|
|
804
|
+
)
|
|
805
|
+
|
|
806
|
+
# 构建预估剩余时间行
|
|
807
|
+
CONFIDENCE_ICONS = {"high": "●", "medium": "◐", "low": "○"}
|
|
808
|
+
if est_remaining is not None:
|
|
809
|
+
eta_str = _format_duration(est_remaining)
|
|
810
|
+
conf_icon = CONFIDENCE_ICONS.get(confidence, "○")
|
|
811
|
+
eta_line = "ETA: ~{} (confidence: {} {})".format(
|
|
812
|
+
eta_str, conf_icon, confidence
|
|
813
|
+
)
|
|
814
|
+
else:
|
|
815
|
+
eta_line = "ETA: calculating... (need >=1 completed feature)"
|
|
816
|
+
|
|
817
|
+
# Print the box
|
|
818
|
+
inner = BOX_WIDTH - 2 # space inside the vertical bars
|
|
819
|
+
print("╔" + "═" * BOX_WIDTH + "╗")
|
|
820
|
+
print("║" + pad_right(COLOR_BOLD + " Dev-Pipeline Status" + COLOR_RESET, inner) + " ║")
|
|
821
|
+
print("╠" + "═" * BOX_WIDTH + "╣")
|
|
822
|
+
print("║" + pad_right(" App: {}".format(app_name), inner) + " ║")
|
|
823
|
+
print("║" + pad_right(" {}".format(summary_line), inner) + " ║")
|
|
824
|
+
print("║" + pad_right(" {}".format(summary_line2), inner) + " ║")
|
|
825
|
+
print("╠" + "─" * BOX_WIDTH + "╣")
|
|
826
|
+
print("║" + pad_right(" Progress: {}".format(progress_bar), inner) + " ║")
|
|
827
|
+
print("║" + pad_right(" {}".format(eta_line), inner) + " ║")
|
|
828
|
+
print("╠" + "═" * BOX_WIDTH + "╣")
|
|
829
|
+
for line in feature_lines:
|
|
830
|
+
print("║" + pad_right(" {}".format(line), inner) + " ║")
|
|
831
|
+
print("╚" + "═" * BOX_WIDTH + "╝")
|
|
832
|
+
|
|
833
|
+
|
|
834
|
+
# ---------------------------------------------------------------------------
|
|
835
|
+
# Action: reset
|
|
836
|
+
# ---------------------------------------------------------------------------
|
|
837
|
+
|
|
838
|
+
def action_reset(args, feature_list_path, state_dir):
|
|
839
|
+
"""Reset a feature to pending state.
|
|
840
|
+
|
|
841
|
+
Resets status.json (status -> pending, retry_count -> 0, clear sessions,
|
|
842
|
+
clear resume_from_phase) and updates feature-list.json status to pending.
|
|
843
|
+
Does NOT delete any files on disk.
|
|
844
|
+
"""
|
|
845
|
+
feature_id = args.feature_id
|
|
846
|
+
if not feature_id:
|
|
847
|
+
error_out("--feature-id is required for 'reset' action")
|
|
848
|
+
return
|
|
849
|
+
|
|
850
|
+
# Load current status to preserve created_at
|
|
851
|
+
fs = load_feature_status(state_dir, feature_id)
|
|
852
|
+
old_status = fs.get("status", "unknown")
|
|
853
|
+
old_retry = fs.get("retry_count", 0)
|
|
854
|
+
|
|
855
|
+
# Reset fields
|
|
856
|
+
fs["status"] = "pending"
|
|
857
|
+
fs["retry_count"] = 0
|
|
858
|
+
fs["sessions"] = []
|
|
859
|
+
fs["last_session_id"] = None
|
|
860
|
+
fs["resume_from_phase"] = None
|
|
861
|
+
fs["updated_at"] = now_iso()
|
|
862
|
+
|
|
863
|
+
# Write back status.json
|
|
864
|
+
err = save_feature_status(state_dir, feature_id, fs)
|
|
865
|
+
if err:
|
|
866
|
+
error_out("Failed to save feature status: {}".format(err))
|
|
867
|
+
return
|
|
868
|
+
|
|
869
|
+
# Update feature-list.json
|
|
870
|
+
err = update_feature_in_list(feature_list_path, feature_id, "pending")
|
|
871
|
+
if err:
|
|
872
|
+
error_out("Failed to update feature-list.json: {}".format(err))
|
|
873
|
+
return
|
|
874
|
+
|
|
875
|
+
result = {
|
|
876
|
+
"action": "reset",
|
|
877
|
+
"feature_id": feature_id,
|
|
878
|
+
"old_status": old_status,
|
|
879
|
+
"old_retry_count": old_retry,
|
|
880
|
+
"new_status": "pending",
|
|
881
|
+
}
|
|
882
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
# ---------------------------------------------------------------------------
|
|
886
|
+
# Action: clean
|
|
887
|
+
# ---------------------------------------------------------------------------
|
|
888
|
+
|
|
889
|
+
def action_clean(args, feature_list_path, state_dir):
|
|
890
|
+
"""Reset a feature AND delete all associated artifacts.
|
|
891
|
+
|
|
892
|
+
Deletes:
|
|
893
|
+
- state/features/F-XXX/sessions/ (all session history)
|
|
894
|
+
- .prizmkit/specs/{slug}/ (spec, plan, tasks, contracts)
|
|
895
|
+
|
|
896
|
+
Then performs a full reset (same as action_reset).
|
|
897
|
+
"""
|
|
898
|
+
feature_id = args.feature_id
|
|
899
|
+
feature_slug = args.feature_slug
|
|
900
|
+
project_root = args.project_root
|
|
901
|
+
|
|
902
|
+
if not feature_id:
|
|
903
|
+
error_out("--feature-id is required for 'clean' action")
|
|
904
|
+
return
|
|
905
|
+
if not feature_slug:
|
|
906
|
+
error_out("--feature-slug is required for 'clean' action")
|
|
907
|
+
return
|
|
908
|
+
if not project_root:
|
|
909
|
+
error_out("--project-root is required for 'clean' action")
|
|
910
|
+
return
|
|
911
|
+
|
|
912
|
+
cleaned = []
|
|
913
|
+
|
|
914
|
+
# 1. Delete session history
|
|
915
|
+
sessions_dir = os.path.join(state_dir, "features", feature_id, "sessions")
|
|
916
|
+
sessions_deleted = 0
|
|
917
|
+
if os.path.isdir(sessions_dir):
|
|
918
|
+
for entry in os.listdir(sessions_dir):
|
|
919
|
+
entry_path = os.path.join(sessions_dir, entry)
|
|
920
|
+
if os.path.isdir(entry_path):
|
|
921
|
+
shutil.rmtree(entry_path)
|
|
922
|
+
sessions_deleted += 1
|
|
923
|
+
cleaned.append("Deleted {} session(s) from {}".format(
|
|
924
|
+
sessions_deleted, sessions_dir
|
|
925
|
+
))
|
|
926
|
+
|
|
927
|
+
# 2. Delete prizmkit specs for this feature
|
|
928
|
+
specs_dir = os.path.join(project_root, ".prizmkit", "specs", feature_slug)
|
|
929
|
+
if os.path.isdir(specs_dir):
|
|
930
|
+
file_count = sum(
|
|
931
|
+
len(files) for _, _, files in os.walk(specs_dir)
|
|
932
|
+
)
|
|
933
|
+
shutil.rmtree(specs_dir)
|
|
934
|
+
cleaned.append("Deleted {} ({} files)".format(specs_dir, file_count))
|
|
935
|
+
|
|
936
|
+
# 3. Delete global dev-team workspace (shared AI transient context)
|
|
937
|
+
dev_team_dir = os.path.join(project_root, ".dev-team")
|
|
938
|
+
if os.path.isdir(dev_team_dir):
|
|
939
|
+
file_count = sum(len(files) for _, _, files in os.walk(dev_team_dir))
|
|
940
|
+
shutil.rmtree(dev_team_dir)
|
|
941
|
+
cleaned.append("Deleted {} ({} files)".format(dev_team_dir, file_count))
|
|
942
|
+
|
|
943
|
+
# 4. Delete current-session pointer if it points to this feature
|
|
944
|
+
current_session_path = os.path.join(state_dir, "current-session.json")
|
|
945
|
+
if os.path.isfile(current_session_path):
|
|
946
|
+
current_session, _ = load_json_file(current_session_path)
|
|
947
|
+
if current_session and current_session.get("feature_id") == feature_id:
|
|
948
|
+
os.remove(current_session_path)
|
|
949
|
+
cleaned.append("Deleted {}".format(current_session_path))
|
|
950
|
+
|
|
951
|
+
# 5. Reset status (reuse reset logic)
|
|
952
|
+
fs = load_feature_status(state_dir, feature_id)
|
|
953
|
+
old_status = fs.get("status", "unknown")
|
|
954
|
+
old_retry = fs.get("retry_count", 0)
|
|
955
|
+
|
|
956
|
+
fs["status"] = "pending"
|
|
957
|
+
fs["retry_count"] = 0
|
|
958
|
+
fs["sessions"] = []
|
|
959
|
+
fs["last_session_id"] = None
|
|
960
|
+
fs["resume_from_phase"] = None
|
|
961
|
+
fs["updated_at"] = now_iso()
|
|
962
|
+
|
|
963
|
+
err = save_feature_status(state_dir, feature_id, fs)
|
|
964
|
+
if err:
|
|
965
|
+
error_out("Failed to save feature status: {}".format(err))
|
|
966
|
+
return
|
|
967
|
+
|
|
968
|
+
err = update_feature_in_list(feature_list_path, feature_id, "pending")
|
|
969
|
+
if err:
|
|
970
|
+
error_out("Failed to update feature-list.json: {}".format(err))
|
|
971
|
+
return
|
|
972
|
+
|
|
973
|
+
result = {
|
|
974
|
+
"action": "clean",
|
|
975
|
+
"feature_id": feature_id,
|
|
976
|
+
"feature_slug": feature_slug,
|
|
977
|
+
"old_status": old_status,
|
|
978
|
+
"old_retry_count": old_retry,
|
|
979
|
+
"new_status": "pending",
|
|
980
|
+
"sessions_deleted": sessions_deleted,
|
|
981
|
+
"cleaned": cleaned,
|
|
982
|
+
}
|
|
983
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
984
|
+
|
|
985
|
+
|
|
986
|
+
# ---------------------------------------------------------------------------
|
|
987
|
+
# Action: pause
|
|
988
|
+
# ---------------------------------------------------------------------------
|
|
989
|
+
|
|
990
|
+
def action_pause(state_dir):
|
|
991
|
+
"""Save current pipeline state for graceful shutdown."""
|
|
992
|
+
pipeline_path = os.path.join(state_dir, "pipeline.json")
|
|
993
|
+
|
|
994
|
+
data, err = load_json_file(pipeline_path)
|
|
995
|
+
if err:
|
|
996
|
+
# If pipeline.json doesn't exist, create a minimal one
|
|
997
|
+
data = {
|
|
998
|
+
"status": "paused",
|
|
999
|
+
"paused_at": now_iso(),
|
|
1000
|
+
}
|
|
1001
|
+
else:
|
|
1002
|
+
data["status"] = "paused"
|
|
1003
|
+
data["paused_at"] = now_iso()
|
|
1004
|
+
|
|
1005
|
+
err = write_json_file(pipeline_path, data)
|
|
1006
|
+
if err:
|
|
1007
|
+
error_out("Failed to write pipeline.json: {}".format(err))
|
|
1008
|
+
return
|
|
1009
|
+
|
|
1010
|
+
result = {
|
|
1011
|
+
"action": "pause",
|
|
1012
|
+
"status": "paused",
|
|
1013
|
+
"paused_at": data["paused_at"],
|
|
1014
|
+
}
|
|
1015
|
+
print(json.dumps(result, indent=2, ensure_ascii=False))
|
|
1016
|
+
|
|
1017
|
+
|
|
1018
|
+
# ---------------------------------------------------------------------------
|
|
1019
|
+
# Helpers
|
|
1020
|
+
# ---------------------------------------------------------------------------
|
|
1021
|
+
|
|
1022
|
+
def error_out(message):
|
|
1023
|
+
"""Print an error JSON and exit with code 1."""
|
|
1024
|
+
output = {"error": message}
|
|
1025
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
1026
|
+
sys.exit(1)
|
|
1027
|
+
|
|
1028
|
+
|
|
1029
|
+
# ---------------------------------------------------------------------------
|
|
1030
|
+
# Main
|
|
1031
|
+
# ---------------------------------------------------------------------------
|
|
1032
|
+
|
|
1033
|
+
def main():
|
|
1034
|
+
args = parse_args()
|
|
1035
|
+
|
|
1036
|
+
# Validate action-specific requirements
|
|
1037
|
+
if args.action == "update":
|
|
1038
|
+
if not args.feature_id:
|
|
1039
|
+
error_out("--feature-id is required for 'update' action")
|
|
1040
|
+
if not args.session_status:
|
|
1041
|
+
error_out("--session-status is required for 'update' action")
|
|
1042
|
+
if args.action in ("reset", "clean", "complete"):
|
|
1043
|
+
if not args.feature_id:
|
|
1044
|
+
error_out("--feature-id is required for '{}' action".format(args.action))
|
|
1045
|
+
if args.action == "clean":
|
|
1046
|
+
if not args.feature_slug:
|
|
1047
|
+
error_out("--feature-slug is required for 'clean' action")
|
|
1048
|
+
if not args.project_root:
|
|
1049
|
+
error_out("--project-root is required for 'clean' action")
|
|
1050
|
+
|
|
1051
|
+
# Load feature list
|
|
1052
|
+
feature_list_data, err = load_json_file(args.feature_list)
|
|
1053
|
+
if err:
|
|
1054
|
+
error_out("Cannot load feature list: {}".format(err))
|
|
1055
|
+
|
|
1056
|
+
# Dispatch action
|
|
1057
|
+
if args.action == "get_next":
|
|
1058
|
+
action_get_next(feature_list_data, args.state_dir)
|
|
1059
|
+
elif args.action == "update":
|
|
1060
|
+
action_update(args, args.feature_list, args.state_dir)
|
|
1061
|
+
elif args.action == "status":
|
|
1062
|
+
action_status(feature_list_data, args.state_dir)
|
|
1063
|
+
elif args.action == "reset":
|
|
1064
|
+
action_reset(args, args.feature_list, args.state_dir)
|
|
1065
|
+
elif args.action == "clean":
|
|
1066
|
+
action_clean(args, args.feature_list, args.state_dir)
|
|
1067
|
+
elif args.action == "complete":
|
|
1068
|
+
# Shortcut: 'complete' is equivalent to 'update --session-status success'
|
|
1069
|
+
args.session_status = "success"
|
|
1070
|
+
action_update(args, args.feature_list, args.state_dir)
|
|
1071
|
+
elif args.action == "pause":
|
|
1072
|
+
action_pause(args.state_dir)
|
|
1073
|
+
|
|
1074
|
+
|
|
1075
|
+
if __name__ == "__main__":
|
|
1076
|
+
main()
|