prizmkit 1.0.13 → 1.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. package/bin/create-prizmkit.js +4 -1
  2. package/bundled/VERSION.json +3 -3
  3. package/bundled/adapters/claude/command-adapter.js +35 -4
  4. package/bundled/adapters/claude/rules-adapter.js +6 -58
  5. package/bundled/adapters/claude/team-adapter.js +2 -2
  6. package/bundled/adapters/codebuddy/agent-adapter.js +0 -1
  7. package/bundled/adapters/codebuddy/rules-adapter.js +30 -0
  8. package/bundled/adapters/shared/frontmatter.js +3 -1
  9. package/bundled/dev-pipeline/README.md +13 -3
  10. package/bundled/dev-pipeline/launch-bugfix-daemon.sh +10 -0
  11. package/bundled/dev-pipeline/launch-daemon.sh +18 -4
  12. package/bundled/dev-pipeline/lib/common.sh +105 -0
  13. package/bundled/dev-pipeline/run-bugfix.sh +57 -57
  14. package/bundled/dev-pipeline/run.sh +75 -59
  15. package/bundled/dev-pipeline/scripts/check-session-status.py +47 -2
  16. package/bundled/dev-pipeline/scripts/cleanup-logs.py +192 -0
  17. package/bundled/dev-pipeline/scripts/detect-stuck.py +15 -3
  18. package/bundled/dev-pipeline/scripts/generate-bootstrap-prompt.py +32 -27
  19. package/bundled/dev-pipeline/scripts/generate-bugfix-prompt.py +23 -23
  20. package/bundled/dev-pipeline/scripts/update-feature-status.py +50 -2
  21. package/bundled/dev-pipeline/scripts/utils.py +22 -0
  22. package/bundled/dev-pipeline/templates/bootstrap-tier1.md +18 -1
  23. package/bundled/dev-pipeline/templates/bootstrap-tier2.md +19 -1
  24. package/bundled/dev-pipeline/templates/bootstrap-tier3.md +18 -2
  25. package/bundled/dev-pipeline/templates/session-status-schema.json +7 -1
  26. package/bundled/dev-pipeline/tests/__init__.py +0 -0
  27. package/bundled/dev-pipeline/tests/conftest.py +133 -0
  28. package/bundled/dev-pipeline/tests/test_check_session.py +127 -0
  29. package/bundled/dev-pipeline/tests/test_cleanup_logs.py +119 -0
  30. package/bundled/dev-pipeline/tests/test_detect_stuck.py +207 -0
  31. package/bundled/dev-pipeline/tests/test_generate_bugfix_prompt.py +181 -0
  32. package/bundled/dev-pipeline/tests/test_generate_prompt.py +190 -0
  33. package/bundled/dev-pipeline/tests/test_init_bugfix_pipeline.py +153 -0
  34. package/bundled/dev-pipeline/tests/test_init_pipeline.py +241 -0
  35. package/bundled/dev-pipeline/tests/test_update_bug_status.py +142 -0
  36. package/bundled/dev-pipeline/tests/test_update_feature_status.py +277 -0
  37. package/bundled/dev-pipeline/tests/test_utils.py +141 -0
  38. package/bundled/rules/USAGE.md +153 -0
  39. package/bundled/rules/_rules-metadata.json +43 -0
  40. package/bundled/rules/general/prefer-linux-commands.md +9 -0
  41. package/bundled/rules/prizm/prizm-commit-workflow.md +10 -0
  42. package/bundled/rules/prizm/prizm-documentation.md +19 -0
  43. package/bundled/rules/prizm/prizm-progressive-loading.md +11 -0
  44. package/bundled/skills/_metadata.json +130 -67
  45. package/bundled/skills/app-planner/SKILL.md +252 -499
  46. package/bundled/skills/app-planner/assets/evaluation-guide.md +44 -0
  47. package/bundled/skills/app-planner/scripts/validate-and-generate.py +143 -4
  48. package/bundled/skills/bug-planner/SKILL.md +58 -13
  49. package/bundled/skills/bugfix-pipeline-launcher/SKILL.md +5 -7
  50. package/bundled/skills/dev-pipeline-launcher/SKILL.md +16 -7
  51. package/bundled/skills/feature-workflow/SKILL.md +175 -234
  52. package/bundled/skills/prizm-kit/SKILL.md +17 -31
  53. package/bundled/skills/{prizmkit-adr-manager → prizmkit-tool-adr-manager}/SKILL.md +6 -7
  54. package/bundled/skills/{prizmkit-api-doc-generator → prizmkit-tool-api-doc-generator}/SKILL.md +4 -5
  55. package/bundled/skills/{prizmkit-bug-reproducer → prizmkit-tool-bug-reproducer}/SKILL.md +4 -5
  56. package/bundled/skills/{prizmkit-ci-cd-generator → prizmkit-tool-ci-cd-generator}/SKILL.md +4 -5
  57. package/bundled/skills/{prizmkit-db-migration → prizmkit-tool-db-migration}/SKILL.md +4 -5
  58. package/bundled/skills/{prizmkit-dependency-health → prizmkit-tool-dependency-health}/SKILL.md +3 -4
  59. package/bundled/skills/{prizmkit-deployment-strategy → prizmkit-tool-deployment-strategy}/SKILL.md +4 -5
  60. package/bundled/skills/{prizmkit-error-triage → prizmkit-tool-error-triage}/SKILL.md +4 -5
  61. package/bundled/skills/{prizmkit-log-analyzer → prizmkit-tool-log-analyzer}/SKILL.md +4 -5
  62. package/bundled/skills/{prizmkit-monitoring-setup → prizmkit-tool-monitoring-setup}/SKILL.md +4 -5
  63. package/bundled/skills/{prizmkit-onboarding-generator → prizmkit-tool-onboarding-generator}/SKILL.md +4 -5
  64. package/bundled/skills/{prizmkit-perf-profiler → prizmkit-tool-perf-profiler}/SKILL.md +4 -5
  65. package/bundled/skills/{prizmkit-security-audit → prizmkit-tool-security-audit}/SKILL.md +3 -4
  66. package/bundled/skills/{prizmkit-tech-debt-tracker → prizmkit-tool-tech-debt-tracker}/SKILL.md +3 -4
  67. package/bundled/skills/refactor-skill/SKILL.md +371 -0
  68. package/bundled/skills/refactor-workflow/SKILL.md +17 -119
  69. package/package.json +1 -1
  70. package/src/external-skills.js +71 -0
  71. package/src/index.js +62 -4
  72. package/src/metadata.js +36 -0
  73. package/src/scaffold.js +136 -32
  74. package/bundled/skills/prizmkit-bug-fix-workflow/SKILL.md +0 -356
  75. package/bundled/templates/claude-md-template.md +0 -38
  76. package/bundled/templates/codebuddy-md-template.md +0 -35
  77. /package/bundled/skills/{prizmkit-adr-manager → prizmkit-tool-adr-manager}/assets/adr-template.md +0 -0
@@ -0,0 +1,241 @@
1
+ """Tests for init-pipeline.py."""
2
+
3
+ import json
4
+ import os
5
+ import sys
6
+ import importlib
7
+ import pytest
8
+
9
+
10
+ # The module has a hyphen in the name, so we need importlib
11
+ def _import_init_pipeline():
12
+ import importlib.util
13
+ path = os.path.join(
14
+ os.path.dirname(__file__), "..", "scripts", "init-pipeline.py"
15
+ )
16
+ spec = importlib.util.spec_from_file_location("init_pipeline", path)
17
+ mod = importlib.util.module_from_spec(spec)
18
+ # Prevent main() from running on import
19
+ sys.modules["init_pipeline"] = mod
20
+ spec.loader.exec_module(mod)
21
+ return mod
22
+
23
+
24
+ init_pipeline = _import_init_pipeline()
25
+ validate_schema = init_pipeline.validate_schema
26
+ validate_features = init_pipeline.validate_features
27
+ check_dag = init_pipeline.check_dag
28
+ create_state_directory = init_pipeline.create_state_directory
29
+
30
+
31
+ class TestValidateSchema:
32
+ def test_valid_schema(self, sample_feature_list):
33
+ errors = validate_schema(sample_feature_list)
34
+ assert errors == []
35
+
36
+ def test_wrong_schema_value(self):
37
+ data = {
38
+ "$schema": "wrong-schema",
39
+ "app_name": "Test",
40
+ "features": [],
41
+ }
42
+ errors = validate_schema(data)
43
+ assert len(errors) == 1
44
+ assert "Invalid $schema" in errors[0]
45
+
46
+ def test_missing_schema(self):
47
+ data = {"app_name": "Test", "features": []}
48
+ errors = validate_schema(data)
49
+ assert any("$schema" in e for e in errors)
50
+
51
+ def test_missing_app_name(self):
52
+ data = {
53
+ "$schema": "dev-pipeline-feature-list-v1",
54
+ "features": [],
55
+ }
56
+ errors = validate_schema(data)
57
+ assert any("app_name" in e for e in errors)
58
+
59
+ def test_empty_app_name(self):
60
+ data = {
61
+ "$schema": "dev-pipeline-feature-list-v1",
62
+ "app_name": " ",
63
+ "features": [],
64
+ }
65
+ errors = validate_schema(data)
66
+ assert any("app_name" in e for e in errors)
67
+
68
+ def test_missing_features(self):
69
+ data = {
70
+ "$schema": "dev-pipeline-feature-list-v1",
71
+ "app_name": "Test",
72
+ }
73
+ errors = validate_schema(data)
74
+ assert any("features" in e for e in errors)
75
+
76
+ def test_features_not_array(self):
77
+ data = {
78
+ "$schema": "dev-pipeline-feature-list-v1",
79
+ "app_name": "Test",
80
+ "features": "not an array",
81
+ }
82
+ errors = validate_schema(data)
83
+ assert any("features must be an array" in e for e in errors)
84
+
85
+
86
+ class TestValidateFeatures:
87
+ def test_valid_features(self, sample_feature_list):
88
+ errors, ids = validate_features(sample_feature_list["features"])
89
+ assert errors == []
90
+ assert ids == {"F-001", "F-002", "F-003"}
91
+
92
+ def test_duplicate_ids(self):
93
+ features = [
94
+ {"id": "F-001", "title": "A", "description": "D", "priority": 1,
95
+ "dependencies": [], "acceptance_criteria": [], "status": "pending"},
96
+ {"id": "F-001", "title": "B", "description": "D", "priority": 2,
97
+ "dependencies": [], "acceptance_criteria": [], "status": "pending"},
98
+ ]
99
+ errors, ids = validate_features(features)
100
+ assert any("Duplicate" in e for e in errors)
101
+
102
+ def test_invalid_id_format(self):
103
+ features = [
104
+ {"id": "X-001", "title": "A", "description": "D", "priority": 1,
105
+ "dependencies": [], "acceptance_criteria": [], "status": "pending"},
106
+ ]
107
+ errors, ids = validate_features(features)
108
+ assert any("invalid id" in e for e in errors)
109
+
110
+ def test_missing_required_fields(self):
111
+ features = [{"id": "F-001"}]
112
+ errors, ids = validate_features(features)
113
+ # Should have errors for missing title, description, etc.
114
+ assert len(errors) > 0
115
+ assert any("missing required field" in e for e in errors)
116
+
117
+ def test_non_object_feature(self):
118
+ features = ["not a dict"]
119
+ errors, ids = validate_features(features)
120
+ assert any("not an object" in e for e in errors)
121
+
122
+ def test_unknown_dependency(self):
123
+ features = [
124
+ {"id": "F-001", "title": "A", "description": "D", "priority": 1,
125
+ "dependencies": ["F-999"], "acceptance_criteria": [], "status": "pending"},
126
+ ]
127
+ errors, ids = validate_features(features)
128
+ assert any("unknown feature" in e for e in errors)
129
+
130
+ def test_dependencies_not_array(self):
131
+ features = [
132
+ {"id": "F-001", "title": "A", "description": "D", "priority": 1,
133
+ "dependencies": "not-a-list", "acceptance_criteria": [], "status": "pending"},
134
+ ]
135
+ errors, ids = validate_features(features)
136
+ assert any("dependencies must be an array" in e for e in errors)
137
+
138
+
139
+ class TestCheckDag:
140
+ def test_simple_chain(self):
141
+ features = [
142
+ {"id": "F-001", "dependencies": []},
143
+ {"id": "F-002", "dependencies": ["F-001"]},
144
+ {"id": "F-003", "dependencies": ["F-002"]},
145
+ ]
146
+ errors = check_dag(features)
147
+ assert errors == []
148
+
149
+ def test_diamond_dependency(self):
150
+ features = [
151
+ {"id": "F-001", "dependencies": []},
152
+ {"id": "F-002", "dependencies": ["F-001"]},
153
+ {"id": "F-003", "dependencies": ["F-001"]},
154
+ {"id": "F-004", "dependencies": ["F-002", "F-003"]},
155
+ ]
156
+ errors = check_dag(features)
157
+ assert errors == []
158
+
159
+ def test_circular_dependency(self):
160
+ features = [
161
+ {"id": "F-001", "dependencies": ["F-003"]},
162
+ {"id": "F-002", "dependencies": ["F-001"]},
163
+ {"id": "F-003", "dependencies": ["F-002"]},
164
+ ]
165
+ errors = check_dag(features)
166
+ assert len(errors) == 1
167
+ assert "cycle" in errors[0].lower()
168
+
169
+ def test_self_dependency(self):
170
+ features = [
171
+ {"id": "F-001", "dependencies": ["F-001"]},
172
+ ]
173
+ errors = check_dag(features)
174
+ assert len(errors) == 1
175
+ assert "cycle" in errors[0].lower()
176
+
177
+ def test_no_features(self):
178
+ errors = check_dag([])
179
+ assert errors == []
180
+
181
+ def test_independent_features(self):
182
+ features = [
183
+ {"id": "F-001", "dependencies": []},
184
+ {"id": "F-002", "dependencies": []},
185
+ ]
186
+ errors = check_dag(features)
187
+ assert errors == []
188
+
189
+
190
+ class TestCreateStateDirectory:
191
+ def test_creates_expected_structure(self, tmp_path, sample_feature_list):
192
+ state_dir = str(tmp_path / "state")
193
+ fl_path = str(tmp_path / "feature-list.json")
194
+ with open(fl_path, "w") as f:
195
+ json.dump(sample_feature_list, f)
196
+
197
+ result = create_state_directory(
198
+ state_dir, fl_path, sample_feature_list["features"]
199
+ )
200
+
201
+ assert os.path.isdir(result)
202
+ assert os.path.isfile(os.path.join(result, "pipeline.json"))
203
+
204
+ # Check per-feature dirs
205
+ for feature in sample_feature_list["features"]:
206
+ fid = feature["id"]
207
+ fdir = os.path.join(result, "features", fid)
208
+ assert os.path.isdir(fdir)
209
+ assert os.path.isfile(os.path.join(fdir, "status.json"))
210
+ assert os.path.isdir(os.path.join(fdir, "sessions"))
211
+
212
+ def test_pipeline_json_contents(self, tmp_path, sample_feature_list):
213
+ state_dir = str(tmp_path / "state")
214
+ fl_path = str(tmp_path / "fl.json")
215
+ with open(fl_path, "w") as f:
216
+ json.dump(sample_feature_list, f)
217
+
218
+ create_state_directory(state_dir, fl_path, sample_feature_list["features"])
219
+
220
+ with open(os.path.join(state_dir, "pipeline.json")) as f:
221
+ pipeline = json.load(f)
222
+
223
+ assert pipeline["status"] == "initialized"
224
+ assert pipeline["total_features"] == 3
225
+ assert pipeline["completed_features"] == 0
226
+ assert pipeline["run_id"].startswith("run-")
227
+
228
+ def test_feature_status_contents(self, tmp_path, sample_feature_list):
229
+ state_dir = str(tmp_path / "state")
230
+ fl_path = str(tmp_path / "fl.json")
231
+ with open(fl_path, "w") as f:
232
+ json.dump(sample_feature_list, f)
233
+
234
+ create_state_directory(state_dir, fl_path, sample_feature_list["features"])
235
+
236
+ with open(os.path.join(state_dir, "features", "F-001", "status.json")) as f:
237
+ status = json.load(f)
238
+
239
+ assert status["feature_id"] == "F-001"
240
+ assert status["status"] == "pending"
241
+ assert status["retry_count"] == 0
@@ -0,0 +1,142 @@
1
+ """Tests for update-bug-status.py."""
2
+
3
+ import json
4
+ import os
5
+ import re
6
+ import sys
7
+ import pytest
8
+
9
+
10
+ def _import_update_bug_status():
11
+ import importlib.util
12
+ path = os.path.join(
13
+ os.path.dirname(__file__), "..", "scripts", "update-bug-status.py"
14
+ )
15
+ spec = importlib.util.spec_from_file_location("update_bug_status", path)
16
+ mod = importlib.util.module_from_spec(spec)
17
+ sys.modules["update_bug_status"] = mod
18
+ spec.loader.exec_module(mod)
19
+ return mod
20
+
21
+
22
+ ubs = _import_update_bug_status()
23
+ now_iso = ubs.now_iso
24
+ load_bug_status = ubs.load_bug_status
25
+ save_bug_status = ubs.save_bug_status
26
+ action_get_next = ubs.action_get_next
27
+ SEVERITY_PRIORITY = ubs.SEVERITY_PRIORITY
28
+
29
+
30
+ class TestNowIso:
31
+ def test_valid_format(self):
32
+ result = now_iso()
33
+ pattern = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$"
34
+ assert re.match(pattern, result) is not None
35
+
36
+
37
+ class TestLoadAndSaveBugStatus:
38
+ def test_round_trip(self, bugfix_state_dir):
39
+ bid = "B-001"
40
+ bdir = os.path.join(bugfix_state_dir, "bugs", bid)
41
+ os.makedirs(bdir, exist_ok=True)
42
+
43
+ data = {
44
+ "bug_id": bid,
45
+ "status": "in_progress",
46
+ "retry_count": 2,
47
+ "max_retries": 3,
48
+ "sessions": ["s-001"],
49
+ "last_session_id": "s-001",
50
+ "resume_from_phase": None,
51
+ "created_at": "2024-01-01T00:00:00Z",
52
+ "updated_at": "2024-01-01T01:00:00Z",
53
+ }
54
+ err = save_bug_status(bugfix_state_dir, bid, data)
55
+ assert err is None
56
+
57
+ loaded = load_bug_status(bugfix_state_dir, bid)
58
+ assert loaded["bug_id"] == bid
59
+ assert loaded["status"] == "in_progress"
60
+ assert loaded["retry_count"] == 2
61
+
62
+ def test_load_missing_returns_default(self, bugfix_state_dir):
63
+ result = load_bug_status(bugfix_state_dir, "B-999")
64
+ assert result["bug_id"] == "B-999"
65
+ assert result["status"] == "pending"
66
+ assert result["retry_count"] == 0
67
+
68
+ def test_load_invalid_json_returns_default(self, bugfix_state_dir):
69
+ bid = "B-BAD"
70
+ bdir = os.path.join(bugfix_state_dir, "bugs", bid)
71
+ os.makedirs(bdir, exist_ok=True)
72
+ with open(os.path.join(bdir, "status.json"), "w") as f:
73
+ f.write("not json!")
74
+ result = load_bug_status(bugfix_state_dir, bid)
75
+ assert result["status"] == "pending"
76
+
77
+
78
+ class TestActionGetNext:
79
+ def test_selects_critical_before_medium(self, bugfix_state_dir, capsys):
80
+ bug_list = {
81
+ "bugs": [
82
+ {"id": "B-001", "title": "Low", "severity": "medium", "priority": 1},
83
+ {"id": "B-002", "title": "High", "severity": "critical", "priority": 2},
84
+ ]
85
+ }
86
+ action_get_next(bug_list, bugfix_state_dir)
87
+ captured = capsys.readouterr()
88
+ result = json.loads(captured.out)
89
+ assert result["bug_id"] == "B-002"
90
+ assert result["severity"] == "critical"
91
+
92
+ def test_same_severity_sorted_by_priority(self, bugfix_state_dir, capsys):
93
+ bug_list = {
94
+ "bugs": [
95
+ {"id": "B-001", "title": "P3", "severity": "high", "priority": 3},
96
+ {"id": "B-002", "title": "P1", "severity": "high", "priority": 1},
97
+ ]
98
+ }
99
+ action_get_next(bug_list, bugfix_state_dir)
100
+ captured = capsys.readouterr()
101
+ result = json.loads(captured.out)
102
+ assert result["bug_id"] == "B-002"
103
+
104
+ def test_all_completed_prints_pipeline_complete(self, bugfix_state_dir, capsys):
105
+ bug_list = {"bugs": [{"id": "B-001", "title": "Done", "severity": "low"}]}
106
+ # Mark as completed
107
+ bdir = os.path.join(bugfix_state_dir, "bugs", "B-001")
108
+ os.makedirs(bdir, exist_ok=True)
109
+ with open(os.path.join(bdir, "status.json"), "w") as f:
110
+ json.dump({"status": "completed"}, f)
111
+ action_get_next(bug_list, bugfix_state_dir)
112
+ captured = capsys.readouterr()
113
+ assert "PIPELINE_COMPLETE" in captured.out
114
+
115
+ def test_empty_bugs_prints_pipeline_complete(self, bugfix_state_dir, capsys):
116
+ action_get_next({"bugs": []}, bugfix_state_dir)
117
+ captured = capsys.readouterr()
118
+ assert "PIPELINE_COMPLETE" in captured.out
119
+
120
+ def test_prefers_in_progress_over_pending(self, bugfix_state_dir, capsys):
121
+ bug_list = {
122
+ "bugs": [
123
+ {"id": "B-001", "title": "Pending", "severity": "critical", "priority": 1},
124
+ {"id": "B-002", "title": "In Progress", "severity": "low", "priority": 2},
125
+ ]
126
+ }
127
+ # Mark B-002 as in_progress
128
+ bdir = os.path.join(bugfix_state_dir, "bugs", "B-002")
129
+ os.makedirs(bdir, exist_ok=True)
130
+ with open(os.path.join(bdir, "status.json"), "w") as f:
131
+ json.dump({"status": "in_progress", "retry_count": 0}, f)
132
+ action_get_next(bug_list, bugfix_state_dir)
133
+ captured = capsys.readouterr()
134
+ result = json.loads(captured.out)
135
+ assert result["bug_id"] == "B-002"
136
+
137
+
138
+ class TestSeverityPriority:
139
+ def test_order(self):
140
+ assert SEVERITY_PRIORITY["critical"] < SEVERITY_PRIORITY["high"]
141
+ assert SEVERITY_PRIORITY["high"] < SEVERITY_PRIORITY["medium"]
142
+ assert SEVERITY_PRIORITY["medium"] < SEVERITY_PRIORITY["low"]
@@ -0,0 +1,277 @@
1
+ """Tests for update-feature-status.py."""
2
+
3
+ import json
4
+ import os
5
+ import re
6
+ import sys
7
+ import pytest
8
+ from types import SimpleNamespace
9
+
10
+
11
+ def _import_update_feature_status():
12
+ import importlib.util
13
+ path = os.path.join(
14
+ os.path.dirname(__file__), "..", "scripts", "update-feature-status.py"
15
+ )
16
+ spec = importlib.util.spec_from_file_location("update_feature_status", path)
17
+ mod = importlib.util.module_from_spec(spec)
18
+ sys.modules["update_feature_status"] = mod
19
+ spec.loader.exec_module(mod)
20
+ return mod
21
+
22
+
23
+ ufs = _import_update_feature_status()
24
+ now_iso = ufs.now_iso
25
+ load_feature_status = ufs.load_feature_status
26
+ save_feature_status = ufs.save_feature_status
27
+ _build_feature_slug = ufs._build_feature_slug
28
+ _format_duration = ufs._format_duration
29
+ _calc_feature_duration = ufs._calc_feature_duration
30
+
31
+
32
+ class TestSessionStatusValues:
33
+ def test_contains_commit_missing(self):
34
+ assert "commit_missing" in ufs.SESSION_STATUS_VALUES
35
+
36
+ def test_contains_docs_missing(self):
37
+ assert "docs_missing" in ufs.SESSION_STATUS_VALUES
38
+
39
+
40
+ class TestNowIso:
41
+ def test_returns_valid_iso_format(self):
42
+ result = now_iso()
43
+ # Should match YYYY-MM-DDTHH:MM:SSZ
44
+ pattern = r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z$"
45
+ assert re.match(pattern, result) is not None
46
+
47
+ def test_returns_string(self):
48
+ assert isinstance(now_iso(), str)
49
+
50
+
51
+ class TestLoadAndSaveFeatureStatus:
52
+ def test_round_trip(self, state_dir):
53
+ fid = "F-001"
54
+ # Create the feature directory
55
+ fdir = os.path.join(state_dir, "features", fid)
56
+ os.makedirs(fdir, exist_ok=True)
57
+
58
+ status_data = {
59
+ "feature_id": fid,
60
+ "status": "in_progress",
61
+ "retry_count": 1,
62
+ "max_retries": 3,
63
+ "sessions": ["s-001"],
64
+ "last_session_id": "s-001",
65
+ "resume_from_phase": "3",
66
+ "created_at": "2024-01-01T00:00:00Z",
67
+ "updated_at": "2024-01-01T01:00:00Z",
68
+ }
69
+ err = save_feature_status(state_dir, fid, status_data)
70
+ assert err is None
71
+
72
+ loaded = load_feature_status(state_dir, fid)
73
+ assert loaded["feature_id"] == fid
74
+ assert loaded["status"] == "in_progress"
75
+ assert loaded["retry_count"] == 1
76
+
77
+ def test_load_missing_returns_default(self, state_dir):
78
+ result = load_feature_status(state_dir, "F-999")
79
+ assert result["feature_id"] == "F-999"
80
+ assert result["status"] == "pending"
81
+ assert result["retry_count"] == 0
82
+
83
+ def test_load_invalid_json_returns_default(self, state_dir):
84
+ fid = "F-BAD"
85
+ fdir = os.path.join(state_dir, "features", fid)
86
+ os.makedirs(fdir, exist_ok=True)
87
+ with open(os.path.join(fdir, "status.json"), "w") as f:
88
+ f.write("{invalid json")
89
+ result = load_feature_status(state_dir, fid)
90
+ assert result["status"] == "pending"
91
+
92
+
93
+ class TestBuildFeatureSlug:
94
+ def test_basic(self):
95
+ assert _build_feature_slug("F-001", "Project Setup") == "001-project-setup"
96
+
97
+ def test_special_chars(self):
98
+ result = _build_feature_slug("F-002", "Auth (OAuth2.0)")
99
+ assert result == "002-auth-oauth20"
100
+
101
+ def test_empty_title(self):
102
+ result = _build_feature_slug("F-003", "")
103
+ assert result == "003-feature"
104
+
105
+ def test_none_title(self):
106
+ result = _build_feature_slug("F-003", None)
107
+ assert result == "003-feature"
108
+
109
+ def test_lowercase_f(self):
110
+ result = _build_feature_slug("f-5", "Test")
111
+ assert result == "005-test"
112
+
113
+ def test_numeric_padding(self):
114
+ result = _build_feature_slug("F-1", "A")
115
+ assert result.startswith("001-")
116
+
117
+
118
+ class TestFormatDuration:
119
+ def test_none(self):
120
+ assert _format_duration(None) == "N/A"
121
+
122
+ def test_seconds(self):
123
+ assert _format_duration(45) == "45s"
124
+
125
+ def test_minutes(self):
126
+ assert _format_duration(125) == "2m5s"
127
+
128
+ def test_hours(self):
129
+ assert _format_duration(3661) == "1h1m"
130
+
131
+ def test_zero(self):
132
+ assert _format_duration(0) == "0s"
133
+
134
+ def test_exact_minute(self):
135
+ assert _format_duration(60) == "1m0s"
136
+
137
+ def test_exact_hour(self):
138
+ assert _format_duration(3600) == "1h0m"
139
+
140
+
141
+ class TestCalcFeatureDuration:
142
+ def test_valid_duration(self, state_dir):
143
+ fid = "F-001"
144
+ fdir = os.path.join(state_dir, "features", fid)
145
+ os.makedirs(fdir, exist_ok=True)
146
+ status = {
147
+ "created_at": "2024-01-01T00:00:00Z",
148
+ "updated_at": "2024-01-01T00:05:00Z", # 300 seconds
149
+ }
150
+ with open(os.path.join(fdir, "status.json"), "w") as f:
151
+ json.dump(status, f)
152
+ result = _calc_feature_duration(state_dir, fid)
153
+ assert result == 300.0
154
+
155
+ def test_too_short_duration(self, state_dir):
156
+ fid = "F-002"
157
+ fdir = os.path.join(state_dir, "features", fid)
158
+ os.makedirs(fdir, exist_ok=True)
159
+ status = {
160
+ "created_at": "2024-01-01T00:00:00Z",
161
+ "updated_at": "2024-01-01T00:00:05Z", # 5 seconds - below 10s threshold
162
+ }
163
+ with open(os.path.join(fdir, "status.json"), "w") as f:
164
+ json.dump(status, f)
165
+ result = _calc_feature_duration(state_dir, fid)
166
+ assert result is None
167
+
168
+ def test_missing_file(self, state_dir):
169
+ result = _calc_feature_duration(state_dir, "F-MISSING")
170
+ assert result is None
171
+
172
+ def test_missing_timestamps(self, state_dir):
173
+ fid = "F-003"
174
+ fdir = os.path.join(state_dir, "features", fid)
175
+ os.makedirs(fdir, exist_ok=True)
176
+ with open(os.path.join(fdir, "status.json"), "w") as f:
177
+ json.dump({"status": "pending"}, f)
178
+ result = _calc_feature_duration(state_dir, fid)
179
+ assert result is None
180
+
181
+
182
+ class TestActionUpdateDegradedStatuses:
183
+ def test_commit_missing_updates_feature_state_without_cleanup(
184
+ self, feature_list_file, state_dir, monkeypatch, capsys
185
+ ):
186
+ called = {"cleanup": 0}
187
+
188
+ def _fake_cleanup(**_kwargs):
189
+ called["cleanup"] += 1
190
+ return []
191
+
192
+ monkeypatch.setattr(ufs, "cleanup_feature_artifacts", _fake_cleanup)
193
+
194
+ args = SimpleNamespace(
195
+ feature_id="F-001",
196
+ session_status="commit_missing",
197
+ session_id="s-commit-missing",
198
+ max_retries=3,
199
+ project_root=None,
200
+ )
201
+
202
+ ufs.action_update(args, feature_list_file, state_dir)
203
+ out = capsys.readouterr().out
204
+ summary = json.loads(out)
205
+
206
+ fs = ufs.load_feature_status(state_dir, "F-001")
207
+ assert fs["status"] == "commit_missing"
208
+ assert fs["retry_count"] == 1
209
+ assert summary["new_status"] == "commit_missing"
210
+ assert summary["degraded_reason"] == "commit_missing"
211
+ assert summary["restart_policy"] == "finalization_retry"
212
+ assert called["cleanup"] == 0
213
+
214
+ with open(feature_list_file, "r", encoding="utf-8") as f:
215
+ data = json.load(f)
216
+ f1 = next(x for x in data["features"] if x["id"] == "F-001")
217
+ assert f1["status"] == "commit_missing"
218
+
219
+ def test_docs_missing_reaches_failed_when_retry_exhausted(
220
+ self, feature_list_file, state_dir, monkeypatch, capsys
221
+ ):
222
+ called = {"cleanup": 0}
223
+
224
+ def _fake_cleanup(**_kwargs):
225
+ called["cleanup"] += 1
226
+ return []
227
+
228
+ monkeypatch.setattr(ufs, "cleanup_feature_artifacts", _fake_cleanup)
229
+
230
+ args = SimpleNamespace(
231
+ feature_id="F-001",
232
+ session_status="docs_missing",
233
+ session_id="s-docs-missing",
234
+ max_retries=1,
235
+ project_root=None,
236
+ )
237
+
238
+ ufs.action_update(args, feature_list_file, state_dir)
239
+ out = capsys.readouterr().out
240
+ summary = json.loads(out)
241
+
242
+ fs = ufs.load_feature_status(state_dir, "F-001")
243
+ assert fs["status"] == "failed"
244
+ assert fs["retry_count"] == 1
245
+ assert summary["new_status"] == "failed"
246
+ assert summary["degraded_reason"] == "docs_missing"
247
+ assert summary["restart_policy"] == "finalization_retry"
248
+ assert called["cleanup"] == 0
249
+
250
+ with open(feature_list_file, "r", encoding="utf-8") as f:
251
+ data = json.load(f)
252
+ f1 = next(x for x in data["features"] if x["id"] == "F-001")
253
+ assert f1["status"] == "failed"
254
+
255
+
256
+ class TestActionStatusForDegradedStates:
257
+ def test_status_output_contains_degraded_counters(self, feature_list_file, state_dir, capsys):
258
+ # Prepare commit_missing and docs_missing statuses
259
+ f1_dir = os.path.join(state_dir, "features", "F-001")
260
+ f2_dir = os.path.join(state_dir, "features", "F-002")
261
+ os.makedirs(f1_dir, exist_ok=True)
262
+ os.makedirs(f2_dir, exist_ok=True)
263
+
264
+ with open(os.path.join(f1_dir, "status.json"), "w", encoding="utf-8") as f:
265
+ json.dump({"status": "commit_missing", "retry_count": 1, "max_retries": 3}, f)
266
+ with open(os.path.join(f2_dir, "status.json"), "w", encoding="utf-8") as f:
267
+ json.dump({"status": "docs_missing", "retry_count": 2, "max_retries": 3}, f)
268
+
269
+ with open(feature_list_file, "r", encoding="utf-8") as f:
270
+ feature_list_data = json.load(f)
271
+
272
+ ufs.action_status(feature_list_data, state_dir)
273
+ out = capsys.readouterr().out
274
+
275
+ assert "Commit Missing: 1 | Docs Missing: 1" in out
276
+ assert "commit missing, retry 1/3" in out
277
+ assert "docs missing, retry 2/3" in out