prizmkit 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bundled/VERSION.json +5 -0
- package/bundled/adapters/claude/agent-adapter.js +108 -0
- package/bundled/adapters/claude/command-adapter.js +104 -0
- package/bundled/adapters/claude/paths.js +35 -0
- package/bundled/adapters/claude/rules-adapter.js +77 -0
- package/bundled/adapters/claude/settings-adapter.js +73 -0
- package/bundled/adapters/claude/team-adapter.js +183 -0
- package/bundled/adapters/codebuddy/agent-adapter.js +43 -0
- package/bundled/adapters/codebuddy/paths.js +29 -0
- package/bundled/adapters/codebuddy/settings-adapter.js +47 -0
- package/bundled/adapters/codebuddy/skill-adapter.js +68 -0
- package/bundled/adapters/codebuddy/team-adapter.js +46 -0
- package/bundled/adapters/shared/frontmatter.js +77 -0
- package/bundled/agents/prizm-dev-team-coordinator.md +142 -0
- package/bundled/agents/prizm-dev-team-dev.md +99 -0
- package/bundled/agents/prizm-dev-team-pm.md +114 -0
- package/bundled/agents/prizm-dev-team-reviewer.md +119 -0
- package/bundled/dev-pipeline/README.md +482 -0
- package/bundled/dev-pipeline/assets/feature-list-example.json +147 -0
- package/bundled/dev-pipeline/assets/prizm-dev-team-integration.md +138 -0
- package/bundled/dev-pipeline/launch-bugfix-daemon.sh +425 -0
- package/bundled/dev-pipeline/launch-daemon.sh +549 -0
- package/bundled/dev-pipeline/reset-feature.sh +209 -0
- package/bundled/dev-pipeline/retry-bug.sh +344 -0
- package/bundled/dev-pipeline/retry-feature.sh +338 -0
- package/bundled/dev-pipeline/run-bugfix.sh +638 -0
- package/bundled/dev-pipeline/run.sh +845 -0
- package/bundled/dev-pipeline/scripts/check-session-status.py +158 -0
- package/bundled/dev-pipeline/scripts/detect-stuck.py +385 -0
- package/bundled/dev-pipeline/scripts/generate-bootstrap-prompt.py +598 -0
- package/bundled/dev-pipeline/scripts/generate-bugfix-prompt.py +402 -0
- package/bundled/dev-pipeline/scripts/init-bugfix-pipeline.py +294 -0
- package/bundled/dev-pipeline/scripts/init-dev-team.py +134 -0
- package/bundled/dev-pipeline/scripts/init-pipeline.py +335 -0
- package/bundled/dev-pipeline/scripts/update-bug-status.py +748 -0
- package/bundled/dev-pipeline/scripts/update-feature-status.py +1076 -0
- package/bundled/dev-pipeline/templates/bootstrap-prompt.md +262 -0
- package/bundled/dev-pipeline/templates/bug-fix-list-schema.json +159 -0
- package/bundled/dev-pipeline/templates/bugfix-bootstrap-prompt.md +291 -0
- package/bundled/dev-pipeline/templates/feature-list-schema.json +112 -0
- package/bundled/dev-pipeline/templates/session-status-schema.json +77 -0
- package/bundled/skills/_metadata.json +267 -0
- package/bundled/skills/app-planner/SKILL.md +580 -0
- package/bundled/skills/app-planner/assets/planning-guide.md +313 -0
- package/bundled/skills/app-planner/scripts/validate-and-generate.py +758 -0
- package/bundled/skills/bug-planner/SKILL.md +235 -0
- package/bundled/skills/bugfix-pipeline-launcher/SKILL.md +252 -0
- package/bundled/skills/dev-pipeline-launcher/SKILL.md +223 -0
- package/bundled/skills/prizm-kit/SKILL.md +151 -0
- package/bundled/skills/prizm-kit/assets/claude-md-template.md +38 -0
- package/bundled/skills/prizm-kit/assets/codebuddy-md-template.md +35 -0
- package/bundled/skills/prizm-kit/assets/hooks/prizm-commit-hook.json +15 -0
- package/bundled/skills/prizmkit-adr-manager/SKILL.md +68 -0
- package/bundled/skills/prizmkit-adr-manager/assets/adr-template.md +26 -0
- package/bundled/skills/prizmkit-analyze/SKILL.md +194 -0
- package/bundled/skills/prizmkit-api-doc-generator/SKILL.md +56 -0
- package/bundled/skills/prizmkit-bug-fix-workflow/SKILL.md +351 -0
- package/bundled/skills/prizmkit-bug-reproducer/SKILL.md +62 -0
- package/bundled/skills/prizmkit-ci-cd-generator/SKILL.md +54 -0
- package/bundled/skills/prizmkit-clarify/SKILL.md +52 -0
- package/bundled/skills/prizmkit-code-review/SKILL.md +70 -0
- package/bundled/skills/prizmkit-committer/SKILL.md +117 -0
- package/bundled/skills/prizmkit-db-migration/SKILL.md +65 -0
- package/bundled/skills/prizmkit-dependency-health/SKILL.md +123 -0
- package/bundled/skills/prizmkit-deployment-strategy/SKILL.md +58 -0
- package/bundled/skills/prizmkit-error-triage/SKILL.md +55 -0
- package/bundled/skills/prizmkit-implement/SKILL.md +47 -0
- package/bundled/skills/prizmkit-init/SKILL.md +156 -0
- package/bundled/skills/prizmkit-log-analyzer/SKILL.md +55 -0
- package/bundled/skills/prizmkit-monitoring-setup/SKILL.md +75 -0
- package/bundled/skills/prizmkit-onboarding-generator/SKILL.md +70 -0
- package/bundled/skills/prizmkit-perf-profiler/SKILL.md +55 -0
- package/bundled/skills/prizmkit-plan/SKILL.md +54 -0
- package/bundled/skills/prizmkit-plan/assets/plan-template.md +37 -0
- package/bundled/skills/prizmkit-prizm-docs/SKILL.md +140 -0
- package/bundled/skills/prizmkit-prizm-docs/assets/PRIZM-SPEC.md +943 -0
- package/bundled/skills/prizmkit-retrospective/SKILL.md +79 -0
- package/bundled/skills/prizmkit-security-audit/SKILL.md +130 -0
- package/bundled/skills/prizmkit-specify/SKILL.md +52 -0
- package/bundled/skills/prizmkit-specify/assets/spec-template.md +37 -0
- package/bundled/skills/prizmkit-summarize/SKILL.md +51 -0
- package/bundled/skills/prizmkit-summarize/assets/registry-template.md +18 -0
- package/bundled/skills/prizmkit-tasks/SKILL.md +50 -0
- package/bundled/skills/prizmkit-tasks/assets/tasks-template.md +21 -0
- package/bundled/skills/prizmkit-tech-debt-tracker/SKILL.md +139 -0
- package/bundled/team/prizm-dev-team.json +47 -0
- package/bundled/templates/claude-md-template.md +38 -0
- package/bundled/templates/codebuddy-md-template.md +35 -0
- package/package.json +2 -1
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Initialize prizmkit directory structures for a feature.
|
|
3
|
+
|
|
4
|
+
Creates the standard directory layout expected by prizm-dev-team agents:
|
|
5
|
+
- .prizmkit/specs/<feature-slug>/ ← per-feature subdirectory
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
python3 init-dev-team.py --project-root <path> --feature-slug <slug>
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import sys
|
|
15
|
+
from datetime import datetime, timezone
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def parse_args():
|
|
19
|
+
parser = argparse.ArgumentParser(
|
|
20
|
+
description="Initialize dev-team and prizmkit directories"
|
|
21
|
+
)
|
|
22
|
+
parser.add_argument(
|
|
23
|
+
"--project-root",
|
|
24
|
+
required=True,
|
|
25
|
+
help="Project root directory",
|
|
26
|
+
)
|
|
27
|
+
parser.add_argument(
|
|
28
|
+
"--feature-id",
|
|
29
|
+
default=None,
|
|
30
|
+
help="Feature ID (e.g. F-001)",
|
|
31
|
+
)
|
|
32
|
+
parser.add_argument(
|
|
33
|
+
"--feature-slug",
|
|
34
|
+
default=None,
|
|
35
|
+
help="Feature slug for per-feature directory (e.g. 001-project-infrastructure-setup)",
|
|
36
|
+
)
|
|
37
|
+
return parser.parse_args()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def create_directories(project_root, feature_slug=None):
|
|
41
|
+
"""Create the prizmkit directory structures."""
|
|
42
|
+
dirs_to_create = []
|
|
43
|
+
|
|
44
|
+
# PrizmKit per-feature directories
|
|
45
|
+
if feature_slug:
|
|
46
|
+
dirs_to_create.extend([
|
|
47
|
+
".prizmkit/specs/{}".format(feature_slug),
|
|
48
|
+
])
|
|
49
|
+
else:
|
|
50
|
+
# Fallback: create flat directories (not recommended)
|
|
51
|
+
dirs_to_create.extend([
|
|
52
|
+
".prizmkit/specs",
|
|
53
|
+
])
|
|
54
|
+
|
|
55
|
+
created = []
|
|
56
|
+
for dir_path in dirs_to_create:
|
|
57
|
+
full_path = os.path.join(project_root, dir_path)
|
|
58
|
+
if not os.path.exists(full_path):
|
|
59
|
+
os.makedirs(full_path, exist_ok=True)
|
|
60
|
+
created.append(dir_path)
|
|
61
|
+
|
|
62
|
+
return created
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def init_prizmkit_config(project_root, feature_id):
|
|
66
|
+
"""Initialize or update .prizmkit/config.json."""
|
|
67
|
+
config_path = os.path.join(project_root, ".prizmkit", "config.json")
|
|
68
|
+
|
|
69
|
+
if os.path.exists(config_path):
|
|
70
|
+
# Update existing config
|
|
71
|
+
with open(config_path, "r", encoding="utf-8") as f:
|
|
72
|
+
config = json.load(f)
|
|
73
|
+
config["current_feature"] = feature_id
|
|
74
|
+
config["updated_at"] = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
75
|
+
else:
|
|
76
|
+
# Create new config
|
|
77
|
+
# Try to get project name from package.json or directory name
|
|
78
|
+
project_name = os.path.basename(project_root)
|
|
79
|
+
pkg_json_path = os.path.join(project_root, "package.json")
|
|
80
|
+
if os.path.exists(pkg_json_path):
|
|
81
|
+
try:
|
|
82
|
+
with open(pkg_json_path, "r", encoding="utf-8") as f:
|
|
83
|
+
pkg = json.load(f)
|
|
84
|
+
project_name = pkg.get("name", project_name)
|
|
85
|
+
except (json.JSONDecodeError, IOError):
|
|
86
|
+
pass
|
|
87
|
+
|
|
88
|
+
config = {
|
|
89
|
+
"adoption_mode": "active",
|
|
90
|
+
"speckit_hooks_enabled": True,
|
|
91
|
+
"project_name": project_name,
|
|
92
|
+
"initialized_at": datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"),
|
|
93
|
+
"feature_prefix": "F-",
|
|
94
|
+
"current_feature": feature_id,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
with open(config_path, "w", encoding="utf-8") as f:
|
|
98
|
+
json.dump(config, f, indent=2)
|
|
99
|
+
|
|
100
|
+
return config_path
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def main():
|
|
104
|
+
args = parse_args()
|
|
105
|
+
project_root = os.path.abspath(args.project_root)
|
|
106
|
+
|
|
107
|
+
if not os.path.isdir(project_root):
|
|
108
|
+
result = {
|
|
109
|
+
"success": False,
|
|
110
|
+
"error": f"Project root does not exist: {project_root}",
|
|
111
|
+
}
|
|
112
|
+
print(json.dumps(result, indent=2))
|
|
113
|
+
sys.exit(1)
|
|
114
|
+
|
|
115
|
+
# Create directories
|
|
116
|
+
created_dirs = create_directories(project_root, args.feature_slug)
|
|
117
|
+
|
|
118
|
+
# Initialize config
|
|
119
|
+
config_path = init_prizmkit_config(project_root, args.feature_id)
|
|
120
|
+
|
|
121
|
+
result = {
|
|
122
|
+
"success": True,
|
|
123
|
+
"project_root": project_root,
|
|
124
|
+
"feature_slug": args.feature_slug,
|
|
125
|
+
"directories_created": created_dirs,
|
|
126
|
+
"config_path": config_path,
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
print(json.dumps(result, indent=2))
|
|
130
|
+
sys.exit(0)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
if __name__ == "__main__":
|
|
134
|
+
main()
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Initialize the dev-pipeline state directory from a feature-list.json file.
|
|
3
|
+
|
|
4
|
+
Validates the feature list schema, checks dependency DAG for cycles,
|
|
5
|
+
and creates the state directory structure with pipeline and feature status files.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
python3 init-pipeline.py --feature-list <path> --state-dir <path>
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
13
|
+
import os
|
|
14
|
+
import re
|
|
15
|
+
import sys
|
|
16
|
+
from collections import deque
|
|
17
|
+
from datetime import datetime, timezone
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
EXPECTED_SCHEMA = "dev-pipeline-feature-list-v1"
|
|
21
|
+
FEATURE_ID_PATTERN = re.compile(r"^F-\d{3}$")
|
|
22
|
+
|
|
23
|
+
REQUIRED_FEATURE_FIELDS = [
|
|
24
|
+
"id",
|
|
25
|
+
"title",
|
|
26
|
+
"description",
|
|
27
|
+
"priority",
|
|
28
|
+
"dependencies",
|
|
29
|
+
"acceptance_criteria",
|
|
30
|
+
"status",
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def parse_args():
|
|
35
|
+
parser = argparse.ArgumentParser(
|
|
36
|
+
description="Initialize dev-pipeline state from a feature-list.json file."
|
|
37
|
+
)
|
|
38
|
+
parser.add_argument(
|
|
39
|
+
"--feature-list",
|
|
40
|
+
required=True,
|
|
41
|
+
help="Path to the feature-list.json file",
|
|
42
|
+
)
|
|
43
|
+
parser.add_argument(
|
|
44
|
+
"--state-dir",
|
|
45
|
+
required=True,
|
|
46
|
+
help="Path to the state directory to create/initialize",
|
|
47
|
+
)
|
|
48
|
+
return parser.parse_args()
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def load_feature_list(path):
|
|
52
|
+
"""Load and return the parsed JSON from the feature list file."""
|
|
53
|
+
abs_path = os.path.abspath(path)
|
|
54
|
+
if not os.path.isfile(abs_path):
|
|
55
|
+
return None, ["Feature list file not found: {}".format(abs_path)]
|
|
56
|
+
try:
|
|
57
|
+
with open(abs_path, "r", encoding="utf-8") as f:
|
|
58
|
+
data = json.load(f)
|
|
59
|
+
except json.JSONDecodeError as e:
|
|
60
|
+
return None, ["Invalid JSON in feature list: {}".format(str(e))]
|
|
61
|
+
except IOError as e:
|
|
62
|
+
return None, ["Cannot read feature list file: {}".format(str(e))]
|
|
63
|
+
return data, []
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def validate_schema(data):
|
|
67
|
+
"""Validate the top-level schema and structure of the feature list."""
|
|
68
|
+
errors = []
|
|
69
|
+
|
|
70
|
+
# Check $schema
|
|
71
|
+
schema = data.get("$schema")
|
|
72
|
+
if schema != EXPECTED_SCHEMA:
|
|
73
|
+
errors.append(
|
|
74
|
+
"Invalid $schema: expected '{}', got '{}'".format(EXPECTED_SCHEMA, schema)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
# Check app_name
|
|
78
|
+
if "app_name" not in data:
|
|
79
|
+
errors.append("Missing required field: app_name")
|
|
80
|
+
elif not isinstance(data["app_name"], str) or not data["app_name"].strip():
|
|
81
|
+
errors.append("app_name must be a non-empty string")
|
|
82
|
+
|
|
83
|
+
# Check features array
|
|
84
|
+
if "features" not in data:
|
|
85
|
+
errors.append("Missing required field: features")
|
|
86
|
+
elif not isinstance(data["features"], list):
|
|
87
|
+
errors.append("features must be an array")
|
|
88
|
+
|
|
89
|
+
return errors
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def validate_features(features):
|
|
93
|
+
"""Validate each feature object and cross-reference dependencies."""
|
|
94
|
+
errors = []
|
|
95
|
+
feature_ids = set()
|
|
96
|
+
seen_ids = set()
|
|
97
|
+
|
|
98
|
+
# First pass: collect all feature IDs and validate structure
|
|
99
|
+
for i, feature in enumerate(features):
|
|
100
|
+
if not isinstance(feature, dict):
|
|
101
|
+
errors.append("Feature at index {} is not an object".format(i))
|
|
102
|
+
continue
|
|
103
|
+
|
|
104
|
+
# Check required fields
|
|
105
|
+
for field in REQUIRED_FEATURE_FIELDS:
|
|
106
|
+
if field not in feature:
|
|
107
|
+
errors.append(
|
|
108
|
+
"Feature at index {} missing required field: {}".format(i, field)
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Validate feature ID format
|
|
112
|
+
fid = feature.get("id")
|
|
113
|
+
if fid is not None:
|
|
114
|
+
if not isinstance(fid, str) or not FEATURE_ID_PATTERN.match(fid):
|
|
115
|
+
errors.append(
|
|
116
|
+
"Feature at index {} has invalid id '{}' "
|
|
117
|
+
"(must match F-NNN pattern)".format(i, fid)
|
|
118
|
+
)
|
|
119
|
+
elif fid in seen_ids:
|
|
120
|
+
errors.append("Duplicate feature id: {}".format(fid))
|
|
121
|
+
else:
|
|
122
|
+
seen_ids.add(fid)
|
|
123
|
+
feature_ids.add(fid)
|
|
124
|
+
|
|
125
|
+
# Validate dependencies is a list
|
|
126
|
+
deps = feature.get("dependencies")
|
|
127
|
+
if deps is not None and not isinstance(deps, list):
|
|
128
|
+
errors.append(
|
|
129
|
+
"Feature '{}' dependencies must be an array".format(
|
|
130
|
+
fid if fid else "index {}".format(i)
|
|
131
|
+
)
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Validate acceptance_criteria is a list
|
|
135
|
+
ac = feature.get("acceptance_criteria")
|
|
136
|
+
if ac is not None and not isinstance(ac, list):
|
|
137
|
+
errors.append(
|
|
138
|
+
"Feature '{}' acceptance_criteria must be an array".format(
|
|
139
|
+
fid if fid else "index {}".format(i)
|
|
140
|
+
)
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
# Second pass: validate dependency references
|
|
144
|
+
for feature in features:
|
|
145
|
+
if not isinstance(feature, dict):
|
|
146
|
+
continue
|
|
147
|
+
fid = feature.get("id", "unknown")
|
|
148
|
+
deps = feature.get("dependencies", [])
|
|
149
|
+
if not isinstance(deps, list):
|
|
150
|
+
continue
|
|
151
|
+
for dep in deps:
|
|
152
|
+
if dep not in feature_ids:
|
|
153
|
+
errors.append(
|
|
154
|
+
"Feature '{}' depends on unknown feature '{}'".format(fid, dep)
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
return errors, feature_ids
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def check_dag(features):
|
|
161
|
+
"""Check that feature dependencies form a DAG (no cycles) using topological sort.
|
|
162
|
+
|
|
163
|
+
Uses Kahn's algorithm. Returns a list of errors if cycles are detected.
|
|
164
|
+
"""
|
|
165
|
+
# Build adjacency list and in-degree count
|
|
166
|
+
adj = {} # feature_id -> list of features that depend on it
|
|
167
|
+
in_degree = {}
|
|
168
|
+
feature_map = {}
|
|
169
|
+
|
|
170
|
+
for feature in features:
|
|
171
|
+
if not isinstance(feature, dict):
|
|
172
|
+
continue
|
|
173
|
+
fid = feature.get("id")
|
|
174
|
+
if fid is None:
|
|
175
|
+
continue
|
|
176
|
+
feature_map[fid] = feature
|
|
177
|
+
if fid not in adj:
|
|
178
|
+
adj[fid] = []
|
|
179
|
+
if fid not in in_degree:
|
|
180
|
+
in_degree[fid] = 0
|
|
181
|
+
|
|
182
|
+
for feature in features:
|
|
183
|
+
if not isinstance(feature, dict):
|
|
184
|
+
continue
|
|
185
|
+
fid = feature.get("id")
|
|
186
|
+
deps = feature.get("dependencies", [])
|
|
187
|
+
if not isinstance(deps, list) or fid is None:
|
|
188
|
+
continue
|
|
189
|
+
for dep in deps:
|
|
190
|
+
if dep in adj:
|
|
191
|
+
# dep -> fid (fid depends on dep, so dep must come first)
|
|
192
|
+
adj[dep].append(fid)
|
|
193
|
+
in_degree[fid] = in_degree.get(fid, 0) + 1
|
|
194
|
+
|
|
195
|
+
# Kahn's algorithm
|
|
196
|
+
queue = deque()
|
|
197
|
+
for fid in in_degree:
|
|
198
|
+
if in_degree[fid] == 0:
|
|
199
|
+
queue.append(fid)
|
|
200
|
+
|
|
201
|
+
sorted_count = 0
|
|
202
|
+
while queue:
|
|
203
|
+
node = queue.popleft()
|
|
204
|
+
sorted_count += 1
|
|
205
|
+
for neighbor in adj.get(node, []):
|
|
206
|
+
in_degree[neighbor] -= 1
|
|
207
|
+
if in_degree[neighbor] == 0:
|
|
208
|
+
queue.append(neighbor)
|
|
209
|
+
|
|
210
|
+
if sorted_count != len(feature_map):
|
|
211
|
+
# Find which features are part of cycles
|
|
212
|
+
cycle_members = [
|
|
213
|
+
fid for fid, deg in in_degree.items() if deg > 0
|
|
214
|
+
]
|
|
215
|
+
return [
|
|
216
|
+
"Dependency cycle detected involving features: {}".format(
|
|
217
|
+
", ".join(sorted(cycle_members))
|
|
218
|
+
)
|
|
219
|
+
]
|
|
220
|
+
|
|
221
|
+
return []
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def create_state_directory(state_dir, feature_list_path, features):
|
|
225
|
+
"""Create the state directory structure with pipeline.json and per-feature status files."""
|
|
226
|
+
abs_state_dir = os.path.abspath(state_dir)
|
|
227
|
+
abs_feature_list_path = os.path.abspath(feature_list_path)
|
|
228
|
+
features_dir = os.path.join(abs_state_dir, "features")
|
|
229
|
+
|
|
230
|
+
now = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
231
|
+
run_id = "run-" + datetime.now(timezone.utc).strftime("%Y%m%d-%H%M%S")
|
|
232
|
+
|
|
233
|
+
# Create top-level state directory
|
|
234
|
+
os.makedirs(abs_state_dir, exist_ok=True)
|
|
235
|
+
os.makedirs(features_dir, exist_ok=True)
|
|
236
|
+
|
|
237
|
+
# Write pipeline.json
|
|
238
|
+
pipeline_state = {
|
|
239
|
+
"run_id": run_id,
|
|
240
|
+
"status": "initialized",
|
|
241
|
+
"feature_list_path": abs_feature_list_path,
|
|
242
|
+
"created_at": now,
|
|
243
|
+
"total_features": len(features),
|
|
244
|
+
"completed_features": 0,
|
|
245
|
+
}
|
|
246
|
+
pipeline_path = os.path.join(abs_state_dir, "pipeline.json")
|
|
247
|
+
with open(pipeline_path, "w", encoding="utf-8") as f:
|
|
248
|
+
json.dump(pipeline_state, f, indent=2, ensure_ascii=False)
|
|
249
|
+
f.write("\n")
|
|
250
|
+
|
|
251
|
+
# Write per-feature status.json and create sessions directory
|
|
252
|
+
for feature in features:
|
|
253
|
+
if not isinstance(feature, dict):
|
|
254
|
+
continue
|
|
255
|
+
fid = feature.get("id")
|
|
256
|
+
if fid is None:
|
|
257
|
+
continue
|
|
258
|
+
|
|
259
|
+
feature_dir = os.path.join(features_dir, fid)
|
|
260
|
+
sessions_dir = os.path.join(feature_dir, "sessions")
|
|
261
|
+
os.makedirs(sessions_dir, exist_ok=True)
|
|
262
|
+
|
|
263
|
+
feature_status = {
|
|
264
|
+
"feature_id": fid,
|
|
265
|
+
"status": "pending",
|
|
266
|
+
"retry_count": 0,
|
|
267
|
+
"max_retries": 3,
|
|
268
|
+
"sessions": [],
|
|
269
|
+
"last_session_id": None,
|
|
270
|
+
"resume_from_phase": None,
|
|
271
|
+
"created_at": now,
|
|
272
|
+
"updated_at": now,
|
|
273
|
+
}
|
|
274
|
+
status_path = os.path.join(feature_dir, "status.json")
|
|
275
|
+
with open(status_path, "w", encoding="utf-8") as f:
|
|
276
|
+
json.dump(feature_status, f, indent=2, ensure_ascii=False)
|
|
277
|
+
f.write("\n")
|
|
278
|
+
|
|
279
|
+
return abs_state_dir
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def main():
|
|
283
|
+
args = parse_args()
|
|
284
|
+
|
|
285
|
+
# Load feature list
|
|
286
|
+
data, load_errors = load_feature_list(args.feature_list)
|
|
287
|
+
if load_errors:
|
|
288
|
+
output = {"valid": False, "errors": load_errors}
|
|
289
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
290
|
+
sys.exit(1)
|
|
291
|
+
|
|
292
|
+
# Validate schema
|
|
293
|
+
schema_errors = validate_schema(data)
|
|
294
|
+
if schema_errors:
|
|
295
|
+
output = {"valid": False, "errors": schema_errors}
|
|
296
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
297
|
+
sys.exit(1)
|
|
298
|
+
|
|
299
|
+
# Validate features
|
|
300
|
+
features = data["features"]
|
|
301
|
+
feature_errors, feature_ids = validate_features(features)
|
|
302
|
+
if feature_errors:
|
|
303
|
+
output = {"valid": False, "errors": feature_errors}
|
|
304
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
305
|
+
sys.exit(1)
|
|
306
|
+
|
|
307
|
+
# Check DAG (no cycles)
|
|
308
|
+
dag_errors = check_dag(features)
|
|
309
|
+
if dag_errors:
|
|
310
|
+
output = {"valid": False, "errors": dag_errors}
|
|
311
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
312
|
+
sys.exit(1)
|
|
313
|
+
|
|
314
|
+
# Create state directory
|
|
315
|
+
try:
|
|
316
|
+
abs_state_dir = create_state_directory(
|
|
317
|
+
args.state_dir, args.feature_list, features
|
|
318
|
+
)
|
|
319
|
+
except (IOError, OSError) as e:
|
|
320
|
+
output = {"valid": False, "errors": ["Failed to create state directory: {}".format(str(e))]}
|
|
321
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
322
|
+
sys.exit(1)
|
|
323
|
+
|
|
324
|
+
# Success output
|
|
325
|
+
output = {
|
|
326
|
+
"valid": True,
|
|
327
|
+
"features_count": len(features),
|
|
328
|
+
"state_dir": abs_state_dir,
|
|
329
|
+
}
|
|
330
|
+
print(json.dumps(output, indent=2, ensure_ascii=False))
|
|
331
|
+
sys.exit(0)
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
if __name__ == "__main__":
|
|
335
|
+
main()
|