atdd 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atdd/__init__.py +0 -0
- atdd/cli.py +404 -0
- atdd/coach/__init__.py +0 -0
- atdd/coach/commands/__init__.py +0 -0
- atdd/coach/commands/add_persistence_metadata.py +215 -0
- atdd/coach/commands/analyze_migrations.py +188 -0
- atdd/coach/commands/consumers.py +720 -0
- atdd/coach/commands/infer_governance_status.py +149 -0
- atdd/coach/commands/initializer.py +177 -0
- atdd/coach/commands/interface.py +1078 -0
- atdd/coach/commands/inventory.py +565 -0
- atdd/coach/commands/migration.py +240 -0
- atdd/coach/commands/registry.py +1560 -0
- atdd/coach/commands/session.py +430 -0
- atdd/coach/commands/sync.py +405 -0
- atdd/coach/commands/test_interface.py +399 -0
- atdd/coach/commands/test_runner.py +141 -0
- atdd/coach/commands/tests/__init__.py +1 -0
- atdd/coach/commands/tests/test_telemetry_array_validation.py +235 -0
- atdd/coach/commands/traceability.py +4264 -0
- atdd/coach/conventions/session.convention.yaml +754 -0
- atdd/coach/overlays/__init__.py +2 -0
- atdd/coach/overlays/claude.md +2 -0
- atdd/coach/schemas/config.schema.json +34 -0
- atdd/coach/schemas/manifest.schema.json +101 -0
- atdd/coach/templates/ATDD.md +282 -0
- atdd/coach/templates/SESSION-TEMPLATE.md +327 -0
- atdd/coach/utils/__init__.py +0 -0
- atdd/coach/utils/graph/__init__.py +0 -0
- atdd/coach/utils/graph/urn.py +875 -0
- atdd/coach/validators/__init__.py +0 -0
- atdd/coach/validators/shared_fixtures.py +365 -0
- atdd/coach/validators/test_enrich_wagon_registry.py +167 -0
- atdd/coach/validators/test_registry.py +575 -0
- atdd/coach/validators/test_session_validation.py +1183 -0
- atdd/coach/validators/test_traceability.py +448 -0
- atdd/coach/validators/test_update_feature_paths.py +108 -0
- atdd/coach/validators/test_validate_contract_consumers.py +297 -0
- atdd/coder/__init__.py +1 -0
- atdd/coder/conventions/adapter.recipe.yaml +88 -0
- atdd/coder/conventions/backend.convention.yaml +460 -0
- atdd/coder/conventions/boundaries.convention.yaml +666 -0
- atdd/coder/conventions/commons.convention.yaml +460 -0
- atdd/coder/conventions/complexity.recipe.yaml +109 -0
- atdd/coder/conventions/component-naming.convention.yaml +178 -0
- atdd/coder/conventions/design.convention.yaml +327 -0
- atdd/coder/conventions/design.recipe.yaml +273 -0
- atdd/coder/conventions/dto.convention.yaml +660 -0
- atdd/coder/conventions/frontend.convention.yaml +542 -0
- atdd/coder/conventions/green.convention.yaml +1012 -0
- atdd/coder/conventions/presentation.convention.yaml +587 -0
- atdd/coder/conventions/refactor.convention.yaml +535 -0
- atdd/coder/conventions/technology.convention.yaml +206 -0
- atdd/coder/conventions/tests/__init__.py +0 -0
- atdd/coder/conventions/tests/test_adapter_recipe.py +302 -0
- atdd/coder/conventions/tests/test_complexity_recipe.py +289 -0
- atdd/coder/conventions/tests/test_component_taxonomy.py +278 -0
- atdd/coder/conventions/tests/test_component_urn_naming.py +165 -0
- atdd/coder/conventions/tests/test_thinness_recipe.py +286 -0
- atdd/coder/conventions/thinness.recipe.yaml +82 -0
- atdd/coder/conventions/train.convention.yaml +325 -0
- atdd/coder/conventions/verification.protocol.yaml +53 -0
- atdd/coder/schemas/design_system.schema.json +361 -0
- atdd/coder/validators/__init__.py +0 -0
- atdd/coder/validators/test_commons_structure.py +485 -0
- atdd/coder/validators/test_complexity.py +416 -0
- atdd/coder/validators/test_cross_language_consistency.py +431 -0
- atdd/coder/validators/test_design_system_compliance.py +413 -0
- atdd/coder/validators/test_dto_testing_patterns.py +268 -0
- atdd/coder/validators/test_green_cross_stack_layers.py +168 -0
- atdd/coder/validators/test_green_layer_dependencies.py +148 -0
- atdd/coder/validators/test_green_python_layer_structure.py +103 -0
- atdd/coder/validators/test_green_supabase_layer_structure.py +103 -0
- atdd/coder/validators/test_import_boundaries.py +396 -0
- atdd/coder/validators/test_init_file_urns.py +593 -0
- atdd/coder/validators/test_preact_layer_boundaries.py +221 -0
- atdd/coder/validators/test_presentation_convention.py +260 -0
- atdd/coder/validators/test_python_architecture.py +674 -0
- atdd/coder/validators/test_quality_metrics.py +420 -0
- atdd/coder/validators/test_station_master_pattern.py +244 -0
- atdd/coder/validators/test_train_infrastructure.py +454 -0
- atdd/coder/validators/test_train_urns.py +293 -0
- atdd/coder/validators/test_typescript_architecture.py +616 -0
- atdd/coder/validators/test_usecase_structure.py +421 -0
- atdd/coder/validators/test_wagon_boundaries.py +586 -0
- atdd/conftest.py +126 -0
- atdd/planner/__init__.py +1 -0
- atdd/planner/conventions/acceptance.convention.yaml +538 -0
- atdd/planner/conventions/appendix.convention.yaml +187 -0
- atdd/planner/conventions/artifact-naming.convention.yaml +852 -0
- atdd/planner/conventions/component.convention.yaml +670 -0
- atdd/planner/conventions/criteria.convention.yaml +141 -0
- atdd/planner/conventions/feature.convention.yaml +371 -0
- atdd/planner/conventions/interface.convention.yaml +382 -0
- atdd/planner/conventions/steps.convention.yaml +141 -0
- atdd/planner/conventions/train.convention.yaml +552 -0
- atdd/planner/conventions/wagon.convention.yaml +275 -0
- atdd/planner/conventions/wmbt.convention.yaml +258 -0
- atdd/planner/schemas/acceptance.schema.json +336 -0
- atdd/planner/schemas/appendix.schema.json +78 -0
- atdd/planner/schemas/component.schema.json +114 -0
- atdd/planner/schemas/feature.schema.json +197 -0
- atdd/planner/schemas/train.schema.json +192 -0
- atdd/planner/schemas/wagon.schema.json +281 -0
- atdd/planner/schemas/wmbt.schema.json +59 -0
- atdd/planner/validators/__init__.py +0 -0
- atdd/planner/validators/conftest.py +5 -0
- atdd/planner/validators/test_draft_wagon_registry.py +374 -0
- atdd/planner/validators/test_plan_cross_refs.py +240 -0
- atdd/planner/validators/test_plan_uniqueness.py +224 -0
- atdd/planner/validators/test_plan_urn_resolution.py +268 -0
- atdd/planner/validators/test_plan_wagons.py +174 -0
- atdd/planner/validators/test_train_validation.py +514 -0
- atdd/planner/validators/test_wagon_urn_chain.py +648 -0
- atdd/planner/validators/test_wmbt_consistency.py +327 -0
- atdd/planner/validators/test_wmbt_vocabulary.py +632 -0
- atdd/tester/__init__.py +1 -0
- atdd/tester/conventions/artifact.convention.yaml +257 -0
- atdd/tester/conventions/contract.convention.yaml +1009 -0
- atdd/tester/conventions/filename.convention.yaml +555 -0
- atdd/tester/conventions/migration.convention.yaml +509 -0
- atdd/tester/conventions/red.convention.yaml +797 -0
- atdd/tester/conventions/routing.convention.yaml +51 -0
- atdd/tester/conventions/telemetry.convention.yaml +458 -0
- atdd/tester/schemas/a11y.tmpl.json +17 -0
- atdd/tester/schemas/artifact.schema.json +189 -0
- atdd/tester/schemas/contract.schema.json +591 -0
- atdd/tester/schemas/contract.tmpl.json +95 -0
- atdd/tester/schemas/db.tmpl.json +20 -0
- atdd/tester/schemas/e2e.tmpl.json +17 -0
- atdd/tester/schemas/edge_function.tmpl.json +17 -0
- atdd/tester/schemas/event.tmpl.json +17 -0
- atdd/tester/schemas/http.tmpl.json +19 -0
- atdd/tester/schemas/job.tmpl.json +18 -0
- atdd/tester/schemas/load.tmpl.json +21 -0
- atdd/tester/schemas/metric.tmpl.json +19 -0
- atdd/tester/schemas/pack.schema.json +139 -0
- atdd/tester/schemas/realtime.tmpl.json +20 -0
- atdd/tester/schemas/rls.tmpl.json +18 -0
- atdd/tester/schemas/script.tmpl.json +16 -0
- atdd/tester/schemas/sec.tmpl.json +18 -0
- atdd/tester/schemas/storage.tmpl.json +18 -0
- atdd/tester/schemas/telemetry.schema.json +128 -0
- atdd/tester/schemas/telemetry_tracking_manifest.schema.json +143 -0
- atdd/tester/schemas/test_filename.schema.json +194 -0
- atdd/tester/schemas/test_intent.schema.json +179 -0
- atdd/tester/schemas/unit.tmpl.json +18 -0
- atdd/tester/schemas/visual.tmpl.json +18 -0
- atdd/tester/schemas/ws.tmpl.json +17 -0
- atdd/tester/utils/__init__.py +0 -0
- atdd/tester/utils/filename.py +300 -0
- atdd/tester/validators/__init__.py +0 -0
- atdd/tester/validators/cleanup_duplicate_headers.py +116 -0
- atdd/tester/validators/cleanup_duplicate_headers_v2.py +135 -0
- atdd/tester/validators/conftest.py +5 -0
- atdd/tester/validators/coverage_gap_report.py +321 -0
- atdd/tester/validators/fix_dual_ac_references.py +179 -0
- atdd/tester/validators/remove_duplicate_lines.py +93 -0
- atdd/tester/validators/test_acceptance_urn_filename_mapping.py +359 -0
- atdd/tester/validators/test_acceptance_urn_separator.py +166 -0
- atdd/tester/validators/test_artifact_naming_category.py +307 -0
- atdd/tester/validators/test_contract_schema_compliance.py +706 -0
- atdd/tester/validators/test_contracts_structure.py +200 -0
- atdd/tester/validators/test_coverage_adequacy.py +797 -0
- atdd/tester/validators/test_dual_ac_reference.py +225 -0
- atdd/tester/validators/test_fixture_validity.py +372 -0
- atdd/tester/validators/test_isolation.py +487 -0
- atdd/tester/validators/test_migration_coverage.py +204 -0
- atdd/tester/validators/test_migration_criteria.py +276 -0
- atdd/tester/validators/test_migration_generation.py +116 -0
- atdd/tester/validators/test_python_test_naming.py +410 -0
- atdd/tester/validators/test_red_layer_validation.py +95 -0
- atdd/tester/validators/test_red_python_layer_structure.py +87 -0
- atdd/tester/validators/test_red_supabase_layer_structure.py +90 -0
- atdd/tester/validators/test_telemetry_structure.py +634 -0
- atdd/tester/validators/test_typescript_test_naming.py +301 -0
- atdd/tester/validators/test_typescript_test_structure.py +84 -0
- atdd-0.1.0.dist-info/METADATA +191 -0
- atdd-0.1.0.dist-info/RECORD +183 -0
- atdd-0.1.0.dist-info/WHEEL +5 -0
- atdd-0.1.0.dist-info/entry_points.txt +2 -0
- atdd-0.1.0.dist-info/licenses/LICENSE +674 -0
- atdd-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1078 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Producer-Contract Traceability Validator
|
|
4
|
+
|
|
5
|
+
Validates bidirectional traceability between wagon produce declarations
|
|
6
|
+
and contract schemas, following artifact naming conventions.
|
|
7
|
+
|
|
8
|
+
Features:
|
|
9
|
+
- Scans wagon/feature produce declarations
|
|
10
|
+
- Validates contract schemas against meta-schema
|
|
11
|
+
- Checks producer/consumer relationships
|
|
12
|
+
- Generates missing contract schemas (with --fix)
|
|
13
|
+
- Reports orphaned contracts without producers
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
import re
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Dict, List, Set, Tuple, Any, Optional
|
|
19
|
+
import yaml
|
|
20
|
+
import json
|
|
21
|
+
from jsonschema import Draft7Validator
|
|
22
|
+
from dataclasses import dataclass, field
|
|
23
|
+
|
|
24
|
+
REPO_ROOT = Path(__file__).parent.parent.parent.parent
|
|
25
|
+
PLAN_DIR = REPO_ROOT / "plan"
|
|
26
|
+
CONTRACTS_DIR = REPO_ROOT / "contracts"
|
|
27
|
+
ARTIFACT_SCHEMA_PATH = REPO_ROOT / ".claude/schemas/tester/artifact.schema.json"
|
|
28
|
+
MANIFEST_FILE = REPO_ROOT / "manifest.yaml"
|
|
29
|
+
REGISTRY_FILE = REPO_ROOT / "plan/_wagons.yaml"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class ProduceDeclaration:
|
|
34
|
+
"""Represents a produce declaration from a wagon/feature"""
|
|
35
|
+
wagon_slug: str
|
|
36
|
+
wagon_theme: str
|
|
37
|
+
artifact_name: str
|
|
38
|
+
contract_urn: Optional[str]
|
|
39
|
+
source_file: Path
|
|
40
|
+
source_type: str # 'wagon' or 'feature'
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class ContractSchema:
|
|
45
|
+
"""Represents a contract schema file"""
|
|
46
|
+
file_path: Path
|
|
47
|
+
schema_id: str
|
|
48
|
+
domain: str
|
|
49
|
+
resource: str
|
|
50
|
+
producer: Optional[str]
|
|
51
|
+
consumers: List[str]
|
|
52
|
+
valid: bool
|
|
53
|
+
validation_errors: List[str] = field(default_factory=list)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ProducerValidator:
|
|
57
|
+
"""Validates producer-contract traceability and generates missing contracts"""
|
|
58
|
+
|
|
59
|
+
def __init__(self, auto_fix: bool = False, verbose: bool = False):
|
|
60
|
+
self.auto_fix = auto_fix
|
|
61
|
+
self.verbose = verbose
|
|
62
|
+
self.errors: List[str] = []
|
|
63
|
+
self.warnings: List[str] = []
|
|
64
|
+
self.produce_declarations: List[ProduceDeclaration] = []
|
|
65
|
+
self.contract_schemas: List[ContractSchema] = []
|
|
66
|
+
self.artifact_schema = None
|
|
67
|
+
self.wagon_registry = set()
|
|
68
|
+
self.draft_wagons = set()
|
|
69
|
+
|
|
70
|
+
def load_wagon_registry(self):
|
|
71
|
+
"""Load active and draft wagons from registry"""
|
|
72
|
+
try:
|
|
73
|
+
with open(REGISTRY_FILE) as f:
|
|
74
|
+
registry_data = yaml.safe_load(f)
|
|
75
|
+
|
|
76
|
+
wagons = registry_data.get("wagons", [])
|
|
77
|
+
|
|
78
|
+
# Handle both list and dict formats
|
|
79
|
+
if isinstance(wagons, list):
|
|
80
|
+
for wagon_data in wagons:
|
|
81
|
+
status = wagon_data.get("status", "active")
|
|
82
|
+
slug = wagon_data.get("wagon", "")
|
|
83
|
+
|
|
84
|
+
if status == "active":
|
|
85
|
+
self.wagon_registry.add(slug)
|
|
86
|
+
elif status == "draft":
|
|
87
|
+
self.draft_wagons.add(slug)
|
|
88
|
+
elif isinstance(wagons, dict):
|
|
89
|
+
for wagon_data in wagons.values():
|
|
90
|
+
status = wagon_data.get("status", "active")
|
|
91
|
+
slug = wagon_data.get("slug", wagon_data.get("wagon", ""))
|
|
92
|
+
|
|
93
|
+
if status == "active":
|
|
94
|
+
self.wagon_registry.add(slug)
|
|
95
|
+
elif status == "draft":
|
|
96
|
+
self.draft_wagons.add(slug)
|
|
97
|
+
|
|
98
|
+
except FileNotFoundError:
|
|
99
|
+
self.warnings.append(f"Registry file not found: {REGISTRY_FILE}")
|
|
100
|
+
except Exception as e:
|
|
101
|
+
self.errors.append(f"Error loading wagon registry: {e}")
|
|
102
|
+
|
|
103
|
+
def load_artifact_schema(self):
|
|
104
|
+
"""Load the artifact meta-schema for validation"""
|
|
105
|
+
try:
|
|
106
|
+
with open(ARTIFACT_SCHEMA_PATH) as f:
|
|
107
|
+
self.artifact_schema = json.load(f)
|
|
108
|
+
except Exception as e:
|
|
109
|
+
self.errors.append(f"Error loading artifact schema: {e}")
|
|
110
|
+
|
|
111
|
+
def scan_wagons(self):
|
|
112
|
+
"""Scan all wagon manifests for produce declarations"""
|
|
113
|
+
wagon_files = list(PLAN_DIR.glob("*/_*.yaml"))
|
|
114
|
+
|
|
115
|
+
if self.verbose:
|
|
116
|
+
print(f" Scanning {len(wagon_files)} wagon manifests...")
|
|
117
|
+
|
|
118
|
+
for wagon_file in wagon_files:
|
|
119
|
+
try:
|
|
120
|
+
with open(wagon_file) as f:
|
|
121
|
+
wagon_data = yaml.safe_load(f)
|
|
122
|
+
|
|
123
|
+
wagon_slug = wagon_data.get("wagon")
|
|
124
|
+
wagon_theme = wagon_data.get("theme", "unknown")
|
|
125
|
+
|
|
126
|
+
# Check if wagon is in registry or draft
|
|
127
|
+
if wagon_slug not in self.wagon_registry and wagon_slug not in self.draft_wagons:
|
|
128
|
+
if self.verbose:
|
|
129
|
+
print(f" Note: {wagon_slug} not in registry (will still scan)")
|
|
130
|
+
|
|
131
|
+
# Get produce declarations
|
|
132
|
+
produce_items = wagon_data.get("produce", [])
|
|
133
|
+
for item in produce_items:
|
|
134
|
+
artifact_name = item.get("name")
|
|
135
|
+
contract_urn = item.get("contract")
|
|
136
|
+
|
|
137
|
+
if artifact_name:
|
|
138
|
+
self.produce_declarations.append(ProduceDeclaration(
|
|
139
|
+
wagon_slug=wagon_slug,
|
|
140
|
+
wagon_theme=wagon_theme,
|
|
141
|
+
artifact_name=artifact_name,
|
|
142
|
+
contract_urn=contract_urn,
|
|
143
|
+
source_file=wagon_file,
|
|
144
|
+
source_type="wagon"
|
|
145
|
+
))
|
|
146
|
+
except Exception as e:
|
|
147
|
+
self.errors.append(f"Error reading wagon {wagon_file.name}: {e}")
|
|
148
|
+
|
|
149
|
+
def scan_features(self):
|
|
150
|
+
"""Scan feature files for produce declarations"""
|
|
151
|
+
feature_files = list(PLAN_DIR.glob("*/features/**/*.yaml"))
|
|
152
|
+
|
|
153
|
+
if self.verbose:
|
|
154
|
+
print(f" Scanning {len(feature_files)} feature files...")
|
|
155
|
+
|
|
156
|
+
for feature_file in feature_files:
|
|
157
|
+
try:
|
|
158
|
+
with open(feature_file) as f:
|
|
159
|
+
feature_data = yaml.safe_load(f)
|
|
160
|
+
|
|
161
|
+
# Extract wagon from feature URN
|
|
162
|
+
feature_urn = feature_data.get("urn", "")
|
|
163
|
+
# urn format: feature:wagon-slug:feature-name
|
|
164
|
+
parts = feature_urn.split(":")
|
|
165
|
+
if len(parts) >= 2:
|
|
166
|
+
wagon_slug = parts[1]
|
|
167
|
+
else:
|
|
168
|
+
continue
|
|
169
|
+
|
|
170
|
+
# Determine wagon theme
|
|
171
|
+
wagon_theme = self._get_wagon_theme(wagon_slug)
|
|
172
|
+
|
|
173
|
+
# Get produces from feature
|
|
174
|
+
produces = feature_data.get("produces", [])
|
|
175
|
+
for item in produces:
|
|
176
|
+
artifact_name = item.get("name")
|
|
177
|
+
contract_urn = item.get("contract")
|
|
178
|
+
|
|
179
|
+
if artifact_name:
|
|
180
|
+
self.produce_declarations.append(ProduceDeclaration(
|
|
181
|
+
wagon_slug=wagon_slug,
|
|
182
|
+
wagon_theme=wagon_theme,
|
|
183
|
+
artifact_name=artifact_name,
|
|
184
|
+
contract_urn=contract_urn,
|
|
185
|
+
source_file=feature_file,
|
|
186
|
+
source_type="feature"
|
|
187
|
+
))
|
|
188
|
+
except Exception as e:
|
|
189
|
+
self.errors.append(f"Error reading feature {feature_file.name}: {e}")
|
|
190
|
+
|
|
191
|
+
def _get_wagon_theme(self, wagon_slug: str) -> str:
|
|
192
|
+
"""Get theme for a wagon"""
|
|
193
|
+
wagon_dir = wagon_slug.replace("-", "_")
|
|
194
|
+
wagon_file = PLAN_DIR / wagon_dir / f"_{wagon_dir}.yaml"
|
|
195
|
+
|
|
196
|
+
if wagon_file.exists():
|
|
197
|
+
try:
|
|
198
|
+
with open(wagon_file) as f:
|
|
199
|
+
wagon_data = yaml.safe_load(f)
|
|
200
|
+
return wagon_data.get("theme", "unknown")
|
|
201
|
+
except:
|
|
202
|
+
pass
|
|
203
|
+
|
|
204
|
+
return "unknown"
|
|
205
|
+
|
|
206
|
+
def scan_contracts(self):
|
|
207
|
+
"""Scan all contract schema files"""
|
|
208
|
+
contract_files = list(CONTRACTS_DIR.glob("**/*.schema.json"))
|
|
209
|
+
|
|
210
|
+
if self.verbose:
|
|
211
|
+
print(f" Scanning {len(contract_files)} contract schemas...")
|
|
212
|
+
|
|
213
|
+
for contract_file in contract_files:
|
|
214
|
+
try:
|
|
215
|
+
with open(contract_file) as f:
|
|
216
|
+
schema = json.load(f)
|
|
217
|
+
|
|
218
|
+
schema_id = schema.get("$id", "")
|
|
219
|
+
metadata = schema.get("x-artifact-metadata", {})
|
|
220
|
+
|
|
221
|
+
domain = metadata.get("domain", "")
|
|
222
|
+
resource = metadata.get("resource", "")
|
|
223
|
+
producer = metadata.get("producer")
|
|
224
|
+
consumers = metadata.get("consumers", [])
|
|
225
|
+
|
|
226
|
+
# Validate against meta-schema
|
|
227
|
+
valid = True
|
|
228
|
+
validation_errors = []
|
|
229
|
+
|
|
230
|
+
if self.artifact_schema:
|
|
231
|
+
validator = Draft7Validator(self.artifact_schema)
|
|
232
|
+
errors = list(validator.iter_errors(schema))
|
|
233
|
+
if errors:
|
|
234
|
+
valid = False
|
|
235
|
+
validation_errors = [e.message for e in errors]
|
|
236
|
+
|
|
237
|
+
self.contract_schemas.append(ContractSchema(
|
|
238
|
+
file_path=contract_file,
|
|
239
|
+
schema_id=schema_id,
|
|
240
|
+
domain=domain,
|
|
241
|
+
resource=resource,
|
|
242
|
+
producer=producer,
|
|
243
|
+
consumers=consumers,
|
|
244
|
+
valid=valid,
|
|
245
|
+
validation_errors=validation_errors
|
|
246
|
+
))
|
|
247
|
+
|
|
248
|
+
except json.JSONDecodeError as e:
|
|
249
|
+
self.errors.append(f"Invalid JSON in {contract_file.name}: {e}")
|
|
250
|
+
except Exception as e:
|
|
251
|
+
self.errors.append(f"Error reading contract {contract_file.name}: {e}")
|
|
252
|
+
|
|
253
|
+
def validate_bidirectional_traceability(self):
|
|
254
|
+
"""Check produce → contract and contract → produce traceability"""
|
|
255
|
+
|
|
256
|
+
# Check produce → contract (missing contracts)
|
|
257
|
+
missing_contracts = []
|
|
258
|
+
for prod in self.produce_declarations:
|
|
259
|
+
if prod.contract_urn and prod.contract_urn != "null":
|
|
260
|
+
expected_path = self._resolve_contract_urn_to_path(prod.contract_urn)
|
|
261
|
+
|
|
262
|
+
if expected_path and not expected_path.exists():
|
|
263
|
+
missing_contracts.append((prod, expected_path))
|
|
264
|
+
self.warnings.append(
|
|
265
|
+
f"Missing contract for wagon:{prod.wagon_slug}:\n"
|
|
266
|
+
f" Artifact: {prod.artifact_name}\n"
|
|
267
|
+
f" Contract URN: {prod.contract_urn}\n"
|
|
268
|
+
f" Expected: {expected_path.relative_to(REPO_ROOT)}\n"
|
|
269
|
+
f" Source: {prod.source_file.relative_to(REPO_ROOT)} ({prod.source_type})"
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
# Check contract → produce (orphaned contracts)
|
|
273
|
+
for contract in self.contract_schemas:
|
|
274
|
+
if not contract.producer:
|
|
275
|
+
self.warnings.append(
|
|
276
|
+
f"Contract missing producer field:\n"
|
|
277
|
+
f" File: {contract.file_path.relative_to(REPO_ROOT)}\n"
|
|
278
|
+
f" Schema ID: {contract.schema_id}"
|
|
279
|
+
)
|
|
280
|
+
continue
|
|
281
|
+
|
|
282
|
+
# Find matching produce declaration
|
|
283
|
+
wagon_slug = contract.producer.replace("wagon:", "")
|
|
284
|
+
found = False
|
|
285
|
+
|
|
286
|
+
for prod in self.produce_declarations:
|
|
287
|
+
if prod.wagon_slug == wagon_slug:
|
|
288
|
+
# Check if contract URN matches or file matches
|
|
289
|
+
if prod.contract_urn:
|
|
290
|
+
expected_path = self._resolve_contract_urn_to_path(prod.contract_urn)
|
|
291
|
+
if expected_path == contract.file_path:
|
|
292
|
+
found = True
|
|
293
|
+
break
|
|
294
|
+
|
|
295
|
+
if not found:
|
|
296
|
+
# Check if wagon exists (active or draft)
|
|
297
|
+
if wagon_slug in self.wagon_registry or wagon_slug in self.draft_wagons:
|
|
298
|
+
self.warnings.append(
|
|
299
|
+
f"Contract has producer but no matching produce declaration:\n"
|
|
300
|
+
f" File: {contract.file_path.relative_to(REPO_ROOT)}\n"
|
|
301
|
+
f" Producer: {contract.producer}\n"
|
|
302
|
+
f" Schema ID: {contract.schema_id}\n"
|
|
303
|
+
f" Note: Wagon {wagon_slug} exists but doesn't declare this artifact"
|
|
304
|
+
)
|
|
305
|
+
else:
|
|
306
|
+
self.errors.append(
|
|
307
|
+
f"Contract references unknown wagon:\n"
|
|
308
|
+
f" File: {contract.file_path.relative_to(REPO_ROOT)}\n"
|
|
309
|
+
f" Producer: {contract.producer}\n"
|
|
310
|
+
f" Wagon not found in registry or plan/"
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
return missing_contracts
|
|
314
|
+
|
|
315
|
+
def _resolve_contract_urn_to_path(self, contract_urn: str) -> Optional[Path]:
|
|
316
|
+
"""Convert contract URN to file path using NEW naming convention
|
|
317
|
+
|
|
318
|
+
NEW CONVENTION:
|
|
319
|
+
contract:commons:player.identity → contracts/commons/player/identity.schema.json
|
|
320
|
+
contract:mechanic:decision.choice → contracts/mechanic/decision/choice.schema.json
|
|
321
|
+
contract:sensory:gesture.tapped → contracts/sensory/gesture/tapped.schema.json
|
|
322
|
+
|
|
323
|
+
Pattern: contract:{theme}:{domain}.{facet} → contracts/{theme}/{domain}/{facet}.schema.json
|
|
324
|
+
|
|
325
|
+
LEGACY SUPPORT:
|
|
326
|
+
contract:system:identifiers.username → contracts/system/identifiers/username.schema.json
|
|
327
|
+
contract:player:identity → contracts/player/identity.schema.json
|
|
328
|
+
"""
|
|
329
|
+
if not contract_urn or contract_urn == "null" or not contract_urn.startswith("contract:"):
|
|
330
|
+
return None
|
|
331
|
+
|
|
332
|
+
# Remove "contract:" prefix: contract:commons:player.identity → commons:player.identity
|
|
333
|
+
urn_without_prefix = contract_urn[9:]
|
|
334
|
+
|
|
335
|
+
# Split by colon: commons:player.identity → ['commons', 'player.identity']
|
|
336
|
+
parts = urn_without_prefix.split(":")
|
|
337
|
+
|
|
338
|
+
if len(parts) < 2:
|
|
339
|
+
return None
|
|
340
|
+
|
|
341
|
+
# First part is theme/namespace
|
|
342
|
+
theme = parts[0]
|
|
343
|
+
|
|
344
|
+
# Remaining parts form domain.facet
|
|
345
|
+
domain_facet = ":".join(parts[1:])
|
|
346
|
+
|
|
347
|
+
# Convert domain.facet to domain/facet path
|
|
348
|
+
# player.identity → player/identity
|
|
349
|
+
# decision.choice → decision/choice
|
|
350
|
+
# identifiers.username → identifiers/username
|
|
351
|
+
path_parts = domain_facet.replace(".", "/")
|
|
352
|
+
|
|
353
|
+
# Build final path: contracts/{theme}/{domain}/{facet}.schema.json
|
|
354
|
+
return CONTRACTS_DIR / theme / f"{path_parts}.schema.json"
|
|
355
|
+
|
|
356
|
+
def validate_contract_schemas(self):
|
|
357
|
+
"""Report schema validation errors"""
|
|
358
|
+
invalid_contracts = [c for c in self.contract_schemas if not c.valid]
|
|
359
|
+
|
|
360
|
+
if invalid_contracts:
|
|
361
|
+
for contract in invalid_contracts:
|
|
362
|
+
# Only show first 3 errors to avoid overwhelming output
|
|
363
|
+
error_summary = contract.validation_errors[:3]
|
|
364
|
+
if len(contract.validation_errors) > 3:
|
|
365
|
+
error_summary.append(f"... and {len(contract.validation_errors) - 3} more errors")
|
|
366
|
+
|
|
367
|
+
self.errors.append(
|
|
368
|
+
f"Invalid contract schema: {contract.file_path.relative_to(REPO_ROOT)}\n" +
|
|
369
|
+
"\n".join(f" - {err}" for err in error_summary)
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
def generate_missing_contracts(self, missing_contracts: List[Tuple[ProduceDeclaration, Path]]):
|
|
373
|
+
"""Generate contract schemas for produce declarations without contracts"""
|
|
374
|
+
if not self.auto_fix or not missing_contracts:
|
|
375
|
+
return
|
|
376
|
+
|
|
377
|
+
print(f"\n🔧 Generating {len(missing_contracts)} missing contracts...")
|
|
378
|
+
|
|
379
|
+
for prod, file_path in missing_contracts:
|
|
380
|
+
try:
|
|
381
|
+
self._generate_contract_schema(prod, file_path)
|
|
382
|
+
print(f" ✅ {file_path.relative_to(REPO_ROOT)}")
|
|
383
|
+
except Exception as e:
|
|
384
|
+
self.errors.append(f"Failed to generate {file_path.name}: {e}")
|
|
385
|
+
|
|
386
|
+
def _generate_contract_schema(self, prod: ProduceDeclaration, file_path: Path):
|
|
387
|
+
"""Generate a contract schema template following NEW artifact naming conventions
|
|
388
|
+
|
|
389
|
+
NEW CONVENTION:
|
|
390
|
+
Artifact name: commons:player.identity (theme:domain.facet)
|
|
391
|
+
Contract URN: contract:commons:player.identity
|
|
392
|
+
Schema $id: urn:contract:commons:player.identity (current) or commons:player.identity:v1 (spec)
|
|
393
|
+
File path: contracts/commons/player/identity.schema.json
|
|
394
|
+
"""
|
|
395
|
+
|
|
396
|
+
# Parse artifact name: {theme}:{domain}.{facet}
|
|
397
|
+
# Examples: commons:player.identity, mechanic:decision.choice
|
|
398
|
+
artifact_parts = prod.artifact_name.split(":", 1)
|
|
399
|
+
|
|
400
|
+
if len(artifact_parts) >= 2:
|
|
401
|
+
theme = artifact_parts[0]
|
|
402
|
+
domain_facet = artifact_parts[1]
|
|
403
|
+
|
|
404
|
+
# Split domain.facet
|
|
405
|
+
if "." in domain_facet:
|
|
406
|
+
domain, facet = domain_facet.split(".", 1)
|
|
407
|
+
else:
|
|
408
|
+
domain = domain_facet
|
|
409
|
+
facet = domain_facet
|
|
410
|
+
|
|
411
|
+
resource = domain_facet # Keep as is: player.identity
|
|
412
|
+
else:
|
|
413
|
+
# Fallback if no theme prefix
|
|
414
|
+
theme = prod.wagon_theme
|
|
415
|
+
domain = prod.artifact_name
|
|
416
|
+
facet = prod.artifact_name
|
|
417
|
+
resource = prod.artifact_name
|
|
418
|
+
|
|
419
|
+
# Generate schema $id following contract.convention.yaml
|
|
420
|
+
# Format: {theme}:{resource} (NO version suffix)
|
|
421
|
+
# Example: commons:player.identity, ux:foundations
|
|
422
|
+
schema_id = f"{theme}:{resource}"
|
|
423
|
+
|
|
424
|
+
# Determine API path from domain.facet
|
|
425
|
+
# player.identity → /player/identity
|
|
426
|
+
api_path = "/" + resource.replace(".", "/")
|
|
427
|
+
|
|
428
|
+
# Infer HTTP method based on REST best practices
|
|
429
|
+
http_method = self._infer_http_method(prod.artifact_name, resource)
|
|
430
|
+
|
|
431
|
+
# Generate title
|
|
432
|
+
title = self._titlecase(f"{theme} {resource}") + " Contract"
|
|
433
|
+
|
|
434
|
+
# Build API operations array per contract.convention.yaml
|
|
435
|
+
# Build responses based on method
|
|
436
|
+
success_code = "200" if http_method == "GET" else "201"
|
|
437
|
+
responses = {
|
|
438
|
+
success_code: {
|
|
439
|
+
"description": "Success",
|
|
440
|
+
"schema": f"$ref: #/definitions/{self._titlecase(resource).replace(' ', '')}"
|
|
441
|
+
},
|
|
442
|
+
"400": {
|
|
443
|
+
"description": "Bad Request"
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
# Add method-specific error responses
|
|
448
|
+
if http_method == "GET":
|
|
449
|
+
responses["404"] = {"description": "Not Found"}
|
|
450
|
+
else:
|
|
451
|
+
responses["500"] = {"description": "Internal Server Error"}
|
|
452
|
+
|
|
453
|
+
operations = [
|
|
454
|
+
{
|
|
455
|
+
"method": http_method,
|
|
456
|
+
"path": api_path,
|
|
457
|
+
"description": f"{'Retrieve' if http_method == 'GET' else 'Submit'} {resource}",
|
|
458
|
+
"responses": responses,
|
|
459
|
+
"idempotent": http_method in ["GET", "PUT", "DELETE"]
|
|
460
|
+
}
|
|
461
|
+
]
|
|
462
|
+
|
|
463
|
+
# Add request body for non-GET operations
|
|
464
|
+
if http_method != "GET":
|
|
465
|
+
operations[0]["requestBody"] = {
|
|
466
|
+
"schema": f"$ref: #/definitions/{self._titlecase(resource).replace(' ', '')}",
|
|
467
|
+
"required": True,
|
|
468
|
+
"contentType": "application/json"
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
# Build traceability per contract.convention.yaml (REQUIRED)
|
|
472
|
+
wagon_dir = prod.wagon_slug.replace("-", "_")
|
|
473
|
+
traceability = {
|
|
474
|
+
"wagon_ref": f"plan/{wagon_dir}/_{wagon_dir}.yaml",
|
|
475
|
+
"feature_refs": [f"feature:{prod.wagon_slug}:TODO"], # Placeholder
|
|
476
|
+
"acceptance_refs": []
|
|
477
|
+
}
|
|
478
|
+
|
|
479
|
+
# Build testing metadata per contract.convention.yaml (REQUIRED)
|
|
480
|
+
testing = {
|
|
481
|
+
"directory": f"contracts/{theme}/{domain}/tests/",
|
|
482
|
+
"schema_tests": [f"{facet}_schema_test.json"]
|
|
483
|
+
}
|
|
484
|
+
|
|
485
|
+
# Generate schema with versioning per contract.convention.yaml
|
|
486
|
+
# New contracts start at v1.0.0 with status="draft"
|
|
487
|
+
schema = {
|
|
488
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
489
|
+
"$id": schema_id,
|
|
490
|
+
"version": "1.0.0",
|
|
491
|
+
"title": title,
|
|
492
|
+
"description": f"Contract schema for {prod.artifact_name} artifact",
|
|
493
|
+
"type": "object",
|
|
494
|
+
"properties": {
|
|
495
|
+
"_version": {
|
|
496
|
+
"type": "string",
|
|
497
|
+
"description": "Contract version for backward compatibility handling. Default '1' for v1.x data.",
|
|
498
|
+
"default": "1"
|
|
499
|
+
}
|
|
500
|
+
},
|
|
501
|
+
"required": [],
|
|
502
|
+
"x-artifact-metadata": {
|
|
503
|
+
"domain": domain,
|
|
504
|
+
"resource": resource,
|
|
505
|
+
"version": "1.0.0",
|
|
506
|
+
"producer": f"wagon:{prod.wagon_slug}",
|
|
507
|
+
"consumers": [],
|
|
508
|
+
"dependencies": [],
|
|
509
|
+
"api": {
|
|
510
|
+
"version": "v1",
|
|
511
|
+
"operations": operations
|
|
512
|
+
},
|
|
513
|
+
"traceability": traceability,
|
|
514
|
+
"testing": testing,
|
|
515
|
+
"governance": {
|
|
516
|
+
"status": "draft",
|
|
517
|
+
"stability": "experimental"
|
|
518
|
+
}
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
# Create directory if needed
|
|
523
|
+
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
524
|
+
|
|
525
|
+
# Write schema
|
|
526
|
+
with open(file_path, 'w') as f:
|
|
527
|
+
json.dump(schema, f, indent=2)
|
|
528
|
+
f.write('\n') # Add trailing newline
|
|
529
|
+
|
|
530
|
+
def _titlecase(self, text: str) -> str:
|
|
531
|
+
"""Convert kebab-case or dot notation to Title Case"""
|
|
532
|
+
return " ".join(word.capitalize() for word in text.replace("-", " ").replace("_", " ").replace(".", " ").split())
|
|
533
|
+
|
|
534
|
+
def _infer_http_method(self, artifact_name: str, resource: str) -> str:
|
|
535
|
+
"""Infer HTTP method based on REST best practices
|
|
536
|
+
|
|
537
|
+
Rules:
|
|
538
|
+
- GET: Retrieve state/data (identity, config, remaining, final, etc.)
|
|
539
|
+
- POST: Create/submit/trigger (choice, registered, started, agreement, etc.)
|
|
540
|
+
- PUT: Update entire resource
|
|
541
|
+
- PATCH: Partial update
|
|
542
|
+
- DELETE: Remove resource
|
|
543
|
+
|
|
544
|
+
Examples:
|
|
545
|
+
commons:player.identity → GET (retrieve identity)
|
|
546
|
+
mechanic:decision.choice → POST (submit choice)
|
|
547
|
+
match:score.final → GET (retrieve final score)
|
|
548
|
+
commons:account:registered → POST (registration event)
|
|
549
|
+
"""
|
|
550
|
+
# Extract facet from resource (last part after dot)
|
|
551
|
+
facet = resource.split(".")[-1] if "." in resource else resource
|
|
552
|
+
|
|
553
|
+
# GET patterns - retrieving state or data (nouns/adjectives)
|
|
554
|
+
get_patterns = [
|
|
555
|
+
"identity", "identities",
|
|
556
|
+
"claims",
|
|
557
|
+
"config", "configuration",
|
|
558
|
+
"remaining", "exhausted",
|
|
559
|
+
"final", "score",
|
|
560
|
+
"result", "results",
|
|
561
|
+
"current", "active", # State queries (not events)
|
|
562
|
+
"impact",
|
|
563
|
+
"evaluation-score", "evaluation",
|
|
564
|
+
"manifest", "catalog",
|
|
565
|
+
"foundations", "primitives", "components", "templates",
|
|
566
|
+
"audio", "animation", "haptics", "themes", "fallback",
|
|
567
|
+
"fragments", "fragment",
|
|
568
|
+
"data", "stream",
|
|
569
|
+
"raw", "presentation", "layer",
|
|
570
|
+
"profile", "personas"
|
|
571
|
+
]
|
|
572
|
+
|
|
573
|
+
# POST patterns - creating, submitting, triggering events (verbs/past participles)
|
|
574
|
+
post_patterns = [
|
|
575
|
+
"choice", "decision",
|
|
576
|
+
"registered", "registration",
|
|
577
|
+
"terminated", "termination",
|
|
578
|
+
"started", "finished", "paused", "resumed",
|
|
579
|
+
"committed", "updated", "changed", # State transition events
|
|
580
|
+
"succeeded", "failed", # Outcome events
|
|
581
|
+
"closed", "opened", # Session events
|
|
582
|
+
"turn-started", "turn-ended",
|
|
583
|
+
"agreement", "agreements",
|
|
584
|
+
"mapping", "attribution",
|
|
585
|
+
"rephrased",
|
|
586
|
+
"new", "create", "created",
|
|
587
|
+
"paired",
|
|
588
|
+
"uuid", "username" # Generators
|
|
589
|
+
]
|
|
590
|
+
|
|
591
|
+
# Check facet against patterns
|
|
592
|
+
facet_lower = facet.lower()
|
|
593
|
+
|
|
594
|
+
if any(pattern in facet_lower for pattern in get_patterns):
|
|
595
|
+
return "GET"
|
|
596
|
+
elif any(pattern in facet_lower for pattern in post_patterns):
|
|
597
|
+
return "POST"
|
|
598
|
+
|
|
599
|
+
# Default heuristics based on common patterns
|
|
600
|
+
if facet_lower.endswith(("ed", "ing")): # Past tense or gerund = event
|
|
601
|
+
return "POST"
|
|
602
|
+
elif facet_lower.endswith(("s", "list", "collection")): # Plural = list
|
|
603
|
+
return "GET"
|
|
604
|
+
|
|
605
|
+
# Default to GET for unknown patterns (prefer idempotent operations)
|
|
606
|
+
return "GET"
|
|
607
|
+
|
|
608
|
+
def run(self) -> bool:
|
|
609
|
+
"""Run full validation"""
|
|
610
|
+
print("🔍 Producer-Contract Traceability Validation")
|
|
611
|
+
print("=" * 80)
|
|
612
|
+
|
|
613
|
+
print("\n📋 Loading wagon registry...")
|
|
614
|
+
self.load_wagon_registry()
|
|
615
|
+
print(f" Active wagons: {len(self.wagon_registry)}")
|
|
616
|
+
print(f" Draft wagons: {len(self.draft_wagons)}")
|
|
617
|
+
|
|
618
|
+
print("\n🔍 Scanning produce declarations...")
|
|
619
|
+
self.load_artifact_schema()
|
|
620
|
+
self.scan_wagons()
|
|
621
|
+
self.scan_features()
|
|
622
|
+
print(f" Found {len(self.produce_declarations)} produce declarations")
|
|
623
|
+
|
|
624
|
+
print("\n🔍 Scanning contract schemas...")
|
|
625
|
+
self.scan_contracts()
|
|
626
|
+
print(f" Found {len(self.contract_schemas)} contract schemas")
|
|
627
|
+
|
|
628
|
+
print("\n🔍 Validating contract schemas against meta-schema...")
|
|
629
|
+
self.validate_contract_schemas()
|
|
630
|
+
|
|
631
|
+
print("\n🔍 Validating bidirectional traceability...")
|
|
632
|
+
missing_contracts = self.validate_bidirectional_traceability()
|
|
633
|
+
|
|
634
|
+
if self.auto_fix and missing_contracts:
|
|
635
|
+
self.generate_missing_contracts(missing_contracts)
|
|
636
|
+
|
|
637
|
+
# Print summary
|
|
638
|
+
print("\n" + "=" * 80)
|
|
639
|
+
print("VALIDATION SUMMARY")
|
|
640
|
+
print("=" * 80)
|
|
641
|
+
|
|
642
|
+
stats = {
|
|
643
|
+
"produce_declarations": len(self.produce_declarations),
|
|
644
|
+
"contract_schemas": len(self.contract_schemas),
|
|
645
|
+
"valid_contracts": len([c for c in self.contract_schemas if c.valid]),
|
|
646
|
+
"invalid_contracts": len([c for c in self.contract_schemas if not c.valid]),
|
|
647
|
+
"missing_contracts": len(missing_contracts) if not self.auto_fix else 0,
|
|
648
|
+
"orphaned_contracts": len([w for w in self.warnings if "no matching produce" in w]),
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
print(f"\n📊 Statistics:")
|
|
652
|
+
print(f" Produce declarations: {stats['produce_declarations']}")
|
|
653
|
+
print(f" Contract schemas: {stats['contract_schemas']}")
|
|
654
|
+
print(f" Valid contracts: {stats['valid_contracts']}")
|
|
655
|
+
print(f" Invalid contracts: {stats['invalid_contracts']}")
|
|
656
|
+
if not self.auto_fix:
|
|
657
|
+
print(f" Missing contracts: {stats['missing_contracts']}")
|
|
658
|
+
print(f" Orphaned contracts: {stats['orphaned_contracts']}")
|
|
659
|
+
|
|
660
|
+
if self.errors:
|
|
661
|
+
print(f"\n❌ Errors ({len(self.errors)}):")
|
|
662
|
+
for error in self.errors:
|
|
663
|
+
print(f"\n{error}")
|
|
664
|
+
|
|
665
|
+
if self.warnings:
|
|
666
|
+
print(f"\n⚠️ Warnings ({len(self.warnings)}):")
|
|
667
|
+
for warning in self.warnings:
|
|
668
|
+
print(f"\n{warning}")
|
|
669
|
+
|
|
670
|
+
if not self.errors and not self.warnings:
|
|
671
|
+
print("\n✅ All producer-contract traceability checks passed!")
|
|
672
|
+
elif not self.errors:
|
|
673
|
+
print(f"\n✅ No errors, but {len(self.warnings)} warnings")
|
|
674
|
+
|
|
675
|
+
print("\n" + "=" * 80)
|
|
676
|
+
|
|
677
|
+
return len(self.errors) == 0
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
def scaffold_contract_metadata(
|
|
681
|
+
artifact_urn: str,
|
|
682
|
+
plan_dir: Path,
|
|
683
|
+
contracts_dir: Path,
|
|
684
|
+
convention_path: Optional[Path] = None
|
|
685
|
+
) -> Dict[str, Any]:
|
|
686
|
+
"""
|
|
687
|
+
Generate complete contract metadata from wagon and feature interfaces.
|
|
688
|
+
|
|
689
|
+
Implements SPEC-COACH-UTILS-0294: Parse artifact URN, scan wagon/feature manifests,
|
|
690
|
+
generate complete x-artifact-metadata, and create contract scaffold.
|
|
691
|
+
|
|
692
|
+
Args:
|
|
693
|
+
artifact_urn: Artifact URN (e.g. "mechanic:timebank.exhausted")
|
|
694
|
+
plan_dir: Path to plan/ directory containing wagon manifests
|
|
695
|
+
contracts_dir: Path to contracts/ directory
|
|
696
|
+
convention_path: Optional path to artifact-naming.convention.yaml
|
|
697
|
+
|
|
698
|
+
Returns:
|
|
699
|
+
Dict with keys: created (bool), path (str), metadata (dict)
|
|
700
|
+
"""
|
|
701
|
+
# Parse artifact URN according to artifact-naming.convention.yaml
|
|
702
|
+
# Pattern: theme(category)*aspect(.variant)?
|
|
703
|
+
# Examples: mechanic:timebank.exhausted → theme=mechanic, aspect=timebank, variant=exhausted
|
|
704
|
+
# commons:ux:foundations:color → theme=commons, categories=[ux,foundations], aspect=color
|
|
705
|
+
|
|
706
|
+
parts = artifact_urn.replace(".", ":").split(":")
|
|
707
|
+
theme = parts[0]
|
|
708
|
+
aspect = parts[-1] if len(parts) > 1 else theme
|
|
709
|
+
|
|
710
|
+
# Reconstruct with original dot for variant detection
|
|
711
|
+
urn_parts = artifact_urn.split(":")
|
|
712
|
+
has_variant = "." in urn_parts[-1] if urn_parts else False
|
|
713
|
+
|
|
714
|
+
if has_variant:
|
|
715
|
+
aspect_variant = urn_parts[-1]
|
|
716
|
+
aspect_base, variant = aspect_variant.split(".", 1)
|
|
717
|
+
resource = aspect_variant
|
|
718
|
+
else:
|
|
719
|
+
aspect_base = urn_parts[-1] if len(urn_parts) > 1 else theme
|
|
720
|
+
variant = None
|
|
721
|
+
resource = artifact_urn.split(":", 1)[1] if ":" in artifact_urn else artifact_urn
|
|
722
|
+
|
|
723
|
+
# Convert URN to contract file path
|
|
724
|
+
# mechanic:timebank.exhausted → contracts/mechanic/timebank/exhausted.schema.json
|
|
725
|
+
path_segments = []
|
|
726
|
+
for i, part in enumerate(urn_parts):
|
|
727
|
+
if "." in part and i == len(urn_parts) - 1:
|
|
728
|
+
# Last segment with variant: split by dot
|
|
729
|
+
base, var = part.split(".", 1)
|
|
730
|
+
path_segments.append(base)
|
|
731
|
+
path_segments.append(var)
|
|
732
|
+
else:
|
|
733
|
+
path_segments.append(part)
|
|
734
|
+
|
|
735
|
+
contract_path = contracts_dir / "/".join(path_segments[:-1]) / f"{path_segments[-1]}.schema.json"
|
|
736
|
+
|
|
737
|
+
# Scan wagon manifests to find producer
|
|
738
|
+
producer_wagon = None
|
|
739
|
+
producer_theme = None
|
|
740
|
+
producer_consume = []
|
|
741
|
+
producer_features = []
|
|
742
|
+
|
|
743
|
+
for wagon_file in plan_dir.glob("*/_*.yaml"):
|
|
744
|
+
with open(wagon_file) as f:
|
|
745
|
+
wagon_data = yaml.safe_load(f)
|
|
746
|
+
|
|
747
|
+
produce_items = wagon_data.get("produce", [])
|
|
748
|
+
for item in produce_items:
|
|
749
|
+
if item.get("name") == artifact_urn:
|
|
750
|
+
producer_wagon = wagon_data.get("wagon")
|
|
751
|
+
producer_theme = wagon_data.get("theme", "unknown")
|
|
752
|
+
producer_consume = wagon_data.get("consume", [])
|
|
753
|
+
producer_features = wagon_data.get("features", [])
|
|
754
|
+
break
|
|
755
|
+
|
|
756
|
+
if producer_wagon:
|
|
757
|
+
break
|
|
758
|
+
|
|
759
|
+
# Scan all wagons to find consumers
|
|
760
|
+
consumers = []
|
|
761
|
+
for wagon_file in plan_dir.glob("*/_*.yaml"):
|
|
762
|
+
with open(wagon_file) as f:
|
|
763
|
+
wagon_data = yaml.safe_load(f)
|
|
764
|
+
|
|
765
|
+
wagon_slug = wagon_data.get("wagon")
|
|
766
|
+
consume_items = wagon_data.get("consume", [])
|
|
767
|
+
|
|
768
|
+
for item in consume_items:
|
|
769
|
+
if item.get("name") == f"contract:{artifact_urn}":
|
|
770
|
+
consumers.append(f"wagon:{wagon_slug}")
|
|
771
|
+
break
|
|
772
|
+
|
|
773
|
+
# Extract dependencies from producer consume[]
|
|
774
|
+
dependencies = [item.get("name") for item in producer_consume if item.get("name")]
|
|
775
|
+
|
|
776
|
+
# Infer API method from aspect/variant
|
|
777
|
+
http_method = _infer_http_method_for_scaffold(artifact_urn, aspect_base, variant)
|
|
778
|
+
|
|
779
|
+
# Generate API path
|
|
780
|
+
api_path = "/" + "/".join(urn_parts)
|
|
781
|
+
if has_variant:
|
|
782
|
+
api_path = "/" + "/".join(urn_parts[:-1]) + "/" + urn_parts[-1].replace(".", "/")
|
|
783
|
+
|
|
784
|
+
# Extract traceability
|
|
785
|
+
wagon_snake = producer_wagon.replace("-", "_") if producer_wagon else "unknown"
|
|
786
|
+
wagon_ref = f"plan/{wagon_snake}/_{wagon_snake}.yaml"
|
|
787
|
+
feature_refs = [f.get("name") for f in producer_features if f.get("name")]
|
|
788
|
+
|
|
789
|
+
# Generate testing paths (relative to contracts_dir)
|
|
790
|
+
test_dir_path = contract_path.parent / "tests"
|
|
791
|
+
test_dir = str(test_dir_path.relative_to(contracts_dir.parent)) + "/"
|
|
792
|
+
test_file = f"{path_segments[-1]}_schema_test.json"
|
|
793
|
+
|
|
794
|
+
# Build x-artifact-metadata
|
|
795
|
+
# Domain is the base aspect without variant
|
|
796
|
+
# mechanic:timebank.exhausted → domain=timebank, resource=timebank.exhausted
|
|
797
|
+
domain = aspect_base if has_variant else (urn_parts[1] if len(urn_parts) > 1 else theme)
|
|
798
|
+
|
|
799
|
+
metadata = {
|
|
800
|
+
"domain": domain,
|
|
801
|
+
"resource": resource,
|
|
802
|
+
"version": "1.0.0",
|
|
803
|
+
"producer": f"wagon:{producer_wagon}" if producer_wagon else "wagon:unknown",
|
|
804
|
+
"consumers": consumers,
|
|
805
|
+
"dependencies": dependencies,
|
|
806
|
+
"api": {
|
|
807
|
+
"operations": [{
|
|
808
|
+
"method": http_method,
|
|
809
|
+
"path": api_path,
|
|
810
|
+
"responses": {
|
|
811
|
+
"200": {"description": "Success"}
|
|
812
|
+
}
|
|
813
|
+
}]
|
|
814
|
+
},
|
|
815
|
+
"traceability": {
|
|
816
|
+
"wagon_ref": wagon_ref,
|
|
817
|
+
"feature_refs": feature_refs
|
|
818
|
+
},
|
|
819
|
+
"testing": {
|
|
820
|
+
"directory": test_dir,
|
|
821
|
+
"schema_tests": [test_file]
|
|
822
|
+
},
|
|
823
|
+
"governance": {
|
|
824
|
+
"status": "draft",
|
|
825
|
+
"stability": "experimental"
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
# Create contract schema - start at v0.1.0 with draft status
|
|
830
|
+
schema = {
|
|
831
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
832
|
+
"$id": artifact_urn,
|
|
833
|
+
"version": "1.0.0",
|
|
834
|
+
"title": _titlecase_scaffold(artifact_urn),
|
|
835
|
+
"description": f"Contract schema for {artifact_urn}",
|
|
836
|
+
"type": "object",
|
|
837
|
+
"properties": {
|
|
838
|
+
"_version": {
|
|
839
|
+
"type": "string",
|
|
840
|
+
"description": "Contract version for backward compatibility handling. Default '1' for v1.x data.",
|
|
841
|
+
"default": "1"
|
|
842
|
+
}
|
|
843
|
+
},
|
|
844
|
+
"x-artifact-metadata": metadata
|
|
845
|
+
}
|
|
846
|
+
|
|
847
|
+
# Write contract file
|
|
848
|
+
contract_path.parent.mkdir(parents=True, exist_ok=True)
|
|
849
|
+
with open(contract_path, 'w') as f:
|
|
850
|
+
json.dump(schema, f, indent=2)
|
|
851
|
+
f.write('\n')
|
|
852
|
+
|
|
853
|
+
return {
|
|
854
|
+
"created": True,
|
|
855
|
+
"path": str(contract_path),
|
|
856
|
+
"metadata": metadata
|
|
857
|
+
}
|
|
858
|
+
|
|
859
|
+
|
|
860
|
+
def _infer_http_method_for_scaffold(urn: str, aspect: str, variant: Optional[str]) -> str:
|
|
861
|
+
"""Infer HTTP method from artifact URN patterns"""
|
|
862
|
+
check_term = variant if variant else aspect
|
|
863
|
+
check_term_lower = check_term.lower()
|
|
864
|
+
|
|
865
|
+
# Event patterns (POST)
|
|
866
|
+
if any(term in check_term_lower for term in ["exhausted", "started", "ended", "committed", "registered"]):
|
|
867
|
+
return "POST"
|
|
868
|
+
|
|
869
|
+
# State patterns (GET)
|
|
870
|
+
if any(term in check_term_lower for term in ["current", "remaining", "config", "state", "identity"]):
|
|
871
|
+
return "GET"
|
|
872
|
+
|
|
873
|
+
return "GET" # Default to GET
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def _titlecase_scaffold(text: str) -> str:
|
|
877
|
+
"""Convert URN to title case"""
|
|
878
|
+
return " ".join(word.capitalize() for word in text.replace(":", " ").replace(".", " ").split())
|
|
879
|
+
|
|
880
|
+
|
|
881
|
+
def validate_and_update_contract_metadata(
|
|
882
|
+
contract_path: Path,
|
|
883
|
+
plan_dir: Path,
|
|
884
|
+
contracts_dir: Path
|
|
885
|
+
) -> Dict[str, Any]:
|
|
886
|
+
"""
|
|
887
|
+
Validate and update existing contract metadata completeness.
|
|
888
|
+
|
|
889
|
+
Implements SPEC-COACH-UTILS-0295: Re-scan wagon manifests, detect missing/outdated
|
|
890
|
+
fields, update only what's needed, preserve user customizations.
|
|
891
|
+
|
|
892
|
+
Args:
|
|
893
|
+
contract_path: Path to existing contract schema file
|
|
894
|
+
plan_dir: Path to plan/ directory
|
|
895
|
+
contracts_dir: Path to contracts/ directory
|
|
896
|
+
|
|
897
|
+
Returns:
|
|
898
|
+
Dict with keys: updates (dict), preserved_customizations (int)
|
|
899
|
+
"""
|
|
900
|
+
# Read existing contract
|
|
901
|
+
with open(contract_path) as f:
|
|
902
|
+
contract = json.load(f)
|
|
903
|
+
|
|
904
|
+
existing_metadata = contract.get("x-artifact-metadata", {})
|
|
905
|
+
artifact_urn = contract.get("$id", "")
|
|
906
|
+
|
|
907
|
+
# Re-generate metadata from current wagon state
|
|
908
|
+
regenerated = scaffold_contract_metadata(
|
|
909
|
+
artifact_urn=artifact_urn,
|
|
910
|
+
plan_dir=plan_dir,
|
|
911
|
+
contracts_dir=contracts_dir
|
|
912
|
+
)
|
|
913
|
+
|
|
914
|
+
new_metadata = regenerated["metadata"]
|
|
915
|
+
|
|
916
|
+
# Compare and update
|
|
917
|
+
updates = {}
|
|
918
|
+
preserved = 0
|
|
919
|
+
|
|
920
|
+
# Update consumers if changed
|
|
921
|
+
existing_consumers = set(existing_metadata.get("consumers", []))
|
|
922
|
+
new_consumers = set(new_metadata.get("consumers", []))
|
|
923
|
+
|
|
924
|
+
if new_consumers != existing_consumers:
|
|
925
|
+
# Merge: keep existing + add new
|
|
926
|
+
merged_consumers = list(existing_consumers | new_consumers)
|
|
927
|
+
existing_metadata["consumers"] = merged_consumers
|
|
928
|
+
updates["consumers"] = list(new_consumers - existing_consumers)
|
|
929
|
+
|
|
930
|
+
# Update missing traceability.feature_refs
|
|
931
|
+
traceability = existing_metadata.get("traceability", {})
|
|
932
|
+
if "feature_refs" not in traceability or not traceability["feature_refs"]:
|
|
933
|
+
traceability["feature_refs"] = new_metadata["traceability"]["feature_refs"]
|
|
934
|
+
existing_metadata["traceability"] = traceability
|
|
935
|
+
updates["traceability.feature_refs"] = new_metadata["traceability"]["feature_refs"]
|
|
936
|
+
|
|
937
|
+
# Update missing testing.schema_tests
|
|
938
|
+
testing = existing_metadata.get("testing", {})
|
|
939
|
+
if "schema_tests" not in testing or not testing["schema_tests"]:
|
|
940
|
+
testing["schema_tests"] = new_metadata["testing"]["schema_tests"]
|
|
941
|
+
existing_metadata["testing"] = testing
|
|
942
|
+
updates["testing.schema_tests"] = new_metadata["testing"]["schema_tests"]
|
|
943
|
+
|
|
944
|
+
# Count preserved customizations
|
|
945
|
+
if contract.get("description") and "CUSTOM" in contract["description"]:
|
|
946
|
+
preserved += 1
|
|
947
|
+
|
|
948
|
+
api_ops = existing_metadata.get("api", {}).get("operations", [])
|
|
949
|
+
if api_ops and api_ops[0].get("description") and "CUSTOM" in api_ops[0]["description"]:
|
|
950
|
+
preserved += 1
|
|
951
|
+
|
|
952
|
+
# Write updated contract
|
|
953
|
+
contract["x-artifact-metadata"] = existing_metadata
|
|
954
|
+
with open(contract_path, 'w') as f:
|
|
955
|
+
json.dump(contract, f, indent=2)
|
|
956
|
+
f.write('\n')
|
|
957
|
+
|
|
958
|
+
return {
|
|
959
|
+
"updates": updates,
|
|
960
|
+
"preserved_customizations": preserved
|
|
961
|
+
}
|
|
962
|
+
|
|
963
|
+
|
|
964
|
+
def create_placeholder_test_files(
|
|
965
|
+
contract_path: Path,
|
|
966
|
+
contracts_dir: Path
|
|
967
|
+
) -> Dict[str, Any]:
|
|
968
|
+
"""
|
|
969
|
+
Create placeholder test files for scaffolded contracts.
|
|
970
|
+
|
|
971
|
+
Implements SPEC-COACH-UTILS-0296: Create test directory, generate placeholder
|
|
972
|
+
test files, avoid overwriting existing tests.
|
|
973
|
+
|
|
974
|
+
Args:
|
|
975
|
+
contract_path: Path to contract schema file
|
|
976
|
+
contracts_dir: Path to contracts/ directory
|
|
977
|
+
|
|
978
|
+
Returns:
|
|
979
|
+
Dict with keys: created (int), skipped (int), created_files (list), skipped_files (list)
|
|
980
|
+
"""
|
|
981
|
+
# Read contract to get testing metadata
|
|
982
|
+
with open(contract_path) as f:
|
|
983
|
+
contract = json.load(f)
|
|
984
|
+
|
|
985
|
+
metadata = contract.get("x-artifact-metadata", {})
|
|
986
|
+
testing = metadata.get("testing", {})
|
|
987
|
+
|
|
988
|
+
test_dir_rel = testing.get("directory", "")
|
|
989
|
+
test_files = testing.get("schema_tests", [])
|
|
990
|
+
|
|
991
|
+
if not test_dir_rel or not test_files:
|
|
992
|
+
return {
|
|
993
|
+
"created": 0,
|
|
994
|
+
"skipped": 0,
|
|
995
|
+
"created_files": [],
|
|
996
|
+
"skipped_files": []
|
|
997
|
+
}
|
|
998
|
+
|
|
999
|
+
# Resolve test directory path
|
|
1000
|
+
test_dir = contracts_dir / test_dir_rel.replace("contracts/", "")
|
|
1001
|
+
test_dir.mkdir(parents=True, exist_ok=True)
|
|
1002
|
+
|
|
1003
|
+
created_files = []
|
|
1004
|
+
skipped_files = []
|
|
1005
|
+
artifact_urn = contract.get("$id", "")
|
|
1006
|
+
|
|
1007
|
+
for test_file in test_files:
|
|
1008
|
+
test_path = test_dir / test_file
|
|
1009
|
+
|
|
1010
|
+
if test_path.exists():
|
|
1011
|
+
skipped_files.append(test_file)
|
|
1012
|
+
else:
|
|
1013
|
+
# Create placeholder
|
|
1014
|
+
placeholder = {
|
|
1015
|
+
"description": f"TODO: Implement schema tests for {artifact_urn}",
|
|
1016
|
+
"contract": artifact_urn,
|
|
1017
|
+
"test_cases": [
|
|
1018
|
+
{
|
|
1019
|
+
"name": "TODO: Add test case",
|
|
1020
|
+
"input": {},
|
|
1021
|
+
"expected": "valid"
|
|
1022
|
+
}
|
|
1023
|
+
]
|
|
1024
|
+
}
|
|
1025
|
+
|
|
1026
|
+
with open(test_path, 'w') as f:
|
|
1027
|
+
json.dump(placeholder, f, indent=2)
|
|
1028
|
+
f.write('\n')
|
|
1029
|
+
|
|
1030
|
+
created_files.append(test_file)
|
|
1031
|
+
|
|
1032
|
+
return {
|
|
1033
|
+
"created": len(created_files),
|
|
1034
|
+
"skipped": len(skipped_files),
|
|
1035
|
+
"created_files": created_files,
|
|
1036
|
+
"skipped_files": skipped_files
|
|
1037
|
+
}
|
|
1038
|
+
|
|
1039
|
+
|
|
1040
|
+
def main():
|
|
1041
|
+
import argparse
|
|
1042
|
+
|
|
1043
|
+
parser = argparse.ArgumentParser(
|
|
1044
|
+
description="Validate producer-contract traceability and generate missing contracts",
|
|
1045
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
1046
|
+
epilog="""
|
|
1047
|
+
Examples:
|
|
1048
|
+
# Validate traceability
|
|
1049
|
+
python atdd/coach/commands/producer.py
|
|
1050
|
+
|
|
1051
|
+
# Auto-generate missing contracts
|
|
1052
|
+
python atdd/coach/commands/producer.py --fix
|
|
1053
|
+
|
|
1054
|
+
# Verbose output
|
|
1055
|
+
python atdd/coach/commands/producer.py --verbose
|
|
1056
|
+
"""
|
|
1057
|
+
)
|
|
1058
|
+
parser.add_argument(
|
|
1059
|
+
"--fix",
|
|
1060
|
+
action="store_true",
|
|
1061
|
+
help="Auto-generate missing contract schemas"
|
|
1062
|
+
)
|
|
1063
|
+
parser.add_argument(
|
|
1064
|
+
"--verbose", "-v",
|
|
1065
|
+
action="store_true",
|
|
1066
|
+
help="Enable verbose output"
|
|
1067
|
+
)
|
|
1068
|
+
|
|
1069
|
+
args = parser.parse_args()
|
|
1070
|
+
|
|
1071
|
+
validator = ProducerValidator(auto_fix=args.fix, verbose=args.verbose)
|
|
1072
|
+
success = validator.run()
|
|
1073
|
+
|
|
1074
|
+
return 0 if success else 1
|
|
1075
|
+
|
|
1076
|
+
|
|
1077
|
+
if __name__ == "__main__":
|
|
1078
|
+
exit(main())
|