atdd 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atdd/__init__.py +6 -0
- atdd/__main__.py +4 -0
- atdd/cli.py +404 -0
- atdd/coach/__init__.py +0 -0
- atdd/coach/commands/__init__.py +0 -0
- atdd/coach/commands/add_persistence_metadata.py +215 -0
- atdd/coach/commands/analyze_migrations.py +188 -0
- atdd/coach/commands/consumers.py +720 -0
- atdd/coach/commands/infer_governance_status.py +149 -0
- atdd/coach/commands/initializer.py +177 -0
- atdd/coach/commands/interface.py +1078 -0
- atdd/coach/commands/inventory.py +565 -0
- atdd/coach/commands/migration.py +240 -0
- atdd/coach/commands/registry.py +1560 -0
- atdd/coach/commands/session.py +430 -0
- atdd/coach/commands/sync.py +405 -0
- atdd/coach/commands/test_interface.py +399 -0
- atdd/coach/commands/test_runner.py +141 -0
- atdd/coach/commands/tests/__init__.py +1 -0
- atdd/coach/commands/tests/test_telemetry_array_validation.py +235 -0
- atdd/coach/commands/traceability.py +4264 -0
- atdd/coach/conventions/session.convention.yaml +754 -0
- atdd/coach/overlays/__init__.py +2 -0
- atdd/coach/overlays/claude.md +2 -0
- atdd/coach/schemas/config.schema.json +34 -0
- atdd/coach/schemas/manifest.schema.json +101 -0
- atdd/coach/templates/ATDD.md +282 -0
- atdd/coach/templates/SESSION-TEMPLATE.md +327 -0
- atdd/coach/utils/__init__.py +0 -0
- atdd/coach/utils/graph/__init__.py +0 -0
- atdd/coach/utils/graph/urn.py +875 -0
- atdd/coach/validators/__init__.py +0 -0
- atdd/coach/validators/shared_fixtures.py +365 -0
- atdd/coach/validators/test_enrich_wagon_registry.py +167 -0
- atdd/coach/validators/test_registry.py +575 -0
- atdd/coach/validators/test_session_validation.py +1183 -0
- atdd/coach/validators/test_traceability.py +448 -0
- atdd/coach/validators/test_update_feature_paths.py +108 -0
- atdd/coach/validators/test_validate_contract_consumers.py +297 -0
- atdd/coder/__init__.py +1 -0
- atdd/coder/conventions/adapter.recipe.yaml +88 -0
- atdd/coder/conventions/backend.convention.yaml +460 -0
- atdd/coder/conventions/boundaries.convention.yaml +666 -0
- atdd/coder/conventions/commons.convention.yaml +460 -0
- atdd/coder/conventions/complexity.recipe.yaml +109 -0
- atdd/coder/conventions/component-naming.convention.yaml +178 -0
- atdd/coder/conventions/design.convention.yaml +327 -0
- atdd/coder/conventions/design.recipe.yaml +273 -0
- atdd/coder/conventions/dto.convention.yaml +660 -0
- atdd/coder/conventions/frontend.convention.yaml +542 -0
- atdd/coder/conventions/green.convention.yaml +1012 -0
- atdd/coder/conventions/presentation.convention.yaml +587 -0
- atdd/coder/conventions/refactor.convention.yaml +535 -0
- atdd/coder/conventions/technology.convention.yaml +206 -0
- atdd/coder/conventions/tests/__init__.py +0 -0
- atdd/coder/conventions/tests/test_adapter_recipe.py +302 -0
- atdd/coder/conventions/tests/test_complexity_recipe.py +289 -0
- atdd/coder/conventions/tests/test_component_taxonomy.py +278 -0
- atdd/coder/conventions/tests/test_component_urn_naming.py +165 -0
- atdd/coder/conventions/tests/test_thinness_recipe.py +286 -0
- atdd/coder/conventions/thinness.recipe.yaml +82 -0
- atdd/coder/conventions/train.convention.yaml +325 -0
- atdd/coder/conventions/verification.protocol.yaml +53 -0
- atdd/coder/schemas/design_system.schema.json +361 -0
- atdd/coder/validators/__init__.py +0 -0
- atdd/coder/validators/test_commons_structure.py +485 -0
- atdd/coder/validators/test_complexity.py +416 -0
- atdd/coder/validators/test_cross_language_consistency.py +431 -0
- atdd/coder/validators/test_design_system_compliance.py +413 -0
- atdd/coder/validators/test_dto_testing_patterns.py +268 -0
- atdd/coder/validators/test_green_cross_stack_layers.py +168 -0
- atdd/coder/validators/test_green_layer_dependencies.py +148 -0
- atdd/coder/validators/test_green_python_layer_structure.py +103 -0
- atdd/coder/validators/test_green_supabase_layer_structure.py +103 -0
- atdd/coder/validators/test_import_boundaries.py +396 -0
- atdd/coder/validators/test_init_file_urns.py +593 -0
- atdd/coder/validators/test_preact_layer_boundaries.py +221 -0
- atdd/coder/validators/test_presentation_convention.py +260 -0
- atdd/coder/validators/test_python_architecture.py +674 -0
- atdd/coder/validators/test_quality_metrics.py +420 -0
- atdd/coder/validators/test_station_master_pattern.py +244 -0
- atdd/coder/validators/test_train_infrastructure.py +454 -0
- atdd/coder/validators/test_train_urns.py +293 -0
- atdd/coder/validators/test_typescript_architecture.py +616 -0
- atdd/coder/validators/test_usecase_structure.py +421 -0
- atdd/coder/validators/test_wagon_boundaries.py +586 -0
- atdd/conftest.py +126 -0
- atdd/planner/__init__.py +1 -0
- atdd/planner/conventions/acceptance.convention.yaml +538 -0
- atdd/planner/conventions/appendix.convention.yaml +187 -0
- atdd/planner/conventions/artifact-naming.convention.yaml +852 -0
- atdd/planner/conventions/component.convention.yaml +670 -0
- atdd/planner/conventions/criteria.convention.yaml +141 -0
- atdd/planner/conventions/feature.convention.yaml +371 -0
- atdd/planner/conventions/interface.convention.yaml +382 -0
- atdd/planner/conventions/steps.convention.yaml +141 -0
- atdd/planner/conventions/train.convention.yaml +552 -0
- atdd/planner/conventions/wagon.convention.yaml +275 -0
- atdd/planner/conventions/wmbt.convention.yaml +258 -0
- atdd/planner/schemas/acceptance.schema.json +336 -0
- atdd/planner/schemas/appendix.schema.json +78 -0
- atdd/planner/schemas/component.schema.json +114 -0
- atdd/planner/schemas/feature.schema.json +197 -0
- atdd/planner/schemas/train.schema.json +192 -0
- atdd/planner/schemas/wagon.schema.json +281 -0
- atdd/planner/schemas/wmbt.schema.json +59 -0
- atdd/planner/validators/__init__.py +0 -0
- atdd/planner/validators/conftest.py +5 -0
- atdd/planner/validators/test_draft_wagon_registry.py +374 -0
- atdd/planner/validators/test_plan_cross_refs.py +240 -0
- atdd/planner/validators/test_plan_uniqueness.py +224 -0
- atdd/planner/validators/test_plan_urn_resolution.py +268 -0
- atdd/planner/validators/test_plan_wagons.py +174 -0
- atdd/planner/validators/test_train_validation.py +514 -0
- atdd/planner/validators/test_wagon_urn_chain.py +648 -0
- atdd/planner/validators/test_wmbt_consistency.py +327 -0
- atdd/planner/validators/test_wmbt_vocabulary.py +632 -0
- atdd/tester/__init__.py +1 -0
- atdd/tester/conventions/artifact.convention.yaml +257 -0
- atdd/tester/conventions/contract.convention.yaml +1009 -0
- atdd/tester/conventions/filename.convention.yaml +555 -0
- atdd/tester/conventions/migration.convention.yaml +509 -0
- atdd/tester/conventions/red.convention.yaml +797 -0
- atdd/tester/conventions/routing.convention.yaml +51 -0
- atdd/tester/conventions/telemetry.convention.yaml +458 -0
- atdd/tester/schemas/a11y.tmpl.json +17 -0
- atdd/tester/schemas/artifact.schema.json +189 -0
- atdd/tester/schemas/contract.schema.json +591 -0
- atdd/tester/schemas/contract.tmpl.json +95 -0
- atdd/tester/schemas/db.tmpl.json +20 -0
- atdd/tester/schemas/e2e.tmpl.json +17 -0
- atdd/tester/schemas/edge_function.tmpl.json +17 -0
- atdd/tester/schemas/event.tmpl.json +17 -0
- atdd/tester/schemas/http.tmpl.json +19 -0
- atdd/tester/schemas/job.tmpl.json +18 -0
- atdd/tester/schemas/load.tmpl.json +21 -0
- atdd/tester/schemas/metric.tmpl.json +19 -0
- atdd/tester/schemas/pack.schema.json +139 -0
- atdd/tester/schemas/realtime.tmpl.json +20 -0
- atdd/tester/schemas/rls.tmpl.json +18 -0
- atdd/tester/schemas/script.tmpl.json +16 -0
- atdd/tester/schemas/sec.tmpl.json +18 -0
- atdd/tester/schemas/storage.tmpl.json +18 -0
- atdd/tester/schemas/telemetry.schema.json +128 -0
- atdd/tester/schemas/telemetry_tracking_manifest.schema.json +143 -0
- atdd/tester/schemas/test_filename.schema.json +194 -0
- atdd/tester/schemas/test_intent.schema.json +179 -0
- atdd/tester/schemas/unit.tmpl.json +18 -0
- atdd/tester/schemas/visual.tmpl.json +18 -0
- atdd/tester/schemas/ws.tmpl.json +17 -0
- atdd/tester/utils/__init__.py +0 -0
- atdd/tester/utils/filename.py +300 -0
- atdd/tester/validators/__init__.py +0 -0
- atdd/tester/validators/cleanup_duplicate_headers.py +116 -0
- atdd/tester/validators/cleanup_duplicate_headers_v2.py +135 -0
- atdd/tester/validators/conftest.py +5 -0
- atdd/tester/validators/coverage_gap_report.py +321 -0
- atdd/tester/validators/fix_dual_ac_references.py +179 -0
- atdd/tester/validators/remove_duplicate_lines.py +93 -0
- atdd/tester/validators/test_acceptance_urn_filename_mapping.py +359 -0
- atdd/tester/validators/test_acceptance_urn_separator.py +166 -0
- atdd/tester/validators/test_artifact_naming_category.py +307 -0
- atdd/tester/validators/test_contract_schema_compliance.py +706 -0
- atdd/tester/validators/test_contracts_structure.py +200 -0
- atdd/tester/validators/test_coverage_adequacy.py +797 -0
- atdd/tester/validators/test_dual_ac_reference.py +225 -0
- atdd/tester/validators/test_fixture_validity.py +372 -0
- atdd/tester/validators/test_isolation.py +487 -0
- atdd/tester/validators/test_migration_coverage.py +204 -0
- atdd/tester/validators/test_migration_criteria.py +276 -0
- atdd/tester/validators/test_migration_generation.py +116 -0
- atdd/tester/validators/test_python_test_naming.py +410 -0
- atdd/tester/validators/test_red_layer_validation.py +95 -0
- atdd/tester/validators/test_red_python_layer_structure.py +87 -0
- atdd/tester/validators/test_red_supabase_layer_structure.py +90 -0
- atdd/tester/validators/test_telemetry_structure.py +634 -0
- atdd/tester/validators/test_typescript_test_naming.py +301 -0
- atdd/tester/validators/test_typescript_test_structure.py +84 -0
- atdd-0.2.1.dist-info/METADATA +221 -0
- atdd-0.2.1.dist-info/RECORD +184 -0
- atdd-0.2.1.dist-info/WHEEL +5 -0
- atdd-0.2.1.dist-info/entry_points.txt +2 -0
- atdd-0.2.1.dist-info/licenses/LICENSE +674 -0
- atdd-0.2.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1560 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Unified Registry System - Load and build all artifact registries.
|
|
3
|
+
|
|
4
|
+
Architecture: 4-Layer Clean Architecture (single file)
|
|
5
|
+
- Domain: Pure business logic (change detection, validation)
|
|
6
|
+
- Integration: File I/O adapters (YAML, file scanning)
|
|
7
|
+
- Application: Use cases (load registry, build registry)
|
|
8
|
+
- Presentation: CLI facades (RegistryLoader, RegistryBuilder)
|
|
9
|
+
|
|
10
|
+
Registries:
|
|
11
|
+
- plan/_wagons.yaml from wagon manifests
|
|
12
|
+
- contracts/_artifacts.yaml from contract schemas
|
|
13
|
+
- telemetry/_signals.yaml from telemetry signals
|
|
14
|
+
- atdd/tester/_tests.yaml from test files
|
|
15
|
+
- python/_implementations.yaml from Python files
|
|
16
|
+
- supabase/_functions.yaml from function files
|
|
17
|
+
|
|
18
|
+
This command helps maintain coherence between source files and registries.
|
|
19
|
+
"""
|
|
20
|
+
import yaml
|
|
21
|
+
import json
|
|
22
|
+
import re
|
|
23
|
+
import ast
|
|
24
|
+
import sys
|
|
25
|
+
from pathlib import Path
|
|
26
|
+
from typing import Dict, List, Any, Optional
|
|
27
|
+
|
|
28
|
+
# Import URNBuilder for URN generation (following conventions)
|
|
29
|
+
try:
|
|
30
|
+
from atdd.coach.utils.graph.urn import URNBuilder
|
|
31
|
+
except ImportError:
|
|
32
|
+
# Fallback if URNBuilder not available
|
|
33
|
+
class URNBuilder:
|
|
34
|
+
@staticmethod
|
|
35
|
+
def test(wagon: str, file: str, func: str) -> str:
|
|
36
|
+
return f"test:{wagon}:{file}::{func}"
|
|
37
|
+
|
|
38
|
+
@staticmethod
|
|
39
|
+
def impl(wagon: str, layer: str, component: str, lang: str) -> str:
|
|
40
|
+
return f"impl:{wagon}:{layer}:{component}:{lang}"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
# ============================================================================
|
|
44
|
+
# PRESENTATION LAYER - CLI Facades
|
|
45
|
+
# ============================================================================
|
|
46
|
+
# Public API for loading and building registries.
|
|
47
|
+
# Delegates to application layer use cases.
|
|
48
|
+
# ============================================================================
|
|
49
|
+
|
|
50
|
+
class RegistryLoader:
|
|
51
|
+
"""Loads and queries registries (read-only)."""
|
|
52
|
+
|
|
53
|
+
def __init__(self, repo_root: Path):
|
|
54
|
+
self.repo_root = repo_root
|
|
55
|
+
self.plan_dir = repo_root / "plan"
|
|
56
|
+
self.contracts_dir = repo_root / "contracts"
|
|
57
|
+
self.telemetry_dir = repo_root / "telemetry"
|
|
58
|
+
self.tester_dir = repo_root / "atdd" / "tester"
|
|
59
|
+
self.python_dir = repo_root / "python"
|
|
60
|
+
self.supabase_dir = repo_root / "supabase"
|
|
61
|
+
|
|
62
|
+
def load_all(self) -> Dict[str, Any]:
|
|
63
|
+
"""Load all registries without distinction."""
|
|
64
|
+
return {
|
|
65
|
+
"plan": self.load_planner(),
|
|
66
|
+
"contracts": self.load_contracts(),
|
|
67
|
+
"telemetry": self.load_telemetry(),
|
|
68
|
+
"tester": self.load_tester(),
|
|
69
|
+
"coder": self.load_coder(),
|
|
70
|
+
"supabase": self.load_supabase()
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
def load_planner(self) -> Dict[str, Any]:
|
|
74
|
+
"""Load planner registry (plan/_wagons.yaml)."""
|
|
75
|
+
registry_path = self.plan_dir / "_wagons.yaml"
|
|
76
|
+
if not registry_path.exists():
|
|
77
|
+
return {"wagons": []}
|
|
78
|
+
|
|
79
|
+
with open(registry_path) as f:
|
|
80
|
+
return yaml.safe_load(f) or {"wagons": []}
|
|
81
|
+
|
|
82
|
+
def load_contracts(self) -> Dict[str, Any]:
|
|
83
|
+
"""Load contracts registry (contracts/_artifacts.yaml)."""
|
|
84
|
+
registry_path = self.contracts_dir / "_artifacts.yaml"
|
|
85
|
+
if not registry_path.exists():
|
|
86
|
+
return {"artifacts": []}
|
|
87
|
+
|
|
88
|
+
with open(registry_path) as f:
|
|
89
|
+
return yaml.safe_load(f) or {"artifacts": []}
|
|
90
|
+
|
|
91
|
+
def load_telemetry(self) -> Dict[str, Any]:
|
|
92
|
+
"""Load telemetry registry (telemetry/_signals.yaml)."""
|
|
93
|
+
registry_path = self.telemetry_dir / "_signals.yaml"
|
|
94
|
+
if not registry_path.exists():
|
|
95
|
+
return {"signals": []}
|
|
96
|
+
|
|
97
|
+
with open(registry_path) as f:
|
|
98
|
+
return yaml.safe_load(f) or {"signals": []}
|
|
99
|
+
|
|
100
|
+
def load_tester(self) -> Dict[str, Any]:
|
|
101
|
+
"""Load tester registry (atdd/tester/_tests.yaml)."""
|
|
102
|
+
registry_path = self.tester_dir / "_tests.yaml"
|
|
103
|
+
if not registry_path.exists():
|
|
104
|
+
return {"tests": []}
|
|
105
|
+
|
|
106
|
+
with open(registry_path) as f:
|
|
107
|
+
return yaml.safe_load(f) or {"tests": []}
|
|
108
|
+
|
|
109
|
+
def load_coder(self) -> Dict[str, Any]:
|
|
110
|
+
"""Load coder implementation registry (python/_implementations.yaml)."""
|
|
111
|
+
registry_path = self.python_dir / "_implementations.yaml"
|
|
112
|
+
if not registry_path.exists():
|
|
113
|
+
return {"implementations": []}
|
|
114
|
+
|
|
115
|
+
with open(registry_path) as f:
|
|
116
|
+
return yaml.safe_load(f) or {"implementations": []}
|
|
117
|
+
|
|
118
|
+
def load_supabase(self) -> Dict[str, Any]:
|
|
119
|
+
"""Load supabase functions registry (supabase/_functions.yaml)."""
|
|
120
|
+
registry_path = self.supabase_dir / "_functions.yaml"
|
|
121
|
+
if not registry_path.exists():
|
|
122
|
+
return {"functions": []}
|
|
123
|
+
|
|
124
|
+
with open(registry_path) as f:
|
|
125
|
+
return yaml.safe_load(f) or {"functions": []}
|
|
126
|
+
|
|
127
|
+
def find_implementations_for_spec(self, spec_urn: str) -> List[Dict]:
|
|
128
|
+
"""Find all implementations linked to a spec URN."""
|
|
129
|
+
coder_data = self.load_coder()
|
|
130
|
+
return [
|
|
131
|
+
impl for impl in coder_data.get("implementations", [])
|
|
132
|
+
if impl.get("spec_urn") == spec_urn
|
|
133
|
+
]
|
|
134
|
+
|
|
135
|
+
def find_tests_for_implementation(self, impl_urn: str) -> Optional[str]:
|
|
136
|
+
"""Find test URN linked to an implementation."""
|
|
137
|
+
coder_data = self.load_coder()
|
|
138
|
+
for impl in coder_data.get("implementations", []):
|
|
139
|
+
if impl.get("urn") == impl_urn:
|
|
140
|
+
return impl.get("test_urn")
|
|
141
|
+
return None
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
# ============================================================================
|
|
145
|
+
# APPLICATION LAYER - Use Cases & Orchestration
|
|
146
|
+
# ============================================================================
|
|
147
|
+
# Coordinates domain and integration layers.
|
|
148
|
+
# Contains registry building logic and workflow orchestration.
|
|
149
|
+
# ============================================================================
|
|
150
|
+
|
|
151
|
+
class RegistryBuilder:
|
|
152
|
+
"""Builds and updates registries from source files (formerly RegistryUpdater)."""
|
|
153
|
+
|
|
154
|
+
def __init__(self, repo_root: Path):
|
|
155
|
+
self.repo_root = repo_root
|
|
156
|
+
self.plan_dir = repo_root / "plan"
|
|
157
|
+
self.contracts_dir = repo_root / "contracts"
|
|
158
|
+
self.telemetry_dir = repo_root / "telemetry"
|
|
159
|
+
self.tester_dir = repo_root / "atdd" / "tester"
|
|
160
|
+
self.python_dir = repo_root / "python"
|
|
161
|
+
self.supabase_dir = repo_root / "supabase"
|
|
162
|
+
|
|
163
|
+
# ========================================================================
|
|
164
|
+
# DOMAIN LAYER - Pure Business Logic (Change Detection)
|
|
165
|
+
# ========================================================================
|
|
166
|
+
# No I/O, no side effects - pure functions for detecting changes
|
|
167
|
+
# ========================================================================
|
|
168
|
+
|
|
169
|
+
def _detect_changes(self, slug: str, old_entry: Dict, new_entry: Dict) -> List[str]:
|
|
170
|
+
"""
|
|
171
|
+
Detect field-level changes between old and new wagon entries.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
List of changed field names
|
|
175
|
+
"""
|
|
176
|
+
changed_fields = []
|
|
177
|
+
|
|
178
|
+
# Fields to compare
|
|
179
|
+
compare_fields = ["description", "theme", "subject", "context", "action",
|
|
180
|
+
"goal", "outcome", "produce", "consume", "wmbt", "total"]
|
|
181
|
+
|
|
182
|
+
for field in compare_fields:
|
|
183
|
+
old_val = old_entry.get(field)
|
|
184
|
+
new_val = new_entry.get(field)
|
|
185
|
+
|
|
186
|
+
if old_val != new_val:
|
|
187
|
+
changed_fields.append(field)
|
|
188
|
+
|
|
189
|
+
return changed_fields
|
|
190
|
+
|
|
191
|
+
def _detect_contract_changes(self, artifact_id: str, old_entry: Dict, new_entry: Dict) -> List[str]:
|
|
192
|
+
"""
|
|
193
|
+
Detect field-level changes between old and new contract entries.
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
List of changed field names
|
|
197
|
+
"""
|
|
198
|
+
changed_fields = []
|
|
199
|
+
|
|
200
|
+
# Fields to compare
|
|
201
|
+
compare_fields = ["urn", "version", "title", "description", "path", "producer", "consumers"]
|
|
202
|
+
|
|
203
|
+
for field in compare_fields:
|
|
204
|
+
old_val = old_entry.get(field)
|
|
205
|
+
new_val = new_entry.get(field)
|
|
206
|
+
|
|
207
|
+
if old_val != new_val:
|
|
208
|
+
changed_fields.append(field)
|
|
209
|
+
|
|
210
|
+
return changed_fields
|
|
211
|
+
|
|
212
|
+
def _detect_telemetry_changes(self, signal_id: str, old_entry: Dict, new_entry: Dict) -> List[str]:
|
|
213
|
+
"""
|
|
214
|
+
Detect field-level changes between old and new telemetry signal entries.
|
|
215
|
+
|
|
216
|
+
Returns:
|
|
217
|
+
List of changed field names
|
|
218
|
+
"""
|
|
219
|
+
changed_fields = []
|
|
220
|
+
|
|
221
|
+
# Fields to compare
|
|
222
|
+
compare_fields = ["type", "description", "path"]
|
|
223
|
+
|
|
224
|
+
for field in compare_fields:
|
|
225
|
+
old_val = old_entry.get(field)
|
|
226
|
+
new_val = new_entry.get(field)
|
|
227
|
+
|
|
228
|
+
if old_val != new_val:
|
|
229
|
+
changed_fields.append(field)
|
|
230
|
+
|
|
231
|
+
return changed_fields
|
|
232
|
+
|
|
233
|
+
def _extract_features_from_manifest(self, manifest: Dict, wagon_slug: str) -> List[Dict]:
|
|
234
|
+
"""
|
|
235
|
+
Extract features list from wagon manifest (DOMAIN logic).
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
manifest: Wagon manifest data
|
|
239
|
+
wagon_slug: Wagon slug for legacy format conversion
|
|
240
|
+
|
|
241
|
+
Returns:
|
|
242
|
+
List of feature objects with 'urn' key, empty list if no features
|
|
243
|
+
"""
|
|
244
|
+
if "features" not in manifest or not manifest["features"]:
|
|
245
|
+
return []
|
|
246
|
+
|
|
247
|
+
features_data = manifest["features"]
|
|
248
|
+
|
|
249
|
+
# Handle array format (current)
|
|
250
|
+
if isinstance(features_data, list):
|
|
251
|
+
return features_data
|
|
252
|
+
|
|
253
|
+
# Handle legacy dict format
|
|
254
|
+
if isinstance(features_data, dict):
|
|
255
|
+
return [{"urn": f"feature:{wagon_slug}.{k}"} for k in features_data.keys()]
|
|
256
|
+
|
|
257
|
+
return []
|
|
258
|
+
|
|
259
|
+
def _extract_wmbt_total_from_manifest(self, manifest: Dict) -> int:
|
|
260
|
+
"""
|
|
261
|
+
Extract WMBT total count from wagon manifest (DOMAIN logic).
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
manifest: Wagon manifest data
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
Total WMBT count, 0 if not found
|
|
268
|
+
"""
|
|
269
|
+
# Try wmbt.total first (current location)
|
|
270
|
+
if "wmbt" in manifest and isinstance(manifest["wmbt"], dict):
|
|
271
|
+
return manifest["wmbt"].get("total", 0)
|
|
272
|
+
|
|
273
|
+
# Fallback to root-level total (legacy)
|
|
274
|
+
return manifest.get("total", 0)
|
|
275
|
+
|
|
276
|
+
def _parse_feature_urn(self, urn: str) -> tuple[str, str]:
|
|
277
|
+
"""
|
|
278
|
+
Parse feature URN to extract wagon and feature slugs (DOMAIN logic).
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
urn: Feature URN in format feature:wagon-slug:feature-slug or feature:wagon-slug.feature-slug
|
|
282
|
+
|
|
283
|
+
Returns:
|
|
284
|
+
Tuple of (wagon_slug, feature_slug)
|
|
285
|
+
"""
|
|
286
|
+
if not urn or not urn.startswith("feature:"):
|
|
287
|
+
return ("", "")
|
|
288
|
+
|
|
289
|
+
# Remove 'feature:' prefix
|
|
290
|
+
rest = urn.replace("feature:", "")
|
|
291
|
+
|
|
292
|
+
# Try colon separator first (current format), then dot (legacy format)
|
|
293
|
+
if ":" in rest:
|
|
294
|
+
parts = rest.split(":", 1)
|
|
295
|
+
elif "." in rest:
|
|
296
|
+
parts = rest.split(".", 1)
|
|
297
|
+
else:
|
|
298
|
+
return ("", "")
|
|
299
|
+
|
|
300
|
+
if len(parts) != 2:
|
|
301
|
+
return ("", "")
|
|
302
|
+
|
|
303
|
+
return (parts[0], parts[1])
|
|
304
|
+
|
|
305
|
+
def _kebab_to_snake(self, text: str) -> str:
|
|
306
|
+
"""
|
|
307
|
+
Convert kebab-case to snake_case (DOMAIN logic).
|
|
308
|
+
|
|
309
|
+
Args:
|
|
310
|
+
text: String in kebab-case (e.g., 'maintain-ux')
|
|
311
|
+
|
|
312
|
+
Returns:
|
|
313
|
+
String in snake_case (e.g., 'maintain_ux')
|
|
314
|
+
"""
|
|
315
|
+
return text.replace("-", "_")
|
|
316
|
+
|
|
317
|
+
def _find_implementation_paths(self, wagon_snake: str, feature_snake: str) -> List[str]:
|
|
318
|
+
"""
|
|
319
|
+
Find existing implementation directories for a feature (INTEGRATION logic).
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
wagon_snake: Wagon name in snake_case
|
|
323
|
+
feature_snake: Feature name in snake_case
|
|
324
|
+
|
|
325
|
+
Returns:
|
|
326
|
+
List of relative paths to existing implementation directories
|
|
327
|
+
"""
|
|
328
|
+
paths = []
|
|
329
|
+
|
|
330
|
+
# Check each potential implementation location
|
|
331
|
+
locations = [
|
|
332
|
+
self.repo_root / "python" / wagon_snake / feature_snake,
|
|
333
|
+
self.repo_root / "lib" / wagon_snake / feature_snake,
|
|
334
|
+
self.repo_root / "supabase" / "functions" / wagon_snake / feature_snake,
|
|
335
|
+
self.repo_root / "packages" / wagon_snake / feature_snake
|
|
336
|
+
]
|
|
337
|
+
|
|
338
|
+
for location in locations:
|
|
339
|
+
if location.exists() and location.is_dir():
|
|
340
|
+
# Store as relative path with trailing slash
|
|
341
|
+
rel_path = location.relative_to(self.repo_root)
|
|
342
|
+
paths.append(str(rel_path) + "/")
|
|
343
|
+
|
|
344
|
+
return sorted(paths)
|
|
345
|
+
|
|
346
|
+
# ========================================================================
|
|
347
|
+
# PRESENTATION LAYER - Output Formatting
|
|
348
|
+
# ========================================================================
|
|
349
|
+
# CLI output formatting and user interaction
|
|
350
|
+
# ========================================================================
|
|
351
|
+
|
|
352
|
+
def _print_change_report(self, changes: List[Dict], preserved_drafts: List[str]):
|
|
353
|
+
"""
|
|
354
|
+
Print detailed change report.
|
|
355
|
+
|
|
356
|
+
Args:
|
|
357
|
+
changes: List of change records
|
|
358
|
+
preserved_drafts: List of preserved draft wagon slugs
|
|
359
|
+
"""
|
|
360
|
+
if not changes and not preserved_drafts:
|
|
361
|
+
return
|
|
362
|
+
|
|
363
|
+
print("\n" + "=" * 60)
|
|
364
|
+
print("DETAILED CHANGE REPORT")
|
|
365
|
+
print("=" * 60)
|
|
366
|
+
|
|
367
|
+
# Group changes by type
|
|
368
|
+
new_wagons = [c for c in changes if c["type"] == "new"]
|
|
369
|
+
updated_wagons = [c for c in changes if c["type"] == "updated"]
|
|
370
|
+
|
|
371
|
+
# Report new wagons
|
|
372
|
+
if new_wagons:
|
|
373
|
+
print(f"\nđ NEW WAGONS ({len(new_wagons)}):")
|
|
374
|
+
for change in sorted(new_wagons, key=lambda x: x["wagon"]):
|
|
375
|
+
print(f" ⢠{change['wagon']}")
|
|
376
|
+
|
|
377
|
+
# Report updated wagons with field changes
|
|
378
|
+
if updated_wagons:
|
|
379
|
+
print(f"\nđ UPDATED WAGONS ({len(updated_wagons)}):")
|
|
380
|
+
for change in sorted(updated_wagons, key=lambda x: x["wagon"]):
|
|
381
|
+
fields = ", ".join(change["fields"])
|
|
382
|
+
print(f" ⢠{change['wagon']}")
|
|
383
|
+
print(f" Changed fields: {fields}")
|
|
384
|
+
|
|
385
|
+
# Report unchanged wagons (synced but no changes)
|
|
386
|
+
unchanged_count = len([c for c in changes if c["type"] == "updated" and not c["fields"]])
|
|
387
|
+
if unchanged_count > 0:
|
|
388
|
+
print(f"\nâ UNCHANGED (synced, no changes): {unchanged_count} wagons")
|
|
389
|
+
|
|
390
|
+
# Report preserved drafts
|
|
391
|
+
if preserved_drafts:
|
|
392
|
+
print(f"\nđ PRESERVED DRAFT WAGONS ({len(preserved_drafts)}):")
|
|
393
|
+
for slug in sorted(preserved_drafts):
|
|
394
|
+
print(f" ⢠{slug}")
|
|
395
|
+
|
|
396
|
+
print("\n" + "=" * 60)
|
|
397
|
+
|
|
398
|
+
def _print_contract_change_report(self, changes: List[Dict]):
|
|
399
|
+
"""
|
|
400
|
+
Print detailed change report for contracts.
|
|
401
|
+
|
|
402
|
+
Args:
|
|
403
|
+
changes: List of change records
|
|
404
|
+
"""
|
|
405
|
+
if not changes:
|
|
406
|
+
return
|
|
407
|
+
|
|
408
|
+
print("\n" + "=" * 60)
|
|
409
|
+
print("DETAILED CHANGE REPORT")
|
|
410
|
+
print("=" * 60)
|
|
411
|
+
|
|
412
|
+
# Group changes by type
|
|
413
|
+
new_artifacts = [c for c in changes if c["type"] == "new"]
|
|
414
|
+
updated_artifacts = [c for c in changes if c["type"] == "updated"]
|
|
415
|
+
|
|
416
|
+
# Report new artifacts
|
|
417
|
+
if new_artifacts:
|
|
418
|
+
print(f"\nđ NEW ARTIFACTS ({len(new_artifacts)}):")
|
|
419
|
+
for change in sorted(new_artifacts, key=lambda x: x["artifact"]):
|
|
420
|
+
print(f" ⢠{change['artifact']}")
|
|
421
|
+
|
|
422
|
+
# Report updated artifacts with field changes
|
|
423
|
+
if updated_artifacts:
|
|
424
|
+
print(f"\nđ UPDATED ARTIFACTS ({len(updated_artifacts)}):")
|
|
425
|
+
for change in sorted(updated_artifacts, key=lambda x: x["artifact"]):
|
|
426
|
+
fields = ", ".join(change["fields"])
|
|
427
|
+
print(f" ⢠{change['artifact']}")
|
|
428
|
+
print(f" Changed fields: {fields}")
|
|
429
|
+
|
|
430
|
+
print("\n" + "=" * 60)
|
|
431
|
+
|
|
432
|
+
def _print_telemetry_change_report(self, changes: List[Dict]):
|
|
433
|
+
"""
|
|
434
|
+
Print detailed change report for telemetry signals.
|
|
435
|
+
|
|
436
|
+
Args:
|
|
437
|
+
changes: List of change records
|
|
438
|
+
"""
|
|
439
|
+
if not changes:
|
|
440
|
+
return
|
|
441
|
+
|
|
442
|
+
print("\n" + "=" * 60)
|
|
443
|
+
print("DETAILED CHANGE REPORT")
|
|
444
|
+
print("=" * 60)
|
|
445
|
+
|
|
446
|
+
# Group changes by type
|
|
447
|
+
new_signals = [c for c in changes if c["type"] == "new"]
|
|
448
|
+
updated_signals = [c for c in changes if c["type"] == "updated"]
|
|
449
|
+
|
|
450
|
+
# Report new signals
|
|
451
|
+
if new_signals:
|
|
452
|
+
print(f"\nđ NEW SIGNALS ({len(new_signals)}):")
|
|
453
|
+
for change in sorted(new_signals, key=lambda x: x["signal"]):
|
|
454
|
+
print(f" ⢠{change['signal']}")
|
|
455
|
+
|
|
456
|
+
# Report updated signals with field changes
|
|
457
|
+
if updated_signals:
|
|
458
|
+
print(f"\nđ UPDATED SIGNALS ({len(updated_signals)}):")
|
|
459
|
+
for change in sorted(updated_signals, key=lambda x: x["signal"]):
|
|
460
|
+
fields = ", ".join(change["fields"])
|
|
461
|
+
print(f" ⢠{change['signal']}")
|
|
462
|
+
print(f" Changed fields: {fields}")
|
|
463
|
+
|
|
464
|
+
print("\n" + "=" * 60)
|
|
465
|
+
|
|
466
|
+
# ========================================================================
|
|
467
|
+
# INTEGRATION LAYER - File I/O & Source Scanning
|
|
468
|
+
# ========================================================================
|
|
469
|
+
# Reads/writes YAML files, scans directories for source files
|
|
470
|
+
# ========================================================================
|
|
471
|
+
|
|
472
|
+
def update_wagon_registry(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
473
|
+
"""
|
|
474
|
+
Update plan/_wagons.yaml from wagon manifest files.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
preview_only: If True, only show what would change without applying
|
|
478
|
+
|
|
479
|
+
Returns:
|
|
480
|
+
Statistics about the update
|
|
481
|
+
"""
|
|
482
|
+
print("đ Analyzing wagon registry from manifest files...")
|
|
483
|
+
|
|
484
|
+
# Load existing registry
|
|
485
|
+
registry_path = self.plan_dir / "_wagons.yaml"
|
|
486
|
+
if registry_path.exists():
|
|
487
|
+
with open(registry_path) as f:
|
|
488
|
+
registry_data = yaml.safe_load(f)
|
|
489
|
+
existing_wagons = {w.get("wagon"): w for w in registry_data.get("wagons", [])}
|
|
490
|
+
else:
|
|
491
|
+
existing_wagons = {}
|
|
492
|
+
|
|
493
|
+
# Scan for wagon manifests
|
|
494
|
+
manifest_files = list(self.plan_dir.glob("*/_*.yaml"))
|
|
495
|
+
manifest_files = [f for f in manifest_files if f.name != "_wagons.yaml"]
|
|
496
|
+
|
|
497
|
+
updated_wagons = []
|
|
498
|
+
stats = {
|
|
499
|
+
"total_manifests": len(manifest_files),
|
|
500
|
+
"updated": 0,
|
|
501
|
+
"new": 0,
|
|
502
|
+
"preserved_drafts": 0,
|
|
503
|
+
"changes": [] # Track detailed changes
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
for manifest_path in sorted(manifest_files):
|
|
507
|
+
try:
|
|
508
|
+
with open(manifest_path) as f:
|
|
509
|
+
manifest = yaml.safe_load(f)
|
|
510
|
+
|
|
511
|
+
slug = manifest.get("wagon", "")
|
|
512
|
+
if not slug:
|
|
513
|
+
print(f" â ď¸ Skipping {manifest_path}: no wagon slug found")
|
|
514
|
+
continue
|
|
515
|
+
|
|
516
|
+
# Get relative paths
|
|
517
|
+
wagon_dir = manifest_path.parent
|
|
518
|
+
rel_manifest = str(manifest_path.relative_to(self.repo_root))
|
|
519
|
+
rel_dir = str(wagon_dir.relative_to(self.repo_root)) + "/"
|
|
520
|
+
|
|
521
|
+
# Build registry entry
|
|
522
|
+
entry = {
|
|
523
|
+
"wagon": slug,
|
|
524
|
+
"description": manifest.get("description", ""),
|
|
525
|
+
"theme": manifest.get("theme", ""),
|
|
526
|
+
"subject": manifest.get("subject", ""),
|
|
527
|
+
"context": manifest.get("context", ""),
|
|
528
|
+
"action": manifest.get("action", ""),
|
|
529
|
+
"goal": manifest.get("goal", ""),
|
|
530
|
+
"outcome": manifest.get("outcome", ""),
|
|
531
|
+
"produce": manifest.get("produce", []),
|
|
532
|
+
"consume": manifest.get("consume", []),
|
|
533
|
+
"wmbt": manifest.get("wmbt", {}),
|
|
534
|
+
"total": manifest.get("total", 0),
|
|
535
|
+
"manifest": rel_manifest,
|
|
536
|
+
"path": rel_dir
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
# Check if updating or new
|
|
540
|
+
if slug in existing_wagons:
|
|
541
|
+
stats["updated"] += 1
|
|
542
|
+
# Track field-level changes
|
|
543
|
+
changes = self._detect_changes(slug, existing_wagons[slug], entry)
|
|
544
|
+
if changes:
|
|
545
|
+
stats["changes"].append({
|
|
546
|
+
"wagon": slug,
|
|
547
|
+
"type": "updated",
|
|
548
|
+
"fields": changes
|
|
549
|
+
})
|
|
550
|
+
else:
|
|
551
|
+
stats["new"] += 1
|
|
552
|
+
stats["changes"].append({
|
|
553
|
+
"wagon": slug,
|
|
554
|
+
"type": "new",
|
|
555
|
+
"fields": ["all fields (new wagon)"]
|
|
556
|
+
})
|
|
557
|
+
|
|
558
|
+
updated_wagons.append(entry)
|
|
559
|
+
|
|
560
|
+
except Exception as e:
|
|
561
|
+
print(f" â Error processing {manifest_path}: {e}")
|
|
562
|
+
|
|
563
|
+
# Preserve draft wagons (those without manifests)
|
|
564
|
+
preserved_drafts = []
|
|
565
|
+
for slug, wagon in existing_wagons.items():
|
|
566
|
+
if not wagon.get("manifest") and not wagon.get("path"):
|
|
567
|
+
updated_wagons.append(wagon)
|
|
568
|
+
preserved_drafts.append(slug)
|
|
569
|
+
stats["preserved_drafts"] += 1
|
|
570
|
+
|
|
571
|
+
# Sort by wagon slug
|
|
572
|
+
updated_wagons.sort(key=lambda w: w.get("wagon", ""))
|
|
573
|
+
|
|
574
|
+
# Show preview
|
|
575
|
+
print(f"\nđ PREVIEW:")
|
|
576
|
+
print(f" ⢠{stats['updated']} wagons will be updated")
|
|
577
|
+
print(f" ⢠{stats['new']} new wagons will be added")
|
|
578
|
+
print(f" ⢠{stats['preserved_drafts']} draft wagons will be preserved")
|
|
579
|
+
|
|
580
|
+
# Print detailed change report
|
|
581
|
+
self._print_change_report(stats["changes"], preserved_drafts)
|
|
582
|
+
|
|
583
|
+
# If preview only, return early
|
|
584
|
+
if preview_only:
|
|
585
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
586
|
+
return stats
|
|
587
|
+
|
|
588
|
+
# Ask for user approval
|
|
589
|
+
print("\nâ Do you want to apply these changes to the registry?")
|
|
590
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
591
|
+
response = input(" > ").strip().lower()
|
|
592
|
+
|
|
593
|
+
if response != "yes":
|
|
594
|
+
print("\nâ Update cancelled by user")
|
|
595
|
+
stats["cancelled"] = True
|
|
596
|
+
return stats
|
|
597
|
+
|
|
598
|
+
# Write updated registry
|
|
599
|
+
output = {"wagons": updated_wagons}
|
|
600
|
+
with open(registry_path, "w") as f:
|
|
601
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
602
|
+
|
|
603
|
+
print(f"\nâ
Registry updated successfully!")
|
|
604
|
+
print(f" ⢠Updated {stats['updated']} wagons")
|
|
605
|
+
print(f" ⢠Added {stats['new']} new wagons")
|
|
606
|
+
print(f" ⢠Preserved {stats['preserved_drafts']} draft wagons")
|
|
607
|
+
print(f" đ Registry: {registry_path}")
|
|
608
|
+
|
|
609
|
+
return stats
|
|
610
|
+
|
|
611
|
+
def update_contract_registry(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
612
|
+
"""
|
|
613
|
+
Update contracts/_artifacts.yaml from contract schema files.
|
|
614
|
+
|
|
615
|
+
Args:
|
|
616
|
+
preview_only: If True, only show what would change without applying
|
|
617
|
+
|
|
618
|
+
Returns:
|
|
619
|
+
Statistics about the update
|
|
620
|
+
"""
|
|
621
|
+
print("\nđ Analyzing contract registry from schema files...")
|
|
622
|
+
|
|
623
|
+
# Load existing registry
|
|
624
|
+
registry_path = self.contracts_dir / "_artifacts.yaml"
|
|
625
|
+
existing_artifacts = {}
|
|
626
|
+
if registry_path.exists():
|
|
627
|
+
with open(registry_path) as f:
|
|
628
|
+
registry_data = yaml.safe_load(f)
|
|
629
|
+
existing_artifacts = {a.get("id"): a for a in registry_data.get("artifacts", [])}
|
|
630
|
+
|
|
631
|
+
artifacts = []
|
|
632
|
+
stats = {
|
|
633
|
+
"total_schemas": 0,
|
|
634
|
+
"processed": 0,
|
|
635
|
+
"updated": 0,
|
|
636
|
+
"new": 0,
|
|
637
|
+
"errors": 0,
|
|
638
|
+
"changes": []
|
|
639
|
+
}
|
|
640
|
+
|
|
641
|
+
# Scan for contract schemas
|
|
642
|
+
schema_files = list(self.contracts_dir.glob("**/*.schema.json"))
|
|
643
|
+
stats["total_schemas"] = len(schema_files)
|
|
644
|
+
|
|
645
|
+
for schema_path in sorted(schema_files):
|
|
646
|
+
try:
|
|
647
|
+
with open(schema_path) as f:
|
|
648
|
+
schema = json.load(f)
|
|
649
|
+
|
|
650
|
+
# Extract metadata
|
|
651
|
+
schema_id = schema.get("$id", "")
|
|
652
|
+
version = schema.get("version", "1.0.0")
|
|
653
|
+
title = schema.get("title", "")
|
|
654
|
+
description = schema.get("description", "")
|
|
655
|
+
metadata = schema.get("x-artifact-metadata", {})
|
|
656
|
+
|
|
657
|
+
# Build artifact entry
|
|
658
|
+
rel_path = str(schema_path.relative_to(self.repo_root))
|
|
659
|
+
|
|
660
|
+
artifact_id = schema_id # No :v1 suffix - version tracked separately
|
|
661
|
+
artifact = {
|
|
662
|
+
"id": artifact_id,
|
|
663
|
+
"urn": f"contract:{schema_id}",
|
|
664
|
+
"version": version,
|
|
665
|
+
"title": title,
|
|
666
|
+
"description": description,
|
|
667
|
+
"path": rel_path,
|
|
668
|
+
"producer": metadata.get("producer", ""),
|
|
669
|
+
"consumers": metadata.get("consumers", []),
|
|
670
|
+
}
|
|
671
|
+
|
|
672
|
+
# Track changes
|
|
673
|
+
if artifact_id in existing_artifacts:
|
|
674
|
+
stats["updated"] += 1
|
|
675
|
+
changes = self._detect_contract_changes(artifact_id, existing_artifacts[artifact_id], artifact)
|
|
676
|
+
if changes:
|
|
677
|
+
stats["changes"].append({
|
|
678
|
+
"artifact": artifact_id,
|
|
679
|
+
"type": "updated",
|
|
680
|
+
"fields": changes
|
|
681
|
+
})
|
|
682
|
+
else:
|
|
683
|
+
stats["new"] += 1
|
|
684
|
+
stats["changes"].append({
|
|
685
|
+
"artifact": artifact_id,
|
|
686
|
+
"type": "new",
|
|
687
|
+
"fields": ["all fields (new artifact)"]
|
|
688
|
+
})
|
|
689
|
+
|
|
690
|
+
artifacts.append(artifact)
|
|
691
|
+
stats["processed"] += 1
|
|
692
|
+
|
|
693
|
+
except Exception as e:
|
|
694
|
+
print(f" â ď¸ Error processing {schema_path}: {e}")
|
|
695
|
+
stats["errors"] += 1
|
|
696
|
+
|
|
697
|
+
# Show preview
|
|
698
|
+
print(f"\nđ PREVIEW:")
|
|
699
|
+
print(f" ⢠{stats['updated']} artifacts will be updated")
|
|
700
|
+
print(f" ⢠{stats['new']} new artifacts will be added")
|
|
701
|
+
if stats["errors"] > 0:
|
|
702
|
+
print(f" â ď¸ {stats['errors']} errors encountered")
|
|
703
|
+
|
|
704
|
+
# Print detailed change report
|
|
705
|
+
self._print_contract_change_report(stats["changes"])
|
|
706
|
+
|
|
707
|
+
# If preview only, return early
|
|
708
|
+
if preview_only:
|
|
709
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
710
|
+
return stats
|
|
711
|
+
|
|
712
|
+
# Ask for user approval
|
|
713
|
+
print("\nâ Do you want to apply these changes to the contract registry?")
|
|
714
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
715
|
+
response = input(" > ").strip().lower()
|
|
716
|
+
|
|
717
|
+
if response != "yes":
|
|
718
|
+
print("\nâ Update cancelled by user")
|
|
719
|
+
stats["cancelled"] = True
|
|
720
|
+
return stats
|
|
721
|
+
|
|
722
|
+
# Write registry
|
|
723
|
+
registry_path = self.contracts_dir / "_artifacts.yaml"
|
|
724
|
+
output = {"artifacts": artifacts}
|
|
725
|
+
|
|
726
|
+
with open(registry_path, "w") as f:
|
|
727
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
728
|
+
|
|
729
|
+
print(f"\nâ
Contract registry updated successfully!")
|
|
730
|
+
print(f" ⢠Updated {stats['updated']} artifacts")
|
|
731
|
+
print(f" ⢠Added {stats['new']} new artifacts")
|
|
732
|
+
print(f" đ Registry: {registry_path}")
|
|
733
|
+
|
|
734
|
+
return stats
|
|
735
|
+
|
|
736
|
+
def update_telemetry_registry(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
737
|
+
"""
|
|
738
|
+
Update telemetry/_signals.yaml from telemetry signal files.
|
|
739
|
+
|
|
740
|
+
Args:
|
|
741
|
+
preview_only: If True, only show what would change without applying
|
|
742
|
+
|
|
743
|
+
Returns:
|
|
744
|
+
Statistics about the update
|
|
745
|
+
"""
|
|
746
|
+
print("\nđ Analyzing telemetry registry from signal files...")
|
|
747
|
+
|
|
748
|
+
# Load existing registry
|
|
749
|
+
registry_path = self.telemetry_dir / "_signals.yaml"
|
|
750
|
+
existing_signals = {}
|
|
751
|
+
if registry_path.exists():
|
|
752
|
+
with open(registry_path) as f:
|
|
753
|
+
registry_data = yaml.safe_load(f)
|
|
754
|
+
existing_signals = {s.get("id"): s for s in registry_data.get("signals", [])}
|
|
755
|
+
|
|
756
|
+
signals = []
|
|
757
|
+
stats = {
|
|
758
|
+
"total_files": 0,
|
|
759
|
+
"processed": 0,
|
|
760
|
+
"updated": 0,
|
|
761
|
+
"new": 0,
|
|
762
|
+
"errors": 0,
|
|
763
|
+
"changes": []
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
# Scan for telemetry signal files (JSON or YAML)
|
|
767
|
+
json_files = list(self.telemetry_dir.glob("**/*.json"))
|
|
768
|
+
yaml_files = list(self.telemetry_dir.glob("**/*.yaml"))
|
|
769
|
+
signal_files = [f for f in (json_files + yaml_files) if "_signals" not in f.name]
|
|
770
|
+
|
|
771
|
+
stats["total_files"] = len(signal_files)
|
|
772
|
+
|
|
773
|
+
for signal_path in sorted(signal_files):
|
|
774
|
+
try:
|
|
775
|
+
# Load signal file
|
|
776
|
+
if signal_path.suffix == ".json":
|
|
777
|
+
with open(signal_path) as f:
|
|
778
|
+
signal_data = json.load(f)
|
|
779
|
+
else:
|
|
780
|
+
with open(signal_path) as f:
|
|
781
|
+
signal_data = yaml.safe_load(f)
|
|
782
|
+
|
|
783
|
+
# Extract metadata
|
|
784
|
+
signal_id = signal_data.get("$id", signal_data.get("id", ""))
|
|
785
|
+
signal_type = signal_data.get("type", "event")
|
|
786
|
+
description = signal_data.get("description", "")
|
|
787
|
+
|
|
788
|
+
# Build signal entry
|
|
789
|
+
rel_path = str(signal_path.relative_to(self.repo_root))
|
|
790
|
+
|
|
791
|
+
signal = {
|
|
792
|
+
"id": signal_id,
|
|
793
|
+
"type": signal_type,
|
|
794
|
+
"description": description,
|
|
795
|
+
"path": rel_path,
|
|
796
|
+
}
|
|
797
|
+
|
|
798
|
+
# Track changes
|
|
799
|
+
if signal_id in existing_signals:
|
|
800
|
+
stats["updated"] += 1
|
|
801
|
+
changes = self._detect_telemetry_changes(signal_id, existing_signals[signal_id], signal)
|
|
802
|
+
if changes:
|
|
803
|
+
stats["changes"].append({
|
|
804
|
+
"signal": signal_id,
|
|
805
|
+
"type": "updated",
|
|
806
|
+
"fields": changes
|
|
807
|
+
})
|
|
808
|
+
else:
|
|
809
|
+
stats["new"] += 1
|
|
810
|
+
stats["changes"].append({
|
|
811
|
+
"signal": signal_id,
|
|
812
|
+
"type": "new",
|
|
813
|
+
"fields": ["all fields (new signal)"]
|
|
814
|
+
})
|
|
815
|
+
|
|
816
|
+
signals.append(signal)
|
|
817
|
+
stats["processed"] += 1
|
|
818
|
+
|
|
819
|
+
except Exception as e:
|
|
820
|
+
print(f" â ď¸ Error processing {signal_path}: {e}")
|
|
821
|
+
stats["errors"] += 1
|
|
822
|
+
|
|
823
|
+
# Show preview
|
|
824
|
+
print(f"\nđ PREVIEW:")
|
|
825
|
+
print(f" ⢠{stats['updated']} signals will be updated")
|
|
826
|
+
print(f" ⢠{stats['new']} new signals will be added")
|
|
827
|
+
if stats["errors"] > 0:
|
|
828
|
+
print(f" â ď¸ {stats['errors']} errors encountered")
|
|
829
|
+
|
|
830
|
+
# Print detailed change report
|
|
831
|
+
self._print_telemetry_change_report(stats["changes"])
|
|
832
|
+
|
|
833
|
+
# If preview only, return early
|
|
834
|
+
if preview_only:
|
|
835
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
836
|
+
return stats
|
|
837
|
+
|
|
838
|
+
# Ask for user approval
|
|
839
|
+
print("\nâ Do you want to apply these changes to the telemetry registry?")
|
|
840
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
841
|
+
response = input(" > ").strip().lower()
|
|
842
|
+
|
|
843
|
+
if response != "yes":
|
|
844
|
+
print("\nâ Update cancelled by user")
|
|
845
|
+
stats["cancelled"] = True
|
|
846
|
+
return stats
|
|
847
|
+
|
|
848
|
+
# Write registry
|
|
849
|
+
registry_path = self.telemetry_dir / "_signals.yaml"
|
|
850
|
+
output = {"signals": signals}
|
|
851
|
+
|
|
852
|
+
with open(registry_path, "w") as f:
|
|
853
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
854
|
+
|
|
855
|
+
print(f"\nâ
Telemetry registry updated successfully!")
|
|
856
|
+
print(f" ⢠Updated {stats['updated']} signals")
|
|
857
|
+
print(f" ⢠Added {stats['new']} new signals")
|
|
858
|
+
print(f" đ Registry: {registry_path}")
|
|
859
|
+
|
|
860
|
+
return stats
|
|
861
|
+
|
|
862
|
+
# Alias methods for unified API
|
|
863
|
+
def build_planner(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
864
|
+
"""Build planner registry (alias for update_wagon_registry)."""
|
|
865
|
+
return self.update_wagon_registry(preview_only)
|
|
866
|
+
|
|
867
|
+
def build_contracts(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
868
|
+
"""Build contracts registry (alias for update_contract_registry)."""
|
|
869
|
+
return self.update_contract_registry(preview_only)
|
|
870
|
+
|
|
871
|
+
def build_telemetry(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
872
|
+
"""Build telemetry registry (alias for update_telemetry_registry)."""
|
|
873
|
+
return self.update_telemetry_registry(preview_only)
|
|
874
|
+
|
|
875
|
+
def build_tester(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
876
|
+
"""
|
|
877
|
+
Build tester registry from test files.
|
|
878
|
+
Scans atdd/tester/**/*_test.py files for URNs and metadata.
|
|
879
|
+
"""
|
|
880
|
+
print("\nđ Analyzing tester registry from test files...")
|
|
881
|
+
|
|
882
|
+
# Load existing registry
|
|
883
|
+
registry_path = self.tester_dir / "_tests.yaml"
|
|
884
|
+
existing_tests = {}
|
|
885
|
+
if registry_path.exists():
|
|
886
|
+
with open(registry_path) as f:
|
|
887
|
+
registry_data = yaml.safe_load(f)
|
|
888
|
+
existing_tests = {t.get("urn"): t for t in registry_data.get("tests", [])}
|
|
889
|
+
|
|
890
|
+
tests = []
|
|
891
|
+
stats = {
|
|
892
|
+
"total_files": 0,
|
|
893
|
+
"processed": 0,
|
|
894
|
+
"updated": 0,
|
|
895
|
+
"new": 0,
|
|
896
|
+
"errors": 0,
|
|
897
|
+
"changes": []
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
# Scan for test files
|
|
901
|
+
if self.tester_dir.exists():
|
|
902
|
+
# Look for both test_*.py and *_test.py patterns
|
|
903
|
+
test_files = list(self.tester_dir.glob("**/*_test.py"))
|
|
904
|
+
test_files.extend(list(self.tester_dir.glob("**/test_*.py")))
|
|
905
|
+
test_files = [f for f in test_files if not f.name.startswith("_")]
|
|
906
|
+
stats["total_files"] = len(test_files)
|
|
907
|
+
|
|
908
|
+
for test_file in sorted(test_files):
|
|
909
|
+
try:
|
|
910
|
+
with open(test_file) as f:
|
|
911
|
+
content = f.read()
|
|
912
|
+
|
|
913
|
+
# Extract URN markers from docstring or comments
|
|
914
|
+
urns = re.findall(r'URN:\s*(\S+)', content)
|
|
915
|
+
spec_urns = re.findall(r'Spec:\s*(\S+)', content)
|
|
916
|
+
acceptance_urns = re.findall(r'Acceptance:\s*(\S+)', content)
|
|
917
|
+
|
|
918
|
+
# Extract wagon from path
|
|
919
|
+
rel_path = test_file.relative_to(self.tester_dir)
|
|
920
|
+
wagon = rel_path.parts[0] if len(rel_path.parts) > 1 else "unknown"
|
|
921
|
+
|
|
922
|
+
# Build test entry
|
|
923
|
+
for urn in urns:
|
|
924
|
+
test_entry = {
|
|
925
|
+
"urn": urn,
|
|
926
|
+
"file": str(test_file.relative_to(self.repo_root)),
|
|
927
|
+
"wagon": wagon
|
|
928
|
+
}
|
|
929
|
+
|
|
930
|
+
if spec_urns:
|
|
931
|
+
test_entry["spec_urn"] = spec_urns[0]
|
|
932
|
+
if acceptance_urns:
|
|
933
|
+
test_entry["acceptance_urn"] = acceptance_urns[0]
|
|
934
|
+
|
|
935
|
+
# Track changes
|
|
936
|
+
if urn in existing_tests:
|
|
937
|
+
stats["updated"] += 1
|
|
938
|
+
else:
|
|
939
|
+
stats["new"] += 1
|
|
940
|
+
stats["changes"].append({
|
|
941
|
+
"test": urn,
|
|
942
|
+
"type": "new",
|
|
943
|
+
"fields": ["all fields (new test)"]
|
|
944
|
+
})
|
|
945
|
+
|
|
946
|
+
tests.append(test_entry)
|
|
947
|
+
stats["processed"] += 1
|
|
948
|
+
|
|
949
|
+
except Exception as e:
|
|
950
|
+
print(f" â ď¸ Error processing {test_file}: {e}")
|
|
951
|
+
stats["errors"] += 1
|
|
952
|
+
|
|
953
|
+
# Show preview
|
|
954
|
+
print(f"\nđ PREVIEW:")
|
|
955
|
+
print(f" ⢠{stats['updated']} tests will be updated")
|
|
956
|
+
print(f" ⢠{stats['new']} new tests will be added")
|
|
957
|
+
if stats["errors"] > 0:
|
|
958
|
+
print(f" â ď¸ {stats['errors']} errors encountered")
|
|
959
|
+
|
|
960
|
+
if preview_only:
|
|
961
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
962
|
+
return stats
|
|
963
|
+
|
|
964
|
+
# Ask for confirmation
|
|
965
|
+
print("\nâ Do you want to apply these changes to the tester registry?")
|
|
966
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
967
|
+
response = input(" > ").strip().lower()
|
|
968
|
+
|
|
969
|
+
if response != "yes":
|
|
970
|
+
print("\nâ Update cancelled by user")
|
|
971
|
+
stats["cancelled"] = True
|
|
972
|
+
return stats
|
|
973
|
+
|
|
974
|
+
# Write registry
|
|
975
|
+
output = {"tests": tests}
|
|
976
|
+
registry_path.parent.mkdir(parents=True, exist_ok=True)
|
|
977
|
+
with open(registry_path, "w") as f:
|
|
978
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
979
|
+
|
|
980
|
+
print(f"\nâ
Tester registry updated successfully!")
|
|
981
|
+
print(f" ⢠Updated {stats['updated']} tests")
|
|
982
|
+
print(f" ⢠Added {stats['new']} new tests")
|
|
983
|
+
print(f" đ Registry: {registry_path}")
|
|
984
|
+
|
|
985
|
+
return stats
|
|
986
|
+
|
|
987
|
+
def build_coder(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
988
|
+
"""
|
|
989
|
+
Build coder implementation registry from Python files.
|
|
990
|
+
Scans python/**/*.py files for implementations.
|
|
991
|
+
"""
|
|
992
|
+
print("\nđ Analyzing coder registry from Python files...")
|
|
993
|
+
|
|
994
|
+
# Load existing registry
|
|
995
|
+
registry_path = self.python_dir / "_implementations.yaml"
|
|
996
|
+
existing_impls = {}
|
|
997
|
+
if registry_path.exists():
|
|
998
|
+
with open(registry_path) as f:
|
|
999
|
+
registry_data = yaml.safe_load(f)
|
|
1000
|
+
existing_impls = {i.get("urn"): i for i in registry_data.get("implementations", [])}
|
|
1001
|
+
|
|
1002
|
+
implementations = []
|
|
1003
|
+
stats = {
|
|
1004
|
+
"total_files": 0,
|
|
1005
|
+
"processed": 0,
|
|
1006
|
+
"updated": 0,
|
|
1007
|
+
"new": 0,
|
|
1008
|
+
"errors": 0,
|
|
1009
|
+
"changes": []
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
# Scan for Python implementation files
|
|
1013
|
+
if self.python_dir.exists():
|
|
1014
|
+
py_files = list(self.python_dir.glob("**/*.py"))
|
|
1015
|
+
# Filter out __init__, __pycache__, and files in specific test directories
|
|
1016
|
+
py_files = [
|
|
1017
|
+
f for f in py_files
|
|
1018
|
+
if not f.name.startswith("_")
|
|
1019
|
+
and "__pycache__" not in str(f)
|
|
1020
|
+
and "/tests/" not in str(f)
|
|
1021
|
+
and "/test/" not in str(f)
|
|
1022
|
+
and not f.name.endswith("_test.py")
|
|
1023
|
+
and not f.name.startswith("test_")
|
|
1024
|
+
]
|
|
1025
|
+
stats["total_files"] = len(py_files)
|
|
1026
|
+
|
|
1027
|
+
for py_file in sorted(py_files):
|
|
1028
|
+
try:
|
|
1029
|
+
with open(py_file) as f:
|
|
1030
|
+
content = f.read()
|
|
1031
|
+
|
|
1032
|
+
# Extract metadata from docstring
|
|
1033
|
+
spec_urns = re.findall(r'Spec:\s*(\S+)', content)
|
|
1034
|
+
test_urns = re.findall(r'Test:\s*(\S+)', content)
|
|
1035
|
+
|
|
1036
|
+
# Extract wagon and layer from path
|
|
1037
|
+
rel_path = py_file.relative_to(self.python_dir)
|
|
1038
|
+
parts = rel_path.parts
|
|
1039
|
+
|
|
1040
|
+
wagon = parts[0] if len(parts) > 0 else "unknown"
|
|
1041
|
+
layer = "unknown"
|
|
1042
|
+
|
|
1043
|
+
# Try to detect layer from path
|
|
1044
|
+
if "domain" in str(py_file):
|
|
1045
|
+
layer = "domain"
|
|
1046
|
+
elif "application" in str(py_file):
|
|
1047
|
+
layer = "application"
|
|
1048
|
+
elif "integration" in str(py_file) or "infrastructure" in str(py_file):
|
|
1049
|
+
layer = "integration"
|
|
1050
|
+
elif "presentation" in str(py_file):
|
|
1051
|
+
layer = "presentation"
|
|
1052
|
+
|
|
1053
|
+
# Generate URN
|
|
1054
|
+
component = py_file.stem
|
|
1055
|
+
impl_urn = f"impl:{wagon}:{layer}:{component}:python"
|
|
1056
|
+
|
|
1057
|
+
# Build implementation entry
|
|
1058
|
+
impl_entry = {
|
|
1059
|
+
"urn": impl_urn,
|
|
1060
|
+
"file": str(py_file.relative_to(self.repo_root)),
|
|
1061
|
+
"wagon": wagon,
|
|
1062
|
+
"layer": layer,
|
|
1063
|
+
"component_type": "entity", # Default
|
|
1064
|
+
"language": "python"
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
if spec_urns:
|
|
1068
|
+
impl_entry["spec_urn"] = spec_urns[0]
|
|
1069
|
+
if test_urns:
|
|
1070
|
+
impl_entry["test_urn"] = test_urns[0]
|
|
1071
|
+
|
|
1072
|
+
# Track changes
|
|
1073
|
+
if impl_urn in existing_impls:
|
|
1074
|
+
stats["updated"] += 1
|
|
1075
|
+
else:
|
|
1076
|
+
stats["new"] += 1
|
|
1077
|
+
stats["changes"].append({
|
|
1078
|
+
"impl": impl_urn,
|
|
1079
|
+
"type": "new",
|
|
1080
|
+
"fields": ["all fields (new implementation)"]
|
|
1081
|
+
})
|
|
1082
|
+
|
|
1083
|
+
implementations.append(impl_entry)
|
|
1084
|
+
stats["processed"] += 1
|
|
1085
|
+
|
|
1086
|
+
except Exception as e:
|
|
1087
|
+
print(f" â ď¸ Error processing {py_file}: {e}")
|
|
1088
|
+
stats["errors"] += 1
|
|
1089
|
+
|
|
1090
|
+
# Show preview
|
|
1091
|
+
print(f"\nđ PREVIEW:")
|
|
1092
|
+
print(f" ⢠{stats['updated']} implementations will be updated")
|
|
1093
|
+
print(f" ⢠{stats['new']} new implementations will be added")
|
|
1094
|
+
if stats["errors"] > 0:
|
|
1095
|
+
print(f" â ď¸ {stats['errors']} errors encountered")
|
|
1096
|
+
|
|
1097
|
+
if preview_only:
|
|
1098
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
1099
|
+
return stats
|
|
1100
|
+
|
|
1101
|
+
# Ask for confirmation
|
|
1102
|
+
print("\nâ Do you want to apply these changes to the coder registry?")
|
|
1103
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
1104
|
+
response = input(" > ").strip().lower()
|
|
1105
|
+
|
|
1106
|
+
if response != "yes":
|
|
1107
|
+
print("\nâ Update cancelled by user")
|
|
1108
|
+
stats["cancelled"] = True
|
|
1109
|
+
return stats
|
|
1110
|
+
|
|
1111
|
+
# Write registry
|
|
1112
|
+
output = {"implementations": implementations}
|
|
1113
|
+
registry_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1114
|
+
with open(registry_path, "w") as f:
|
|
1115
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1116
|
+
|
|
1117
|
+
print(f"\nâ
Coder registry updated successfully!")
|
|
1118
|
+
print(f" ⢠Updated {stats['updated']} implementations")
|
|
1119
|
+
print(f" ⢠Added {stats['new']} new implementations")
|
|
1120
|
+
print(f" đ Registry: {registry_path}")
|
|
1121
|
+
|
|
1122
|
+
return stats
|
|
1123
|
+
|
|
1124
|
+
def build_supabase(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
1125
|
+
"""
|
|
1126
|
+
Build supabase functions registry.
|
|
1127
|
+
Scans supabase/functions/**/ for function directories.
|
|
1128
|
+
"""
|
|
1129
|
+
print("\nđ Analyzing supabase registry from function files...")
|
|
1130
|
+
|
|
1131
|
+
# Load existing registry
|
|
1132
|
+
registry_path = self.supabase_dir / "_functions.yaml"
|
|
1133
|
+
existing_funcs = {}
|
|
1134
|
+
if registry_path.exists():
|
|
1135
|
+
with open(registry_path) as f:
|
|
1136
|
+
registry_data = yaml.safe_load(f)
|
|
1137
|
+
existing_funcs = {f.get("id"): f for f in registry_data.get("functions", [])}
|
|
1138
|
+
|
|
1139
|
+
functions = []
|
|
1140
|
+
stats = {
|
|
1141
|
+
"total_dirs": 0,
|
|
1142
|
+
"processed": 0,
|
|
1143
|
+
"updated": 0,
|
|
1144
|
+
"new": 0,
|
|
1145
|
+
"errors": 0,
|
|
1146
|
+
"changes": []
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
# Scan for function directories
|
|
1150
|
+
functions_dir = self.supabase_dir / "functions"
|
|
1151
|
+
if functions_dir.exists():
|
|
1152
|
+
func_dirs = [d for d in functions_dir.iterdir() if d.is_dir()]
|
|
1153
|
+
stats["total_dirs"] = len(func_dirs)
|
|
1154
|
+
|
|
1155
|
+
for func_dir in sorted(func_dirs):
|
|
1156
|
+
try:
|
|
1157
|
+
func_id = func_dir.name
|
|
1158
|
+
index_file = func_dir / "index.ts"
|
|
1159
|
+
|
|
1160
|
+
if not index_file.exists():
|
|
1161
|
+
continue
|
|
1162
|
+
|
|
1163
|
+
rel_path = str(index_file.relative_to(self.repo_root))
|
|
1164
|
+
|
|
1165
|
+
func_entry = {
|
|
1166
|
+
"id": func_id,
|
|
1167
|
+
"path": rel_path,
|
|
1168
|
+
"description": f"Supabase function: {func_id}"
|
|
1169
|
+
}
|
|
1170
|
+
|
|
1171
|
+
# Track changes
|
|
1172
|
+
if func_id in existing_funcs:
|
|
1173
|
+
stats["updated"] += 1
|
|
1174
|
+
else:
|
|
1175
|
+
stats["new"] += 1
|
|
1176
|
+
stats["changes"].append({
|
|
1177
|
+
"function": func_id,
|
|
1178
|
+
"type": "new",
|
|
1179
|
+
"fields": ["all fields (new function)"]
|
|
1180
|
+
})
|
|
1181
|
+
|
|
1182
|
+
functions.append(func_entry)
|
|
1183
|
+
stats["processed"] += 1
|
|
1184
|
+
|
|
1185
|
+
except Exception as e:
|
|
1186
|
+
print(f" â ď¸ Error processing {func_dir}: {e}")
|
|
1187
|
+
stats["errors"] += 1
|
|
1188
|
+
|
|
1189
|
+
# Show preview
|
|
1190
|
+
print(f"\nđ PREVIEW:")
|
|
1191
|
+
print(f" ⢠{stats['updated']} functions will be updated")
|
|
1192
|
+
print(f" ⢠{stats['new']} new functions will be added")
|
|
1193
|
+
|
|
1194
|
+
if preview_only:
|
|
1195
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
1196
|
+
return stats
|
|
1197
|
+
|
|
1198
|
+
# Ask for confirmation
|
|
1199
|
+
print("\nâ Do you want to apply these changes to the supabase registry?")
|
|
1200
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
1201
|
+
response = input(" > ").strip().lower()
|
|
1202
|
+
|
|
1203
|
+
if response != "yes":
|
|
1204
|
+
print("\nâ Update cancelled by user")
|
|
1205
|
+
stats["cancelled"] = True
|
|
1206
|
+
return stats
|
|
1207
|
+
|
|
1208
|
+
# Write registry
|
|
1209
|
+
output = {"functions": functions}
|
|
1210
|
+
registry_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1211
|
+
with open(registry_path, "w") as f:
|
|
1212
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1213
|
+
|
|
1214
|
+
print(f"\nâ
Supabase registry updated successfully!")
|
|
1215
|
+
print(f" ⢠Updated {stats['updated']} functions")
|
|
1216
|
+
print(f" ⢠Added {stats['new']} new functions")
|
|
1217
|
+
print(f" đ Registry: {registry_path}")
|
|
1218
|
+
|
|
1219
|
+
return stats
|
|
1220
|
+
|
|
1221
|
+
def build_python_manifest(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
1222
|
+
"""
|
|
1223
|
+
Build python/_manifest.yaml from Python modules.
|
|
1224
|
+
Discovers Python modules and generates package configuration.
|
|
1225
|
+
|
|
1226
|
+
Returns:
|
|
1227
|
+
Statistics about the manifest generation
|
|
1228
|
+
"""
|
|
1229
|
+
print("\nđ Building Python manifest from discovered modules...")
|
|
1230
|
+
|
|
1231
|
+
# Check if python directory exists
|
|
1232
|
+
if not self.python_dir.exists():
|
|
1233
|
+
print(" â ď¸ No python/ directory found")
|
|
1234
|
+
return {"total_modules": 0, "manifest_created": False}
|
|
1235
|
+
|
|
1236
|
+
# Discover Python modules
|
|
1237
|
+
modules = []
|
|
1238
|
+
for item in self.python_dir.iterdir():
|
|
1239
|
+
if item.is_dir() and not item.name.startswith('.') and not item.name.startswith('_'):
|
|
1240
|
+
if (item / '__init__.py').exists() or any(item.rglob('*.py')):
|
|
1241
|
+
modules.append(item.name)
|
|
1242
|
+
|
|
1243
|
+
modules = sorted(modules)
|
|
1244
|
+
|
|
1245
|
+
stats = {
|
|
1246
|
+
"total_modules": len(modules),
|
|
1247
|
+
"manifest_created": False
|
|
1248
|
+
}
|
|
1249
|
+
|
|
1250
|
+
# Generate manifest data structure
|
|
1251
|
+
manifest_data = {
|
|
1252
|
+
"project": {
|
|
1253
|
+
"name": "jel-extractor",
|
|
1254
|
+
"version": "0.1.0",
|
|
1255
|
+
"description": "Job Element Extractor - Knowledge graph construction from narrative materials",
|
|
1256
|
+
"requires_python": ">=3.10",
|
|
1257
|
+
"authors": [
|
|
1258
|
+
{"name": "JEL Extractor Team"}
|
|
1259
|
+
]
|
|
1260
|
+
},
|
|
1261
|
+
"dependencies": [
|
|
1262
|
+
"pydantic>=2.0",
|
|
1263
|
+
"pyyaml>=6.0",
|
|
1264
|
+
"openai>=1.0",
|
|
1265
|
+
"anthropic>=0.18.0"
|
|
1266
|
+
],
|
|
1267
|
+
"dev_dependencies": [
|
|
1268
|
+
"pytest>=7.0",
|
|
1269
|
+
"pytest-cov>=4.0",
|
|
1270
|
+
"black>=23.0",
|
|
1271
|
+
"ruff>=0.1.0",
|
|
1272
|
+
"mypy>=1.0"
|
|
1273
|
+
],
|
|
1274
|
+
"modules": modules,
|
|
1275
|
+
"test": {
|
|
1276
|
+
"testpaths": ["python"],
|
|
1277
|
+
"python_files": "test_*.py",
|
|
1278
|
+
"python_classes": "Test*",
|
|
1279
|
+
"python_functions": "test_*"
|
|
1280
|
+
},
|
|
1281
|
+
"formatting": {
|
|
1282
|
+
"line_length": 100,
|
|
1283
|
+
"target_version": "py310"
|
|
1284
|
+
}
|
|
1285
|
+
}
|
|
1286
|
+
|
|
1287
|
+
# Show preview
|
|
1288
|
+
print(f"\nđ PREVIEW:")
|
|
1289
|
+
print(f" ⢠{stats['total_modules']} Python modules discovered")
|
|
1290
|
+
print(f" ⢠Modules: {', '.join(modules)}")
|
|
1291
|
+
|
|
1292
|
+
if preview_only:
|
|
1293
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
1294
|
+
return stats
|
|
1295
|
+
|
|
1296
|
+
# Ask for confirmation
|
|
1297
|
+
print("\nâ Do you want to generate python/_manifest.yaml?")
|
|
1298
|
+
print(" Type 'yes' to confirm, or anything else to cancel:")
|
|
1299
|
+
response = input(" > ").strip().lower()
|
|
1300
|
+
|
|
1301
|
+
if response != "yes":
|
|
1302
|
+
print("\nâ Manifest generation cancelled by user")
|
|
1303
|
+
stats["cancelled"] = True
|
|
1304
|
+
return stats
|
|
1305
|
+
|
|
1306
|
+
# Write manifest
|
|
1307
|
+
manifest_path = self.python_dir / "_manifest.yaml"
|
|
1308
|
+
with open(manifest_path, "w") as f:
|
|
1309
|
+
yaml.dump(manifest_data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1310
|
+
|
|
1311
|
+
stats["manifest_created"] = True
|
|
1312
|
+
|
|
1313
|
+
print(f"\nâ
Python manifest generated successfully!")
|
|
1314
|
+
print(f" ⢠Discovered {stats['total_modules']} modules")
|
|
1315
|
+
print(f" ⢠Modules: {', '.join(modules)}")
|
|
1316
|
+
print(f" đ Manifest: {manifest_path}")
|
|
1317
|
+
|
|
1318
|
+
return stats
|
|
1319
|
+
|
|
1320
|
+
def build_all(self) -> Dict[str, Any]:
|
|
1321
|
+
"""Build all registries."""
|
|
1322
|
+
print("=" * 60)
|
|
1323
|
+
print("Unified Registry Builder - Synchronizing from source files")
|
|
1324
|
+
print("=" * 60)
|
|
1325
|
+
|
|
1326
|
+
results = {
|
|
1327
|
+
"plan": self.build_planner(),
|
|
1328
|
+
"contracts": self.build_contracts(),
|
|
1329
|
+
"telemetry": self.build_telemetry(),
|
|
1330
|
+
"tester": self.build_tester(),
|
|
1331
|
+
"coder": self.build_coder(),
|
|
1332
|
+
"supabase": self.build_supabase()
|
|
1333
|
+
}
|
|
1334
|
+
|
|
1335
|
+
print("\n" + "=" * 60)
|
|
1336
|
+
print("Registry Build Complete")
|
|
1337
|
+
print("=" * 60)
|
|
1338
|
+
|
|
1339
|
+
return results
|
|
1340
|
+
|
|
1341
|
+
def enrich_wagon_registry(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
1342
|
+
"""
|
|
1343
|
+
Enrich _wagons.yaml with features and simplified WMBT totals.
|
|
1344
|
+
|
|
1345
|
+
SPEC-COACH-UTILS-0290: Add features section and simplify WMBT counts
|
|
1346
|
+
|
|
1347
|
+
Adds features: list from wagon manifests and replaces detailed wmbt
|
|
1348
|
+
entries with just total: N field.
|
|
1349
|
+
|
|
1350
|
+
Args:
|
|
1351
|
+
preview_only: If True, only show what would change without applying
|
|
1352
|
+
|
|
1353
|
+
Returns:
|
|
1354
|
+
Statistics about the enrichment
|
|
1355
|
+
"""
|
|
1356
|
+
print("\nđ Enriching wagon registry with features and WMBT totals...")
|
|
1357
|
+
|
|
1358
|
+
# Load existing registry
|
|
1359
|
+
registry_path = self.plan_dir / "_wagons.yaml"
|
|
1360
|
+
if not registry_path.exists():
|
|
1361
|
+
print(" â ď¸ No _wagons.yaml found")
|
|
1362
|
+
return {"total": 0, "enriched": 0}
|
|
1363
|
+
|
|
1364
|
+
with open(registry_path) as f:
|
|
1365
|
+
registry_data = yaml.safe_load(f)
|
|
1366
|
+
|
|
1367
|
+
wagons = registry_data.get("wagons", [])
|
|
1368
|
+
enriched_wagons = []
|
|
1369
|
+
stats = {
|
|
1370
|
+
"total": len(wagons),
|
|
1371
|
+
"enriched": 0,
|
|
1372
|
+
"with_features": 0,
|
|
1373
|
+
"wmbt_simplified": 0
|
|
1374
|
+
}
|
|
1375
|
+
|
|
1376
|
+
for wagon_entry in wagons:
|
|
1377
|
+
slug = wagon_entry.get("wagon", "")
|
|
1378
|
+
|
|
1379
|
+
# Load wagon manifest to get features and wmbt.total
|
|
1380
|
+
manifest_path = None
|
|
1381
|
+
if "manifest" in wagon_entry:
|
|
1382
|
+
manifest_path = self.repo_root / wagon_entry["manifest"]
|
|
1383
|
+
else:
|
|
1384
|
+
# Fallback: construct from slug
|
|
1385
|
+
dirname = slug.replace("-", "_")
|
|
1386
|
+
manifest_path = self.plan_dir / dirname / f"_{dirname}.yaml"
|
|
1387
|
+
|
|
1388
|
+
enriched_entry = wagon_entry.copy()
|
|
1389
|
+
|
|
1390
|
+
if manifest_path and manifest_path.exists():
|
|
1391
|
+
try:
|
|
1392
|
+
with open(manifest_path) as f:
|
|
1393
|
+
manifest = yaml.safe_load(f)
|
|
1394
|
+
|
|
1395
|
+
# Extract features from manifest (DOMAIN)
|
|
1396
|
+
features = self._extract_features_from_manifest(manifest, slug)
|
|
1397
|
+
enriched_entry["features"] = features
|
|
1398
|
+
if features:
|
|
1399
|
+
stats["with_features"] += 1
|
|
1400
|
+
|
|
1401
|
+
# Extract WMBT total from manifest (DOMAIN)
|
|
1402
|
+
wmbt_total = self._extract_wmbt_total_from_manifest(manifest)
|
|
1403
|
+
|
|
1404
|
+
# Structure WMBT with total and coverage
|
|
1405
|
+
if "wmbt" in enriched_entry and enriched_entry["wmbt"]:
|
|
1406
|
+
stats["wmbt_simplified"] += 1
|
|
1407
|
+
enriched_entry["wmbt"] = {
|
|
1408
|
+
"total": wmbt_total,
|
|
1409
|
+
"coverage": 0 # To be computed later
|
|
1410
|
+
}
|
|
1411
|
+
|
|
1412
|
+
# Remove legacy root-level total field
|
|
1413
|
+
if "total" in enriched_entry:
|
|
1414
|
+
del enriched_entry["total"]
|
|
1415
|
+
|
|
1416
|
+
stats["enriched"] += 1
|
|
1417
|
+
|
|
1418
|
+
except Exception as e:
|
|
1419
|
+
print(f" â ď¸ Error processing {slug}: {e}")
|
|
1420
|
+
# Keep original entry if error
|
|
1421
|
+
enriched_entry["features"] = []
|
|
1422
|
+
enriched_entry["wmbt"] = {"total": 0, "coverage": 0}
|
|
1423
|
+
if "total" in enriched_entry:
|
|
1424
|
+
del enriched_entry["total"]
|
|
1425
|
+
else:
|
|
1426
|
+
# No manifest, add empty features and default wmbt
|
|
1427
|
+
enriched_entry["features"] = []
|
|
1428
|
+
enriched_entry["wmbt"] = {"total": wagon_entry.get("total", 0), "coverage": 0}
|
|
1429
|
+
# Remove legacy root-level total field
|
|
1430
|
+
if "total" in enriched_entry:
|
|
1431
|
+
del enriched_entry["total"]
|
|
1432
|
+
|
|
1433
|
+
enriched_wagons.append(enriched_entry)
|
|
1434
|
+
|
|
1435
|
+
# Show preview
|
|
1436
|
+
print(f"\nđ PREVIEW:")
|
|
1437
|
+
print(f" ⢠{stats['enriched']} wagons will be enriched")
|
|
1438
|
+
print(f" ⢠{stats['with_features']} wagons have features")
|
|
1439
|
+
print(f" ⢠{stats['wmbt_simplified']} WMBT sections simplified")
|
|
1440
|
+
|
|
1441
|
+
if preview_only:
|
|
1442
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
1443
|
+
return stats
|
|
1444
|
+
|
|
1445
|
+
# Write enriched registry
|
|
1446
|
+
output = {"wagons": enriched_wagons}
|
|
1447
|
+
with open(registry_path, "w") as f:
|
|
1448
|
+
yaml.dump(output, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1449
|
+
|
|
1450
|
+
print(f"\nâ
Wagon registry enriched successfully!")
|
|
1451
|
+
print(f" ⢠Enriched {stats['enriched']} wagons")
|
|
1452
|
+
print(f" ⢠Added features to {stats['with_features']} wagons")
|
|
1453
|
+
print(f" ⢠Simplified {stats['wmbt_simplified']} WMBT sections")
|
|
1454
|
+
print(f" đ Registry: {registry_path}")
|
|
1455
|
+
|
|
1456
|
+
return stats
|
|
1457
|
+
|
|
1458
|
+
def update_feature_implementation_paths(self, preview_only: bool = False) -> Dict[str, Any]:
|
|
1459
|
+
"""
|
|
1460
|
+
Update feature manifest files with implementation paths from filesystem.
|
|
1461
|
+
|
|
1462
|
+
SPEC-COACH-UTILS-0291: Add implementation paths array to feature manifests
|
|
1463
|
+
|
|
1464
|
+
Scans filesystem for implementation directories and adds paths array to
|
|
1465
|
+
each feature manifest at plan/{wagon_snake}/features/{feature_snake}.yaml
|
|
1466
|
+
|
|
1467
|
+
Args:
|
|
1468
|
+
preview_only: If True, only show what would change without applying
|
|
1469
|
+
|
|
1470
|
+
Returns:
|
|
1471
|
+
Statistics about the update
|
|
1472
|
+
"""
|
|
1473
|
+
print("\nđ Updating feature manifests with implementation paths...")
|
|
1474
|
+
|
|
1475
|
+
# Find all feature manifest files
|
|
1476
|
+
feature_files = list(self.plan_dir.glob("*/features/*.yaml"))
|
|
1477
|
+
|
|
1478
|
+
stats = {
|
|
1479
|
+
"total_features": len(feature_files),
|
|
1480
|
+
"updated": 0,
|
|
1481
|
+
"with_paths": 0,
|
|
1482
|
+
"errors": 0
|
|
1483
|
+
}
|
|
1484
|
+
|
|
1485
|
+
for feature_file in sorted(feature_files):
|
|
1486
|
+
try:
|
|
1487
|
+
# Load feature manifest
|
|
1488
|
+
with open(feature_file) as f:
|
|
1489
|
+
feature_data = yaml.safe_load(f)
|
|
1490
|
+
|
|
1491
|
+
if not feature_data:
|
|
1492
|
+
continue
|
|
1493
|
+
|
|
1494
|
+
# Extract URN
|
|
1495
|
+
urn = feature_data.get("urn", "")
|
|
1496
|
+
if not urn:
|
|
1497
|
+
continue
|
|
1498
|
+
|
|
1499
|
+
# Parse URN to get wagon and feature slugs (DOMAIN)
|
|
1500
|
+
wagon_slug, feature_slug = self._parse_feature_urn(urn)
|
|
1501
|
+
if not wagon_slug or not feature_slug:
|
|
1502
|
+
continue
|
|
1503
|
+
|
|
1504
|
+
# Convert to snake_case for filesystem (DOMAIN)
|
|
1505
|
+
wagon_snake = self._kebab_to_snake(wagon_slug)
|
|
1506
|
+
feature_snake = self._kebab_to_snake(feature_slug)
|
|
1507
|
+
|
|
1508
|
+
# Find existing implementation paths (INTEGRATION)
|
|
1509
|
+
impl_paths = self._find_implementation_paths(wagon_snake, feature_snake)
|
|
1510
|
+
|
|
1511
|
+
# Add paths to feature data
|
|
1512
|
+
feature_data["paths"] = impl_paths
|
|
1513
|
+
if impl_paths:
|
|
1514
|
+
stats["with_paths"] += 1
|
|
1515
|
+
|
|
1516
|
+
if not preview_only:
|
|
1517
|
+
# Write updated feature manifest
|
|
1518
|
+
with open(feature_file, "w") as f:
|
|
1519
|
+
yaml.dump(feature_data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
1520
|
+
|
|
1521
|
+
stats["updated"] += 1
|
|
1522
|
+
|
|
1523
|
+
except Exception as e:
|
|
1524
|
+
print(f" â ď¸ Error processing {feature_file}: {e}")
|
|
1525
|
+
stats["errors"] += 1
|
|
1526
|
+
|
|
1527
|
+
# Show summary
|
|
1528
|
+
print(f"\nđ SUMMARY:")
|
|
1529
|
+
print(f" ⢠{stats['updated']} features processed")
|
|
1530
|
+
print(f" ⢠{stats['with_paths']} features have implementations")
|
|
1531
|
+
print(f" ⢠{stats['total_features'] - stats['with_paths']} features have no implementations yet")
|
|
1532
|
+
if stats["errors"] > 0:
|
|
1533
|
+
print(f" â ď¸ {stats['errors']} errors encountered")
|
|
1534
|
+
|
|
1535
|
+
if preview_only:
|
|
1536
|
+
print("\nâ ď¸ Preview mode - no changes applied")
|
|
1537
|
+
else:
|
|
1538
|
+
print(f"\nâ
Feature manifests updated successfully!")
|
|
1539
|
+
|
|
1540
|
+
return stats
|
|
1541
|
+
|
|
1542
|
+
def update_all(self) -> Dict[str, Any]:
|
|
1543
|
+
"""Update all registries (alias for backward compatibility)."""
|
|
1544
|
+
return self.build_all()
|
|
1545
|
+
|
|
1546
|
+
|
|
1547
|
+
# Backward compatibility alias
|
|
1548
|
+
RegistryUpdater = RegistryBuilder
|
|
1549
|
+
|
|
1550
|
+
|
|
1551
|
+
def main(repo_root: Path):
|
|
1552
|
+
"""Main entry point for registry builder."""
|
|
1553
|
+
builder = RegistryBuilder(repo_root)
|
|
1554
|
+
return builder.build_all()
|
|
1555
|
+
|
|
1556
|
+
|
|
1557
|
+
if __name__ == "__main__":
|
|
1558
|
+
from pathlib import Path
|
|
1559
|
+
repo_root = Path(__file__).resolve().parents[4]
|
|
1560
|
+
main(repo_root)
|