atdd 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- atdd/__init__.py +6 -0
- atdd/__main__.py +4 -0
- atdd/cli.py +404 -0
- atdd/coach/__init__.py +0 -0
- atdd/coach/commands/__init__.py +0 -0
- atdd/coach/commands/add_persistence_metadata.py +215 -0
- atdd/coach/commands/analyze_migrations.py +188 -0
- atdd/coach/commands/consumers.py +720 -0
- atdd/coach/commands/infer_governance_status.py +149 -0
- atdd/coach/commands/initializer.py +177 -0
- atdd/coach/commands/interface.py +1078 -0
- atdd/coach/commands/inventory.py +565 -0
- atdd/coach/commands/migration.py +240 -0
- atdd/coach/commands/registry.py +1560 -0
- atdd/coach/commands/session.py +430 -0
- atdd/coach/commands/sync.py +405 -0
- atdd/coach/commands/test_interface.py +399 -0
- atdd/coach/commands/test_runner.py +141 -0
- atdd/coach/commands/tests/__init__.py +1 -0
- atdd/coach/commands/tests/test_telemetry_array_validation.py +235 -0
- atdd/coach/commands/traceability.py +4264 -0
- atdd/coach/conventions/session.convention.yaml +754 -0
- atdd/coach/overlays/__init__.py +2 -0
- atdd/coach/overlays/claude.md +2 -0
- atdd/coach/schemas/config.schema.json +34 -0
- atdd/coach/schemas/manifest.schema.json +101 -0
- atdd/coach/templates/ATDD.md +282 -0
- atdd/coach/templates/SESSION-TEMPLATE.md +327 -0
- atdd/coach/utils/__init__.py +0 -0
- atdd/coach/utils/graph/__init__.py +0 -0
- atdd/coach/utils/graph/urn.py +875 -0
- atdd/coach/validators/__init__.py +0 -0
- atdd/coach/validators/shared_fixtures.py +365 -0
- atdd/coach/validators/test_enrich_wagon_registry.py +167 -0
- atdd/coach/validators/test_registry.py +575 -0
- atdd/coach/validators/test_session_validation.py +1183 -0
- atdd/coach/validators/test_traceability.py +448 -0
- atdd/coach/validators/test_update_feature_paths.py +108 -0
- atdd/coach/validators/test_validate_contract_consumers.py +297 -0
- atdd/coder/__init__.py +1 -0
- atdd/coder/conventions/adapter.recipe.yaml +88 -0
- atdd/coder/conventions/backend.convention.yaml +460 -0
- atdd/coder/conventions/boundaries.convention.yaml +666 -0
- atdd/coder/conventions/commons.convention.yaml +460 -0
- atdd/coder/conventions/complexity.recipe.yaml +109 -0
- atdd/coder/conventions/component-naming.convention.yaml +178 -0
- atdd/coder/conventions/design.convention.yaml +327 -0
- atdd/coder/conventions/design.recipe.yaml +273 -0
- atdd/coder/conventions/dto.convention.yaml +660 -0
- atdd/coder/conventions/frontend.convention.yaml +542 -0
- atdd/coder/conventions/green.convention.yaml +1012 -0
- atdd/coder/conventions/presentation.convention.yaml +587 -0
- atdd/coder/conventions/refactor.convention.yaml +535 -0
- atdd/coder/conventions/technology.convention.yaml +206 -0
- atdd/coder/conventions/tests/__init__.py +0 -0
- atdd/coder/conventions/tests/test_adapter_recipe.py +302 -0
- atdd/coder/conventions/tests/test_complexity_recipe.py +289 -0
- atdd/coder/conventions/tests/test_component_taxonomy.py +278 -0
- atdd/coder/conventions/tests/test_component_urn_naming.py +165 -0
- atdd/coder/conventions/tests/test_thinness_recipe.py +286 -0
- atdd/coder/conventions/thinness.recipe.yaml +82 -0
- atdd/coder/conventions/train.convention.yaml +325 -0
- atdd/coder/conventions/verification.protocol.yaml +53 -0
- atdd/coder/schemas/design_system.schema.json +361 -0
- atdd/coder/validators/__init__.py +0 -0
- atdd/coder/validators/test_commons_structure.py +485 -0
- atdd/coder/validators/test_complexity.py +416 -0
- atdd/coder/validators/test_cross_language_consistency.py +431 -0
- atdd/coder/validators/test_design_system_compliance.py +413 -0
- atdd/coder/validators/test_dto_testing_patterns.py +268 -0
- atdd/coder/validators/test_green_cross_stack_layers.py +168 -0
- atdd/coder/validators/test_green_layer_dependencies.py +148 -0
- atdd/coder/validators/test_green_python_layer_structure.py +103 -0
- atdd/coder/validators/test_green_supabase_layer_structure.py +103 -0
- atdd/coder/validators/test_import_boundaries.py +396 -0
- atdd/coder/validators/test_init_file_urns.py +593 -0
- atdd/coder/validators/test_preact_layer_boundaries.py +221 -0
- atdd/coder/validators/test_presentation_convention.py +260 -0
- atdd/coder/validators/test_python_architecture.py +674 -0
- atdd/coder/validators/test_quality_metrics.py +420 -0
- atdd/coder/validators/test_station_master_pattern.py +244 -0
- atdd/coder/validators/test_train_infrastructure.py +454 -0
- atdd/coder/validators/test_train_urns.py +293 -0
- atdd/coder/validators/test_typescript_architecture.py +616 -0
- atdd/coder/validators/test_usecase_structure.py +421 -0
- atdd/coder/validators/test_wagon_boundaries.py +586 -0
- atdd/conftest.py +126 -0
- atdd/planner/__init__.py +1 -0
- atdd/planner/conventions/acceptance.convention.yaml +538 -0
- atdd/planner/conventions/appendix.convention.yaml +187 -0
- atdd/planner/conventions/artifact-naming.convention.yaml +852 -0
- atdd/planner/conventions/component.convention.yaml +670 -0
- atdd/planner/conventions/criteria.convention.yaml +141 -0
- atdd/planner/conventions/feature.convention.yaml +371 -0
- atdd/planner/conventions/interface.convention.yaml +382 -0
- atdd/planner/conventions/steps.convention.yaml +141 -0
- atdd/planner/conventions/train.convention.yaml +552 -0
- atdd/planner/conventions/wagon.convention.yaml +275 -0
- atdd/planner/conventions/wmbt.convention.yaml +258 -0
- atdd/planner/schemas/acceptance.schema.json +336 -0
- atdd/planner/schemas/appendix.schema.json +78 -0
- atdd/planner/schemas/component.schema.json +114 -0
- atdd/planner/schemas/feature.schema.json +197 -0
- atdd/planner/schemas/train.schema.json +192 -0
- atdd/planner/schemas/wagon.schema.json +281 -0
- atdd/planner/schemas/wmbt.schema.json +59 -0
- atdd/planner/validators/__init__.py +0 -0
- atdd/planner/validators/conftest.py +5 -0
- atdd/planner/validators/test_draft_wagon_registry.py +374 -0
- atdd/planner/validators/test_plan_cross_refs.py +240 -0
- atdd/planner/validators/test_plan_uniqueness.py +224 -0
- atdd/planner/validators/test_plan_urn_resolution.py +268 -0
- atdd/planner/validators/test_plan_wagons.py +174 -0
- atdd/planner/validators/test_train_validation.py +514 -0
- atdd/planner/validators/test_wagon_urn_chain.py +648 -0
- atdd/planner/validators/test_wmbt_consistency.py +327 -0
- atdd/planner/validators/test_wmbt_vocabulary.py +632 -0
- atdd/tester/__init__.py +1 -0
- atdd/tester/conventions/artifact.convention.yaml +257 -0
- atdd/tester/conventions/contract.convention.yaml +1009 -0
- atdd/tester/conventions/filename.convention.yaml +555 -0
- atdd/tester/conventions/migration.convention.yaml +509 -0
- atdd/tester/conventions/red.convention.yaml +797 -0
- atdd/tester/conventions/routing.convention.yaml +51 -0
- atdd/tester/conventions/telemetry.convention.yaml +458 -0
- atdd/tester/schemas/a11y.tmpl.json +17 -0
- atdd/tester/schemas/artifact.schema.json +189 -0
- atdd/tester/schemas/contract.schema.json +591 -0
- atdd/tester/schemas/contract.tmpl.json +95 -0
- atdd/tester/schemas/db.tmpl.json +20 -0
- atdd/tester/schemas/e2e.tmpl.json +17 -0
- atdd/tester/schemas/edge_function.tmpl.json +17 -0
- atdd/tester/schemas/event.tmpl.json +17 -0
- atdd/tester/schemas/http.tmpl.json +19 -0
- atdd/tester/schemas/job.tmpl.json +18 -0
- atdd/tester/schemas/load.tmpl.json +21 -0
- atdd/tester/schemas/metric.tmpl.json +19 -0
- atdd/tester/schemas/pack.schema.json +139 -0
- atdd/tester/schemas/realtime.tmpl.json +20 -0
- atdd/tester/schemas/rls.tmpl.json +18 -0
- atdd/tester/schemas/script.tmpl.json +16 -0
- atdd/tester/schemas/sec.tmpl.json +18 -0
- atdd/tester/schemas/storage.tmpl.json +18 -0
- atdd/tester/schemas/telemetry.schema.json +128 -0
- atdd/tester/schemas/telemetry_tracking_manifest.schema.json +143 -0
- atdd/tester/schemas/test_filename.schema.json +194 -0
- atdd/tester/schemas/test_intent.schema.json +179 -0
- atdd/tester/schemas/unit.tmpl.json +18 -0
- atdd/tester/schemas/visual.tmpl.json +18 -0
- atdd/tester/schemas/ws.tmpl.json +17 -0
- atdd/tester/utils/__init__.py +0 -0
- atdd/tester/utils/filename.py +300 -0
- atdd/tester/validators/__init__.py +0 -0
- atdd/tester/validators/cleanup_duplicate_headers.py +116 -0
- atdd/tester/validators/cleanup_duplicate_headers_v2.py +135 -0
- atdd/tester/validators/conftest.py +5 -0
- atdd/tester/validators/coverage_gap_report.py +321 -0
- atdd/tester/validators/fix_dual_ac_references.py +179 -0
- atdd/tester/validators/remove_duplicate_lines.py +93 -0
- atdd/tester/validators/test_acceptance_urn_filename_mapping.py +359 -0
- atdd/tester/validators/test_acceptance_urn_separator.py +166 -0
- atdd/tester/validators/test_artifact_naming_category.py +307 -0
- atdd/tester/validators/test_contract_schema_compliance.py +706 -0
- atdd/tester/validators/test_contracts_structure.py +200 -0
- atdd/tester/validators/test_coverage_adequacy.py +797 -0
- atdd/tester/validators/test_dual_ac_reference.py +225 -0
- atdd/tester/validators/test_fixture_validity.py +372 -0
- atdd/tester/validators/test_isolation.py +487 -0
- atdd/tester/validators/test_migration_coverage.py +204 -0
- atdd/tester/validators/test_migration_criteria.py +276 -0
- atdd/tester/validators/test_migration_generation.py +116 -0
- atdd/tester/validators/test_python_test_naming.py +410 -0
- atdd/tester/validators/test_red_layer_validation.py +95 -0
- atdd/tester/validators/test_red_python_layer_structure.py +87 -0
- atdd/tester/validators/test_red_supabase_layer_structure.py +90 -0
- atdd/tester/validators/test_telemetry_structure.py +634 -0
- atdd/tester/validators/test_typescript_test_naming.py +301 -0
- atdd/tester/validators/test_typescript_test_structure.py +84 -0
- atdd-0.2.1.dist-info/METADATA +221 -0
- atdd-0.2.1.dist-info/RECORD +184 -0
- atdd-0.2.1.dist-info/WHEEL +5 -0
- atdd-0.2.1.dist-info/entry_points.txt +2 -0
- atdd-0.2.1.dist-info/licenses/LICENSE +674 -0
- atdd-0.2.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,565 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Generate comprehensive repository inventory.
|
|
4
|
+
|
|
5
|
+
Catalogs all artifacts across the ATDD lifecycle:
|
|
6
|
+
- Platform: .claude/ infrastructure (conventions, schemas, commands, agents, utils, actions)
|
|
7
|
+
- Planning: Trains, wagons, features, WMBT acceptance (C/L/E/P patterns)
|
|
8
|
+
- Testing: Contracts, telemetry, test files (meta + feature tests)
|
|
9
|
+
- Coding: Implementation files (Python, Dart, TypeScript)
|
|
10
|
+
- Tracking: Facts/logs, ATDD documentation
|
|
11
|
+
|
|
12
|
+
Usage:
|
|
13
|
+
python atdd/inventory.py > atdd/INVENTORY.yaml
|
|
14
|
+
pytest atdd/ --inventory
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import yaml
|
|
18
|
+
import json
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from datetime import datetime
|
|
21
|
+
from collections import defaultdict
|
|
22
|
+
from typing import Dict, List, Any
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RepositoryInventory:
|
|
26
|
+
"""Generate comprehensive repository inventory."""
|
|
27
|
+
|
|
28
|
+
def __init__(self, repo_root: Path = None):
|
|
29
|
+
self.repo_root = repo_root or Path(__file__).parent.parent
|
|
30
|
+
self.inventory = {
|
|
31
|
+
"inventory": {
|
|
32
|
+
"generated_at": datetime.now().isoformat(),
|
|
33
|
+
"repository": str(self.repo_root.name),
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
def scan_platform_infrastructure(self) -> Dict[str, Any]:
|
|
38
|
+
"""Scan .claude/ for platform infrastructure."""
|
|
39
|
+
claude_dir = self.repo_root / ".claude"
|
|
40
|
+
|
|
41
|
+
if not claude_dir.exists():
|
|
42
|
+
return {"total": 0}
|
|
43
|
+
|
|
44
|
+
# Conventions
|
|
45
|
+
convention_files = list(claude_dir.glob("conventions/**/*.yaml"))
|
|
46
|
+
convention_files.extend(claude_dir.glob("conventions/**/*.yml"))
|
|
47
|
+
|
|
48
|
+
# Schemas
|
|
49
|
+
schema_files = list(claude_dir.glob("schemas/**/*.json"))
|
|
50
|
+
|
|
51
|
+
# Commands
|
|
52
|
+
command_files = list(claude_dir.glob("commands/**/*"))
|
|
53
|
+
command_files = [f for f in command_files if f.is_file()]
|
|
54
|
+
|
|
55
|
+
# Agents
|
|
56
|
+
agent_files = list(claude_dir.glob("agents/**/*.yaml"))
|
|
57
|
+
agent_files.extend(claude_dir.glob("agents/**/*.json"))
|
|
58
|
+
|
|
59
|
+
# Utils
|
|
60
|
+
util_files = list(claude_dir.glob("utils/**/*"))
|
|
61
|
+
util_files = [f for f in util_files if f.is_file() and f.suffix in [".yaml", ".json", ".yml"]]
|
|
62
|
+
|
|
63
|
+
# Actions
|
|
64
|
+
action_files = list(claude_dir.glob("actions/**/*.yaml"))
|
|
65
|
+
action_files.extend(claude_dir.glob("actions/**/*.json"))
|
|
66
|
+
|
|
67
|
+
return {
|
|
68
|
+
"total": len(convention_files) + len(schema_files) + len(command_files) + len(agent_files) + len(util_files) + len(action_files),
|
|
69
|
+
"conventions": len(convention_files),
|
|
70
|
+
"schemas": len(schema_files),
|
|
71
|
+
"commands": len(command_files),
|
|
72
|
+
"agents": len(agent_files),
|
|
73
|
+
"utils": len(util_files),
|
|
74
|
+
"actions": len(action_files)
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
def scan_trains(self) -> Dict[str, Any]:
|
|
78
|
+
"""Scan plan/ for train manifests (aggregations of wagons)."""
|
|
79
|
+
plan_dir = self.repo_root / "plan"
|
|
80
|
+
|
|
81
|
+
if not plan_dir.exists():
|
|
82
|
+
return {"total": 0, "trains": []}
|
|
83
|
+
|
|
84
|
+
# Load trains registry
|
|
85
|
+
trains_file = plan_dir / "_trains.yaml"
|
|
86
|
+
all_trains = []
|
|
87
|
+
|
|
88
|
+
if trains_file.exists():
|
|
89
|
+
with open(trains_file) as f:
|
|
90
|
+
data = yaml.safe_load(f)
|
|
91
|
+
trains_data = data.get("trains", {})
|
|
92
|
+
|
|
93
|
+
# Flatten the nested structure
|
|
94
|
+
# Input: {"0-commons": {"00-commons-nominal": [train1, train2], ...}, ...}
|
|
95
|
+
# Output: flat list of all trains
|
|
96
|
+
for theme_key, categories in trains_data.items():
|
|
97
|
+
if isinstance(categories, dict):
|
|
98
|
+
for category_key, trains_list in categories.items():
|
|
99
|
+
if isinstance(trains_list, list):
|
|
100
|
+
all_trains.extend(trains_list)
|
|
101
|
+
|
|
102
|
+
# Count by theme
|
|
103
|
+
by_theme = defaultdict(int)
|
|
104
|
+
train_ids = []
|
|
105
|
+
|
|
106
|
+
for train in all_trains:
|
|
107
|
+
train_id = train.get("train_id", "unknown")
|
|
108
|
+
train_ids.append(train_id)
|
|
109
|
+
|
|
110
|
+
# Extract theme from train_id (first digit maps to theme)
|
|
111
|
+
if train_id and len(train_id) > 0 and train_id[0].isdigit():
|
|
112
|
+
theme_digit = train_id[0]
|
|
113
|
+
theme_map = {
|
|
114
|
+
"0": "commons", "1": "mechanic", "2": "scenario", "3": "match",
|
|
115
|
+
"4": "sensory", "5": "player", "6": "league", "7": "audience",
|
|
116
|
+
"8": "monetization", "9": "partnership"
|
|
117
|
+
}
|
|
118
|
+
theme = theme_map.get(theme_digit, "unknown")
|
|
119
|
+
by_theme[theme] += 1
|
|
120
|
+
|
|
121
|
+
# Find train detail files
|
|
122
|
+
train_detail_files = list((plan_dir / "_trains").glob("*.yaml")) if (plan_dir / "_trains").exists() else []
|
|
123
|
+
|
|
124
|
+
return {
|
|
125
|
+
"total": len(all_trains),
|
|
126
|
+
"by_theme": dict(by_theme),
|
|
127
|
+
"train_ids": train_ids,
|
|
128
|
+
"detail_files": len(train_detail_files)
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
def scan_wagons(self) -> Dict[str, Any]:
|
|
132
|
+
"""Scan plan/ for wagon manifests."""
|
|
133
|
+
plan_dir = self.repo_root / "plan"
|
|
134
|
+
|
|
135
|
+
if not plan_dir.exists():
|
|
136
|
+
return {"total": 0, "wagons": []}
|
|
137
|
+
|
|
138
|
+
# Load wagons registry
|
|
139
|
+
wagons_file = plan_dir / "_wagons.yaml"
|
|
140
|
+
wagons_data = []
|
|
141
|
+
|
|
142
|
+
if wagons_file.exists():
|
|
143
|
+
with open(wagons_file) as f:
|
|
144
|
+
data = yaml.safe_load(f)
|
|
145
|
+
wagons_data = data.get("wagons", [])
|
|
146
|
+
|
|
147
|
+
# Count by status
|
|
148
|
+
total = len(wagons_data)
|
|
149
|
+
by_status = defaultdict(int)
|
|
150
|
+
by_theme = defaultdict(int)
|
|
151
|
+
|
|
152
|
+
for wagon in wagons_data:
|
|
153
|
+
status = wagon.get("status", "unknown")
|
|
154
|
+
theme = wagon.get("theme", "unknown")
|
|
155
|
+
by_status[status] += 1
|
|
156
|
+
by_theme[theme] += 1
|
|
157
|
+
|
|
158
|
+
return {
|
|
159
|
+
"total": total,
|
|
160
|
+
"active": by_status.get("active", 0),
|
|
161
|
+
"draft": by_status.get("draft", 0),
|
|
162
|
+
"by_theme": dict(by_theme),
|
|
163
|
+
"manifests": [w.get("manifest") for w in wagons_data]
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
def scan_contracts(self) -> Dict[str, Any]:
|
|
167
|
+
"""Scan contracts/ for contract schemas."""
|
|
168
|
+
contracts_dir = self.repo_root / "contracts"
|
|
169
|
+
|
|
170
|
+
if not contracts_dir.exists():
|
|
171
|
+
return {"total": 0, "by_domain": {}}
|
|
172
|
+
|
|
173
|
+
# Find all schema files
|
|
174
|
+
schema_files = list(contracts_dir.glob("**/*.schema.json"))
|
|
175
|
+
|
|
176
|
+
by_domain = defaultdict(list)
|
|
177
|
+
|
|
178
|
+
for schema_file in schema_files:
|
|
179
|
+
# Extract domain from path
|
|
180
|
+
rel_path = schema_file.relative_to(contracts_dir)
|
|
181
|
+
domain = rel_path.parts[0] if rel_path.parts else "unknown"
|
|
182
|
+
|
|
183
|
+
# Load schema to get $id
|
|
184
|
+
try:
|
|
185
|
+
with open(schema_file) as f:
|
|
186
|
+
schema = json.load(f)
|
|
187
|
+
schema_id = schema.get("$id", "unknown")
|
|
188
|
+
by_domain[domain].append({
|
|
189
|
+
"path": str(rel_path),
|
|
190
|
+
"id": schema_id
|
|
191
|
+
})
|
|
192
|
+
except:
|
|
193
|
+
by_domain[domain].append({
|
|
194
|
+
"path": str(rel_path),
|
|
195
|
+
"id": "error"
|
|
196
|
+
})
|
|
197
|
+
|
|
198
|
+
return {
|
|
199
|
+
"total": len(schema_files),
|
|
200
|
+
"by_domain": {
|
|
201
|
+
domain: {
|
|
202
|
+
"count": len(schemas),
|
|
203
|
+
"schemas": [s["id"] for s in schemas]
|
|
204
|
+
}
|
|
205
|
+
for domain, schemas in by_domain.items()
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
def scan_telemetry(self) -> Dict[str, Any]:
|
|
210
|
+
"""Scan telemetry/ for signal definitions."""
|
|
211
|
+
telemetry_dir = self.repo_root / "telemetry"
|
|
212
|
+
|
|
213
|
+
if not telemetry_dir.exists():
|
|
214
|
+
return {"total": 0, "by_domain": {}}
|
|
215
|
+
|
|
216
|
+
# Find all signal files
|
|
217
|
+
signal_files = list(telemetry_dir.glob("**/*.signal.yaml"))
|
|
218
|
+
|
|
219
|
+
by_domain = defaultdict(int)
|
|
220
|
+
|
|
221
|
+
for signal_file in signal_files:
|
|
222
|
+
rel_path = signal_file.relative_to(telemetry_dir)
|
|
223
|
+
domain = rel_path.parts[0] if rel_path.parts else "unknown"
|
|
224
|
+
by_domain[domain] += 1
|
|
225
|
+
|
|
226
|
+
return {
|
|
227
|
+
"total": len(signal_files),
|
|
228
|
+
"by_domain": dict(by_domain)
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
def count_test_cases_in_file(self, test_file: Path) -> int:
|
|
232
|
+
"""Count number of test functions/cases in a test file."""
|
|
233
|
+
try:
|
|
234
|
+
with open(test_file, 'r', encoding='utf-8') as f:
|
|
235
|
+
content = f.read()
|
|
236
|
+
# Count test functions (def test_* or async def test_*)
|
|
237
|
+
import re
|
|
238
|
+
pattern = r'^\s*(?:async\s+)?def\s+test_\w+'
|
|
239
|
+
matches = re.findall(pattern, content, re.MULTILINE)
|
|
240
|
+
return len(matches)
|
|
241
|
+
except:
|
|
242
|
+
return 0
|
|
243
|
+
|
|
244
|
+
def scan_tests(self) -> Dict[str, Any]:
|
|
245
|
+
"""Scan all test files and count test cases across the repository."""
|
|
246
|
+
|
|
247
|
+
# Meta-tests in atdd/
|
|
248
|
+
atdd_dir = self.repo_root / "atdd"
|
|
249
|
+
planner_tests = []
|
|
250
|
+
tester_tests = []
|
|
251
|
+
coder_tests = []
|
|
252
|
+
|
|
253
|
+
if atdd_dir.exists():
|
|
254
|
+
planner_tests = list((atdd_dir / "planner").glob("test_*.py")) if (atdd_dir / "planner").exists() else []
|
|
255
|
+
tester_tests = list((atdd_dir / "tester").glob("test_*.py")) if (atdd_dir / "tester").exists() else []
|
|
256
|
+
coder_tests = list((atdd_dir / "coder").glob("test_*.py")) if (atdd_dir / "coder").exists() else []
|
|
257
|
+
|
|
258
|
+
# Python feature tests - look in any test/ subdirectory
|
|
259
|
+
python_tests = []
|
|
260
|
+
if (self.repo_root / "python").exists():
|
|
261
|
+
# Find all test_*.py files within any test/ directory structure
|
|
262
|
+
for test_file in (self.repo_root / "python").rglob("test_*.py"):
|
|
263
|
+
# Ensure it's within a test/ directory somewhere in its path
|
|
264
|
+
if "/test/" in str(test_file) or "\\test\\" in str(test_file):
|
|
265
|
+
python_tests.append(test_file)
|
|
266
|
+
|
|
267
|
+
# TypeScript feature tests
|
|
268
|
+
ts_tests = []
|
|
269
|
+
if (self.repo_root / "web").exists():
|
|
270
|
+
ts_tests.extend((self.repo_root / "web").glob("**/*.test.ts"))
|
|
271
|
+
ts_tests.extend((self.repo_root / "web").glob("**/*.test.tsx"))
|
|
272
|
+
if (self.repo_root / "supabase").exists():
|
|
273
|
+
ts_tests.extend((self.repo_root / "supabase").glob("**/*.test.ts"))
|
|
274
|
+
|
|
275
|
+
# Platform/infrastructure tests (in .claude/)
|
|
276
|
+
platform_tests = []
|
|
277
|
+
if (self.repo_root / ".claude").exists():
|
|
278
|
+
platform_tests = list((self.repo_root / ".claude").rglob("test_*.py"))
|
|
279
|
+
|
|
280
|
+
# Count test cases (functions) in Python test files
|
|
281
|
+
planner_cases = sum(self.count_test_cases_in_file(f) for f in planner_tests)
|
|
282
|
+
tester_cases = sum(self.count_test_cases_in_file(f) for f in tester_tests)
|
|
283
|
+
coder_cases = sum(self.count_test_cases_in_file(f) for f in coder_tests)
|
|
284
|
+
platform_cases = sum(self.count_test_cases_in_file(f) for f in platform_tests)
|
|
285
|
+
python_cases = sum(self.count_test_cases_in_file(f) for f in python_tests)
|
|
286
|
+
|
|
287
|
+
meta_files = len(planner_tests) + len(tester_tests) + len(coder_tests) + len(platform_tests)
|
|
288
|
+
feature_files = len(python_tests) + len(ts_tests)
|
|
289
|
+
|
|
290
|
+
meta_cases = planner_cases + tester_cases + coder_cases + platform_cases
|
|
291
|
+
feature_cases = python_cases # Dart/TS case counting would require parsing those languages
|
|
292
|
+
|
|
293
|
+
return {
|
|
294
|
+
"total_files": meta_files + feature_files,
|
|
295
|
+
"total_cases": meta_cases + feature_cases,
|
|
296
|
+
"meta_tests": {
|
|
297
|
+
"files": {
|
|
298
|
+
"planner": len(planner_tests),
|
|
299
|
+
"tester": len(tester_tests),
|
|
300
|
+
"coder": len(coder_tests),
|
|
301
|
+
"platform": len(platform_tests),
|
|
302
|
+
"total": meta_files
|
|
303
|
+
},
|
|
304
|
+
"cases": {
|
|
305
|
+
"planner": planner_cases,
|
|
306
|
+
"tester": tester_cases,
|
|
307
|
+
"coder": coder_cases,
|
|
308
|
+
"platform": platform_cases,
|
|
309
|
+
"total": meta_cases
|
|
310
|
+
}
|
|
311
|
+
},
|
|
312
|
+
"feature_tests": {
|
|
313
|
+
"files": {
|
|
314
|
+
"python": len(python_tests),
|
|
315
|
+
"dart": len(dart_tests),
|
|
316
|
+
"typescript": len(ts_tests),
|
|
317
|
+
"total": feature_files
|
|
318
|
+
},
|
|
319
|
+
"cases": {
|
|
320
|
+
"python": python_cases,
|
|
321
|
+
"dart": "not_counted",
|
|
322
|
+
"typescript": "not_counted",
|
|
323
|
+
"total": feature_cases
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
def scan_features(self) -> Dict[str, Any]:
|
|
329
|
+
"""Scan plan/ for feature definitions."""
|
|
330
|
+
plan_dir = self.repo_root / "plan"
|
|
331
|
+
|
|
332
|
+
if not plan_dir.exists():
|
|
333
|
+
return {"total": 0, "by_wagon": {}}
|
|
334
|
+
|
|
335
|
+
# Find all feature YAML files
|
|
336
|
+
feature_files = list(plan_dir.glob("**/features/*.yaml"))
|
|
337
|
+
|
|
338
|
+
by_wagon = defaultdict(int)
|
|
339
|
+
|
|
340
|
+
for feature_file in feature_files:
|
|
341
|
+
rel_path = feature_file.relative_to(plan_dir)
|
|
342
|
+
wagon = rel_path.parts[0] if rel_path.parts else "unknown"
|
|
343
|
+
by_wagon[wagon] += 1
|
|
344
|
+
|
|
345
|
+
return {
|
|
346
|
+
"total": len(feature_files),
|
|
347
|
+
"by_wagon": dict(by_wagon)
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
def scan_wmbt_acceptance(self) -> Dict[str, Any]:
|
|
351
|
+
"""Scan for WMBT (Write Meaningful Before Tests) acceptance files."""
|
|
352
|
+
plan_dir = self.repo_root / "plan"
|
|
353
|
+
|
|
354
|
+
if not plan_dir.exists():
|
|
355
|
+
return {"total": 0, "by_category": {}, "by_wagon": {}}
|
|
356
|
+
|
|
357
|
+
# WMBT categories: C (Contract), L (Logic), E (Edge), P (Performance)
|
|
358
|
+
wmbt_patterns = {
|
|
359
|
+
"contract": "C",
|
|
360
|
+
"logic": "L",
|
|
361
|
+
"edge": "E",
|
|
362
|
+
"performance": "P"
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
by_category = defaultdict(int)
|
|
366
|
+
by_wagon = defaultdict(lambda: defaultdict(int))
|
|
367
|
+
total = 0
|
|
368
|
+
|
|
369
|
+
for category, prefix in wmbt_patterns.items():
|
|
370
|
+
# Find files matching pattern like C001.yaml, L001.yaml, etc.
|
|
371
|
+
category_files = list(plan_dir.glob(f"**/{prefix}[0-9]*.yaml"))
|
|
372
|
+
by_category[category] = len(category_files)
|
|
373
|
+
total += len(category_files)
|
|
374
|
+
|
|
375
|
+
# Count by wagon
|
|
376
|
+
for wmbt_file in category_files:
|
|
377
|
+
rel_path = wmbt_file.relative_to(plan_dir)
|
|
378
|
+
wagon = rel_path.parts[0] if rel_path.parts else "unknown"
|
|
379
|
+
by_wagon[wagon][category] += 1
|
|
380
|
+
|
|
381
|
+
return {
|
|
382
|
+
"total": total,
|
|
383
|
+
"by_category": dict(by_category),
|
|
384
|
+
"by_wagon": {
|
|
385
|
+
wagon: dict(categories)
|
|
386
|
+
for wagon, categories in by_wagon.items()
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
def scan_acceptance_criteria(self) -> Dict[str, Any]:
|
|
391
|
+
"""Scan for acceptance criteria definitions (includes both AC-* and WMBT patterns)."""
|
|
392
|
+
plan_dir = self.repo_root / "plan"
|
|
393
|
+
|
|
394
|
+
if not plan_dir.exists():
|
|
395
|
+
return {"total": 0, "by_wagon": {}}
|
|
396
|
+
|
|
397
|
+
# Find all AC files (traditional AC-* pattern)
|
|
398
|
+
ac_files = list(plan_dir.glob("**/AC-*.yaml"))
|
|
399
|
+
|
|
400
|
+
by_wagon = defaultdict(int)
|
|
401
|
+
|
|
402
|
+
for ac_file in ac_files:
|
|
403
|
+
rel_path = ac_file.relative_to(plan_dir)
|
|
404
|
+
wagon = rel_path.parts[0] if rel_path.parts else "unknown"
|
|
405
|
+
by_wagon[wagon] += 1
|
|
406
|
+
|
|
407
|
+
return {
|
|
408
|
+
"total": len(ac_files),
|
|
409
|
+
"by_wagon": dict(by_wagon)
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
def scan_facts(self) -> Dict[str, Any]:
|
|
413
|
+
"""Scan facts/ directory for audit logs and state tracking."""
|
|
414
|
+
facts_dir = self.repo_root / "facts"
|
|
415
|
+
|
|
416
|
+
if not facts_dir.exists():
|
|
417
|
+
return {"total": 0, "files": []}
|
|
418
|
+
|
|
419
|
+
# Find all files in facts directory
|
|
420
|
+
fact_files = [f for f in facts_dir.glob("**/*") if f.is_file()]
|
|
421
|
+
|
|
422
|
+
# Categorize by file type
|
|
423
|
+
by_type = defaultdict(int)
|
|
424
|
+
file_list = []
|
|
425
|
+
|
|
426
|
+
for fact_file in fact_files:
|
|
427
|
+
file_list.append(str(fact_file.relative_to(facts_dir)))
|
|
428
|
+
if fact_file.suffix == ".log":
|
|
429
|
+
by_type["logs"] += 1
|
|
430
|
+
elif fact_file.suffix in [".yaml", ".yml"]:
|
|
431
|
+
by_type["yaml"] += 1
|
|
432
|
+
elif fact_file.suffix == ".json":
|
|
433
|
+
by_type["json"] += 1
|
|
434
|
+
else:
|
|
435
|
+
by_type["other"] += 1
|
|
436
|
+
|
|
437
|
+
return {
|
|
438
|
+
"total": len(fact_files),
|
|
439
|
+
"by_type": dict(by_type),
|
|
440
|
+
"files": sorted(file_list)
|
|
441
|
+
}
|
|
442
|
+
|
|
443
|
+
def scan_atdd_docs(self) -> Dict[str, Any]:
|
|
444
|
+
"""Scan atdd/ directory for documentation and meta-files."""
|
|
445
|
+
atdd_dir = self.repo_root / "atdd"
|
|
446
|
+
|
|
447
|
+
if not atdd_dir.exists():
|
|
448
|
+
return {"total": 0, "docs": []}
|
|
449
|
+
|
|
450
|
+
# Find documentation files
|
|
451
|
+
doc_patterns = ["*.md", "*.rst", "*.txt"]
|
|
452
|
+
doc_files = []
|
|
453
|
+
|
|
454
|
+
for pattern in doc_patterns:
|
|
455
|
+
doc_files.extend(atdd_dir.glob(pattern))
|
|
456
|
+
|
|
457
|
+
# Get list of doc names
|
|
458
|
+
doc_names = [f.name for f in doc_files]
|
|
459
|
+
|
|
460
|
+
return {
|
|
461
|
+
"total": len(doc_files),
|
|
462
|
+
"docs": sorted(doc_names)
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
def scan_implementations(self) -> Dict[str, Any]:
|
|
466
|
+
"""Scan implementation files (Python, Dart, TypeScript)."""
|
|
467
|
+
|
|
468
|
+
# Python implementations
|
|
469
|
+
python_files = []
|
|
470
|
+
if (self.repo_root / "python").exists():
|
|
471
|
+
python_files = [
|
|
472
|
+
f for f in (self.repo_root / "python").glob("**/*.py")
|
|
473
|
+
if "test" not in str(f) and "__pycache__" not in str(f)
|
|
474
|
+
]
|
|
475
|
+
|
|
476
|
+
# TypeScript implementations
|
|
477
|
+
ts_files = []
|
|
478
|
+
if (self.repo_root / "supabase").exists():
|
|
479
|
+
ts_files = [
|
|
480
|
+
f for f in (self.repo_root / "supabase").glob("**/*.ts")
|
|
481
|
+
if not f.name.endswith(".test.ts")
|
|
482
|
+
]
|
|
483
|
+
if (self.repo_root / "web").exists():
|
|
484
|
+
ts_files.extend([
|
|
485
|
+
f for f in (self.repo_root / "web").glob("**/*.ts")
|
|
486
|
+
if not f.name.endswith(".test.ts")
|
|
487
|
+
])
|
|
488
|
+
ts_files.extend([
|
|
489
|
+
f for f in (self.repo_root / "web").glob("**/*.tsx")
|
|
490
|
+
if not f.name.endswith(".test.tsx")
|
|
491
|
+
])
|
|
492
|
+
|
|
493
|
+
return {
|
|
494
|
+
"total": len(python_files) + len(ts_files),
|
|
495
|
+
"python": len(python_files),
|
|
496
|
+
"typescript": len(ts_files)
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
def generate(self) -> Dict[str, Any]:
|
|
500
|
+
"""Generate complete inventory."""
|
|
501
|
+
|
|
502
|
+
print("🔍 Scanning repository...", flush=True)
|
|
503
|
+
|
|
504
|
+
# Platform infrastructure
|
|
505
|
+
self.inventory["inventory"]["platform"] = self.scan_platform_infrastructure()
|
|
506
|
+
print(f" ✓ Found {self.inventory['inventory']['platform']['total']} platform infrastructure files")
|
|
507
|
+
|
|
508
|
+
# Planning artifacts
|
|
509
|
+
self.inventory["inventory"]["trains"] = self.scan_trains()
|
|
510
|
+
print(f" ✓ Found {self.inventory['inventory']['trains']['total']} trains")
|
|
511
|
+
|
|
512
|
+
self.inventory["inventory"]["wagons"] = self.scan_wagons()
|
|
513
|
+
print(f" ✓ Found {self.inventory['inventory']['wagons']['total']} wagons")
|
|
514
|
+
|
|
515
|
+
self.inventory["inventory"]["features"] = self.scan_features()
|
|
516
|
+
print(f" ✓ Found {self.inventory['inventory']['features']['total']} features")
|
|
517
|
+
|
|
518
|
+
# Acceptance criteria (both traditional and WMBT)
|
|
519
|
+
self.inventory["inventory"]["wmbt_acceptance"] = self.scan_wmbt_acceptance()
|
|
520
|
+
print(f" ✓ Found {self.inventory['inventory']['wmbt_acceptance']['total']} WMBT acceptance files")
|
|
521
|
+
|
|
522
|
+
self.inventory["inventory"]["acceptance_criteria"] = self.scan_acceptance_criteria()
|
|
523
|
+
print(f" ✓ Found {self.inventory['inventory']['acceptance_criteria']['total']} traditional acceptance criteria")
|
|
524
|
+
|
|
525
|
+
# Testing artifacts
|
|
526
|
+
self.inventory["inventory"]["contracts"] = self.scan_contracts()
|
|
527
|
+
print(f" ✓ Found {self.inventory['inventory']['contracts']['total']} contracts")
|
|
528
|
+
|
|
529
|
+
self.inventory["inventory"]["telemetry"] = self.scan_telemetry()
|
|
530
|
+
print(f" ✓ Found {self.inventory['inventory']['telemetry']['total']} telemetry signals")
|
|
531
|
+
|
|
532
|
+
self.inventory["inventory"]["tests"] = self.scan_tests()
|
|
533
|
+
test_files = self.inventory['inventory']['tests']['total_files']
|
|
534
|
+
test_cases = self.inventory['inventory']['tests']['total_cases']
|
|
535
|
+
print(f" ✓ Found {test_files} test files with {test_cases} test cases")
|
|
536
|
+
|
|
537
|
+
# Implementation artifacts
|
|
538
|
+
self.inventory["inventory"]["implementations"] = self.scan_implementations()
|
|
539
|
+
print(f" ✓ Found {self.inventory['inventory']['implementations']['total']} implementation files")
|
|
540
|
+
|
|
541
|
+
# Facts and documentation
|
|
542
|
+
self.inventory["inventory"]["facts"] = self.scan_facts()
|
|
543
|
+
print(f" ✓ Found {self.inventory['inventory']['facts']['total']} facts/logs")
|
|
544
|
+
|
|
545
|
+
self.inventory["inventory"]["atdd_docs"] = self.scan_atdd_docs()
|
|
546
|
+
print(f" ✓ Found {self.inventory['inventory']['atdd_docs']['total']} ATDD documentation files")
|
|
547
|
+
|
|
548
|
+
return self.inventory
|
|
549
|
+
|
|
550
|
+
|
|
551
|
+
def main():
|
|
552
|
+
"""Generate and print inventory."""
|
|
553
|
+
inventory = RepositoryInventory()
|
|
554
|
+
data = inventory.generate()
|
|
555
|
+
|
|
556
|
+
print("\n" + "=" * 60)
|
|
557
|
+
print("Repository Inventory Generated")
|
|
558
|
+
print("=" * 60 + "\n")
|
|
559
|
+
|
|
560
|
+
# Output as YAML
|
|
561
|
+
print(yaml.dump(data, default_flow_style=False, sort_keys=False))
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
if __name__ == "__main__":
|
|
565
|
+
main()
|