delimit-cli 2.3.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/.dockerignore +7 -0
  2. package/.github/workflows/ci.yml +22 -0
  3. package/CHANGELOG.md +33 -0
  4. package/CODE_OF_CONDUCT.md +48 -0
  5. package/CONTRIBUTING.md +67 -0
  6. package/Dockerfile +9 -0
  7. package/LICENSE +21 -0
  8. package/README.md +51 -130
  9. package/SECURITY.md +42 -0
  10. package/adapters/codex-forge.js +107 -0
  11. package/adapters/codex-jamsons.js +142 -0
  12. package/adapters/codex-security.js +94 -0
  13. package/adapters/gemini-forge.js +120 -0
  14. package/adapters/gemini-jamsons.js +152 -0
  15. package/bin/delimit-cli.js +52 -2
  16. package/bin/delimit-setup.js +258 -0
  17. package/gateway/ai/backends/__init__.py +0 -0
  18. package/gateway/ai/backends/async_utils.py +21 -0
  19. package/gateway/ai/backends/deploy_bridge.py +150 -0
  20. package/gateway/ai/backends/gateway_core.py +261 -0
  21. package/gateway/ai/backends/generate_bridge.py +38 -0
  22. package/gateway/ai/backends/governance_bridge.py +196 -0
  23. package/gateway/ai/backends/intel_bridge.py +59 -0
  24. package/gateway/ai/backends/memory_bridge.py +93 -0
  25. package/gateway/ai/backends/ops_bridge.py +137 -0
  26. package/gateway/ai/backends/os_bridge.py +82 -0
  27. package/gateway/ai/backends/repo_bridge.py +117 -0
  28. package/gateway/ai/backends/ui_bridge.py +118 -0
  29. package/gateway/ai/backends/vault_bridge.py +129 -0
  30. package/gateway/ai/server.py +1182 -0
  31. package/gateway/core/__init__.py +3 -0
  32. package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
  33. package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
  34. package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
  35. package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
  36. package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
  37. package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
  38. package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
  39. package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
  40. package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
  41. package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
  42. package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
  43. package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
  44. package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
  45. package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
  46. package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
  47. package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
  48. package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
  49. package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
  50. package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
  51. package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
  52. package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
  53. package/gateway/core/auto_baseline.py +304 -0
  54. package/gateway/core/ci_formatter.py +283 -0
  55. package/gateway/core/complexity_analyzer.py +386 -0
  56. package/gateway/core/contract_ledger.py +345 -0
  57. package/gateway/core/dependency_graph.py +218 -0
  58. package/gateway/core/dependency_manifest.py +223 -0
  59. package/gateway/core/diff_engine_v2.py +477 -0
  60. package/gateway/core/diff_engine_v2.py.bak +426 -0
  61. package/gateway/core/event_backbone.py +268 -0
  62. package/gateway/core/event_schema.py +258 -0
  63. package/gateway/core/explainer.py +438 -0
  64. package/gateway/core/gateway.py +128 -0
  65. package/gateway/core/gateway_v2.py +154 -0
  66. package/gateway/core/gateway_v3.py +224 -0
  67. package/gateway/core/impact_analyzer.py +163 -0
  68. package/gateway/core/policies/default.yml +13 -0
  69. package/gateway/core/policies/relaxed.yml +48 -0
  70. package/gateway/core/policies/strict.yml +55 -0
  71. package/gateway/core/policy_engine.py +464 -0
  72. package/gateway/core/registry.py +52 -0
  73. package/gateway/core/registry_v2.py +132 -0
  74. package/gateway/core/registry_v3.py +134 -0
  75. package/gateway/core/semver_classifier.py +152 -0
  76. package/gateway/core/spec_detector.py +130 -0
  77. package/gateway/core/surface_bridge.py +307 -0
  78. package/gateway/core/zero_spec/__init__.py +4 -0
  79. package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
  80. package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
  81. package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
  82. package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
  83. package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
  84. package/gateway/core/zero_spec/detector.py +353 -0
  85. package/gateway/core/zero_spec/express_extractor.py +483 -0
  86. package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
  87. package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
  88. package/gateway/tasks/__init__.py +1 -0
  89. package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
  90. package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
  91. package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
  92. package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
  93. package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
  94. package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
  95. package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
  96. package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
  97. package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
  98. package/gateway/tasks/check_policy.py +177 -0
  99. package/gateway/tasks/check_policy_v2.py +255 -0
  100. package/gateway/tasks/check_policy_v3.py +255 -0
  101. package/gateway/tasks/explain_diff.py +305 -0
  102. package/gateway/tasks/explain_diff_v2.py +267 -0
  103. package/gateway/tasks/validate_api.py +131 -0
  104. package/gateway/tasks/validate_api_v2.py +208 -0
  105. package/gateway/tasks/validate_api_v3.py +163 -0
  106. package/package.json +3 -3
  107. package/adapters/codex-skill.js +0 -87
  108. package/adapters/cursor-extension.js +0 -190
  109. package/adapters/gemini-action.js +0 -93
  110. package/adapters/openai-function.js +0 -112
  111. package/adapters/xai-plugin.js +0 -151
  112. package/test-decision-engine.js +0 -181
  113. package/test-hook.js +0 -27
@@ -0,0 +1,345 @@
1
+ """
2
+ Delimit Contract Ledger
3
+ Reads, validates, and queries the append-only JSONL event ledger.
4
+ Optional SQLite index for fast lookups (never required for CI).
5
+
6
+ Per Jamsons Doctrine:
7
+ - Deterministic outputs
8
+ - Append-only artifacts
9
+ - SQLite index is optional, not required for CI
10
+ - No telemetry collection
11
+ """
12
+
13
+ import json
14
+ import logging
15
+ import sqlite3
16
+ from pathlib import Path
17
+ from typing import Any, Dict, List, Optional
18
+
19
+ from .event_schema import compute_event_hash, validate_event
20
+
21
+ logger = logging.getLogger("delimit.contract_ledger")
22
+
23
+ GENESIS_HASH = "GENESIS"
24
+
25
+
26
+ class ChainValidationError(Exception):
27
+ """Raised when the ledger hash chain is broken."""
28
+
29
+ def __init__(self, index: int, expected: str, actual: str):
30
+ self.index = index
31
+ self.expected = expected
32
+ self.actual = actual
33
+ super().__init__(
34
+ f"Hash chain broken at event {index}: "
35
+ f"expected previous_hash={expected!r}, got={actual!r}"
36
+ )
37
+
38
+
39
+ class ContractLedger:
40
+ """Read, validate, and query the JSONL event ledger."""
41
+
42
+ def __init__(self, ledger_path: str):
43
+ """Initialize with path to the JSONL ledger file.
44
+
45
+ Args:
46
+ ledger_path: Path to events.jsonl file.
47
+ """
48
+ self._ledger_path = Path(ledger_path)
49
+
50
+ @property
51
+ def ledger_path(self) -> Path:
52
+ return self._ledger_path
53
+
54
+ def exists(self) -> bool:
55
+ """Check if the ledger file exists."""
56
+ return self._ledger_path.exists()
57
+
58
+ def read_events(self) -> List[Dict[str, Any]]:
59
+ """Read all events from the JSONL ledger.
60
+
61
+ Returns:
62
+ List of event dictionaries in chronological order.
63
+ Empty list if ledger does not exist or is empty.
64
+ """
65
+ if not self._ledger_path.exists():
66
+ return []
67
+
68
+ events = []
69
+ try:
70
+ with open(self._ledger_path, "r", encoding="utf-8") as f:
71
+ for line_num, line in enumerate(f, 1):
72
+ stripped = line.strip()
73
+ if not stripped:
74
+ continue
75
+ try:
76
+ event = json.loads(stripped)
77
+ events.append(event)
78
+ except json.JSONDecodeError as e:
79
+ logger.warning(
80
+ "Skipping malformed JSON at line %d: %s", line_num, e
81
+ )
82
+ except OSError as e:
83
+ logger.warning("Failed to read ledger %s: %s", self._ledger_path, e)
84
+
85
+ return events
86
+
87
+ def get_latest_event(self) -> Optional[Dict[str, Any]]:
88
+ """Return the most recent event, or None if ledger is empty."""
89
+ if not self._ledger_path.exists():
90
+ return None
91
+
92
+ last_line = ""
93
+ try:
94
+ with open(self._ledger_path, "r", encoding="utf-8") as f:
95
+ for line in f:
96
+ stripped = line.strip()
97
+ if stripped:
98
+ last_line = stripped
99
+ except OSError as e:
100
+ logger.warning("Failed to read ledger: %s", e)
101
+ return None
102
+
103
+ if not last_line:
104
+ return None
105
+
106
+ try:
107
+ return json.loads(last_line)
108
+ except json.JSONDecodeError:
109
+ return None
110
+
111
+ def get_event_count(self) -> int:
112
+ """Return the number of events in the ledger."""
113
+ if not self._ledger_path.exists():
114
+ return 0
115
+
116
+ count = 0
117
+ try:
118
+ with open(self._ledger_path, "r", encoding="utf-8") as f:
119
+ for line in f:
120
+ if line.strip():
121
+ count += 1
122
+ except OSError:
123
+ pass
124
+ return count
125
+
126
+ def validate_chain(self) -> bool:
127
+ """Validate the entire hash chain integrity.
128
+
129
+ Checks that:
130
+ 1. First event has previous_hash == GENESIS
131
+ 2. Each subsequent event's previous_hash matches the prior event_hash
132
+ 3. Each event's event_hash is correctly computed
133
+
134
+ Returns:
135
+ True if the chain is valid.
136
+
137
+ Raises:
138
+ ChainValidationError: If the chain is broken.
139
+ """
140
+ events = self.read_events()
141
+ if not events:
142
+ return True
143
+
144
+ for i, event in enumerate(events):
145
+ # Validate previous_hash linkage
146
+ if i == 0:
147
+ if event.get("previous_hash") != GENESIS_HASH:
148
+ raise ChainValidationError(
149
+ index=i,
150
+ expected=GENESIS_HASH,
151
+ actual=event.get("previous_hash", ""),
152
+ )
153
+ else:
154
+ expected_prev = events[i - 1].get("event_hash", "")
155
+ actual_prev = event.get("previous_hash", "")
156
+ if actual_prev != expected_prev:
157
+ raise ChainValidationError(
158
+ index=i,
159
+ expected=expected_prev,
160
+ actual=actual_prev,
161
+ )
162
+
163
+ # Validate event_hash correctness
164
+ expected_hash = compute_event_hash(
165
+ previous_hash=event.get("previous_hash", ""),
166
+ spec_hash=event.get("spec_hash", ""),
167
+ diff_summary=event.get("diff_summary", []),
168
+ commit=event.get("commit", ""),
169
+ timestamp=event.get("timestamp", ""),
170
+ )
171
+ actual_hash = event.get("event_hash", "")
172
+ if actual_hash != expected_hash:
173
+ raise ChainValidationError(
174
+ index=i,
175
+ expected=f"computed={expected_hash}",
176
+ actual=f"stored={actual_hash}",
177
+ )
178
+
179
+ return True
180
+
181
+ def get_api_timeline(self, api_name: str) -> List[Dict[str, Any]]:
182
+ """Return all events for a specific API in chronological order.
183
+
184
+ Args:
185
+ api_name: The API name to filter by.
186
+
187
+ Returns:
188
+ List of events matching the api_name.
189
+ """
190
+ return [
191
+ event for event in self.read_events()
192
+ if event.get("api_name") == api_name
193
+ ]
194
+
195
+ def get_events_by_type(self, event_type: str) -> List[Dict[str, Any]]:
196
+ """Return all events of a specific type."""
197
+ return [
198
+ event for event in self.read_events()
199
+ if event.get("event_type") == event_type
200
+ ]
201
+
202
+ def get_events_by_repository(self, repository: str) -> List[Dict[str, Any]]:
203
+ """Return all events for a specific repository."""
204
+ return [
205
+ event for event in self.read_events()
206
+ if event.get("repository") == repository
207
+ ]
208
+
209
+
210
+ class SQLiteIndex:
211
+ """Optional SQLite index for fast ledger queries.
212
+
213
+ This is a convenience layer that is NEVER required for CI execution.
214
+ The JSONL ledger is the source of truth.
215
+ """
216
+
217
+ SCHEMA_SQL = """
218
+ CREATE TABLE IF NOT EXISTS events (
219
+ rowid INTEGER PRIMARY KEY AUTOINCREMENT,
220
+ event_hash TEXT UNIQUE NOT NULL,
221
+ event_type TEXT NOT NULL,
222
+ api_name TEXT NOT NULL,
223
+ repository TEXT NOT NULL,
224
+ version TEXT NOT NULL,
225
+ timestamp TEXT NOT NULL,
226
+ commit_sha TEXT NOT NULL,
227
+ actor TEXT NOT NULL,
228
+ spec_hash TEXT NOT NULL,
229
+ previous_hash TEXT NOT NULL,
230
+ policy_result TEXT NOT NULL,
231
+ complexity_score INTEGER NOT NULL,
232
+ complexity_class TEXT NOT NULL,
233
+ raw_json TEXT NOT NULL
234
+ );
235
+ CREATE INDEX IF NOT EXISTS idx_api_name ON events(api_name);
236
+ CREATE INDEX IF NOT EXISTS idx_repository ON events(repository);
237
+ CREATE INDEX IF NOT EXISTS idx_event_type ON events(event_type);
238
+ CREATE INDEX IF NOT EXISTS idx_timestamp ON events(timestamp);
239
+ """
240
+
241
+ def __init__(self, db_path: str):
242
+ """Initialize SQLite index.
243
+
244
+ Args:
245
+ db_path: Path to the SQLite database file.
246
+ """
247
+ self._db_path = Path(db_path)
248
+ self._db_path.parent.mkdir(parents=True, exist_ok=True)
249
+ self._conn: Optional[sqlite3.Connection] = None
250
+
251
+ def _connect(self) -> sqlite3.Connection:
252
+ if self._conn is None:
253
+ self._conn = sqlite3.connect(str(self._db_path))
254
+ self._conn.row_factory = sqlite3.Row
255
+ self._conn.executescript(self.SCHEMA_SQL)
256
+ return self._conn
257
+
258
+ def close(self):
259
+ """Close the database connection."""
260
+ if self._conn:
261
+ self._conn.close()
262
+ self._conn = None
263
+
264
+ def index_event(self, event: Dict[str, Any]) -> bool:
265
+ """Add a single event to the SQLite index.
266
+
267
+ Returns True on success, False on failure.
268
+ """
269
+ try:
270
+ conn = self._connect()
271
+ conn.execute(
272
+ """INSERT OR IGNORE INTO events
273
+ (event_hash, event_type, api_name, repository, version,
274
+ timestamp, commit_sha, actor, spec_hash, previous_hash,
275
+ policy_result, complexity_score, complexity_class, raw_json)
276
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
277
+ (
278
+ event["event_hash"],
279
+ event["event_type"],
280
+ event["api_name"],
281
+ event["repository"],
282
+ event["version"],
283
+ event["timestamp"],
284
+ event["commit"],
285
+ event["actor"],
286
+ event["spec_hash"],
287
+ event["previous_hash"],
288
+ event["policy_result"],
289
+ event["complexity_score"],
290
+ event["complexity_class"],
291
+ json.dumps(event, sort_keys=True),
292
+ ),
293
+ )
294
+ conn.commit()
295
+ return True
296
+ except (sqlite3.Error, KeyError) as e:
297
+ logger.warning("Failed to index event: %s", e)
298
+ return False
299
+
300
+ def rebuild_from_ledger(self, ledger: ContractLedger) -> int:
301
+ """Rebuild the entire SQLite index from the JSONL ledger.
302
+
303
+ Returns the number of events indexed.
304
+ """
305
+ events = ledger.read_events()
306
+ count = 0
307
+ for event in events:
308
+ if self.index_event(event):
309
+ count += 1
310
+ return count
311
+
312
+ def query_by_api(self, api_name: str) -> List[Dict[str, Any]]:
313
+ """Query events by API name using the index."""
314
+ try:
315
+ conn = self._connect()
316
+ cursor = conn.execute(
317
+ "SELECT raw_json FROM events WHERE api_name = ? ORDER BY timestamp",
318
+ (api_name,),
319
+ )
320
+ return [json.loads(row["raw_json"]) for row in cursor]
321
+ except (sqlite3.Error, json.JSONDecodeError) as e:
322
+ logger.warning("SQLite query failed: %s", e)
323
+ return []
324
+
325
+ def query_by_repository(self, repository: str) -> List[Dict[str, Any]]:
326
+ """Query events by repository using the index."""
327
+ try:
328
+ conn = self._connect()
329
+ cursor = conn.execute(
330
+ "SELECT raw_json FROM events WHERE repository = ? ORDER BY timestamp",
331
+ (repository,),
332
+ )
333
+ return [json.loads(row["raw_json"]) for row in cursor]
334
+ except (sqlite3.Error, json.JSONDecodeError) as e:
335
+ logger.warning("SQLite query failed: %s", e)
336
+ return []
337
+
338
+ def get_event_count(self) -> int:
339
+ """Return total number of indexed events."""
340
+ try:
341
+ conn = self._connect()
342
+ cursor = conn.execute("SELECT COUNT(*) as cnt FROM events")
343
+ return cursor.fetchone()["cnt"]
344
+ except sqlite3.Error:
345
+ return 0
@@ -0,0 +1,218 @@
1
+ """
2
+ Delimit Dependency Graph
3
+ Constructs a deterministic service dependency graph from manifests.
4
+
5
+ The graph maps each API/service to its downstream consumers,
6
+ enabling impact analysis when an API contract changes.
7
+
8
+ Per Jamsons Doctrine:
9
+ - Deterministic outputs (sorted, reproducible)
10
+ - No telemetry
11
+ - Graceful degradation when manifests are missing
12
+ """
13
+
14
+ import logging
15
+ from pathlib import Path
16
+ from typing import Any, Dict, List, Optional, Set, Union
17
+
18
+ from .dependency_manifest import discover_manifests, parse_manifest
19
+
20
+ logger = logging.getLogger("delimit.dependency_graph")
21
+
22
+
23
+ class DependencyGraph:
24
+ """Service dependency graph for API impact analysis.
25
+
26
+ The graph tracks two relationships:
27
+ - consumers: api_name -> [services that consume this API]
28
+ - producers: service_name -> [APIs this service produces]
29
+ """
30
+
31
+ def __init__(self):
32
+ # api_name -> sorted list of consuming service names
33
+ self._consumers: Dict[str, Set[str]] = {}
34
+ # service_name -> sorted list of APIs it produces
35
+ self._producers: Dict[str, Set[str]] = {}
36
+ # service_name -> sorted list of APIs it consumes
37
+ self._consumes: Dict[str, Set[str]] = {}
38
+ # All known service names
39
+ self._services: Set[str] = set()
40
+
41
+ def add_manifest(self, manifest: Dict[str, Any]) -> None:
42
+ """Add a single parsed manifest to the graph.
43
+
44
+ Args:
45
+ manifest: Parsed and normalized manifest dictionary.
46
+ """
47
+ service = manifest.get("service")
48
+ if not isinstance(service, str) or not service:
49
+ logger.warning("Skipping manifest with invalid service: %r", service)
50
+ return
51
+
52
+ self._services.add(service)
53
+
54
+ # Track what this service consumes — validate list of strings
55
+ consumes = manifest.get("consumes", [])
56
+ if not isinstance(consumes, list):
57
+ logger.warning("Manifest %s has non-list consumes, skipping", service)
58
+ consumes = []
59
+
60
+ for api in consumes:
61
+ if not isinstance(api, str) or not api:
62
+ continue
63
+ self._consumes.setdefault(service, set()).add(api)
64
+ self._consumers.setdefault(api, set()).add(service)
65
+
66
+ # Track what this service produces — validate list of strings
67
+ produces = manifest.get("produces", [])
68
+ if not isinstance(produces, list):
69
+ logger.warning("Manifest %s has non-list produces, skipping", service)
70
+ produces = []
71
+
72
+ for api in produces:
73
+ if not isinstance(api, str) or not api:
74
+ continue
75
+ self._producers.setdefault(service, set()).add(api)
76
+
77
+ def load_from_manifests(self, manifests: List[Dict[str, Any]]) -> int:
78
+ """Load multiple manifests into the graph.
79
+
80
+ Args:
81
+ manifests: List of parsed manifest dictionaries.
82
+
83
+ Returns:
84
+ Number of manifests loaded.
85
+ """
86
+ for manifest in manifests:
87
+ self.add_manifest(manifest)
88
+ return len(manifests)
89
+
90
+ def load_from_directory(self, root_dir: Union[str, Path]) -> int:
91
+ """Discover and load all manifests from a directory tree.
92
+
93
+ Args:
94
+ root_dir: Root directory to search for .delimit/dependencies.yaml files.
95
+
96
+ Returns:
97
+ Number of manifests loaded.
98
+ """
99
+ manifests = discover_manifests(root_dir)
100
+ return self.load_from_manifests(manifests)
101
+
102
+ def get_consumers(self, api_name: str) -> List[str]:
103
+ """Get all services that consume a given API.
104
+
105
+ Args:
106
+ api_name: The API name to look up.
107
+
108
+ Returns:
109
+ Sorted list of consumer service names. Empty if none found.
110
+ """
111
+ consumers = self._consumers.get(api_name, set())
112
+ return sorted(consumers)
113
+
114
+ def get_all_consumers(self) -> Dict[str, List[str]]:
115
+ """Get the full consumer map: api -> [consumers].
116
+
117
+ Returns:
118
+ Dictionary with sorted keys and sorted consumer lists.
119
+ """
120
+ return {
121
+ api: sorted(consumers)
122
+ for api, consumers in sorted(self._consumers.items())
123
+ }
124
+
125
+ def get_produced_apis(self, service: str) -> List[str]:
126
+ """Get all APIs produced by a service.
127
+
128
+ Returns:
129
+ Sorted list of API names.
130
+ """
131
+ return sorted(self._producers.get(service, set()))
132
+
133
+ def get_consumed_apis(self, service: str) -> List[str]:
134
+ """Get all APIs consumed by a service.
135
+
136
+ Returns:
137
+ Sorted list of API names.
138
+ """
139
+ return sorted(self._consumes.get(service, set()))
140
+
141
+ def get_all_services(self) -> List[str]:
142
+ """Get all known service names.
143
+
144
+ Returns:
145
+ Sorted list of service names.
146
+ """
147
+ return sorted(self._services)
148
+
149
+ def get_all_apis(self) -> List[str]:
150
+ """Get all known API names (anything that is consumed or produced).
151
+
152
+ Returns:
153
+ Sorted list of API names.
154
+ """
155
+ apis: Set[str] = set()
156
+ apis.update(self._consumers.keys())
157
+ for produced in self._producers.values():
158
+ apis.update(produced)
159
+ return sorted(apis)
160
+
161
+ def get_service_count(self) -> int:
162
+ """Return total number of known services."""
163
+ return len(self._services)
164
+
165
+ def get_api_count(self) -> int:
166
+ """Return total number of known APIs."""
167
+ return len(self.get_all_apis())
168
+
169
+ def get_edge_count(self) -> int:
170
+ """Return total number of consumer edges in the graph."""
171
+ return sum(len(consumers) for consumers in self._consumers.values())
172
+
173
+ def is_empty(self) -> bool:
174
+ """Return True if no manifests have been loaded."""
175
+ return len(self._services) == 0
176
+
177
+ def to_dict(self) -> Dict[str, Any]:
178
+ """Export the graph as a deterministic dictionary.
179
+
180
+ Returns:
181
+ Dictionary with sorted keys and values for reproducible output.
182
+ """
183
+ return {
184
+ "services": self.get_all_services(),
185
+ "apis": self.get_all_apis(),
186
+ "consumers": self.get_all_consumers(),
187
+ "service_count": self.get_service_count(),
188
+ "api_count": self.get_api_count(),
189
+ "edge_count": self.get_edge_count(),
190
+ }
191
+
192
+
193
+ def build_graph(manifests: List[Dict[str, Any]]) -> DependencyGraph:
194
+ """Convenience function to build a graph from a list of manifests.
195
+
196
+ Args:
197
+ manifests: List of parsed manifest dictionaries.
198
+
199
+ Returns:
200
+ Populated DependencyGraph instance.
201
+ """
202
+ graph = DependencyGraph()
203
+ graph.load_from_manifests(manifests)
204
+ return graph
205
+
206
+
207
+ def build_graph_from_directory(root_dir: Union[str, Path]) -> DependencyGraph:
208
+ """Convenience function to build a graph by discovering manifests.
209
+
210
+ Args:
211
+ root_dir: Root directory to search.
212
+
213
+ Returns:
214
+ Populated DependencyGraph instance. Empty graph if no manifests found.
215
+ """
216
+ graph = DependencyGraph()
217
+ graph.load_from_directory(root_dir)
218
+ return graph