delimit-cli 2.3.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +7 -0
- package/.github/workflows/ci.yml +22 -0
- package/CHANGELOG.md +33 -0
- package/CODE_OF_CONDUCT.md +48 -0
- package/CONTRIBUTING.md +67 -0
- package/Dockerfile +9 -0
- package/LICENSE +21 -0
- package/README.md +51 -130
- package/SECURITY.md +42 -0
- package/adapters/codex-forge.js +107 -0
- package/adapters/codex-jamsons.js +142 -0
- package/adapters/codex-security.js +94 -0
- package/adapters/gemini-forge.js +120 -0
- package/adapters/gemini-jamsons.js +152 -0
- package/bin/delimit-cli.js +52 -2
- package/bin/delimit-setup.js +258 -0
- package/gateway/ai/backends/__init__.py +0 -0
- package/gateway/ai/backends/async_utils.py +21 -0
- package/gateway/ai/backends/deploy_bridge.py +150 -0
- package/gateway/ai/backends/gateway_core.py +261 -0
- package/gateway/ai/backends/generate_bridge.py +38 -0
- package/gateway/ai/backends/governance_bridge.py +196 -0
- package/gateway/ai/backends/intel_bridge.py +59 -0
- package/gateway/ai/backends/memory_bridge.py +93 -0
- package/gateway/ai/backends/ops_bridge.py +137 -0
- package/gateway/ai/backends/os_bridge.py +82 -0
- package/gateway/ai/backends/repo_bridge.py +117 -0
- package/gateway/ai/backends/ui_bridge.py +118 -0
- package/gateway/ai/backends/vault_bridge.py +129 -0
- package/gateway/ai/server.py +1182 -0
- package/gateway/core/__init__.py +3 -0
- package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
- package/gateway/core/auto_baseline.py +304 -0
- package/gateway/core/ci_formatter.py +283 -0
- package/gateway/core/complexity_analyzer.py +386 -0
- package/gateway/core/contract_ledger.py +345 -0
- package/gateway/core/dependency_graph.py +218 -0
- package/gateway/core/dependency_manifest.py +223 -0
- package/gateway/core/diff_engine_v2.py +477 -0
- package/gateway/core/diff_engine_v2.py.bak +426 -0
- package/gateway/core/event_backbone.py +268 -0
- package/gateway/core/event_schema.py +258 -0
- package/gateway/core/explainer.py +438 -0
- package/gateway/core/gateway.py +128 -0
- package/gateway/core/gateway_v2.py +154 -0
- package/gateway/core/gateway_v3.py +224 -0
- package/gateway/core/impact_analyzer.py +163 -0
- package/gateway/core/policies/default.yml +13 -0
- package/gateway/core/policies/relaxed.yml +48 -0
- package/gateway/core/policies/strict.yml +55 -0
- package/gateway/core/policy_engine.py +464 -0
- package/gateway/core/registry.py +52 -0
- package/gateway/core/registry_v2.py +132 -0
- package/gateway/core/registry_v3.py +134 -0
- package/gateway/core/semver_classifier.py +152 -0
- package/gateway/core/spec_detector.py +130 -0
- package/gateway/core/surface_bridge.py +307 -0
- package/gateway/core/zero_spec/__init__.py +4 -0
- package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/detector.py +353 -0
- package/gateway/core/zero_spec/express_extractor.py +483 -0
- package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
- package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
- package/gateway/tasks/__init__.py +1 -0
- package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/check_policy.py +177 -0
- package/gateway/tasks/check_policy_v2.py +255 -0
- package/gateway/tasks/check_policy_v3.py +255 -0
- package/gateway/tasks/explain_diff.py +305 -0
- package/gateway/tasks/explain_diff_v2.py +267 -0
- package/gateway/tasks/validate_api.py +131 -0
- package/gateway/tasks/validate_api_v2.py +208 -0
- package/gateway/tasks/validate_api_v3.py +163 -0
- package/package.json +3 -3
- package/adapters/codex-skill.js +0 -87
- package/adapters/cursor-extension.js +0 -190
- package/adapters/gemini-action.js +0 -93
- package/adapters/openai-function.js +0 -112
- package/adapters/xai-plugin.js +0 -151
- package/test-decision-engine.js +0 -181
- package/test-hook.js +0 -27
|
@@ -0,0 +1,426 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Enhanced OpenAPI diff engine with deep schema comparison.
|
|
3
|
+
Handles nested objects, response schemas, enums, and edge cases.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, List, Any, Optional, Set, Tuple
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from enum import Enum
|
|
9
|
+
|
|
10
|
+
class ChangeType(Enum):
|
|
11
|
+
# Breaking changes
|
|
12
|
+
ENDPOINT_REMOVED = "endpoint_removed"
|
|
13
|
+
METHOD_REMOVED = "method_removed"
|
|
14
|
+
REQUIRED_PARAM_ADDED = "required_param_added"
|
|
15
|
+
PARAM_REMOVED = "param_removed"
|
|
16
|
+
RESPONSE_REMOVED = "response_removed"
|
|
17
|
+
REQUIRED_FIELD_ADDED = "required_field_added"
|
|
18
|
+
FIELD_REMOVED = "field_removed"
|
|
19
|
+
TYPE_CHANGED = "type_changed"
|
|
20
|
+
FORMAT_CHANGED = "format_changed"
|
|
21
|
+
ENUM_VALUE_REMOVED = "enum_value_removed"
|
|
22
|
+
|
|
23
|
+
# Non-breaking changes
|
|
24
|
+
ENDPOINT_ADDED = "endpoint_added"
|
|
25
|
+
METHOD_ADDED = "method_added"
|
|
26
|
+
OPTIONAL_PARAM_ADDED = "optional_param_added"
|
|
27
|
+
RESPONSE_ADDED = "response_added"
|
|
28
|
+
OPTIONAL_FIELD_ADDED = "optional_field_added"
|
|
29
|
+
ENUM_VALUE_ADDED = "enum_value_added"
|
|
30
|
+
DESCRIPTION_CHANGED = "description_changed"
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class Change:
|
|
34
|
+
type: ChangeType
|
|
35
|
+
path: str
|
|
36
|
+
details: Dict[str, Any]
|
|
37
|
+
severity: str # high, medium, low
|
|
38
|
+
message: str
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def is_breaking(self) -> bool:
|
|
42
|
+
return self.type in [
|
|
43
|
+
ChangeType.ENDPOINT_REMOVED,
|
|
44
|
+
ChangeType.METHOD_REMOVED,
|
|
45
|
+
ChangeType.REQUIRED_PARAM_ADDED,
|
|
46
|
+
ChangeType.PARAM_REMOVED,
|
|
47
|
+
ChangeType.RESPONSE_REMOVED,
|
|
48
|
+
ChangeType.REQUIRED_FIELD_ADDED,
|
|
49
|
+
ChangeType.FIELD_REMOVED,
|
|
50
|
+
ChangeType.TYPE_CHANGED,
|
|
51
|
+
ChangeType.FORMAT_CHANGED,
|
|
52
|
+
ChangeType.ENUM_VALUE_REMOVED,
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
class OpenAPIDiffEngine:
|
|
56
|
+
"""Advanced diff engine for OpenAPI specifications."""
|
|
57
|
+
|
|
58
|
+
def __init__(self):
|
|
59
|
+
self.changes: List[Change] = []
|
|
60
|
+
|
|
61
|
+
def compare(self, old_spec: Dict, new_spec: Dict) -> List[Change]:
|
|
62
|
+
"""Compare two OpenAPI specifications and return all changes."""
|
|
63
|
+
self.changes = []
|
|
64
|
+
|
|
65
|
+
# Compare paths
|
|
66
|
+
self._compare_paths(old_spec.get("paths", {}), new_spec.get("paths", {}))
|
|
67
|
+
|
|
68
|
+
# Compare components/schemas
|
|
69
|
+
self._compare_schemas(
|
|
70
|
+
old_spec.get("components", {}).get("schemas", {}),
|
|
71
|
+
new_spec.get("components", {}).get("schemas", {})
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
# Compare security schemes
|
|
75
|
+
self._compare_security(
|
|
76
|
+
old_spec.get("components", {}).get("securitySchemes", {}),
|
|
77
|
+
new_spec.get("components", {}).get("securitySchemes", {})
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
return self.changes
|
|
81
|
+
|
|
82
|
+
def _compare_paths(self, old_paths: Dict, new_paths: Dict):
|
|
83
|
+
"""Compare API paths/endpoints."""
|
|
84
|
+
old_set = set(old_paths.keys())
|
|
85
|
+
new_set = set(new_paths.keys())
|
|
86
|
+
|
|
87
|
+
# Check removed endpoints
|
|
88
|
+
for path in old_set - new_set:
|
|
89
|
+
self.changes.append(Change(
|
|
90
|
+
type=ChangeType.ENDPOINT_REMOVED,
|
|
91
|
+
path=path,
|
|
92
|
+
details={"endpoint": path},
|
|
93
|
+
severity="high",
|
|
94
|
+
message=f"Endpoint removed: {path}"
|
|
95
|
+
))
|
|
96
|
+
|
|
97
|
+
# Check added endpoints
|
|
98
|
+
for path in new_set - old_set:
|
|
99
|
+
self.changes.append(Change(
|
|
100
|
+
type=ChangeType.ENDPOINT_ADDED,
|
|
101
|
+
path=path,
|
|
102
|
+
details={"endpoint": path},
|
|
103
|
+
severity="low",
|
|
104
|
+
message=f"New endpoint added: {path}"
|
|
105
|
+
))
|
|
106
|
+
|
|
107
|
+
# Check modified endpoints
|
|
108
|
+
for path in old_set & new_set:
|
|
109
|
+
self._compare_methods(path, old_paths[path], new_paths[path])
|
|
110
|
+
|
|
111
|
+
def _compare_methods(self, path: str, old_methods: Dict, new_methods: Dict):
|
|
112
|
+
"""Compare HTTP methods for an endpoint."""
|
|
113
|
+
old_set = set(m for m in old_methods.keys() if m in ["get", "post", "put", "delete", "patch", "head", "options"])
|
|
114
|
+
new_set = set(m for m in new_methods.keys() if m in ["get", "post", "put", "delete", "patch", "head", "options"])
|
|
115
|
+
|
|
116
|
+
# Check removed methods
|
|
117
|
+
for method in old_set - new_set:
|
|
118
|
+
self.changes.append(Change(
|
|
119
|
+
type=ChangeType.METHOD_REMOVED,
|
|
120
|
+
path=f"{path}:{method.upper()}",
|
|
121
|
+
details={"endpoint": path, "method": method.upper()},
|
|
122
|
+
severity="high",
|
|
123
|
+
message=f"Method removed: {method.upper()} {path}"
|
|
124
|
+
))
|
|
125
|
+
|
|
126
|
+
# Check modified methods
|
|
127
|
+
for method in old_set & new_set:
|
|
128
|
+
self._compare_operation(
|
|
129
|
+
f"{path}:{method.upper()}",
|
|
130
|
+
old_methods[method],
|
|
131
|
+
new_methods[method]
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
def _compare_operation(self, operation_id: str, old_op: Dict, new_op: Dict):
|
|
135
|
+
"""Compare operation details (parameters, responses, etc.)."""
|
|
136
|
+
|
|
137
|
+
# Compare parameters
|
|
138
|
+
old_params = {self._param_key(p): p for p in old_op.get("parameters", [])}
|
|
139
|
+
new_params = {self._param_key(p): p for p in new_op.get("parameters", [])}
|
|
140
|
+
|
|
141
|
+
# Check removed parameters
|
|
142
|
+
for param_key in set(old_params.keys()) - set(new_params.keys()):
|
|
143
|
+
param = old_params[param_key]
|
|
144
|
+
self.changes.append(Change(
|
|
145
|
+
type=ChangeType.PARAM_REMOVED,
|
|
146
|
+
path=operation_id,
|
|
147
|
+
details={"parameter": param["name"], "in": param["in"]},
|
|
148
|
+
severity="high",
|
|
149
|
+
message=f"Parameter removed: {param['name']} from {operation_id}"
|
|
150
|
+
))
|
|
151
|
+
|
|
152
|
+
# Check added required parameters
|
|
153
|
+
for param_key in set(new_params.keys()) - set(old_params.keys()):
|
|
154
|
+
param = new_params[param_key]
|
|
155
|
+
if param.get("required", False):
|
|
156
|
+
self.changes.append(Change(
|
|
157
|
+
type=ChangeType.REQUIRED_PARAM_ADDED,
|
|
158
|
+
path=operation_id,
|
|
159
|
+
details={"parameter": param["name"], "in": param["in"]},
|
|
160
|
+
severity="high",
|
|
161
|
+
message=f"Required parameter added: {param['name']} to {operation_id}"
|
|
162
|
+
))
|
|
163
|
+
|
|
164
|
+
# Check parameter schema changes
|
|
165
|
+
for param_key in set(old_params.keys()) & set(new_params.keys()):
|
|
166
|
+
self._compare_parameter_schemas(
|
|
167
|
+
operation_id,
|
|
168
|
+
old_params[param_key],
|
|
169
|
+
new_params[param_key]
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Compare request body
|
|
173
|
+
if "requestBody" in old_op or "requestBody" in new_op:
|
|
174
|
+
self._compare_request_body(
|
|
175
|
+
operation_id,
|
|
176
|
+
old_op.get("requestBody"),
|
|
177
|
+
new_op.get("requestBody")
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
# Compare responses
|
|
181
|
+
self._compare_responses(
|
|
182
|
+
operation_id,
|
|
183
|
+
old_op.get("responses", {}),
|
|
184
|
+
new_op.get("responses", {})
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
def _compare_parameter_schemas(self, operation_id: str, old_param: Dict, new_param: Dict):
|
|
188
|
+
"""Compare parameter schemas for type changes."""
|
|
189
|
+
old_schema = old_param.get("schema", {})
|
|
190
|
+
new_schema = new_param.get("schema", {})
|
|
191
|
+
|
|
192
|
+
# Check type changes
|
|
193
|
+
if old_schema.get("type") != new_schema.get("type"):
|
|
194
|
+
self.changes.append(Change(
|
|
195
|
+
type=ChangeType.TYPE_CHANGED,
|
|
196
|
+
path=operation_id,
|
|
197
|
+
details={
|
|
198
|
+
"parameter": old_param["name"],
|
|
199
|
+
"old_type": old_schema.get("type"),
|
|
200
|
+
"new_type": new_schema.get("type")
|
|
201
|
+
},
|
|
202
|
+
severity="high",
|
|
203
|
+
message=f"Parameter type changed: {old_param['name']} from {old_schema.get('type')} to {new_schema.get('type')}"
|
|
204
|
+
))
|
|
205
|
+
|
|
206
|
+
# Check enum changes
|
|
207
|
+
if "enum" in old_schema or "enum" in new_schema:
|
|
208
|
+
self._compare_enums(
|
|
209
|
+
f"{operation_id}:{old_param['name']}",
|
|
210
|
+
old_schema.get("enum", []),
|
|
211
|
+
new_schema.get("enum", [])
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
def _compare_request_body(self, operation_id: str, old_body: Optional[Dict], new_body: Optional[Dict]):
|
|
215
|
+
"""Compare request body schemas."""
|
|
216
|
+
if old_body and not new_body:
|
|
217
|
+
self.changes.append(Change(
|
|
218
|
+
type=ChangeType.FIELD_REMOVED,
|
|
219
|
+
path=operation_id,
|
|
220
|
+
details={"field": "request_body"},
|
|
221
|
+
severity="high",
|
|
222
|
+
message=f"Request body removed from {operation_id}"
|
|
223
|
+
))
|
|
224
|
+
elif not old_body and new_body and new_body.get("required", False):
|
|
225
|
+
self.changes.append(Change(
|
|
226
|
+
type=ChangeType.REQUIRED_FIELD_ADDED,
|
|
227
|
+
path=operation_id,
|
|
228
|
+
details={"field": "request_body"},
|
|
229
|
+
severity="high",
|
|
230
|
+
message=f"Required request body added to {operation_id}"
|
|
231
|
+
))
|
|
232
|
+
elif old_body and new_body:
|
|
233
|
+
# Compare content types
|
|
234
|
+
old_content = old_body.get("content", {})
|
|
235
|
+
new_content = new_body.get("content", {})
|
|
236
|
+
|
|
237
|
+
for content_type in old_content.keys() & new_content.keys():
|
|
238
|
+
self._compare_schema_deep(
|
|
239
|
+
f"{operation_id}:request",
|
|
240
|
+
old_content[content_type].get("schema", {}),
|
|
241
|
+
new_content[content_type].get("schema", {})
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
def _compare_responses(self, operation_id: str, old_responses: Dict, new_responses: Dict):
|
|
245
|
+
"""Compare response definitions."""
|
|
246
|
+
old_codes = set(old_responses.keys())
|
|
247
|
+
new_codes = set(new_responses.keys())
|
|
248
|
+
|
|
249
|
+
# Check removed responses
|
|
250
|
+
for code in old_codes - new_codes:
|
|
251
|
+
# Only flag 2xx responses as breaking
|
|
252
|
+
if code.startswith("2"):
|
|
253
|
+
self.changes.append(Change(
|
|
254
|
+
type=ChangeType.RESPONSE_REMOVED,
|
|
255
|
+
path=operation_id,
|
|
256
|
+
details={"response_code": code},
|
|
257
|
+
severity="high",
|
|
258
|
+
message=f"Success response {code} removed from {operation_id}"
|
|
259
|
+
))
|
|
260
|
+
|
|
261
|
+
# Compare response schemas
|
|
262
|
+
for code in old_codes & new_codes:
|
|
263
|
+
old_resp = old_responses[code]
|
|
264
|
+
new_resp = new_responses[code]
|
|
265
|
+
|
|
266
|
+
if "content" in old_resp or "content" in new_resp:
|
|
267
|
+
old_content = old_resp.get("content", {})
|
|
268
|
+
new_content = new_resp.get("content", {})
|
|
269
|
+
|
|
270
|
+
for content_type in old_content.keys() & new_content.keys():
|
|
271
|
+
self._compare_schema_deep(
|
|
272
|
+
f"{operation_id}:{code}",
|
|
273
|
+
old_content[content_type].get("schema", {}),
|
|
274
|
+
new_content[content_type].get("schema", {})
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
def _compare_schema_deep(self, path: str, old_schema: Dict, new_schema: Dict, required_fields: Optional[Set[str]] = None):
|
|
278
|
+
"""Deep comparison of schemas including nested objects."""
|
|
279
|
+
|
|
280
|
+
# Handle references
|
|
281
|
+
if "$ref" in old_schema or "$ref" in new_schema:
|
|
282
|
+
# TODO: Resolve references properly
|
|
283
|
+
return
|
|
284
|
+
|
|
285
|
+
# Compare types
|
|
286
|
+
old_type = old_schema.get("type")
|
|
287
|
+
new_type = new_schema.get("type")
|
|
288
|
+
|
|
289
|
+
if old_type != new_type and old_type is not None:
|
|
290
|
+
self.changes.append(Change(
|
|
291
|
+
type=ChangeType.TYPE_CHANGED,
|
|
292
|
+
path=path,
|
|
293
|
+
details={"old_type": old_type, "new_type": new_type},
|
|
294
|
+
severity="high",
|
|
295
|
+
message=f"Type changed from {old_type} to {new_type} at {path}"
|
|
296
|
+
))
|
|
297
|
+
return
|
|
298
|
+
|
|
299
|
+
# Compare object properties
|
|
300
|
+
if old_type == "object":
|
|
301
|
+
old_props = old_schema.get("properties", {})
|
|
302
|
+
new_props = new_schema.get("properties", {})
|
|
303
|
+
old_required = set(old_schema.get("required", []))
|
|
304
|
+
new_required = set(new_schema.get("required", []))
|
|
305
|
+
|
|
306
|
+
# Check removed fields
|
|
307
|
+
for prop in set(old_props.keys()) - set(new_props.keys()):
|
|
308
|
+
if prop in old_required:
|
|
309
|
+
self.changes.append(Change(
|
|
310
|
+
type=ChangeType.FIELD_REMOVED,
|
|
311
|
+
path=f"{path}.{prop}",
|
|
312
|
+
details={"field": prop},
|
|
313
|
+
severity="high",
|
|
314
|
+
message=f"Required field '{prop}' removed at {path}"
|
|
315
|
+
))
|
|
316
|
+
|
|
317
|
+
# Check new required fields
|
|
318
|
+
for prop in new_required - old_required:
|
|
319
|
+
if prop not in old_props:
|
|
320
|
+
self.changes.append(Change(
|
|
321
|
+
type=ChangeType.REQUIRED_FIELD_ADDED,
|
|
322
|
+
path=f"{path}.{prop}",
|
|
323
|
+
details={"field": prop},
|
|
324
|
+
severity="high",
|
|
325
|
+
message=f"New required field '{prop}' added at {path}"
|
|
326
|
+
))
|
|
327
|
+
|
|
328
|
+
# Recursively compare nested properties
|
|
329
|
+
for prop in set(old_props.keys()) & set(new_props.keys()):
|
|
330
|
+
self._compare_schema_deep(
|
|
331
|
+
f"{path}.{prop}",
|
|
332
|
+
old_props[prop],
|
|
333
|
+
new_props[prop],
|
|
334
|
+
old_required if prop in old_required else None
|
|
335
|
+
)
|
|
336
|
+
|
|
337
|
+
# Compare arrays
|
|
338
|
+
elif old_type == "array":
|
|
339
|
+
if "items" in old_schema and "items" in new_schema:
|
|
340
|
+
self._compare_schema_deep(
|
|
341
|
+
f"{path}[]",
|
|
342
|
+
old_schema["items"],
|
|
343
|
+
new_schema["items"]
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# Compare enums
|
|
347
|
+
if "enum" in old_schema or "enum" in new_schema:
|
|
348
|
+
self._compare_enums(path, old_schema.get("enum", []), new_schema.get("enum", []))
|
|
349
|
+
|
|
350
|
+
def _compare_enums(self, path: str, old_enum: List, new_enum: List):
|
|
351
|
+
"""Compare enum values."""
|
|
352
|
+
old_set = set(old_enum)
|
|
353
|
+
new_set = set(new_enum)
|
|
354
|
+
|
|
355
|
+
# Removed enum values are breaking
|
|
356
|
+
for value in old_set - new_set:
|
|
357
|
+
self.changes.append(Change(
|
|
358
|
+
type=ChangeType.ENUM_VALUE_REMOVED,
|
|
359
|
+
path=path,
|
|
360
|
+
details={"value": value},
|
|
361
|
+
severity="high",
|
|
362
|
+
message=f"Enum value '{value}' removed at {path}"
|
|
363
|
+
))
|
|
364
|
+
|
|
365
|
+
# Added enum values are non-breaking
|
|
366
|
+
for value in new_set - old_set:
|
|
367
|
+
self.changes.append(Change(
|
|
368
|
+
type=ChangeType.ENUM_VALUE_ADDED,
|
|
369
|
+
path=path,
|
|
370
|
+
details={"value": value},
|
|
371
|
+
severity="low",
|
|
372
|
+
message=f"Enum value '{value}' added at {path}"
|
|
373
|
+
))
|
|
374
|
+
|
|
375
|
+
def _compare_schemas(self, old_schemas: Dict, new_schemas: Dict):
|
|
376
|
+
"""Compare component schemas."""
|
|
377
|
+
# Schema removal is breaking if referenced
|
|
378
|
+
for schema_name in set(old_schemas.keys()) - set(new_schemas.keys()):
|
|
379
|
+
self.changes.append(Change(
|
|
380
|
+
type=ChangeType.FIELD_REMOVED,
|
|
381
|
+
path=f"#/components/schemas/{schema_name}",
|
|
382
|
+
details={"schema": schema_name},
|
|
383
|
+
severity="medium",
|
|
384
|
+
message=f"Schema '{schema_name}' removed"
|
|
385
|
+
))
|
|
386
|
+
|
|
387
|
+
# Compare existing schemas
|
|
388
|
+
for schema_name in set(old_schemas.keys()) & set(new_schemas.keys()):
|
|
389
|
+
self._compare_schema_deep(
|
|
390
|
+
f"#/components/schemas/{schema_name}",
|
|
391
|
+
old_schemas[schema_name],
|
|
392
|
+
new_schemas[schema_name]
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
def _compare_security(self, old_security: Dict, new_security: Dict):
|
|
396
|
+
"""Compare security schemes."""
|
|
397
|
+
# Security scheme changes are usually breaking
|
|
398
|
+
for scheme in set(old_security.keys()) - set(new_security.keys()):
|
|
399
|
+
self.changes.append(Change(
|
|
400
|
+
type=ChangeType.FIELD_REMOVED,
|
|
401
|
+
path=f"#/components/securitySchemes/{scheme}",
|
|
402
|
+
details={"scheme": scheme},
|
|
403
|
+
severity="high",
|
|
404
|
+
message=f"Security scheme '{scheme}' removed"
|
|
405
|
+
))
|
|
406
|
+
|
|
407
|
+
def _param_key(self, param: Dict) -> str:
|
|
408
|
+
"""Generate unique key for parameter."""
|
|
409
|
+
return f"{param.get('in', 'query')}:{param.get('name', '')}"
|
|
410
|
+
|
|
411
|
+
def get_breaking_changes(self) -> List[Change]:
|
|
412
|
+
"""Get only breaking changes."""
|
|
413
|
+
return [c for c in self.changes if c.is_breaking]
|
|
414
|
+
|
|
415
|
+
def get_summary(self) -> Dict[str, Any]:
|
|
416
|
+
"""Get summary of all changes."""
|
|
417
|
+
breaking = self.get_breaking_changes()
|
|
418
|
+
return {
|
|
419
|
+
"total_changes": len(self.changes),
|
|
420
|
+
"breaking_changes": len(breaking),
|
|
421
|
+
"endpoints_removed": len([c for c in breaking if c.type == ChangeType.ENDPOINT_REMOVED]),
|
|
422
|
+
"methods_removed": len([c for c in breaking if c.type == ChangeType.METHOD_REMOVED]),
|
|
423
|
+
"parameters_changed": len([c for c in breaking if c.type in [ChangeType.PARAM_REMOVED, ChangeType.REQUIRED_PARAM_ADDED]]),
|
|
424
|
+
"schemas_changed": len([c for c in breaking if c.type in [ChangeType.FIELD_REMOVED, ChangeType.REQUIRED_FIELD_ADDED, ChangeType.TYPE_CHANGED]]),
|
|
425
|
+
"is_breaking": len(breaking) > 0
|
|
426
|
+
}
|
|
@@ -0,0 +1,268 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Delimit Event Backbone
|
|
3
|
+
Constructs ledger events, generates SHA-256 hashes, links hash chains,
|
|
4
|
+
and appends to the append-only JSONL ledger.
|
|
5
|
+
|
|
6
|
+
Per Jamsons Doctrine:
|
|
7
|
+
- Deterministic outputs
|
|
8
|
+
- Append-only artifacts
|
|
9
|
+
- Fail-closed CI behavior (ledger failures never affect CI)
|
|
10
|
+
- No telemetry collection
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import json
|
|
14
|
+
import logging
|
|
15
|
+
import os
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Any, Dict, List, Optional
|
|
18
|
+
|
|
19
|
+
from .event_schema import (
|
|
20
|
+
canonicalize,
|
|
21
|
+
compute_event_hash,
|
|
22
|
+
create_event,
|
|
23
|
+
now_utc,
|
|
24
|
+
validate_event,
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger("delimit.event_backbone")
|
|
28
|
+
|
|
29
|
+
# Default ledger location relative to repository root
|
|
30
|
+
DEFAULT_LEDGER_DIR = ".delimit/ledger"
|
|
31
|
+
DEFAULT_LEDGER_FILE = "events.jsonl"
|
|
32
|
+
|
|
33
|
+
# Genesis sentinel for first event in chain
|
|
34
|
+
GENESIS_HASH = "GENESIS"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class EventBackbone:
|
|
38
|
+
"""Constructs and appends ledger events with SHA-256 hash chain."""
|
|
39
|
+
|
|
40
|
+
def __init__(self, ledger_dir: Optional[str] = None):
|
|
41
|
+
"""Initialize the backbone with a ledger directory.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
ledger_dir: Path to ledger directory. Defaults to .delimit/ledger/
|
|
45
|
+
"""
|
|
46
|
+
self._ledger_dir = Path(ledger_dir) if ledger_dir else Path(DEFAULT_LEDGER_DIR)
|
|
47
|
+
self._ledger_file = self._ledger_dir / DEFAULT_LEDGER_FILE
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def ledger_path(self) -> Path:
|
|
51
|
+
"""Return the full path to the JSONL ledger file."""
|
|
52
|
+
return self._ledger_file
|
|
53
|
+
|
|
54
|
+
def _ensure_ledger_dir(self) -> bool:
|
|
55
|
+
"""Create ledger directory if it does not exist.
|
|
56
|
+
|
|
57
|
+
Returns True if directory exists/was created, False on failure.
|
|
58
|
+
"""
|
|
59
|
+
try:
|
|
60
|
+
self._ledger_dir.mkdir(parents=True, exist_ok=True)
|
|
61
|
+
return True
|
|
62
|
+
except OSError as e:
|
|
63
|
+
logger.warning("Failed to create ledger directory %s: %s", self._ledger_dir, e)
|
|
64
|
+
return False
|
|
65
|
+
|
|
66
|
+
def get_last_event_hash(self) -> str:
|
|
67
|
+
"""Read the last event hash from the ledger for chain linking.
|
|
68
|
+
|
|
69
|
+
Returns GENESIS if the ledger is empty or does not exist.
|
|
70
|
+
"""
|
|
71
|
+
if not self._ledger_file.exists():
|
|
72
|
+
return GENESIS_HASH
|
|
73
|
+
|
|
74
|
+
try:
|
|
75
|
+
last_line = ""
|
|
76
|
+
with open(self._ledger_file, "r", encoding="utf-8") as f:
|
|
77
|
+
for line in f:
|
|
78
|
+
stripped = line.strip()
|
|
79
|
+
if stripped:
|
|
80
|
+
last_line = stripped
|
|
81
|
+
|
|
82
|
+
if not last_line:
|
|
83
|
+
return GENESIS_HASH
|
|
84
|
+
|
|
85
|
+
event = json.loads(last_line)
|
|
86
|
+
return event.get("event_hash", GENESIS_HASH)
|
|
87
|
+
except (json.JSONDecodeError, OSError, KeyError) as e:
|
|
88
|
+
logger.warning("Failed to read last event hash: %s", e)
|
|
89
|
+
return GENESIS_HASH
|
|
90
|
+
|
|
91
|
+
def construct_event(
|
|
92
|
+
self,
|
|
93
|
+
event_type: str,
|
|
94
|
+
api_name: str,
|
|
95
|
+
repository: str,
|
|
96
|
+
version: str,
|
|
97
|
+
commit: str,
|
|
98
|
+
actor: str,
|
|
99
|
+
spec_hash: str,
|
|
100
|
+
diff_summary: List[Any],
|
|
101
|
+
policy_result: str,
|
|
102
|
+
complexity_score: int,
|
|
103
|
+
complexity_class: str,
|
|
104
|
+
timestamp: Optional[str] = None,
|
|
105
|
+
previous_hash: Optional[str] = None,
|
|
106
|
+
) -> Dict[str, Any]:
|
|
107
|
+
"""Construct a ledger event with computed hash chain.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
event_type: Type of event (e.g. "contract_change")
|
|
111
|
+
api_name: Name of the API
|
|
112
|
+
repository: Repository identifier
|
|
113
|
+
version: API version string
|
|
114
|
+
commit: Git commit SHA
|
|
115
|
+
actor: Who triggered the event
|
|
116
|
+
spec_hash: SHA-256 hash of the API spec
|
|
117
|
+
diff_summary: List of change summaries
|
|
118
|
+
policy_result: Result of policy evaluation
|
|
119
|
+
complexity_score: Complexity score 0-100
|
|
120
|
+
complexity_class: Complexity classification
|
|
121
|
+
timestamp: ISO 8601 UTC timestamp. Auto-generated if None.
|
|
122
|
+
previous_hash: Previous event hash. Auto-read from ledger if None.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Validated event dictionary with computed event_hash.
|
|
126
|
+
|
|
127
|
+
Raises:
|
|
128
|
+
ValueError: If the event fails schema validation.
|
|
129
|
+
"""
|
|
130
|
+
if timestamp is None:
|
|
131
|
+
timestamp = now_utc()
|
|
132
|
+
|
|
133
|
+
if previous_hash is None:
|
|
134
|
+
previous_hash = self.get_last_event_hash()
|
|
135
|
+
|
|
136
|
+
return create_event(
|
|
137
|
+
event_type=event_type,
|
|
138
|
+
api_name=api_name,
|
|
139
|
+
repository=repository,
|
|
140
|
+
version=version,
|
|
141
|
+
timestamp=timestamp,
|
|
142
|
+
commit=commit,
|
|
143
|
+
actor=actor,
|
|
144
|
+
spec_hash=spec_hash,
|
|
145
|
+
previous_hash=previous_hash,
|
|
146
|
+
diff_summary=diff_summary,
|
|
147
|
+
policy_result=policy_result,
|
|
148
|
+
complexity_score=complexity_score,
|
|
149
|
+
complexity_class=complexity_class,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
def append_event(self, event: Dict[str, Any]) -> bool:
|
|
153
|
+
"""Append a validated event to the JSONL ledger.
|
|
154
|
+
|
|
155
|
+
Serializes with deterministic key ordering. This is a best-effort
|
|
156
|
+
operation — failures are logged but never raise exceptions.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
event: Validated event dictionary.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
True if the event was appended successfully, False otherwise.
|
|
163
|
+
"""
|
|
164
|
+
# Validate before writing
|
|
165
|
+
errors = validate_event(event)
|
|
166
|
+
if errors:
|
|
167
|
+
logger.warning("Event validation failed, not appending: %s", errors)
|
|
168
|
+
return False
|
|
169
|
+
|
|
170
|
+
if not self._ensure_ledger_dir():
|
|
171
|
+
return False
|
|
172
|
+
|
|
173
|
+
try:
|
|
174
|
+
line = canonicalize(event) + "\n"
|
|
175
|
+
with open(self._ledger_file, "a", encoding="utf-8") as f:
|
|
176
|
+
f.write(line)
|
|
177
|
+
return True
|
|
178
|
+
except OSError as e:
|
|
179
|
+
logger.warning("Failed to append event to ledger: %s", e)
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
def emit(
|
|
183
|
+
self,
|
|
184
|
+
event_type: str,
|
|
185
|
+
api_name: str,
|
|
186
|
+
repository: str,
|
|
187
|
+
version: str,
|
|
188
|
+
commit: str,
|
|
189
|
+
actor: str,
|
|
190
|
+
spec_hash: str,
|
|
191
|
+
diff_summary: List[Any],
|
|
192
|
+
policy_result: str,
|
|
193
|
+
complexity_score: int,
|
|
194
|
+
complexity_class: str,
|
|
195
|
+
timestamp: Optional[str] = None,
|
|
196
|
+
) -> Optional[Dict[str, Any]]:
|
|
197
|
+
"""Construct an event and append it to the ledger in one step.
|
|
198
|
+
|
|
199
|
+
This is the primary API for event generation. It is best-effort:
|
|
200
|
+
if the ledger write fails, the event is still returned but not persisted.
|
|
201
|
+
|
|
202
|
+
CRITICAL: This method NEVER raises exceptions. Per Jamsons Doctrine,
|
|
203
|
+
ledger failures must not affect CI pass/fail outcome.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
The event dictionary, or None if construction failed.
|
|
207
|
+
"""
|
|
208
|
+
try:
|
|
209
|
+
event = self.construct_event(
|
|
210
|
+
event_type=event_type,
|
|
211
|
+
api_name=api_name,
|
|
212
|
+
repository=repository,
|
|
213
|
+
version=version,
|
|
214
|
+
commit=commit,
|
|
215
|
+
actor=actor,
|
|
216
|
+
spec_hash=spec_hash,
|
|
217
|
+
diff_summary=diff_summary,
|
|
218
|
+
policy_result=policy_result,
|
|
219
|
+
complexity_score=complexity_score,
|
|
220
|
+
complexity_class=complexity_class,
|
|
221
|
+
timestamp=timestamp,
|
|
222
|
+
)
|
|
223
|
+
except ValueError as e:
|
|
224
|
+
logger.warning("Event construction failed: %s", e)
|
|
225
|
+
return None
|
|
226
|
+
|
|
227
|
+
# Best-effort append — log warning on failure, never fatal
|
|
228
|
+
success = self.append_event(event)
|
|
229
|
+
if not success:
|
|
230
|
+
logger.warning("Ledger append failed for event %s — CI continues normally",
|
|
231
|
+
event.get("event_hash", "unknown"))
|
|
232
|
+
|
|
233
|
+
return event
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def emit_pipeline_event(
|
|
237
|
+
ledger_dir: Optional[str] = None,
|
|
238
|
+
event_type: str = "contract_change",
|
|
239
|
+
api_name: str = "",
|
|
240
|
+
repository: str = "",
|
|
241
|
+
version: str = "",
|
|
242
|
+
commit: str = "",
|
|
243
|
+
actor: str = "",
|
|
244
|
+
spec_hash: str = "",
|
|
245
|
+
diff_summary: Optional[List[Any]] = None,
|
|
246
|
+
policy_result: str = "passed",
|
|
247
|
+
complexity_score: int = 0,
|
|
248
|
+
complexity_class: str = "simple",
|
|
249
|
+
) -> Optional[Dict[str, Any]]:
|
|
250
|
+
"""Convenience function for CI pipeline integration.
|
|
251
|
+
|
|
252
|
+
Called after diff_engine → policy_engine → complexity_analyzer.
|
|
253
|
+
Best-effort: never raises, never affects CI outcome.
|
|
254
|
+
"""
|
|
255
|
+
backbone = EventBackbone(ledger_dir=ledger_dir)
|
|
256
|
+
return backbone.emit(
|
|
257
|
+
event_type=event_type,
|
|
258
|
+
api_name=api_name,
|
|
259
|
+
repository=repository,
|
|
260
|
+
version=version,
|
|
261
|
+
commit=commit,
|
|
262
|
+
actor=actor,
|
|
263
|
+
spec_hash=spec_hash,
|
|
264
|
+
diff_summary=diff_summary if diff_summary is not None else [],
|
|
265
|
+
policy_result=policy_result,
|
|
266
|
+
complexity_score=complexity_score,
|
|
267
|
+
complexity_class=complexity_class,
|
|
268
|
+
)
|