delimit-cli 2.3.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/.dockerignore +7 -0
  2. package/.github/workflows/ci.yml +22 -0
  3. package/CHANGELOG.md +33 -0
  4. package/CODE_OF_CONDUCT.md +48 -0
  5. package/CONTRIBUTING.md +67 -0
  6. package/Dockerfile +9 -0
  7. package/LICENSE +21 -0
  8. package/README.md +51 -130
  9. package/SECURITY.md +42 -0
  10. package/adapters/codex-forge.js +107 -0
  11. package/adapters/codex-jamsons.js +142 -0
  12. package/adapters/codex-security.js +94 -0
  13. package/adapters/gemini-forge.js +120 -0
  14. package/adapters/gemini-jamsons.js +152 -0
  15. package/bin/delimit-cli.js +52 -2
  16. package/bin/delimit-setup.js +258 -0
  17. package/gateway/ai/backends/__init__.py +0 -0
  18. package/gateway/ai/backends/async_utils.py +21 -0
  19. package/gateway/ai/backends/deploy_bridge.py +150 -0
  20. package/gateway/ai/backends/gateway_core.py +261 -0
  21. package/gateway/ai/backends/generate_bridge.py +38 -0
  22. package/gateway/ai/backends/governance_bridge.py +196 -0
  23. package/gateway/ai/backends/intel_bridge.py +59 -0
  24. package/gateway/ai/backends/memory_bridge.py +93 -0
  25. package/gateway/ai/backends/ops_bridge.py +137 -0
  26. package/gateway/ai/backends/os_bridge.py +82 -0
  27. package/gateway/ai/backends/repo_bridge.py +117 -0
  28. package/gateway/ai/backends/ui_bridge.py +118 -0
  29. package/gateway/ai/backends/vault_bridge.py +129 -0
  30. package/gateway/ai/server.py +1182 -0
  31. package/gateway/core/__init__.py +3 -0
  32. package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
  33. package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
  34. package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
  35. package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
  36. package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
  37. package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
  38. package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
  39. package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
  40. package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
  41. package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
  42. package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
  43. package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
  44. package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
  45. package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
  46. package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
  47. package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
  48. package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
  49. package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
  50. package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
  51. package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
  52. package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
  53. package/gateway/core/auto_baseline.py +304 -0
  54. package/gateway/core/ci_formatter.py +283 -0
  55. package/gateway/core/complexity_analyzer.py +386 -0
  56. package/gateway/core/contract_ledger.py +345 -0
  57. package/gateway/core/dependency_graph.py +218 -0
  58. package/gateway/core/dependency_manifest.py +223 -0
  59. package/gateway/core/diff_engine_v2.py +477 -0
  60. package/gateway/core/diff_engine_v2.py.bak +426 -0
  61. package/gateway/core/event_backbone.py +268 -0
  62. package/gateway/core/event_schema.py +258 -0
  63. package/gateway/core/explainer.py +438 -0
  64. package/gateway/core/gateway.py +128 -0
  65. package/gateway/core/gateway_v2.py +154 -0
  66. package/gateway/core/gateway_v3.py +224 -0
  67. package/gateway/core/impact_analyzer.py +163 -0
  68. package/gateway/core/policies/default.yml +13 -0
  69. package/gateway/core/policies/relaxed.yml +48 -0
  70. package/gateway/core/policies/strict.yml +55 -0
  71. package/gateway/core/policy_engine.py +464 -0
  72. package/gateway/core/registry.py +52 -0
  73. package/gateway/core/registry_v2.py +132 -0
  74. package/gateway/core/registry_v3.py +134 -0
  75. package/gateway/core/semver_classifier.py +152 -0
  76. package/gateway/core/spec_detector.py +130 -0
  77. package/gateway/core/surface_bridge.py +307 -0
  78. package/gateway/core/zero_spec/__init__.py +4 -0
  79. package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
  80. package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
  81. package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
  82. package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
  83. package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
  84. package/gateway/core/zero_spec/detector.py +353 -0
  85. package/gateway/core/zero_spec/express_extractor.py +483 -0
  86. package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
  87. package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
  88. package/gateway/tasks/__init__.py +1 -0
  89. package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
  90. package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
  91. package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
  92. package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
  93. package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
  94. package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
  95. package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
  96. package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
  97. package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
  98. package/gateway/tasks/check_policy.py +177 -0
  99. package/gateway/tasks/check_policy_v2.py +255 -0
  100. package/gateway/tasks/check_policy_v3.py +255 -0
  101. package/gateway/tasks/explain_diff.py +305 -0
  102. package/gateway/tasks/explain_diff_v2.py +267 -0
  103. package/gateway/tasks/validate_api.py +131 -0
  104. package/gateway/tasks/validate_api_v2.py +208 -0
  105. package/gateway/tasks/validate_api_v3.py +163 -0
  106. package/package.json +3 -3
  107. package/adapters/codex-skill.js +0 -87
  108. package/adapters/cursor-extension.js +0 -190
  109. package/adapters/gemini-action.js +0 -93
  110. package/adapters/openai-function.js +0 -112
  111. package/adapters/xai-plugin.js +0 -151
  112. package/test-decision-engine.js +0 -181
  113. package/test-hook.js +0 -27
@@ -0,0 +1,305 @@
1
+ import yaml
2
+ import json
3
+ from typing import Dict, List, Any
4
+ from core.registry import task_registry
5
+ from schemas.base import TaskRequest
6
+
7
+ register_task = task_registry.register
8
+
9
+ @register_task("explain-diff", version="v1", description="Explain differences between API versions")
10
+ def explain_diff_handler(request: TaskRequest) -> Dict[str, Any]:
11
+ """Generate human-readable explanation of API changes"""
12
+
13
+ files = request.files
14
+ if len(files) != 2:
15
+ raise ValueError("explain-diff requires exactly 2 files: old and new API spec")
16
+
17
+ old_spec = load_spec(files[0])
18
+ new_spec = load_spec(files[1])
19
+
20
+ changes = analyze_changes(old_spec, new_spec)
21
+ explanation = generate_explanation(changes)
22
+ migration_guide = generate_migration_guide(changes)
23
+
24
+ return {
25
+ "summary": explanation["summary"],
26
+ "changes": changes,
27
+ "explanation": explanation["details"],
28
+ "migration_guide": migration_guide,
29
+ "impact_assessment": assess_impact(changes)
30
+ }
31
+
32
+ def load_spec(file_path: str) -> Dict:
33
+ """Load API specification"""
34
+ with open(file_path, 'r') as f:
35
+ if file_path.endswith('.yaml') or file_path.endswith('.yml'):
36
+ return yaml.safe_load(f)
37
+ else:
38
+ return json.load(f)
39
+
40
+ def analyze_changes(old_spec: Dict, new_spec: Dict) -> Dict[str, Any]:
41
+ """Analyze all changes between specs"""
42
+ changes = {
43
+ "endpoints": {
44
+ "added": [],
45
+ "removed": [],
46
+ "modified": []
47
+ },
48
+ "models": {
49
+ "added": [],
50
+ "removed": [],
51
+ "modified": []
52
+ },
53
+ "security": {
54
+ "added": [],
55
+ "removed": [],
56
+ "modified": []
57
+ },
58
+ "metadata": {}
59
+ }
60
+
61
+ # Analyze endpoint changes
62
+ old_paths = set(old_spec.get("paths", {}).keys())
63
+ new_paths = set(new_spec.get("paths", {}).keys())
64
+
65
+ changes["endpoints"]["added"] = list(new_paths - old_paths)
66
+ changes["endpoints"]["removed"] = list(old_paths - new_paths)
67
+
68
+ # Check for modified endpoints
69
+ for path in old_paths & new_paths:
70
+ old_methods = set(old_spec["paths"][path].keys())
71
+ new_methods = set(new_spec["paths"][path].keys())
72
+
73
+ if old_methods != new_methods or has_parameter_changes(
74
+ old_spec["paths"][path], new_spec["paths"][path]
75
+ ):
76
+ changes["endpoints"]["modified"].append({
77
+ "path": path,
78
+ "methods_added": list(new_methods - old_methods),
79
+ "methods_removed": list(old_methods - new_methods),
80
+ "parameter_changes": get_parameter_changes(
81
+ old_spec["paths"][path], new_spec["paths"][path]
82
+ )
83
+ })
84
+
85
+ # Analyze model/schema changes
86
+ old_schemas = get_schemas(old_spec)
87
+ new_schemas = get_schemas(new_spec)
88
+
89
+ changes["models"]["added"] = list(set(new_schemas.keys()) - set(old_schemas.keys()))
90
+ changes["models"]["removed"] = list(set(old_schemas.keys()) - set(new_schemas.keys()))
91
+
92
+ # Check for modified models
93
+ for model_name in set(old_schemas.keys()) & set(new_schemas.keys()):
94
+ if old_schemas[model_name] != new_schemas[model_name]:
95
+ changes["models"]["modified"].append({
96
+ "name": model_name,
97
+ "changes": compare_schemas(old_schemas[model_name], new_schemas[model_name])
98
+ })
99
+
100
+ # Analyze metadata changes
101
+ if old_spec.get("info") != new_spec.get("info"):
102
+ changes["metadata"]["version_change"] = {
103
+ "old": old_spec.get("info", {}).get("version"),
104
+ "new": new_spec.get("info", {}).get("version")
105
+ }
106
+
107
+ return changes
108
+
109
+ def has_parameter_changes(old_endpoint: Dict, new_endpoint: Dict) -> bool:
110
+ """Check if endpoint has parameter changes"""
111
+ for method in set(old_endpoint.keys()) & set(new_endpoint.keys()):
112
+ old_params = old_endpoint[method].get("parameters", [])
113
+ new_params = new_endpoint[method].get("parameters", [])
114
+
115
+ if len(old_params) != len(new_params):
116
+ return True
117
+
118
+ old_param_names = {p.get("name") for p in old_params}
119
+ new_param_names = {p.get("name") for p in new_params}
120
+
121
+ if old_param_names != new_param_names:
122
+ return True
123
+
124
+ return False
125
+
126
+ def get_parameter_changes(old_endpoint: Dict, new_endpoint: Dict) -> List[Dict]:
127
+ """Get detailed parameter changes"""
128
+ changes = []
129
+
130
+ for method in set(old_endpoint.keys()) & set(new_endpoint.keys()):
131
+ old_params = {p.get("name"): p for p in old_endpoint[method].get("parameters", [])}
132
+ new_params = {p.get("name"): p for p in new_endpoint[method].get("parameters", [])}
133
+
134
+ added = set(new_params.keys()) - set(old_params.keys())
135
+ removed = set(old_params.keys()) - set(new_params.keys())
136
+
137
+ if added or removed:
138
+ changes.append({
139
+ "method": method.upper(),
140
+ "added_params": list(added),
141
+ "removed_params": list(removed)
142
+ })
143
+
144
+ return changes
145
+
146
+ def get_schemas(spec: Dict) -> Dict:
147
+ """Extract schemas/models from spec"""
148
+ if "components" in spec and "schemas" in spec["components"]:
149
+ return spec["components"]["schemas"]
150
+ elif "definitions" in spec:
151
+ return spec["definitions"]
152
+ return {}
153
+
154
+ def compare_schemas(old_schema: Dict, new_schema: Dict) -> List[str]:
155
+ """Compare two schemas and return changes"""
156
+ changes = []
157
+
158
+ old_props = set(old_schema.get("properties", {}).keys())
159
+ new_props = set(new_schema.get("properties", {}).keys())
160
+
161
+ added = new_props - old_props
162
+ removed = old_props - new_props
163
+
164
+ if added:
165
+ changes.append(f"Added fields: {', '.join(added)}")
166
+ if removed:
167
+ changes.append(f"Removed fields: {', '.join(removed)}")
168
+
169
+ # Check for type changes
170
+ for prop in old_props & new_props:
171
+ old_type = old_schema["properties"][prop].get("type")
172
+ new_type = new_schema["properties"][prop].get("type")
173
+ if old_type != new_type:
174
+ changes.append(f"Field '{prop}' type changed from {old_type} to {new_type}")
175
+
176
+ return changes
177
+
178
+ def generate_explanation(changes: Dict) -> Dict[str, Any]:
179
+ """Generate human-readable explanation"""
180
+ explanation = {
181
+ "summary": "",
182
+ "details": []
183
+ }
184
+
185
+ # Calculate summary statistics
186
+ total_changes = (
187
+ len(changes["endpoints"]["added"]) +
188
+ len(changes["endpoints"]["removed"]) +
189
+ len(changes["endpoints"]["modified"]) +
190
+ len(changes["models"]["added"]) +
191
+ len(changes["models"]["removed"]) +
192
+ len(changes["models"]["modified"])
193
+ )
194
+
195
+ if total_changes == 0:
196
+ explanation["summary"] = "No significant changes detected between API versions"
197
+ else:
198
+ breaking = len(changes["endpoints"]["removed"]) + len(changes["models"]["removed"])
199
+ if breaking > 0:
200
+ explanation["summary"] = f"⚠️ {breaking} breaking changes detected among {total_changes} total changes"
201
+ else:
202
+ explanation["summary"] = f"✅ {total_changes} non-breaking changes detected"
203
+
204
+ # Generate detailed explanations
205
+ if changes["endpoints"]["added"]:
206
+ explanation["details"].append(
207
+ f"New endpoints added: {', '.join(changes['endpoints']['added'])}"
208
+ )
209
+
210
+ if changes["endpoints"]["removed"]:
211
+ explanation["details"].append(
212
+ f"⚠️ Endpoints removed (BREAKING): {', '.join(changes['endpoints']['removed'])}"
213
+ )
214
+
215
+ if changes["endpoints"]["modified"]:
216
+ explanation["details"].append(
217
+ f"Endpoints modified: {len(changes['endpoints']['modified'])} endpoints have changes"
218
+ )
219
+
220
+ if changes["models"]["added"]:
221
+ explanation["details"].append(
222
+ f"New models added: {', '.join(changes['models']['added'])}"
223
+ )
224
+
225
+ if changes["models"]["removed"]:
226
+ explanation["details"].append(
227
+ f"⚠️ Models removed (BREAKING): {', '.join(changes['models']['removed'])}"
228
+ )
229
+
230
+ return explanation
231
+
232
+ def generate_migration_guide(changes: Dict) -> List[str]:
233
+ """Generate migration guide for developers"""
234
+ guide = []
235
+
236
+ if changes["endpoints"]["removed"]:
237
+ guide.append("## Breaking Changes - Action Required")
238
+ guide.append("The following endpoints have been removed:")
239
+ for endpoint in changes["endpoints"]["removed"]:
240
+ guide.append(f" - {endpoint}: Find alternative endpoint or update implementation")
241
+
242
+ if changes["endpoints"]["modified"]:
243
+ guide.append("## Modified Endpoints")
244
+ for mod in changes["endpoints"]["modified"]:
245
+ if mod["methods_removed"]:
246
+ guide.append(f" - {mod['path']}: Methods {', '.join(mod['methods_removed'])} removed")
247
+ if mod["parameter_changes"]:
248
+ for change in mod["parameter_changes"]:
249
+ if change["removed_params"]:
250
+ guide.append(f" - {change['method']}: Parameters {', '.join(change['removed_params'])} removed")
251
+ if change["added_params"]:
252
+ guide.append(f" - {change['method']}: New parameters {', '.join(change['added_params'])} added")
253
+
254
+ if changes["models"]["removed"]:
255
+ guide.append("## Removed Models")
256
+ for model in changes["models"]["removed"]:
257
+ guide.append(f" - {model}: Update code to use alternative model")
258
+
259
+ if not guide:
260
+ guide.append("No migration required - all changes are backwards compatible")
261
+
262
+ return guide
263
+
264
+ def assess_impact(changes: Dict) -> Dict[str, Any]:
265
+ """Assess the impact of changes"""
266
+ impact = {
267
+ "level": "none",
268
+ "breaking_changes": 0,
269
+ "affected_endpoints": 0,
270
+ "affected_models": 0,
271
+ "recommendation": ""
272
+ }
273
+
274
+ impact["breaking_changes"] = (
275
+ len(changes["endpoints"]["removed"]) +
276
+ len(changes["models"]["removed"]) +
277
+ sum(len(m["methods_removed"]) for m in changes["endpoints"]["modified"])
278
+ )
279
+
280
+ impact["affected_endpoints"] = (
281
+ len(changes["endpoints"]["added"]) +
282
+ len(changes["endpoints"]["removed"]) +
283
+ len(changes["endpoints"]["modified"])
284
+ )
285
+
286
+ impact["affected_models"] = (
287
+ len(changes["models"]["added"]) +
288
+ len(changes["models"]["removed"]) +
289
+ len(changes["models"]["modified"])
290
+ )
291
+
292
+ if impact["breaking_changes"] > 0:
293
+ impact["level"] = "high"
294
+ impact["recommendation"] = "Major version bump required. Notify all API consumers."
295
+ elif impact["affected_endpoints"] > 5:
296
+ impact["level"] = "medium"
297
+ impact["recommendation"] = "Minor version bump recommended. Review with team."
298
+ elif impact["affected_endpoints"] > 0:
299
+ impact["level"] = "low"
300
+ impact["recommendation"] = "Patch version bump. Standard deployment process."
301
+ else:
302
+ impact["level"] = "none"
303
+ impact["recommendation"] = "No action required."
304
+
305
+ return impact
@@ -0,0 +1,267 @@
1
+ """
2
+ Explain Diff task with Evidence Contract
3
+ V12 Core Hardening - Complete Implementation
4
+ """
5
+
6
+ import yaml
7
+ import json
8
+ from typing import Dict, List, Set
9
+ from pathlib import Path
10
+
11
+ from core.registry_v3 import task_registry
12
+ from schemas.requests_v2 import ExplainDiffRequest
13
+ from schemas.evidence import (
14
+ DiffExplanationEvidence, Decision, Violation, ViolationSeverity,
15
+ Evidence, Remediation
16
+ )
17
+
18
+
19
+ @task_registry.register("explain-diff", task_version="1.0", description="Explain API differences")
20
+ def explain_diff_handler(request: ExplainDiffRequest) -> DiffExplanationEvidence:
21
+ """Generate human-readable explanation of API changes with evidence contract"""
22
+
23
+ # Load specifications
24
+ old_spec = load_spec(request.old_spec)
25
+ new_spec = load_spec(request.new_spec)
26
+
27
+ # Analyze changes
28
+ violations = []
29
+ evidence_list = []
30
+ changes_summary = {
31
+ "endpoints_added": [],
32
+ "endpoints_removed": [],
33
+ "endpoints_modified": [],
34
+ "models_added": [],
35
+ "models_removed": [],
36
+ "models_modified": []
37
+ }
38
+
39
+ # Check endpoint changes
40
+ old_paths = set(old_spec.get("paths", {}).keys())
41
+ new_paths = set(new_spec.get("paths", {}).keys())
42
+
43
+ # Removed endpoints (breaking)
44
+ removed_paths = old_paths - new_paths
45
+ for path in removed_paths:
46
+ changes_summary["endpoints_removed"].append(path)
47
+ violations.append(Violation(
48
+ rule="no_removed_endpoint",
49
+ severity=ViolationSeverity.HIGH,
50
+ path=path,
51
+ message=f"Breaking: Endpoint removed - {path}",
52
+ details={"change_type": "endpoint_removed"}
53
+ ))
54
+ evidence_list.append(Evidence(
55
+ rule="endpoint_tracking",
56
+ passed=False,
57
+ details={"path": path, "change": "removed"}
58
+ ))
59
+
60
+ # Added endpoints (non-breaking)
61
+ added_paths = new_paths - old_paths
62
+ for path in added_paths:
63
+ changes_summary["endpoints_added"].append(path)
64
+ evidence_list.append(Evidence(
65
+ rule="endpoint_tracking",
66
+ passed=True,
67
+ details={"path": path, "change": "added"}
68
+ ))
69
+
70
+ # Modified endpoints
71
+ for path in old_paths & new_paths:
72
+ old_methods = set(old_spec["paths"][path].keys())
73
+ new_methods = set(new_spec["paths"][path].keys())
74
+
75
+ if old_methods != new_methods:
76
+ changes_summary["endpoints_modified"].append(path)
77
+
78
+ # Removed methods (breaking)
79
+ removed_methods = old_methods - new_methods
80
+ for method in removed_methods:
81
+ violations.append(Violation(
82
+ rule="no_removed_method",
83
+ severity=ViolationSeverity.HIGH,
84
+ path=f"{path}:{method.upper()}",
85
+ message=f"Breaking: Method removed - {method.upper()} {path}",
86
+ details={"change_type": "method_removed", "method": method}
87
+ ))
88
+ evidence_list.append(Evidence(
89
+ rule="method_tracking",
90
+ passed=False,
91
+ details={"path": path, "method": method, "change": "removed"}
92
+ ))
93
+
94
+ # Check model/schema changes
95
+ old_schemas = extract_schemas(old_spec)
96
+ new_schemas = extract_schemas(new_spec)
97
+
98
+ # Removed models (breaking)
99
+ removed_models = set(old_schemas.keys()) - set(new_schemas.keys())
100
+ for model in removed_models:
101
+ changes_summary["models_removed"].append(model)
102
+ violations.append(Violation(
103
+ rule="no_removed_model",
104
+ severity=ViolationSeverity.HIGH,
105
+ message=f"Breaking: Model removed - {model}",
106
+ details={"change_type": "model_removed", "model": model}
107
+ ))
108
+ evidence_list.append(Evidence(
109
+ rule="model_tracking",
110
+ passed=False,
111
+ details={"model": model, "change": "removed"}
112
+ ))
113
+
114
+ # Added models (non-breaking)
115
+ added_models = set(new_schemas.keys()) - set(old_schemas.keys())
116
+ for model in added_models:
117
+ changes_summary["models_added"].append(model)
118
+ evidence_list.append(Evidence(
119
+ rule="model_tracking",
120
+ passed=True,
121
+ details={"model": model, "change": "added"}
122
+ ))
123
+
124
+ # Check for modified models
125
+ for model in set(old_schemas.keys()) & set(new_schemas.keys()):
126
+ if schemas_differ(old_schemas[model], new_schemas[model]):
127
+ changes_summary["models_modified"].append(model)
128
+ evidence_list.append(Evidence(
129
+ rule="model_tracking",
130
+ passed=True, # Modifications are warnings, not failures
131
+ details={"model": model, "change": "modified"}
132
+ ))
133
+
134
+ # Determine impact and migration requirements
135
+ breaking_changes = len(violations)
136
+ migration_required = breaking_changes > 0
137
+
138
+ if breaking_changes == 0:
139
+ impact_level = "none"
140
+ decision = Decision.PASS
141
+ exit_code = 0
142
+ summary = "No breaking changes detected"
143
+ elif breaking_changes <= 2:
144
+ impact_level = "low"
145
+ decision = Decision.WARN
146
+ exit_code = 0
147
+ summary = f"{breaking_changes} breaking changes detected (low impact)"
148
+ elif breaking_changes <= 5:
149
+ impact_level = "medium"
150
+ decision = Decision.WARN
151
+ exit_code = 0
152
+ summary = f"{breaking_changes} breaking changes detected (medium impact)"
153
+ else:
154
+ impact_level = "high"
155
+ decision = Decision.FAIL
156
+ exit_code = 1
157
+ summary = f"{breaking_changes} breaking changes detected (high impact)"
158
+
159
+ # Build remediation guidance
160
+ remediation = None
161
+ if violations:
162
+ steps = []
163
+ if changes_summary["endpoints_removed"]:
164
+ steps.append("Restore removed endpoints or provide migration path")
165
+ if changes_summary["models_removed"]:
166
+ steps.append("Restore removed models or update dependent code")
167
+ if any(v.details.get("change_type") == "method_removed" for v in violations):
168
+ steps.append("Restore removed methods or document alternatives")
169
+
170
+ steps.extend([
171
+ "Consider versioning the API (e.g., /v2/) for breaking changes",
172
+ "Add deprecation notices before removing features",
173
+ "Document migration guide for consumers"
174
+ ])
175
+
176
+ remediation = Remediation(
177
+ summary="Breaking changes require migration planning",
178
+ steps=steps,
179
+ examples=[
180
+ "Add version prefix: /v2/api/endpoints",
181
+ "Keep old endpoints with deprecation headers",
182
+ "Provide transformation utilities for model changes"
183
+ ],
184
+ documentation="https://docs.delimit.ai/api-migration"
185
+ )
186
+
187
+ # Add detail based on request level
188
+ if request.detail_level == "summary":
189
+ # Minimal details
190
+ metrics = {
191
+ "total_changes": sum(len(v) for v in changes_summary.values()),
192
+ "breaking_changes": breaking_changes
193
+ }
194
+ else:
195
+ # Full metrics
196
+ metrics = {
197
+ "endpoints_added": len(changes_summary["endpoints_added"]),
198
+ "endpoints_removed": len(changes_summary["endpoints_removed"]),
199
+ "endpoints_modified": len(changes_summary["endpoints_modified"]),
200
+ "models_added": len(changes_summary["models_added"]),
201
+ "models_removed": len(changes_summary["models_removed"]),
202
+ "models_modified": len(changes_summary["models_modified"]),
203
+ "breaking_changes": breaking_changes,
204
+ "total_changes": sum(len(v) for v in changes_summary.values())
205
+ }
206
+
207
+ return DiffExplanationEvidence(
208
+ task="explain-diff",
209
+ task_version="1.0",
210
+ decision=decision,
211
+ exit_code=exit_code,
212
+ violations=violations,
213
+ evidence=evidence_list,
214
+ remediation=remediation,
215
+ summary=summary,
216
+ correlation_id=request.correlation_id,
217
+ metrics=metrics,
218
+ changes_summary=changes_summary,
219
+ migration_required=migration_required,
220
+ impact_level=impact_level
221
+ )
222
+
223
+
224
+ def load_spec(file_path: str) -> Dict:
225
+ """Load API specification from file"""
226
+ path = Path(file_path)
227
+ with path.open('r') as f:
228
+ if path.suffix in ['.yaml', '.yml']:
229
+ return yaml.safe_load(f)
230
+ elif path.suffix == '.json':
231
+ return json.load(f)
232
+ else:
233
+ # Try YAML first, then JSON
234
+ content = f.read()
235
+ try:
236
+ return yaml.safe_load(content)
237
+ except:
238
+ return json.loads(content)
239
+
240
+
241
+ def extract_schemas(spec: Dict) -> Dict:
242
+ """Extract schemas/models from specification"""
243
+ if "components" in spec and "schemas" in spec.get("components", {}):
244
+ return spec["components"]["schemas"]
245
+ elif "definitions" in spec:
246
+ return spec["definitions"]
247
+ return {}
248
+
249
+
250
+ def schemas_differ(old_schema: Dict, new_schema: Dict) -> bool:
251
+ """Check if two schemas are different"""
252
+ # Simple comparison - could be enhanced
253
+ old_props = set(old_schema.get("properties", {}).keys())
254
+ new_props = set(new_schema.get("properties", {}).keys())
255
+
256
+ # Check for property changes
257
+ if old_props != new_props:
258
+ return True
259
+
260
+ # Check for type changes
261
+ for prop in old_props:
262
+ old_type = old_schema.get("properties", {}).get(prop, {}).get("type")
263
+ new_type = new_schema.get("properties", {}).get(prop, {}).get("type")
264
+ if old_type != new_type:
265
+ return True
266
+
267
+ return False