delimit-cli 2.3.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +7 -0
- package/.github/workflows/ci.yml +22 -0
- package/CHANGELOG.md +33 -0
- package/CODE_OF_CONDUCT.md +48 -0
- package/CONTRIBUTING.md +67 -0
- package/Dockerfile +9 -0
- package/LICENSE +21 -0
- package/README.md +51 -130
- package/SECURITY.md +42 -0
- package/adapters/codex-forge.js +107 -0
- package/adapters/codex-jamsons.js +142 -0
- package/adapters/codex-security.js +94 -0
- package/adapters/gemini-forge.js +120 -0
- package/adapters/gemini-jamsons.js +152 -0
- package/bin/delimit-cli.js +52 -2
- package/bin/delimit-setup.js +258 -0
- package/gateway/ai/backends/__init__.py +0 -0
- package/gateway/ai/backends/async_utils.py +21 -0
- package/gateway/ai/backends/deploy_bridge.py +150 -0
- package/gateway/ai/backends/gateway_core.py +261 -0
- package/gateway/ai/backends/generate_bridge.py +38 -0
- package/gateway/ai/backends/governance_bridge.py +196 -0
- package/gateway/ai/backends/intel_bridge.py +59 -0
- package/gateway/ai/backends/memory_bridge.py +93 -0
- package/gateway/ai/backends/ops_bridge.py +137 -0
- package/gateway/ai/backends/os_bridge.py +82 -0
- package/gateway/ai/backends/repo_bridge.py +117 -0
- package/gateway/ai/backends/ui_bridge.py +118 -0
- package/gateway/ai/backends/vault_bridge.py +129 -0
- package/gateway/ai/server.py +1182 -0
- package/gateway/core/__init__.py +3 -0
- package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
- package/gateway/core/auto_baseline.py +304 -0
- package/gateway/core/ci_formatter.py +283 -0
- package/gateway/core/complexity_analyzer.py +386 -0
- package/gateway/core/contract_ledger.py +345 -0
- package/gateway/core/dependency_graph.py +218 -0
- package/gateway/core/dependency_manifest.py +223 -0
- package/gateway/core/diff_engine_v2.py +477 -0
- package/gateway/core/diff_engine_v2.py.bak +426 -0
- package/gateway/core/event_backbone.py +268 -0
- package/gateway/core/event_schema.py +258 -0
- package/gateway/core/explainer.py +438 -0
- package/gateway/core/gateway.py +128 -0
- package/gateway/core/gateway_v2.py +154 -0
- package/gateway/core/gateway_v3.py +224 -0
- package/gateway/core/impact_analyzer.py +163 -0
- package/gateway/core/policies/default.yml +13 -0
- package/gateway/core/policies/relaxed.yml +48 -0
- package/gateway/core/policies/strict.yml +55 -0
- package/gateway/core/policy_engine.py +464 -0
- package/gateway/core/registry.py +52 -0
- package/gateway/core/registry_v2.py +132 -0
- package/gateway/core/registry_v3.py +134 -0
- package/gateway/core/semver_classifier.py +152 -0
- package/gateway/core/spec_detector.py +130 -0
- package/gateway/core/surface_bridge.py +307 -0
- package/gateway/core/zero_spec/__init__.py +4 -0
- package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/detector.py +353 -0
- package/gateway/core/zero_spec/express_extractor.py +483 -0
- package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
- package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
- package/gateway/tasks/__init__.py +1 -0
- package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/check_policy.py +177 -0
- package/gateway/tasks/check_policy_v2.py +255 -0
- package/gateway/tasks/check_policy_v3.py +255 -0
- package/gateway/tasks/explain_diff.py +305 -0
- package/gateway/tasks/explain_diff_v2.py +267 -0
- package/gateway/tasks/validate_api.py +131 -0
- package/gateway/tasks/validate_api_v2.py +208 -0
- package/gateway/tasks/validate_api_v3.py +163 -0
- package/package.json +3 -3
- package/adapters/codex-skill.js +0 -87
- package/adapters/cursor-extension.js +0 -190
- package/adapters/gemini-action.js +0 -93
- package/adapters/openai-function.js +0 -112
- package/adapters/xai-plugin.js +0 -151
- package/test-decision-engine.js +0 -181
- package/test-hook.js +0 -27
|
@@ -0,0 +1,223 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Delimit Dependency Manifest
|
|
3
|
+
Parses and validates .delimit/dependencies.yaml service dependency declarations.
|
|
4
|
+
|
|
5
|
+
Per Jamsons Doctrine:
|
|
6
|
+
- Deterministic outputs
|
|
7
|
+
- No credential discovery
|
|
8
|
+
- No telemetry
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import logging
|
|
12
|
+
import re
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict, List, Optional, Union
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger("delimit.dependency_manifest")
|
|
17
|
+
|
|
18
|
+
# Manifest filename convention
|
|
19
|
+
MANIFEST_FILENAME = "dependencies.yaml"
|
|
20
|
+
MANIFEST_DIR = ".delimit"
|
|
21
|
+
|
|
22
|
+
# Valid characters for service identifiers: alphanumeric, hyphens, underscores, slashes
|
|
23
|
+
_SERVICE_ID_PATTERN = re.compile(r"^[a-zA-Z0-9][a-zA-Z0-9._/-]*$")
|
|
24
|
+
|
|
25
|
+
# Required fields in a manifest
|
|
26
|
+
REQUIRED_FIELDS = frozenset(["service"])
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _load_yaml():
|
|
30
|
+
"""Lazy import yaml to avoid hard dependency."""
|
|
31
|
+
try:
|
|
32
|
+
import yaml
|
|
33
|
+
return yaml
|
|
34
|
+
except ImportError:
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def normalize_service_id(service_id: str) -> str:
|
|
39
|
+
"""Normalize a service identifier to lowercase with consistent separators.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
service_id: Raw service identifier string.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Normalized lowercase identifier.
|
|
46
|
+
"""
|
|
47
|
+
return service_id.strip().lower()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def validate_service_id(service_id: str) -> Optional[str]:
|
|
51
|
+
"""Validate a service identifier.
|
|
52
|
+
|
|
53
|
+
Returns None if valid, or an error message string if invalid.
|
|
54
|
+
"""
|
|
55
|
+
if not service_id or not isinstance(service_id, str):
|
|
56
|
+
return "Service identifier must be a non-empty string"
|
|
57
|
+
|
|
58
|
+
normalized = normalize_service_id(service_id)
|
|
59
|
+
if not normalized:
|
|
60
|
+
return "Service identifier is empty after normalization"
|
|
61
|
+
|
|
62
|
+
if not _SERVICE_ID_PATTERN.match(normalized):
|
|
63
|
+
return (
|
|
64
|
+
f"Invalid service identifier: {service_id!r}. "
|
|
65
|
+
"Must start with alphanumeric and contain only "
|
|
66
|
+
"alphanumeric, hyphens, underscores, dots, or slashes."
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
return None
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def validate_manifest(data: Dict[str, Any]) -> List[str]:
|
|
73
|
+
"""Validate a parsed dependency manifest.
|
|
74
|
+
|
|
75
|
+
Returns a list of error strings. Empty list means valid.
|
|
76
|
+
"""
|
|
77
|
+
errors: List[str] = []
|
|
78
|
+
|
|
79
|
+
if not isinstance(data, dict):
|
|
80
|
+
return ["Manifest must be a YAML mapping"]
|
|
81
|
+
|
|
82
|
+
# Check required fields
|
|
83
|
+
if "service" not in data:
|
|
84
|
+
errors.append("Missing required field: 'service'")
|
|
85
|
+
else:
|
|
86
|
+
err = validate_service_id(data["service"])
|
|
87
|
+
if err:
|
|
88
|
+
errors.append(f"Invalid 'service' field: {err}")
|
|
89
|
+
|
|
90
|
+
# Validate consumes list
|
|
91
|
+
consumes = data.get("consumes")
|
|
92
|
+
if consumes is not None:
|
|
93
|
+
if not isinstance(consumes, list):
|
|
94
|
+
errors.append("Field 'consumes' must be a list")
|
|
95
|
+
else:
|
|
96
|
+
for i, item in enumerate(consumes):
|
|
97
|
+
if not isinstance(item, str):
|
|
98
|
+
errors.append(f"consumes[{i}] must be a string, got {type(item).__name__}")
|
|
99
|
+
else:
|
|
100
|
+
err = validate_service_id(item)
|
|
101
|
+
if err:
|
|
102
|
+
errors.append(f"consumes[{i}]: {err}")
|
|
103
|
+
|
|
104
|
+
# Validate produces list
|
|
105
|
+
produces = data.get("produces")
|
|
106
|
+
if produces is not None:
|
|
107
|
+
if not isinstance(produces, list):
|
|
108
|
+
errors.append("Field 'produces' must be a list")
|
|
109
|
+
else:
|
|
110
|
+
for i, item in enumerate(produces):
|
|
111
|
+
if not isinstance(item, str):
|
|
112
|
+
errors.append(f"produces[{i}] must be a string, got {type(item).__name__}")
|
|
113
|
+
else:
|
|
114
|
+
err = validate_service_id(item)
|
|
115
|
+
if err:
|
|
116
|
+
errors.append(f"produces[{i}]: {err}")
|
|
117
|
+
|
|
118
|
+
# Validate optional string fields
|
|
119
|
+
for field in ("owner", "repository"):
|
|
120
|
+
val = data.get(field)
|
|
121
|
+
if val is not None and not isinstance(val, str):
|
|
122
|
+
errors.append(f"Field '{field}' must be a string")
|
|
123
|
+
|
|
124
|
+
return errors
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def parse_manifest(data: Dict[str, Any]) -> Dict[str, Any]:
|
|
128
|
+
"""Parse and normalize a dependency manifest.
|
|
129
|
+
|
|
130
|
+
Args:
|
|
131
|
+
data: Raw parsed YAML dictionary.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
Normalized manifest dictionary with deterministic structure.
|
|
135
|
+
|
|
136
|
+
Raises:
|
|
137
|
+
ValueError: If validation fails.
|
|
138
|
+
"""
|
|
139
|
+
errors = validate_manifest(data)
|
|
140
|
+
if errors:
|
|
141
|
+
raise ValueError(f"Manifest validation failed: {'; '.join(errors)}")
|
|
142
|
+
|
|
143
|
+
manifest = {
|
|
144
|
+
"service": normalize_service_id(data["service"]),
|
|
145
|
+
"consumes": sorted(set(
|
|
146
|
+
normalize_service_id(s) for s in data.get("consumes", [])
|
|
147
|
+
)),
|
|
148
|
+
"produces": sorted(set(
|
|
149
|
+
normalize_service_id(s) for s in data.get("produces", [])
|
|
150
|
+
)),
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if "owner" in data and isinstance(data["owner"], str):
|
|
154
|
+
manifest["owner"] = data["owner"].strip()
|
|
155
|
+
if "repository" in data and isinstance(data["repository"], str):
|
|
156
|
+
manifest["repository"] = data["repository"].strip()
|
|
157
|
+
|
|
158
|
+
return manifest
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def load_manifest_file(path: Union[str, Path]) -> Optional[Dict[str, Any]]:
|
|
162
|
+
"""Load and parse a dependency manifest from a YAML file.
|
|
163
|
+
|
|
164
|
+
Args:
|
|
165
|
+
path: Path to the dependencies.yaml file.
|
|
166
|
+
|
|
167
|
+
Returns:
|
|
168
|
+
Parsed manifest dictionary, or None if the file doesn't exist
|
|
169
|
+
or can't be parsed.
|
|
170
|
+
"""
|
|
171
|
+
yaml = _load_yaml()
|
|
172
|
+
if yaml is None:
|
|
173
|
+
logger.warning("PyYAML not installed — cannot parse dependency manifests")
|
|
174
|
+
return None
|
|
175
|
+
|
|
176
|
+
filepath = Path(path)
|
|
177
|
+
if not filepath.exists():
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
with open(filepath, "r", encoding="utf-8") as f:
|
|
182
|
+
data = yaml.safe_load(f)
|
|
183
|
+
except Exception as e:
|
|
184
|
+
logger.warning("Failed to load manifest %s: %s", filepath, e)
|
|
185
|
+
return None
|
|
186
|
+
|
|
187
|
+
if data is None:
|
|
188
|
+
return None
|
|
189
|
+
|
|
190
|
+
try:
|
|
191
|
+
return parse_manifest(data)
|
|
192
|
+
except (ValueError, TypeError, AttributeError) as e:
|
|
193
|
+
logger.warning("Invalid manifest %s: %s", filepath, e)
|
|
194
|
+
return None
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def discover_manifests(root_dir: Union[str, Path]) -> List[Dict[str, Any]]:
|
|
198
|
+
"""Discover and load all dependency manifests under a root directory.
|
|
199
|
+
|
|
200
|
+
Searches for .delimit/dependencies.yaml files recursively.
|
|
201
|
+
|
|
202
|
+
Args:
|
|
203
|
+
root_dir: Root directory to search.
|
|
204
|
+
|
|
205
|
+
Returns:
|
|
206
|
+
List of parsed manifest dictionaries, sorted by service name.
|
|
207
|
+
"""
|
|
208
|
+
root = Path(root_dir)
|
|
209
|
+
manifests = []
|
|
210
|
+
|
|
211
|
+
if not root.exists():
|
|
212
|
+
return manifests
|
|
213
|
+
|
|
214
|
+
# Search for manifest files
|
|
215
|
+
for manifest_path in root.rglob(f"{MANIFEST_DIR}/{MANIFEST_FILENAME}"):
|
|
216
|
+
manifest = load_manifest_file(manifest_path)
|
|
217
|
+
if manifest is not None:
|
|
218
|
+
manifest["_source_path"] = str(manifest_path)
|
|
219
|
+
manifests.append(manifest)
|
|
220
|
+
|
|
221
|
+
# Deterministic ordering
|
|
222
|
+
manifests.sort(key=lambda m: m["service"])
|
|
223
|
+
return manifests
|
|
@@ -0,0 +1,477 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Enhanced OpenAPI diff engine with deep schema comparison.
|
|
3
|
+
Handles nested objects, response schemas, enums, and edge cases.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Dict, List, Any, Optional, Set, Tuple
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from enum import Enum
|
|
9
|
+
|
|
10
|
+
class ChangeType(Enum):
|
|
11
|
+
# Breaking changes
|
|
12
|
+
ENDPOINT_REMOVED = "endpoint_removed"
|
|
13
|
+
METHOD_REMOVED = "method_removed"
|
|
14
|
+
REQUIRED_PARAM_ADDED = "required_param_added"
|
|
15
|
+
PARAM_REMOVED = "param_removed"
|
|
16
|
+
RESPONSE_REMOVED = "response_removed"
|
|
17
|
+
REQUIRED_FIELD_ADDED = "required_field_added"
|
|
18
|
+
FIELD_REMOVED = "field_removed"
|
|
19
|
+
TYPE_CHANGED = "type_changed"
|
|
20
|
+
FORMAT_CHANGED = "format_changed"
|
|
21
|
+
ENUM_VALUE_REMOVED = "enum_value_removed"
|
|
22
|
+
|
|
23
|
+
# Non-breaking changes
|
|
24
|
+
ENDPOINT_ADDED = "endpoint_added"
|
|
25
|
+
METHOD_ADDED = "method_added"
|
|
26
|
+
OPTIONAL_PARAM_ADDED = "optional_param_added"
|
|
27
|
+
RESPONSE_ADDED = "response_added"
|
|
28
|
+
OPTIONAL_FIELD_ADDED = "optional_field_added"
|
|
29
|
+
ENUM_VALUE_ADDED = "enum_value_added"
|
|
30
|
+
DESCRIPTION_CHANGED = "description_changed"
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class Change:
|
|
34
|
+
type: ChangeType
|
|
35
|
+
path: str
|
|
36
|
+
details: Dict[str, Any]
|
|
37
|
+
severity: str # high, medium, low
|
|
38
|
+
message: str
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def is_breaking(self) -> bool:
|
|
42
|
+
return self.type in [
|
|
43
|
+
ChangeType.ENDPOINT_REMOVED,
|
|
44
|
+
ChangeType.METHOD_REMOVED,
|
|
45
|
+
ChangeType.REQUIRED_PARAM_ADDED,
|
|
46
|
+
ChangeType.PARAM_REMOVED,
|
|
47
|
+
ChangeType.RESPONSE_REMOVED,
|
|
48
|
+
ChangeType.REQUIRED_FIELD_ADDED,
|
|
49
|
+
ChangeType.FIELD_REMOVED,
|
|
50
|
+
ChangeType.TYPE_CHANGED,
|
|
51
|
+
ChangeType.FORMAT_CHANGED,
|
|
52
|
+
ChangeType.ENUM_VALUE_REMOVED,
|
|
53
|
+
]
|
|
54
|
+
|
|
55
|
+
class OpenAPIDiffEngine:
|
|
56
|
+
"""Advanced diff engine for OpenAPI specifications."""
|
|
57
|
+
|
|
58
|
+
def __init__(self):
|
|
59
|
+
self.changes: List[Change] = []
|
|
60
|
+
self._old_spec: Dict = {}
|
|
61
|
+
self._new_spec: Dict = {}
|
|
62
|
+
self._ref_trail: Set[Tuple[str, str]] = set()
|
|
63
|
+
|
|
64
|
+
def compare(self, old_spec: Dict, new_spec: Dict) -> List[Change]:
|
|
65
|
+
"""Compare two OpenAPI specifications and return all changes."""
|
|
66
|
+
self.changes = []
|
|
67
|
+
self._old_spec = old_spec or {}
|
|
68
|
+
self._new_spec = new_spec or {}
|
|
69
|
+
self._ref_trail = set()
|
|
70
|
+
|
|
71
|
+
# Compare paths
|
|
72
|
+
self._compare_paths(self._old_spec.get("paths", {}), self._new_spec.get("paths", {}))
|
|
73
|
+
|
|
74
|
+
# Compare components/schemas
|
|
75
|
+
self._compare_schemas(
|
|
76
|
+
self._old_spec.get("components", {}).get("schemas", {}),
|
|
77
|
+
self._new_spec.get("components", {}).get("schemas", {})
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
# Compare security schemes
|
|
81
|
+
self._compare_security(
|
|
82
|
+
self._old_spec.get("components", {}).get("securitySchemes", {}),
|
|
83
|
+
self._new_spec.get("components", {}).get("securitySchemes", {})
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
return self.changes
|
|
87
|
+
|
|
88
|
+
def _compare_paths(self, old_paths: Dict, new_paths: Dict):
|
|
89
|
+
"""Compare API paths/endpoints."""
|
|
90
|
+
old_set = set(old_paths.keys())
|
|
91
|
+
new_set = set(new_paths.keys())
|
|
92
|
+
|
|
93
|
+
# Check removed endpoints
|
|
94
|
+
for path in old_set - new_set:
|
|
95
|
+
self.changes.append(Change(
|
|
96
|
+
type=ChangeType.ENDPOINT_REMOVED,
|
|
97
|
+
path=path,
|
|
98
|
+
details={"endpoint": path},
|
|
99
|
+
severity="high",
|
|
100
|
+
message=f"Endpoint removed: {path}"
|
|
101
|
+
))
|
|
102
|
+
|
|
103
|
+
# Check added endpoints
|
|
104
|
+
for path in new_set - old_set:
|
|
105
|
+
self.changes.append(Change(
|
|
106
|
+
type=ChangeType.ENDPOINT_ADDED,
|
|
107
|
+
path=path,
|
|
108
|
+
details={"endpoint": path},
|
|
109
|
+
severity="low",
|
|
110
|
+
message=f"New endpoint added: {path}"
|
|
111
|
+
))
|
|
112
|
+
|
|
113
|
+
# Check modified endpoints
|
|
114
|
+
for path in old_set & new_set:
|
|
115
|
+
self._compare_methods(path, old_paths[path], new_paths[path])
|
|
116
|
+
|
|
117
|
+
def _compare_methods(self, path: str, old_methods: Dict, new_methods: Dict):
|
|
118
|
+
"""Compare HTTP methods for an endpoint."""
|
|
119
|
+
old_set = set(m for m in old_methods.keys() if m in ["get", "post", "put", "delete", "patch", "head", "options"])
|
|
120
|
+
new_set = set(m for m in new_methods.keys() if m in ["get", "post", "put", "delete", "patch", "head", "options"])
|
|
121
|
+
|
|
122
|
+
# Check removed methods
|
|
123
|
+
for method in old_set - new_set:
|
|
124
|
+
self.changes.append(Change(
|
|
125
|
+
type=ChangeType.METHOD_REMOVED,
|
|
126
|
+
path=f"{path}:{method.upper()}",
|
|
127
|
+
details={"endpoint": path, "method": method.upper()},
|
|
128
|
+
severity="high",
|
|
129
|
+
message=f"Method removed: {method.upper()} {path}"
|
|
130
|
+
))
|
|
131
|
+
|
|
132
|
+
# Check modified methods
|
|
133
|
+
for method in old_set & new_set:
|
|
134
|
+
self._compare_operation(
|
|
135
|
+
f"{path}:{method.upper()}",
|
|
136
|
+
old_methods[method],
|
|
137
|
+
new_methods[method]
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
def _compare_operation(self, operation_id: str, old_op: Dict, new_op: Dict):
|
|
141
|
+
"""Compare operation details (parameters, responses, etc.)."""
|
|
142
|
+
|
|
143
|
+
# Compare parameters
|
|
144
|
+
old_params = {self._param_key(p): p for p in old_op.get("parameters", [])}
|
|
145
|
+
new_params = {self._param_key(p): p for p in new_op.get("parameters", [])}
|
|
146
|
+
|
|
147
|
+
# Check removed parameters
|
|
148
|
+
for param_key in set(old_params.keys()) - set(new_params.keys()):
|
|
149
|
+
param = old_params[param_key]
|
|
150
|
+
self.changes.append(Change(
|
|
151
|
+
type=ChangeType.PARAM_REMOVED,
|
|
152
|
+
path=operation_id,
|
|
153
|
+
details={"parameter": param["name"], "in": param["in"]},
|
|
154
|
+
severity="high",
|
|
155
|
+
message=f"Parameter removed: {param['name']} from {operation_id}"
|
|
156
|
+
))
|
|
157
|
+
|
|
158
|
+
# Check added required parameters
|
|
159
|
+
for param_key in set(new_params.keys()) - set(old_params.keys()):
|
|
160
|
+
param = new_params[param_key]
|
|
161
|
+
if param.get("required", False):
|
|
162
|
+
self.changes.append(Change(
|
|
163
|
+
type=ChangeType.REQUIRED_PARAM_ADDED,
|
|
164
|
+
path=operation_id,
|
|
165
|
+
details={"parameter": param["name"], "in": param["in"]},
|
|
166
|
+
severity="high",
|
|
167
|
+
message=f"Required parameter added: {param['name']} to {operation_id}"
|
|
168
|
+
))
|
|
169
|
+
|
|
170
|
+
# Check parameter schema changes
|
|
171
|
+
for param_key in set(old_params.keys()) & set(new_params.keys()):
|
|
172
|
+
self._compare_parameter_schemas(
|
|
173
|
+
operation_id,
|
|
174
|
+
old_params[param_key],
|
|
175
|
+
new_params[param_key]
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Compare request body
|
|
179
|
+
if "requestBody" in old_op or "requestBody" in new_op:
|
|
180
|
+
self._compare_request_body(
|
|
181
|
+
operation_id,
|
|
182
|
+
old_op.get("requestBody"),
|
|
183
|
+
new_op.get("requestBody")
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
# Compare responses
|
|
187
|
+
self._compare_responses(
|
|
188
|
+
operation_id,
|
|
189
|
+
old_op.get("responses", {}),
|
|
190
|
+
new_op.get("responses", {})
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
def _compare_parameter_schemas(self, operation_id: str, old_param: Dict, new_param: Dict):
|
|
194
|
+
"""Compare parameter schemas for type changes."""
|
|
195
|
+
old_schema = old_param.get("schema", {})
|
|
196
|
+
new_schema = new_param.get("schema", {})
|
|
197
|
+
|
|
198
|
+
# Resolve $ref in parameter schemas
|
|
199
|
+
if "$ref" in old_schema:
|
|
200
|
+
old_schema = self._resolve_schema(old_schema, self._old_spec)
|
|
201
|
+
if "$ref" in new_schema:
|
|
202
|
+
new_schema = self._resolve_schema(new_schema, self._new_spec)
|
|
203
|
+
|
|
204
|
+
# Check type changes
|
|
205
|
+
if old_schema.get("type") != new_schema.get("type"):
|
|
206
|
+
self.changes.append(Change(
|
|
207
|
+
type=ChangeType.TYPE_CHANGED,
|
|
208
|
+
path=operation_id,
|
|
209
|
+
details={
|
|
210
|
+
"parameter": old_param["name"],
|
|
211
|
+
"old_type": old_schema.get("type"),
|
|
212
|
+
"new_type": new_schema.get("type")
|
|
213
|
+
},
|
|
214
|
+
severity="high",
|
|
215
|
+
message=f"Parameter type changed: {old_param['name']} from {old_schema.get('type')} to {new_schema.get('type')}"
|
|
216
|
+
))
|
|
217
|
+
|
|
218
|
+
# Check enum changes
|
|
219
|
+
if "enum" in old_schema or "enum" in new_schema:
|
|
220
|
+
self._compare_enums(
|
|
221
|
+
f"{operation_id}:{old_param['name']}",
|
|
222
|
+
old_schema.get("enum", []),
|
|
223
|
+
new_schema.get("enum", [])
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
def _compare_request_body(self, operation_id: str, old_body: Optional[Dict], new_body: Optional[Dict]):
|
|
227
|
+
"""Compare request body schemas."""
|
|
228
|
+
if old_body and not new_body:
|
|
229
|
+
self.changes.append(Change(
|
|
230
|
+
type=ChangeType.FIELD_REMOVED,
|
|
231
|
+
path=operation_id,
|
|
232
|
+
details={"field": "request_body"},
|
|
233
|
+
severity="high",
|
|
234
|
+
message=f"Request body removed from {operation_id}"
|
|
235
|
+
))
|
|
236
|
+
elif not old_body and new_body and new_body.get("required", False):
|
|
237
|
+
self.changes.append(Change(
|
|
238
|
+
type=ChangeType.REQUIRED_FIELD_ADDED,
|
|
239
|
+
path=operation_id,
|
|
240
|
+
details={"field": "request_body"},
|
|
241
|
+
severity="high",
|
|
242
|
+
message=f"Required request body added to {operation_id}"
|
|
243
|
+
))
|
|
244
|
+
elif old_body and new_body:
|
|
245
|
+
# Compare content types
|
|
246
|
+
old_content = old_body.get("content", {})
|
|
247
|
+
new_content = new_body.get("content", {})
|
|
248
|
+
|
|
249
|
+
for content_type in old_content.keys() & new_content.keys():
|
|
250
|
+
self._compare_schema_deep(
|
|
251
|
+
f"{operation_id}:request",
|
|
252
|
+
old_content[content_type].get("schema", {}),
|
|
253
|
+
new_content[content_type].get("schema", {})
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
def _compare_responses(self, operation_id: str, old_responses: Dict, new_responses: Dict):
|
|
257
|
+
"""Compare response definitions."""
|
|
258
|
+
old_codes = set(old_responses.keys())
|
|
259
|
+
new_codes = set(new_responses.keys())
|
|
260
|
+
|
|
261
|
+
# Check removed responses
|
|
262
|
+
for code in old_codes - new_codes:
|
|
263
|
+
# Only flag 2xx responses as breaking
|
|
264
|
+
if code.startswith("2"):
|
|
265
|
+
self.changes.append(Change(
|
|
266
|
+
type=ChangeType.RESPONSE_REMOVED,
|
|
267
|
+
path=operation_id,
|
|
268
|
+
details={"response_code": code},
|
|
269
|
+
severity="high",
|
|
270
|
+
message=f"Success response {code} removed from {operation_id}"
|
|
271
|
+
))
|
|
272
|
+
|
|
273
|
+
# Compare response schemas
|
|
274
|
+
for code in old_codes & new_codes:
|
|
275
|
+
old_resp = old_responses[code]
|
|
276
|
+
new_resp = new_responses[code]
|
|
277
|
+
|
|
278
|
+
if "content" in old_resp or "content" in new_resp:
|
|
279
|
+
old_content = old_resp.get("content", {})
|
|
280
|
+
new_content = new_resp.get("content", {})
|
|
281
|
+
|
|
282
|
+
for content_type in old_content.keys() & new_content.keys():
|
|
283
|
+
self._compare_schema_deep(
|
|
284
|
+
f"{operation_id}:{code}",
|
|
285
|
+
old_content[content_type].get("schema", {}),
|
|
286
|
+
new_content[content_type].get("schema", {})
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
def _resolve_ref(self, ref_string: str, spec: Dict) -> Optional[Dict]:
|
|
290
|
+
"""Resolve a JSON $ref pointer like #/components/schemas/User."""
|
|
291
|
+
if not ref_string.startswith('#/'):
|
|
292
|
+
return None
|
|
293
|
+
parts = ref_string[2:].split('/')
|
|
294
|
+
current = spec
|
|
295
|
+
for part in parts:
|
|
296
|
+
part = part.replace('~1', '/').replace('~0', '~')
|
|
297
|
+
if isinstance(current, dict) and part in current:
|
|
298
|
+
current = current[part]
|
|
299
|
+
else:
|
|
300
|
+
return None
|
|
301
|
+
return current if isinstance(current, dict) else None
|
|
302
|
+
|
|
303
|
+
def _resolve_schema(self, schema: Dict, spec: Dict, visited: Optional[Set[str]] = None) -> Dict:
|
|
304
|
+
"""Follow $ref chains, detecting circular references."""
|
|
305
|
+
if visited is None:
|
|
306
|
+
visited = set()
|
|
307
|
+
if '$ref' not in schema:
|
|
308
|
+
return schema
|
|
309
|
+
ref = schema['$ref']
|
|
310
|
+
if ref in visited:
|
|
311
|
+
return schema # circular — return as-is
|
|
312
|
+
visited.add(ref)
|
|
313
|
+
resolved = self._resolve_ref(ref, spec)
|
|
314
|
+
if resolved is None:
|
|
315
|
+
return schema # unresolvable — return as-is
|
|
316
|
+
if '$ref' in resolved:
|
|
317
|
+
return self._resolve_schema(resolved, spec, visited)
|
|
318
|
+
return resolved
|
|
319
|
+
|
|
320
|
+
def _compare_schema_deep(self, path: str, old_schema: Dict, new_schema: Dict, required_fields: Optional[Set[str]] = None):
|
|
321
|
+
"""Deep comparison of schemas including nested objects."""
|
|
322
|
+
|
|
323
|
+
# Handle references — resolve before comparing
|
|
324
|
+
if "$ref" in old_schema or "$ref" in new_schema:
|
|
325
|
+
old_resolved = self._resolve_schema(old_schema, self._old_spec) if "$ref" in old_schema else old_schema
|
|
326
|
+
new_resolved = self._resolve_schema(new_schema, self._new_spec) if "$ref" in new_schema else new_schema
|
|
327
|
+
# Track ref pairs to avoid infinite loops on circular schemas
|
|
328
|
+
ref_key = (old_schema.get("$ref", ""), new_schema.get("$ref", ""))
|
|
329
|
+
if ref_key in self._ref_trail:
|
|
330
|
+
return
|
|
331
|
+
self._ref_trail.add(ref_key)
|
|
332
|
+
# Compare the resolved schemas
|
|
333
|
+
self._compare_schema_deep(path, old_resolved, new_resolved, required_fields)
|
|
334
|
+
return
|
|
335
|
+
|
|
336
|
+
# Compare types
|
|
337
|
+
old_type = old_schema.get("type")
|
|
338
|
+
new_type = new_schema.get("type")
|
|
339
|
+
|
|
340
|
+
if old_type != new_type and old_type is not None:
|
|
341
|
+
self.changes.append(Change(
|
|
342
|
+
type=ChangeType.TYPE_CHANGED,
|
|
343
|
+
path=path,
|
|
344
|
+
details={"old_type": old_type, "new_type": new_type},
|
|
345
|
+
severity="high",
|
|
346
|
+
message=f"Type changed from {old_type} to {new_type} at {path}"
|
|
347
|
+
))
|
|
348
|
+
return
|
|
349
|
+
|
|
350
|
+
# Compare object properties
|
|
351
|
+
if old_type == "object":
|
|
352
|
+
old_props = old_schema.get("properties", {})
|
|
353
|
+
new_props = new_schema.get("properties", {})
|
|
354
|
+
old_required = set(old_schema.get("required", []))
|
|
355
|
+
new_required = set(new_schema.get("required", []))
|
|
356
|
+
|
|
357
|
+
# Check removed fields
|
|
358
|
+
for prop in set(old_props.keys()) - set(new_props.keys()):
|
|
359
|
+
if prop in old_required:
|
|
360
|
+
self.changes.append(Change(
|
|
361
|
+
type=ChangeType.FIELD_REMOVED,
|
|
362
|
+
path=f"{path}.{prop}",
|
|
363
|
+
details={"field": prop},
|
|
364
|
+
severity="high",
|
|
365
|
+
message=f"Required field '{prop}' removed at {path}"
|
|
366
|
+
))
|
|
367
|
+
|
|
368
|
+
# Check new required fields
|
|
369
|
+
for prop in new_required - old_required:
|
|
370
|
+
if prop not in old_props:
|
|
371
|
+
self.changes.append(Change(
|
|
372
|
+
type=ChangeType.REQUIRED_FIELD_ADDED,
|
|
373
|
+
path=f"{path}.{prop}",
|
|
374
|
+
details={"field": prop},
|
|
375
|
+
severity="high",
|
|
376
|
+
message=f"New required field '{prop}' added at {path}"
|
|
377
|
+
))
|
|
378
|
+
|
|
379
|
+
# Recursively compare nested properties
|
|
380
|
+
for prop in set(old_props.keys()) & set(new_props.keys()):
|
|
381
|
+
self._compare_schema_deep(
|
|
382
|
+
f"{path}.{prop}",
|
|
383
|
+
old_props[prop],
|
|
384
|
+
new_props[prop],
|
|
385
|
+
old_required if prop in old_required else None
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
# Compare arrays
|
|
389
|
+
elif old_type == "array":
|
|
390
|
+
if "items" in old_schema and "items" in new_schema:
|
|
391
|
+
self._compare_schema_deep(
|
|
392
|
+
f"{path}[]",
|
|
393
|
+
old_schema["items"],
|
|
394
|
+
new_schema["items"]
|
|
395
|
+
)
|
|
396
|
+
|
|
397
|
+
# Compare enums
|
|
398
|
+
if "enum" in old_schema or "enum" in new_schema:
|
|
399
|
+
self._compare_enums(path, old_schema.get("enum", []), new_schema.get("enum", []))
|
|
400
|
+
|
|
401
|
+
def _compare_enums(self, path: str, old_enum: List, new_enum: List):
|
|
402
|
+
"""Compare enum values."""
|
|
403
|
+
old_set = set(old_enum)
|
|
404
|
+
new_set = set(new_enum)
|
|
405
|
+
|
|
406
|
+
# Removed enum values are breaking
|
|
407
|
+
for value in old_set - new_set:
|
|
408
|
+
self.changes.append(Change(
|
|
409
|
+
type=ChangeType.ENUM_VALUE_REMOVED,
|
|
410
|
+
path=path,
|
|
411
|
+
details={"value": value},
|
|
412
|
+
severity="high",
|
|
413
|
+
message=f"Enum value '{value}' removed at {path}"
|
|
414
|
+
))
|
|
415
|
+
|
|
416
|
+
# Added enum values are non-breaking
|
|
417
|
+
for value in new_set - old_set:
|
|
418
|
+
self.changes.append(Change(
|
|
419
|
+
type=ChangeType.ENUM_VALUE_ADDED,
|
|
420
|
+
path=path,
|
|
421
|
+
details={"value": value},
|
|
422
|
+
severity="low",
|
|
423
|
+
message=f"Enum value '{value}' added at {path}"
|
|
424
|
+
))
|
|
425
|
+
|
|
426
|
+
def _compare_schemas(self, old_schemas: Dict, new_schemas: Dict):
|
|
427
|
+
"""Compare component schemas."""
|
|
428
|
+
# Schema removal is breaking if referenced
|
|
429
|
+
for schema_name in set(old_schemas.keys()) - set(new_schemas.keys()):
|
|
430
|
+
self.changes.append(Change(
|
|
431
|
+
type=ChangeType.FIELD_REMOVED,
|
|
432
|
+
path=f"#/components/schemas/{schema_name}",
|
|
433
|
+
details={"schema": schema_name},
|
|
434
|
+
severity="medium",
|
|
435
|
+
message=f"Schema '{schema_name}' removed"
|
|
436
|
+
))
|
|
437
|
+
|
|
438
|
+
# Compare existing schemas
|
|
439
|
+
for schema_name in set(old_schemas.keys()) & set(new_schemas.keys()):
|
|
440
|
+
self._compare_schema_deep(
|
|
441
|
+
f"#/components/schemas/{schema_name}",
|
|
442
|
+
old_schemas[schema_name],
|
|
443
|
+
new_schemas[schema_name]
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
def _compare_security(self, old_security: Dict, new_security: Dict):
|
|
447
|
+
"""Compare security schemes."""
|
|
448
|
+
# Security scheme changes are usually breaking
|
|
449
|
+
for scheme in set(old_security.keys()) - set(new_security.keys()):
|
|
450
|
+
self.changes.append(Change(
|
|
451
|
+
type=ChangeType.FIELD_REMOVED,
|
|
452
|
+
path=f"#/components/securitySchemes/{scheme}",
|
|
453
|
+
details={"scheme": scheme},
|
|
454
|
+
severity="high",
|
|
455
|
+
message=f"Security scheme '{scheme}' removed"
|
|
456
|
+
))
|
|
457
|
+
|
|
458
|
+
def _param_key(self, param: Dict) -> str:
|
|
459
|
+
"""Generate unique key for parameter."""
|
|
460
|
+
return f"{param.get('in', 'query')}:{param.get('name', '')}"
|
|
461
|
+
|
|
462
|
+
def get_breaking_changes(self) -> List[Change]:
|
|
463
|
+
"""Get only breaking changes."""
|
|
464
|
+
return [c for c in self.changes if c.is_breaking]
|
|
465
|
+
|
|
466
|
+
def get_summary(self) -> Dict[str, Any]:
|
|
467
|
+
"""Get summary of all changes."""
|
|
468
|
+
breaking = self.get_breaking_changes()
|
|
469
|
+
return {
|
|
470
|
+
"total_changes": len(self.changes),
|
|
471
|
+
"breaking_changes": len(breaking),
|
|
472
|
+
"endpoints_removed": len([c for c in breaking if c.type == ChangeType.ENDPOINT_REMOVED]),
|
|
473
|
+
"methods_removed": len([c for c in breaking if c.type == ChangeType.METHOD_REMOVED]),
|
|
474
|
+
"parameters_changed": len([c for c in breaking if c.type in [ChangeType.PARAM_REMOVED, ChangeType.REQUIRED_PARAM_ADDED]]),
|
|
475
|
+
"schemas_changed": len([c for c in breaking if c.type in [ChangeType.FIELD_REMOVED, ChangeType.REQUIRED_FIELD_ADDED, ChangeType.TYPE_CHANGED]]),
|
|
476
|
+
"is_breaking": len(breaking) > 0
|
|
477
|
+
}
|