delimit-cli 2.4.0 โ 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +7 -0
- package/.github/workflows/ci.yml +22 -0
- package/CODE_OF_CONDUCT.md +48 -0
- package/CONTRIBUTING.md +67 -0
- package/Dockerfile +9 -0
- package/LICENSE +21 -0
- package/README.md +18 -69
- package/SECURITY.md +42 -0
- package/adapters/gemini-forge.js +11 -0
- package/adapters/gemini-jamsons.js +152 -0
- package/bin/delimit-cli.js +8 -0
- package/bin/delimit-setup.js +258 -0
- package/gateway/ai/backends/__init__.py +0 -0
- package/gateway/ai/backends/async_utils.py +21 -0
- package/gateway/ai/backends/deploy_bridge.py +150 -0
- package/gateway/ai/backends/gateway_core.py +261 -0
- package/gateway/ai/backends/generate_bridge.py +38 -0
- package/gateway/ai/backends/governance_bridge.py +196 -0
- package/gateway/ai/backends/intel_bridge.py +59 -0
- package/gateway/ai/backends/memory_bridge.py +93 -0
- package/gateway/ai/backends/ops_bridge.py +137 -0
- package/gateway/ai/backends/os_bridge.py +82 -0
- package/gateway/ai/backends/repo_bridge.py +117 -0
- package/gateway/ai/backends/ui_bridge.py +118 -0
- package/gateway/ai/backends/vault_bridge.py +129 -0
- package/gateway/ai/server.py +1182 -0
- package/gateway/core/__init__.py +3 -0
- package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
- package/gateway/core/auto_baseline.py +304 -0
- package/gateway/core/ci_formatter.py +283 -0
- package/gateway/core/complexity_analyzer.py +386 -0
- package/gateway/core/contract_ledger.py +345 -0
- package/gateway/core/dependency_graph.py +218 -0
- package/gateway/core/dependency_manifest.py +223 -0
- package/gateway/core/diff_engine_v2.py +477 -0
- package/gateway/core/diff_engine_v2.py.bak +426 -0
- package/gateway/core/event_backbone.py +268 -0
- package/gateway/core/event_schema.py +258 -0
- package/gateway/core/explainer.py +438 -0
- package/gateway/core/gateway.py +128 -0
- package/gateway/core/gateway_v2.py +154 -0
- package/gateway/core/gateway_v3.py +224 -0
- package/gateway/core/impact_analyzer.py +163 -0
- package/gateway/core/policies/default.yml +13 -0
- package/gateway/core/policies/relaxed.yml +48 -0
- package/gateway/core/policies/strict.yml +55 -0
- package/gateway/core/policy_engine.py +464 -0
- package/gateway/core/registry.py +52 -0
- package/gateway/core/registry_v2.py +132 -0
- package/gateway/core/registry_v3.py +134 -0
- package/gateway/core/semver_classifier.py +152 -0
- package/gateway/core/spec_detector.py +130 -0
- package/gateway/core/surface_bridge.py +307 -0
- package/gateway/core/zero_spec/__init__.py +4 -0
- package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/detector.py +353 -0
- package/gateway/core/zero_spec/express_extractor.py +483 -0
- package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
- package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
- package/gateway/tasks/__init__.py +1 -0
- package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/check_policy.py +177 -0
- package/gateway/tasks/check_policy_v2.py +255 -0
- package/gateway/tasks/check_policy_v3.py +255 -0
- package/gateway/tasks/explain_diff.py +305 -0
- package/gateway/tasks/explain_diff_v2.py +267 -0
- package/gateway/tasks/validate_api.py +131 -0
- package/gateway/tasks/validate_api_v2.py +208 -0
- package/gateway/tasks/validate_api_v3.py +163 -0
- package/package.json +2 -2
- package/adapters/codex-skill.js +0 -87
- package/adapters/cursor-extension.js +0 -190
- package/adapters/gemini-action.js +0 -93
- package/adapters/openai-function.js +0 -112
- package/adapters/xai-plugin.js +0 -151
- package/test-decision-engine.js +0 -181
- package/test-hook.js +0 -27
- package/tests/cli.test.js +0 -359
- package/tests/fixtures/openapi-changed.yaml +0 -56
- package/tests/fixtures/openapi.yaml +0 -87
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Delimit Event Schema
|
|
3
|
+
Canonical event schema for API contract evolution tracking.
|
|
4
|
+
Deterministic validation and serialization per Jamsons Doctrine.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import hashlib
|
|
8
|
+
import json
|
|
9
|
+
import re
|
|
10
|
+
from datetime import datetime, timezone
|
|
11
|
+
from typing import Any, Dict, List, Optional
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# Schema version for forward compatibility
|
|
15
|
+
SCHEMA_VERSION = "1.0.0"
|
|
16
|
+
|
|
17
|
+
# Valid event types
|
|
18
|
+
VALID_EVENT_TYPES = frozenset([
|
|
19
|
+
"contract_change",
|
|
20
|
+
"contract_added",
|
|
21
|
+
"contract_removed",
|
|
22
|
+
"policy_evaluation",
|
|
23
|
+
"complexity_assessment",
|
|
24
|
+
"baseline_established",
|
|
25
|
+
])
|
|
26
|
+
|
|
27
|
+
# Required top-level fields
|
|
28
|
+
REQUIRED_FIELDS = frozenset([
|
|
29
|
+
"event_type",
|
|
30
|
+
"api_name",
|
|
31
|
+
"repository",
|
|
32
|
+
"version",
|
|
33
|
+
"timestamp",
|
|
34
|
+
"commit",
|
|
35
|
+
"actor",
|
|
36
|
+
"spec_hash",
|
|
37
|
+
"previous_hash",
|
|
38
|
+
"diff_summary",
|
|
39
|
+
"policy_result",
|
|
40
|
+
"complexity_score",
|
|
41
|
+
"complexity_class",
|
|
42
|
+
"event_hash",
|
|
43
|
+
])
|
|
44
|
+
|
|
45
|
+
# Valid complexity classes
|
|
46
|
+
VALID_COMPLEXITY_CLASSES = frozenset([
|
|
47
|
+
"simple",
|
|
48
|
+
"moderate",
|
|
49
|
+
"complex",
|
|
50
|
+
"enterprise",
|
|
51
|
+
])
|
|
52
|
+
|
|
53
|
+
# Valid policy results
|
|
54
|
+
VALID_POLICY_RESULTS = frozenset([
|
|
55
|
+
"passed",
|
|
56
|
+
"failed",
|
|
57
|
+
"warning",
|
|
58
|
+
"skipped",
|
|
59
|
+
])
|
|
60
|
+
|
|
61
|
+
# SHA-256 hex pattern
|
|
62
|
+
_SHA256_PATTERN = re.compile(r"^[a-f0-9]{64}$")
|
|
63
|
+
|
|
64
|
+
# ISO 8601 UTC pattern
|
|
65
|
+
_ISO8601_PATTERN = re.compile(
|
|
66
|
+
r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?Z$"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def validate_event(event: Dict[str, Any]) -> List[str]:
|
|
71
|
+
"""Validate an event payload against the canonical schema.
|
|
72
|
+
|
|
73
|
+
Returns a list of validation error strings. Empty list means valid.
|
|
74
|
+
"""
|
|
75
|
+
errors: List[str] = []
|
|
76
|
+
|
|
77
|
+
# Check required fields
|
|
78
|
+
missing = REQUIRED_FIELDS - set(event.keys())
|
|
79
|
+
if missing:
|
|
80
|
+
errors.append(f"Missing required fields: {sorted(missing)}")
|
|
81
|
+
|
|
82
|
+
# Validate event_type
|
|
83
|
+
event_type = event.get("event_type")
|
|
84
|
+
if event_type is not None and event_type not in VALID_EVENT_TYPES:
|
|
85
|
+
errors.append(
|
|
86
|
+
f"Invalid event_type: {event_type!r}. "
|
|
87
|
+
f"Must be one of: {sorted(VALID_EVENT_TYPES)}"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Validate string fields are non-empty strings
|
|
91
|
+
string_fields = [
|
|
92
|
+
"api_name", "repository", "version", "commit", "actor",
|
|
93
|
+
"spec_hash", "previous_hash", "event_hash",
|
|
94
|
+
]
|
|
95
|
+
for field in string_fields:
|
|
96
|
+
val = event.get(field)
|
|
97
|
+
if val is not None and (not isinstance(val, str) or not val.strip()):
|
|
98
|
+
errors.append(f"Field {field!r} must be a non-empty string")
|
|
99
|
+
|
|
100
|
+
# Validate timestamp format (ISO 8601 UTC)
|
|
101
|
+
ts = event.get("timestamp")
|
|
102
|
+
if ts is not None:
|
|
103
|
+
if not isinstance(ts, str) or not _ISO8601_PATTERN.match(ts):
|
|
104
|
+
errors.append(
|
|
105
|
+
f"Field 'timestamp' must be ISO 8601 UTC format "
|
|
106
|
+
f"(YYYY-MM-DDTHH:MM:SSZ), got: {ts!r}"
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Validate spec_hash format
|
|
110
|
+
spec_hash = event.get("spec_hash")
|
|
111
|
+
if spec_hash is not None and isinstance(spec_hash, str):
|
|
112
|
+
if spec_hash != "GENESIS" and not _SHA256_PATTERN.match(spec_hash):
|
|
113
|
+
errors.append(
|
|
114
|
+
f"Field 'spec_hash' must be a SHA-256 hex string, "
|
|
115
|
+
f"got: {spec_hash!r}"
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# Validate previous_hash format
|
|
119
|
+
prev_hash = event.get("previous_hash")
|
|
120
|
+
if prev_hash is not None and isinstance(prev_hash, str):
|
|
121
|
+
if prev_hash != "GENESIS" and not _SHA256_PATTERN.match(prev_hash):
|
|
122
|
+
errors.append(
|
|
123
|
+
f"Field 'previous_hash' must be 'GENESIS' or SHA-256 hex, "
|
|
124
|
+
f"got: {prev_hash!r}"
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# Validate event_hash format
|
|
128
|
+
event_hash = event.get("event_hash")
|
|
129
|
+
if event_hash is not None and isinstance(event_hash, str):
|
|
130
|
+
if not _SHA256_PATTERN.match(event_hash):
|
|
131
|
+
errors.append(
|
|
132
|
+
f"Field 'event_hash' must be a SHA-256 hex string, "
|
|
133
|
+
f"got: {event_hash!r}"
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
# Validate diff_summary is a list
|
|
137
|
+
diff_summary = event.get("diff_summary")
|
|
138
|
+
if diff_summary is not None and not isinstance(diff_summary, list):
|
|
139
|
+
errors.append("Field 'diff_summary' must be a list")
|
|
140
|
+
|
|
141
|
+
# Validate policy_result
|
|
142
|
+
policy_result = event.get("policy_result")
|
|
143
|
+
if policy_result is not None and policy_result not in VALID_POLICY_RESULTS:
|
|
144
|
+
errors.append(
|
|
145
|
+
f"Invalid policy_result: {policy_result!r}. "
|
|
146
|
+
f"Must be one of: {sorted(VALID_POLICY_RESULTS)}"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
# Validate complexity_score is an integer 0-100
|
|
150
|
+
score = event.get("complexity_score")
|
|
151
|
+
if score is not None:
|
|
152
|
+
if not isinstance(score, int) or score < 0 or score > 100:
|
|
153
|
+
errors.append(
|
|
154
|
+
f"Field 'complexity_score' must be an integer 0-100, "
|
|
155
|
+
f"got: {score!r}"
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
# Validate complexity_class
|
|
159
|
+
cclass = event.get("complexity_class")
|
|
160
|
+
if cclass is not None and cclass not in VALID_COMPLEXITY_CLASSES:
|
|
161
|
+
errors.append(
|
|
162
|
+
f"Invalid complexity_class: {cclass!r}. "
|
|
163
|
+
f"Must be one of: {sorted(VALID_COMPLEXITY_CLASSES)}"
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
return errors
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def canonicalize(event: Dict[str, Any]) -> str:
|
|
170
|
+
"""Serialize event to canonical JSON with deterministic key ordering.
|
|
171
|
+
|
|
172
|
+
Uses sorted keys and no unnecessary whitespace for reproducibility.
|
|
173
|
+
This ensures identical events always produce identical byte sequences.
|
|
174
|
+
"""
|
|
175
|
+
return json.dumps(event, sort_keys=True, separators=(",", ":"))
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def compute_event_hash(
|
|
179
|
+
previous_hash: str,
|
|
180
|
+
spec_hash: str,
|
|
181
|
+
diff_summary: List[Any],
|
|
182
|
+
commit: str,
|
|
183
|
+
timestamp: str,
|
|
184
|
+
) -> str:
|
|
185
|
+
"""Compute deterministic SHA-256 event hash.
|
|
186
|
+
|
|
187
|
+
Hash inputs are concatenated in a fixed, documented order:
|
|
188
|
+
previous_hash + spec_hash + canonical(diff_summary) + commit + timestamp
|
|
189
|
+
|
|
190
|
+
Returns lowercase hex digest.
|
|
191
|
+
"""
|
|
192
|
+
diff_canonical = json.dumps(diff_summary, sort_keys=True, separators=(",", ":"))
|
|
193
|
+
payload = (
|
|
194
|
+
previous_hash
|
|
195
|
+
+ spec_hash
|
|
196
|
+
+ diff_canonical
|
|
197
|
+
+ commit
|
|
198
|
+
+ timestamp
|
|
199
|
+
)
|
|
200
|
+
return hashlib.sha256(payload.encode("utf-8")).hexdigest()
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def create_event(
|
|
204
|
+
event_type: str,
|
|
205
|
+
api_name: str,
|
|
206
|
+
repository: str,
|
|
207
|
+
version: str,
|
|
208
|
+
timestamp: str,
|
|
209
|
+
commit: str,
|
|
210
|
+
actor: str,
|
|
211
|
+
spec_hash: str,
|
|
212
|
+
previous_hash: str,
|
|
213
|
+
diff_summary: List[Any],
|
|
214
|
+
policy_result: str,
|
|
215
|
+
complexity_score: int,
|
|
216
|
+
complexity_class: str,
|
|
217
|
+
schema_version: str = SCHEMA_VERSION,
|
|
218
|
+
) -> Dict[str, Any]:
|
|
219
|
+
"""Create a validated event with computed hash.
|
|
220
|
+
|
|
221
|
+
Raises ValueError if the resulting event fails validation.
|
|
222
|
+
"""
|
|
223
|
+
event_hash = compute_event_hash(
|
|
224
|
+
previous_hash=previous_hash,
|
|
225
|
+
spec_hash=spec_hash,
|
|
226
|
+
diff_summary=diff_summary,
|
|
227
|
+
commit=commit,
|
|
228
|
+
timestamp=timestamp,
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
event = {
|
|
232
|
+
"schema_version": schema_version,
|
|
233
|
+
"event_type": event_type,
|
|
234
|
+
"api_name": api_name,
|
|
235
|
+
"repository": repository,
|
|
236
|
+
"version": version,
|
|
237
|
+
"timestamp": timestamp,
|
|
238
|
+
"commit": commit,
|
|
239
|
+
"actor": actor,
|
|
240
|
+
"spec_hash": spec_hash,
|
|
241
|
+
"previous_hash": previous_hash,
|
|
242
|
+
"diff_summary": diff_summary,
|
|
243
|
+
"policy_result": policy_result,
|
|
244
|
+
"complexity_score": complexity_score,
|
|
245
|
+
"complexity_class": complexity_class,
|
|
246
|
+
"event_hash": event_hash,
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
errors = validate_event(event)
|
|
250
|
+
if errors:
|
|
251
|
+
raise ValueError(f"Event validation failed: {'; '.join(errors)}")
|
|
252
|
+
|
|
253
|
+
return event
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def now_utc() -> str:
|
|
257
|
+
"""Return current UTC timestamp in ISO 8601 format."""
|
|
258
|
+
return datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
@@ -0,0 +1,438 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Delimit API Change Explainer
|
|
3
|
+
|
|
4
|
+
7 templates that transform raw diff/semver data into human-readable explanations
|
|
5
|
+
for different audiences and delivery channels.
|
|
6
|
+
|
|
7
|
+
Templates:
|
|
8
|
+
1. developer โ Technical, code-focused detail
|
|
9
|
+
2. team_lead โ Executive summary for tech leads
|
|
10
|
+
3. product โ Business-impact focus for PMs
|
|
11
|
+
4. migration โ Step-by-step migration guide
|
|
12
|
+
5. changelog โ CHANGELOG.md entry
|
|
13
|
+
6. pr_comment โ GitHub PR comment (compact markdown)
|
|
14
|
+
7. slack โ Slack notification (mrkdwn)
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from typing import Any, Dict, List, Optional
|
|
18
|
+
|
|
19
|
+
from .diff_engine_v2 import Change, ChangeType
|
|
20
|
+
from .semver_classifier import SemverBump, classify, classify_detailed
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# โโ Public API โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
24
|
+
|
|
25
|
+
TEMPLATES = [
|
|
26
|
+
"developer",
|
|
27
|
+
"team_lead",
|
|
28
|
+
"product",
|
|
29
|
+
"migration",
|
|
30
|
+
"changelog",
|
|
31
|
+
"pr_comment",
|
|
32
|
+
"slack",
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def explain(
|
|
37
|
+
changes: List[Change],
|
|
38
|
+
template: str = "developer",
|
|
39
|
+
old_version: Optional[str] = None,
|
|
40
|
+
new_version: Optional[str] = None,
|
|
41
|
+
api_name: Optional[str] = None,
|
|
42
|
+
) -> str:
|
|
43
|
+
"""Generate a human-readable explanation of API changes.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
changes: List of Change objects from the diff engine.
|
|
47
|
+
template: One of the 7 template names.
|
|
48
|
+
old_version: Previous API version (e.g. "1.0.0").
|
|
49
|
+
new_version: New API version (e.g. "2.0.0").
|
|
50
|
+
api_name: Optional API/service name for context.
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Formatted explanation string.
|
|
54
|
+
"""
|
|
55
|
+
detail = classify_detailed(changes)
|
|
56
|
+
ctx = _build_context(detail, changes, old_version, new_version, api_name)
|
|
57
|
+
|
|
58
|
+
renderer = _RENDERERS.get(template)
|
|
59
|
+
if renderer is None:
|
|
60
|
+
return f"Unknown template '{template}'. Available: {', '.join(TEMPLATES)}"
|
|
61
|
+
return renderer(ctx)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def explain_all(
|
|
65
|
+
changes: List[Change],
|
|
66
|
+
old_version: Optional[str] = None,
|
|
67
|
+
new_version: Optional[str] = None,
|
|
68
|
+
api_name: Optional[str] = None,
|
|
69
|
+
) -> Dict[str, str]:
|
|
70
|
+
"""Generate all 7 template outputs at once."""
|
|
71
|
+
detail = classify_detailed(changes)
|
|
72
|
+
ctx = _build_context(detail, changes, old_version, new_version, api_name)
|
|
73
|
+
return {name: _RENDERERS[name](ctx) for name in TEMPLATES}
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
# โโ Internal context builder โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
77
|
+
|
|
78
|
+
def _build_context(
|
|
79
|
+
detail: Dict[str, Any],
|
|
80
|
+
changes: List[Change],
|
|
81
|
+
old_version: Optional[str],
|
|
82
|
+
new_version: Optional[str],
|
|
83
|
+
api_name: Optional[str],
|
|
84
|
+
) -> Dict[str, Any]:
|
|
85
|
+
return {
|
|
86
|
+
**detail,
|
|
87
|
+
"changes": changes,
|
|
88
|
+
"old_version": old_version or "unknown",
|
|
89
|
+
"new_version": new_version or "unknown",
|
|
90
|
+
"api_name": api_name or "API",
|
|
91
|
+
"version_label": _version_label(old_version, new_version),
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _version_label(old: Optional[str], new: Optional[str]) -> str:
|
|
96
|
+
if old and new:
|
|
97
|
+
return f"{old} -> {new}"
|
|
98
|
+
return ""
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
# โโ Renderers โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
102
|
+
|
|
103
|
+
def _render_developer(ctx: Dict) -> str:
|
|
104
|
+
lines: List[str] = []
|
|
105
|
+
bump = ctx["bump"]
|
|
106
|
+
api = ctx["api_name"]
|
|
107
|
+
ver = ctx["version_label"]
|
|
108
|
+
|
|
109
|
+
lines.append(f"# {api} โ Semver: {bump.upper()}" + (f" ({ver})" if ver else ""))
|
|
110
|
+
lines.append("")
|
|
111
|
+
|
|
112
|
+
if ctx["counts"]["breaking"] > 0:
|
|
113
|
+
lines.append(f"## Breaking Changes ({ctx['counts']['breaking']})")
|
|
114
|
+
lines.append("")
|
|
115
|
+
for c in ctx["breaking_changes"]:
|
|
116
|
+
lines.append(f" - [{c['type']}] {c['message']}")
|
|
117
|
+
lines.append("")
|
|
118
|
+
|
|
119
|
+
if ctx["counts"]["additive"] > 0:
|
|
120
|
+
lines.append(f"## Additions ({ctx['counts']['additive']})")
|
|
121
|
+
lines.append("")
|
|
122
|
+
for c in ctx["additive_changes"]:
|
|
123
|
+
lines.append(f" - [{c['type']}] {c['message']}")
|
|
124
|
+
lines.append("")
|
|
125
|
+
|
|
126
|
+
if ctx["counts"]["patch"] > 0:
|
|
127
|
+
lines.append(f"## Patches ({ctx['counts']['patch']})")
|
|
128
|
+
lines.append("")
|
|
129
|
+
for c in ctx["patch_changes"]:
|
|
130
|
+
lines.append(f" - [{c['type']}] {c['message']}")
|
|
131
|
+
lines.append("")
|
|
132
|
+
|
|
133
|
+
lines.append(f"Total changes: {ctx['counts']['total']}")
|
|
134
|
+
return "\n".join(lines)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def _render_team_lead(ctx: Dict) -> str:
|
|
138
|
+
lines: List[str] = []
|
|
139
|
+
bump = ctx["bump"]
|
|
140
|
+
api = ctx["api_name"]
|
|
141
|
+
ver = ctx["version_label"]
|
|
142
|
+
bc = ctx["counts"]["breaking"]
|
|
143
|
+
|
|
144
|
+
lines.append(f"## {api} Change Summary" + (f" ({ver})" if ver else ""))
|
|
145
|
+
lines.append("")
|
|
146
|
+
lines.append(f"**Recommended bump**: `{bump}`")
|
|
147
|
+
lines.append(f"**Total changes**: {ctx['counts']['total']}")
|
|
148
|
+
lines.append(f"**Breaking**: {bc}")
|
|
149
|
+
lines.append(f"**Additive**: {ctx['counts']['additive']}")
|
|
150
|
+
lines.append("")
|
|
151
|
+
|
|
152
|
+
if bc > 0:
|
|
153
|
+
lines.append("### Action required")
|
|
154
|
+
lines.append("")
|
|
155
|
+
lines.append("Breaking changes detected. Consumer teams must be notified before release.")
|
|
156
|
+
lines.append("")
|
|
157
|
+
for c in ctx["breaking_changes"]:
|
|
158
|
+
lines.append(f"- {c['message']}")
|
|
159
|
+
else:
|
|
160
|
+
lines.append("No breaking changes. Safe to release without consumer coordination.")
|
|
161
|
+
|
|
162
|
+
return "\n".join(lines)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def _render_product(ctx: Dict) -> str:
|
|
166
|
+
lines: List[str] = []
|
|
167
|
+
api = ctx["api_name"]
|
|
168
|
+
bump = ctx["bump"]
|
|
169
|
+
bc = ctx["counts"]["breaking"]
|
|
170
|
+
add = ctx["counts"]["additive"]
|
|
171
|
+
|
|
172
|
+
lines.append(f"## {api} โ Impact Assessment")
|
|
173
|
+
lines.append("")
|
|
174
|
+
|
|
175
|
+
if bc > 0:
|
|
176
|
+
lines.append(f"**Risk level**: HIGH โ {bc} breaking change(s) detected.")
|
|
177
|
+
lines.append("")
|
|
178
|
+
lines.append("**What this means**: Existing integrations will break if these changes ship")
|
|
179
|
+
lines.append("without a coordinated migration. Downstream partners and client teams")
|
|
180
|
+
lines.append("need advance notice.")
|
|
181
|
+
lines.append("")
|
|
182
|
+
lines.append("**Breaking changes**:")
|
|
183
|
+
for c in ctx["breaking_changes"]:
|
|
184
|
+
lines.append(f" - {c['message']}")
|
|
185
|
+
elif add > 0:
|
|
186
|
+
lines.append("**Risk level**: LOW โ New capabilities added, no existing behavior changed.")
|
|
187
|
+
lines.append("")
|
|
188
|
+
lines.append("**What this means**: New features available. Existing integrations unaffected.")
|
|
189
|
+
else:
|
|
190
|
+
lines.append("**Risk level**: NONE โ Documentation or cosmetic changes only.")
|
|
191
|
+
|
|
192
|
+
lines.append("")
|
|
193
|
+
lines.append(f"**Recommended version bump**: `{bump}`")
|
|
194
|
+
return "\n".join(lines)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _render_migration(ctx: Dict) -> str:
|
|
198
|
+
lines: List[str] = []
|
|
199
|
+
api = ctx["api_name"]
|
|
200
|
+
ver = ctx["version_label"]
|
|
201
|
+
|
|
202
|
+
lines.append(f"# Migration Guide: {api}" + (f" ({ver})" if ver else ""))
|
|
203
|
+
lines.append("")
|
|
204
|
+
|
|
205
|
+
breaking: List[Dict] = ctx["breaking_changes"]
|
|
206
|
+
if not breaking:
|
|
207
|
+
lines.append("No breaking changes. No migration needed.")
|
|
208
|
+
return "\n".join(lines)
|
|
209
|
+
|
|
210
|
+
lines.append(f"This release contains **{len(breaking)} breaking change(s)**.")
|
|
211
|
+
lines.append("Follow the steps below to update your integration.")
|
|
212
|
+
lines.append("")
|
|
213
|
+
|
|
214
|
+
for i, c in enumerate(breaking, 1):
|
|
215
|
+
lines.append(f"### Step {i}: {c['type'].replace('_', ' ').title()}")
|
|
216
|
+
lines.append("")
|
|
217
|
+
lines.append(f"**Change**: {c['message']}")
|
|
218
|
+
lines.append(f"**Location**: `{c['path']}`")
|
|
219
|
+
lines.append("")
|
|
220
|
+
lines.append(_migration_advice(c["type"]))
|
|
221
|
+
lines.append("")
|
|
222
|
+
|
|
223
|
+
lines.append("---")
|
|
224
|
+
lines.append("After completing all steps, run your integration tests to verify.")
|
|
225
|
+
return "\n".join(lines)
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def _render_changelog(ctx: Dict) -> str:
|
|
229
|
+
lines: List[str] = []
|
|
230
|
+
ver = ctx.get("new_version") or "Unreleased"
|
|
231
|
+
|
|
232
|
+
lines.append(f"## [{ver}]")
|
|
233
|
+
lines.append("")
|
|
234
|
+
|
|
235
|
+
if ctx["counts"]["breaking"] > 0:
|
|
236
|
+
lines.append("### Breaking Changes")
|
|
237
|
+
lines.append("")
|
|
238
|
+
for c in ctx["breaking_changes"]:
|
|
239
|
+
lines.append(f"- {c['message']}")
|
|
240
|
+
lines.append("")
|
|
241
|
+
|
|
242
|
+
if ctx["counts"]["additive"] > 0:
|
|
243
|
+
lines.append("### Added")
|
|
244
|
+
lines.append("")
|
|
245
|
+
for c in ctx["additive_changes"]:
|
|
246
|
+
lines.append(f"- {c['message']}")
|
|
247
|
+
lines.append("")
|
|
248
|
+
|
|
249
|
+
if ctx["counts"]["patch"] > 0:
|
|
250
|
+
lines.append("### Changed")
|
|
251
|
+
lines.append("")
|
|
252
|
+
for c in ctx["patch_changes"]:
|
|
253
|
+
lines.append(f"- {c['message']}")
|
|
254
|
+
lines.append("")
|
|
255
|
+
|
|
256
|
+
return "\n".join(lines)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def _render_pr_comment(ctx: Dict) -> str:
|
|
260
|
+
lines: List[str] = []
|
|
261
|
+
bump = ctx["bump"]
|
|
262
|
+
bc = ctx["counts"]["breaking"]
|
|
263
|
+
total = ctx["counts"]["total"]
|
|
264
|
+
additive_count = ctx["counts"]["additive"]
|
|
265
|
+
|
|
266
|
+
# Header with semver badge
|
|
267
|
+
badge = {"major": "๐ด MAJOR", "minor": "๐ก MINOR", "patch": "๐ข PATCH", "none": "โช NONE"}
|
|
268
|
+
badge_text = badge.get(bump, bump.upper())
|
|
269
|
+
|
|
270
|
+
if bc > 0:
|
|
271
|
+
lines.append(f"## {badge_text} โ Breaking Changes Detected")
|
|
272
|
+
else:
|
|
273
|
+
lines.append(f"## {badge_text} โ API Changes Look Good")
|
|
274
|
+
lines.append("")
|
|
275
|
+
|
|
276
|
+
# Summary line
|
|
277
|
+
parts = [f"**{total}** change{'s' if total != 1 else ''}"]
|
|
278
|
+
if bc > 0:
|
|
279
|
+
parts.append(f"**{bc}** breaking")
|
|
280
|
+
if additive_count > 0:
|
|
281
|
+
parts.append(f"**{additive_count}** additive")
|
|
282
|
+
lines.append(" ยท ".join(parts))
|
|
283
|
+
lines.append("")
|
|
284
|
+
|
|
285
|
+
# Breaking changes table
|
|
286
|
+
if bc > 0:
|
|
287
|
+
lines.append("### Breaking Changes")
|
|
288
|
+
lines.append("")
|
|
289
|
+
lines.append("| Change | Location | Severity |")
|
|
290
|
+
lines.append("|--------|----------|----------|")
|
|
291
|
+
for c in ctx["breaking_changes"]:
|
|
292
|
+
change_type = c.get("type", "breaking")
|
|
293
|
+
severity = _pr_severity(change_type)
|
|
294
|
+
lines.append(f"| {c['message']} | `{c['path']}` | {severity} |")
|
|
295
|
+
lines.append("")
|
|
296
|
+
|
|
297
|
+
# Migration guidance
|
|
298
|
+
lines.append("<details>")
|
|
299
|
+
lines.append("<summary>๐ Migration guide</summary>")
|
|
300
|
+
lines.append("")
|
|
301
|
+
for i, c in enumerate(ctx["breaking_changes"], 1):
|
|
302
|
+
lines.append(f"**{i}. {c['path']}**")
|
|
303
|
+
lines.append(f"- {_pr_migration_hint(c)}")
|
|
304
|
+
lines.append("")
|
|
305
|
+
lines.append("</details>")
|
|
306
|
+
lines.append("")
|
|
307
|
+
|
|
308
|
+
# Additive changes
|
|
309
|
+
additive = ctx["additive_changes"]
|
|
310
|
+
if additive:
|
|
311
|
+
lines.append("<details>")
|
|
312
|
+
lines.append(f"<summary>โ
New additions ({len(additive)})</summary>")
|
|
313
|
+
lines.append("")
|
|
314
|
+
for c in additive:
|
|
315
|
+
lines.append(f"- `{c['path']}` โ {c['message']}")
|
|
316
|
+
lines.append("")
|
|
317
|
+
lines.append("</details>")
|
|
318
|
+
lines.append("")
|
|
319
|
+
|
|
320
|
+
lines.append("---")
|
|
321
|
+
lines.append("*[Delimit](https://github.com/delimit-ai/delimit) ยท API governance for CI/CD*")
|
|
322
|
+
return "\n".join(lines)
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def _pr_severity(change_type: str) -> str:
|
|
326
|
+
"""Map change type to severity emoji for PR comments."""
|
|
327
|
+
critical = {"endpoint_removed", "method_removed", "field_removed"}
|
|
328
|
+
high = {"required_param_added", "type_changed", "enum_value_removed"}
|
|
329
|
+
if change_type in critical:
|
|
330
|
+
return "๐ด Critical"
|
|
331
|
+
if change_type in high:
|
|
332
|
+
return "๐ High"
|
|
333
|
+
return "๐ก Medium"
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
def _pr_migration_hint(change: Dict) -> str:
|
|
337
|
+
"""Generate a migration hint for a breaking change."""
|
|
338
|
+
ct = change.get("type", "")
|
|
339
|
+
if ct == "endpoint_removed":
|
|
340
|
+
return "Consumers must stop calling this endpoint. Consider a deprecation period."
|
|
341
|
+
if ct == "method_removed":
|
|
342
|
+
return "Consumers using this HTTP method must migrate to an alternative."
|
|
343
|
+
if ct == "required_param_added":
|
|
344
|
+
return "All existing consumers must include this parameter. Consider making it optional with a default."
|
|
345
|
+
if ct == "field_removed":
|
|
346
|
+
return "Consumers reading this field will break. Add it back or provide a migration path."
|
|
347
|
+
if ct == "type_changed":
|
|
348
|
+
return "Consumers expecting the old type will fail to parse. Coordinate the type migration."
|
|
349
|
+
if ct == "enum_value_removed":
|
|
350
|
+
return "Consumers using this value must update. Consider keeping it as deprecated."
|
|
351
|
+
return "Review this change and update consumers accordingly."
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def _render_slack(ctx: Dict) -> str:
|
|
355
|
+
bump = ctx["bump"]
|
|
356
|
+
api = ctx["api_name"]
|
|
357
|
+
bc = ctx["counts"]["breaking"]
|
|
358
|
+
total = ctx["counts"]["total"]
|
|
359
|
+
ver = ctx["version_label"]
|
|
360
|
+
|
|
361
|
+
icon = ":red_circle:" if bc > 0 else ":large_green_circle:"
|
|
362
|
+
|
|
363
|
+
lines: List[str] = []
|
|
364
|
+
lines.append(f"{icon} *{api} API Change* โ `{bump}` bump" + (f" ({ver})" if ver else ""))
|
|
365
|
+
lines.append("")
|
|
366
|
+
lines.append(f"Changes: {total} total, {bc} breaking, {ctx['counts']['additive']} additive")
|
|
367
|
+
|
|
368
|
+
if bc > 0:
|
|
369
|
+
lines.append("")
|
|
370
|
+
lines.append("*Breaking:*")
|
|
371
|
+
for c in ctx["breaking_changes"][:5]: # cap at 5 for Slack
|
|
372
|
+
lines.append(f" > {c['message']}")
|
|
373
|
+
if bc > 5:
|
|
374
|
+
lines.append(f" > ...and {bc - 5} more")
|
|
375
|
+
|
|
376
|
+
return "\n".join(lines)
|
|
377
|
+
|
|
378
|
+
|
|
379
|
+
# โโ Migration advice per change type โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
380
|
+
|
|
381
|
+
def _migration_advice(change_type: str) -> str:
|
|
382
|
+
advice = {
|
|
383
|
+
"endpoint_removed": (
|
|
384
|
+
"**Action**: Update all clients to stop calling this endpoint. "
|
|
385
|
+
"If you control the consumers, search for references and remove them. "
|
|
386
|
+
"Consider using the new endpoint (if applicable) as a replacement."
|
|
387
|
+
),
|
|
388
|
+
"method_removed": (
|
|
389
|
+
"**Action**: Update clients using this HTTP method. "
|
|
390
|
+
"Check if an alternative method is available on the same path."
|
|
391
|
+
),
|
|
392
|
+
"required_param_added": (
|
|
393
|
+
"**Action**: All existing requests must now include this parameter. "
|
|
394
|
+
"Update every call site to pass the new required value."
|
|
395
|
+
),
|
|
396
|
+
"param_removed": (
|
|
397
|
+
"**Action**: Remove this parameter from all requests. "
|
|
398
|
+
"Sending it may cause errors or be silently ignored."
|
|
399
|
+
),
|
|
400
|
+
"response_removed": (
|
|
401
|
+
"**Action**: Update any client logic that depends on this response code. "
|
|
402
|
+
"Check what the new expected response is."
|
|
403
|
+
),
|
|
404
|
+
"required_field_added": (
|
|
405
|
+
"**Action**: If this is a request body field, include it in all requests. "
|
|
406
|
+
"If this is a response field, update parsers to handle the new field."
|
|
407
|
+
),
|
|
408
|
+
"field_removed": (
|
|
409
|
+
"**Action**: Remove any references to this field in your response parsers. "
|
|
410
|
+
"Accessing it will return undefined/null."
|
|
411
|
+
),
|
|
412
|
+
"type_changed": (
|
|
413
|
+
"**Action**: Update serialization/deserialization logic for the new type. "
|
|
414
|
+
"Check all type assertions, validators, and database column types."
|
|
415
|
+
),
|
|
416
|
+
"format_changed": (
|
|
417
|
+
"**Action**: Update parsing logic for the new format. "
|
|
418
|
+
"For example, if a date field changed from 'date' to 'date-time'."
|
|
419
|
+
),
|
|
420
|
+
"enum_value_removed": (
|
|
421
|
+
"**Action**: Stop sending the removed enum value. "
|
|
422
|
+
"Update any switch/case or if/else blocks that handle it."
|
|
423
|
+
),
|
|
424
|
+
}
|
|
425
|
+
return advice.get(change_type, "**Action**: Review the change and update your integration accordingly.")
|
|
426
|
+
|
|
427
|
+
|
|
428
|
+
# โโ Renderer registry โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
|
|
429
|
+
|
|
430
|
+
_RENDERERS = {
|
|
431
|
+
"developer": _render_developer,
|
|
432
|
+
"team_lead": _render_team_lead,
|
|
433
|
+
"product": _render_product,
|
|
434
|
+
"migration": _render_migration,
|
|
435
|
+
"changelog": _render_changelog,
|
|
436
|
+
"pr_comment": _render_pr_comment,
|
|
437
|
+
"slack": _render_slack,
|
|
438
|
+
}
|