delimit-cli 2.4.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/.dockerignore +7 -0
  2. package/.github/workflows/ci.yml +22 -0
  3. package/CODE_OF_CONDUCT.md +48 -0
  4. package/CONTRIBUTING.md +67 -0
  5. package/Dockerfile +9 -0
  6. package/LICENSE +21 -0
  7. package/README.md +18 -69
  8. package/SECURITY.md +42 -0
  9. package/adapters/gemini-forge.js +11 -0
  10. package/adapters/gemini-jamsons.js +152 -0
  11. package/bin/delimit-cli.js +8 -0
  12. package/bin/delimit-setup.js +258 -0
  13. package/gateway/ai/backends/__init__.py +0 -0
  14. package/gateway/ai/backends/async_utils.py +21 -0
  15. package/gateway/ai/backends/deploy_bridge.py +150 -0
  16. package/gateway/ai/backends/gateway_core.py +261 -0
  17. package/gateway/ai/backends/generate_bridge.py +38 -0
  18. package/gateway/ai/backends/governance_bridge.py +196 -0
  19. package/gateway/ai/backends/intel_bridge.py +59 -0
  20. package/gateway/ai/backends/memory_bridge.py +93 -0
  21. package/gateway/ai/backends/ops_bridge.py +137 -0
  22. package/gateway/ai/backends/os_bridge.py +82 -0
  23. package/gateway/ai/backends/repo_bridge.py +117 -0
  24. package/gateway/ai/backends/ui_bridge.py +118 -0
  25. package/gateway/ai/backends/vault_bridge.py +129 -0
  26. package/gateway/ai/server.py +1182 -0
  27. package/gateway/core/__init__.py +3 -0
  28. package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
  29. package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
  30. package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
  31. package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
  32. package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
  33. package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
  34. package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
  35. package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
  36. package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
  37. package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
  38. package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
  39. package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
  40. package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
  41. package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
  42. package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
  43. package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
  44. package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
  45. package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
  46. package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
  47. package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
  48. package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
  49. package/gateway/core/auto_baseline.py +304 -0
  50. package/gateway/core/ci_formatter.py +283 -0
  51. package/gateway/core/complexity_analyzer.py +386 -0
  52. package/gateway/core/contract_ledger.py +345 -0
  53. package/gateway/core/dependency_graph.py +218 -0
  54. package/gateway/core/dependency_manifest.py +223 -0
  55. package/gateway/core/diff_engine_v2.py +477 -0
  56. package/gateway/core/diff_engine_v2.py.bak +426 -0
  57. package/gateway/core/event_backbone.py +268 -0
  58. package/gateway/core/event_schema.py +258 -0
  59. package/gateway/core/explainer.py +438 -0
  60. package/gateway/core/gateway.py +128 -0
  61. package/gateway/core/gateway_v2.py +154 -0
  62. package/gateway/core/gateway_v3.py +224 -0
  63. package/gateway/core/impact_analyzer.py +163 -0
  64. package/gateway/core/policies/default.yml +13 -0
  65. package/gateway/core/policies/relaxed.yml +48 -0
  66. package/gateway/core/policies/strict.yml +55 -0
  67. package/gateway/core/policy_engine.py +464 -0
  68. package/gateway/core/registry.py +52 -0
  69. package/gateway/core/registry_v2.py +132 -0
  70. package/gateway/core/registry_v3.py +134 -0
  71. package/gateway/core/semver_classifier.py +152 -0
  72. package/gateway/core/spec_detector.py +130 -0
  73. package/gateway/core/surface_bridge.py +307 -0
  74. package/gateway/core/zero_spec/__init__.py +4 -0
  75. package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
  76. package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
  77. package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
  78. package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
  79. package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
  80. package/gateway/core/zero_spec/detector.py +353 -0
  81. package/gateway/core/zero_spec/express_extractor.py +483 -0
  82. package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
  83. package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
  84. package/gateway/tasks/__init__.py +1 -0
  85. package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
  86. package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
  87. package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
  88. package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
  89. package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
  90. package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
  91. package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
  92. package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
  93. package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
  94. package/gateway/tasks/check_policy.py +177 -0
  95. package/gateway/tasks/check_policy_v2.py +255 -0
  96. package/gateway/tasks/check_policy_v3.py +255 -0
  97. package/gateway/tasks/explain_diff.py +305 -0
  98. package/gateway/tasks/explain_diff_v2.py +267 -0
  99. package/gateway/tasks/validate_api.py +131 -0
  100. package/gateway/tasks/validate_api_v2.py +208 -0
  101. package/gateway/tasks/validate_api_v3.py +163 -0
  102. package/package.json +2 -2
  103. package/adapters/codex-skill.js +0 -87
  104. package/adapters/cursor-extension.js +0 -190
  105. package/adapters/gemini-action.js +0 -93
  106. package/adapters/openai-function.js +0 -112
  107. package/adapters/xai-plugin.js +0 -151
  108. package/test-decision-engine.js +0 -181
  109. package/test-hook.js +0 -27
  110. package/tests/cli.test.js +0 -359
  111. package/tests/fixtures/openapi-changed.yaml +0 -56
  112. package/tests/fixtures/openapi.yaml +0 -87
@@ -0,0 +1,131 @@
1
+ import yaml
2
+ import json
3
+ from typing import Dict, List, Any
4
+ from core.registry import task_registry
5
+ from schemas.base import TaskRequest
6
+
7
+ register_task = task_registry.register
8
+
9
+ @register_task("validate-api", version="v1", description="Validate API for breaking changes")
10
+ def validate_api_handler(request: TaskRequest) -> Dict[str, Any]:
11
+ """Check API specifications for breaking changes"""
12
+
13
+ files = request.files
14
+ if len(files) != 2:
15
+ raise ValueError("validate-api requires exactly 2 files: old and new API spec")
16
+
17
+ old_spec = load_spec(files[0])
18
+ new_spec = load_spec(files[1])
19
+
20
+ breaking_changes = []
21
+ warnings = []
22
+
23
+ # Check removed endpoints
24
+ old_paths = set(old_spec.get("paths", {}).keys())
25
+ new_paths = set(new_spec.get("paths", {}).keys())
26
+
27
+ removed = old_paths - new_paths
28
+ if removed:
29
+ for path in removed:
30
+ breaking_changes.append({
31
+ "type": "endpoint_removed",
32
+ "path": path,
33
+ "severity": "high"
34
+ })
35
+
36
+ # Check modified endpoints
37
+ for path in old_paths & new_paths:
38
+ old_methods = set(old_spec["paths"][path].keys())
39
+ new_methods = set(new_spec["paths"][path].keys())
40
+
41
+ removed_methods = old_methods - new_methods
42
+ if removed_methods:
43
+ for method in removed_methods:
44
+ breaking_changes.append({
45
+ "type": "method_removed",
46
+ "path": path,
47
+ "method": method.upper(),
48
+ "severity": "high"
49
+ })
50
+
51
+ # Check parameter changes
52
+ for method in old_methods & new_methods:
53
+ old_params = old_spec["paths"][path][method].get("parameters", [])
54
+ new_params = new_spec["paths"][path][method].get("parameters", [])
55
+
56
+ old_required = {p["name"] for p in old_params if p.get("required", False)}
57
+ new_required = {p["name"] for p in new_params if p.get("required", False)}
58
+
59
+ new_required_params = new_required - old_required
60
+ if new_required_params:
61
+ for param in new_required_params:
62
+ breaking_changes.append({
63
+ "type": "required_parameter_added",
64
+ "path": path,
65
+ "method": method.upper(),
66
+ "parameter": param,
67
+ "severity": "high"
68
+ })
69
+
70
+ # Check for new optional endpoints (non-breaking)
71
+ added = new_paths - old_paths
72
+ if added:
73
+ for path in added:
74
+ warnings.append(f"New endpoint added: {path}")
75
+
76
+ risk_score = calculate_risk_score(breaking_changes)
77
+
78
+ return {
79
+ "breaking_changes": breaking_changes,
80
+ "warnings": warnings,
81
+ "risk_score": risk_score,
82
+ "risk_level": get_risk_level(risk_score),
83
+ "summary": {
84
+ "total_breaking_changes": len(breaking_changes),
85
+ "endpoints_removed": len([c for c in breaking_changes if c["type"] == "endpoint_removed"]),
86
+ "methods_removed": len([c for c in breaking_changes if c["type"] == "method_removed"]),
87
+ "required_params_added": len([c for c in breaking_changes if c["type"] == "required_parameter_added"])
88
+ }
89
+ }
90
+
91
+ def load_spec(file_path: str) -> Dict:
92
+ """Load API specification from YAML or JSON"""
93
+ with open(file_path, 'r') as f:
94
+ if file_path.endswith('.yaml') or file_path.endswith('.yml'):
95
+ return yaml.safe_load(f)
96
+ elif file_path.endswith('.json'):
97
+ return json.load(f)
98
+ else:
99
+ # Try YAML first, then JSON
100
+ content = f.read()
101
+ try:
102
+ return yaml.safe_load(content)
103
+ except:
104
+ return json.loads(content)
105
+
106
+ def calculate_risk_score(breaking_changes: List[Dict]) -> int:
107
+ """Calculate risk score based on breaking changes"""
108
+ if not breaking_changes:
109
+ return 0
110
+
111
+ score = 0
112
+ for change in breaking_changes:
113
+ if change["severity"] == "high":
114
+ score += 10
115
+ elif change["severity"] == "medium":
116
+ score += 5
117
+ else:
118
+ score += 1
119
+
120
+ return min(score, 100)
121
+
122
+ def get_risk_level(score: int) -> str:
123
+ """Convert risk score to level"""
124
+ if score == 0:
125
+ return "none"
126
+ elif score < 20:
127
+ return "low"
128
+ elif score < 50:
129
+ return "medium"
130
+ else:
131
+ return "high"
@@ -0,0 +1,208 @@
1
+ """
2
+ Validate API task with Evidence Contract
3
+ V12 Core Hardening
4
+ """
5
+
6
+ import yaml
7
+ import json
8
+ from typing import Dict, List, Set
9
+ from pathlib import Path
10
+
11
+ from core.registry_v2 import task_registry
12
+ from schemas.requests import ValidateAPIRequest
13
+ from schemas.evidence import (
14
+ APIChangeEvidence, Decision, Violation, ViolationSeverity,
15
+ Evidence, Remediation
16
+ )
17
+
18
+
19
+ @task_registry.register("validate-api", version="1.0", description="Check API for breaking changes")
20
+ def validate_api_handler(request: ValidateAPIRequest) -> APIChangeEvidence:
21
+ """Check API specifications for breaking changes with evidence contract"""
22
+
23
+ # Load specifications
24
+ old_spec = load_spec(request.old_spec)
25
+ new_spec = load_spec(request.new_spec)
26
+
27
+ # Analyze changes
28
+ violations = []
29
+ evidence_list = []
30
+ breaking_changes = []
31
+ non_breaking_changes = []
32
+
33
+ # Check removed endpoints
34
+ old_paths = set(old_spec.get("paths", {}).keys())
35
+ new_paths = set(new_spec.get("paths", {}).keys())
36
+
37
+ removed_paths = old_paths - new_paths
38
+ for path in removed_paths:
39
+ violations.append(Violation(
40
+ rule="no_removed_endpoint",
41
+ severity=ViolationSeverity.HIGH,
42
+ path=path,
43
+ message=f"Endpoint removed: {path}",
44
+ details={"type": "endpoint_removed"}
45
+ ))
46
+ breaking_changes.append({
47
+ "type": "endpoint_removed",
48
+ "path": path
49
+ })
50
+ evidence_list.append(Evidence(
51
+ rule="no_removed_endpoint",
52
+ passed=False,
53
+ details={"path": path, "status": "removed"}
54
+ ))
55
+
56
+ # Check for removed methods
57
+ for path in old_paths & new_paths:
58
+ old_methods = set(old_spec["paths"][path].keys())
59
+ new_methods = set(new_spec["paths"][path].keys())
60
+
61
+ removed_methods = old_methods - new_methods
62
+ for method in removed_methods:
63
+ violations.append(Violation(
64
+ rule="no_removed_method",
65
+ severity=ViolationSeverity.HIGH,
66
+ path=f"{path}:{method.upper()}",
67
+ message=f"Method removed: {method.upper()} {path}",
68
+ details={"type": "method_removed", "method": method}
69
+ ))
70
+ breaking_changes.append({
71
+ "type": "method_removed",
72
+ "path": path,
73
+ "method": method.upper()
74
+ })
75
+ evidence_list.append(Evidence(
76
+ rule="no_removed_method",
77
+ passed=False,
78
+ details={"path": path, "method": method, "status": "removed"}
79
+ ))
80
+
81
+ # Check for new required parameters (breaking)
82
+ for method in old_methods & new_methods:
83
+ old_params = extract_parameters(old_spec["paths"][path][method])
84
+ new_params = extract_parameters(new_spec["paths"][path][method])
85
+
86
+ old_required = {p["name"] for p in old_params if p.get("required", False)}
87
+ new_required = {p["name"] for p in new_params if p.get("required", False)}
88
+
89
+ newly_required = new_required - old_required
90
+ for param in newly_required:
91
+ violations.append(Violation(
92
+ rule="no_new_required_param",
93
+ severity=ViolationSeverity.HIGH,
94
+ path=f"{path}:{method.upper()}",
95
+ message=f"New required parameter: {param}",
96
+ details={"type": "required_param_added", "parameter": param}
97
+ ))
98
+ breaking_changes.append({
99
+ "type": "required_param_added",
100
+ "path": path,
101
+ "method": method.upper(),
102
+ "parameter": param
103
+ })
104
+ evidence_list.append(Evidence(
105
+ rule="no_new_required_param",
106
+ passed=False,
107
+ details={"path": path, "method": method, "parameter": param}
108
+ ))
109
+
110
+ # Check for added endpoints (non-breaking)
111
+ added_paths = new_paths - old_paths
112
+ for path in added_paths:
113
+ non_breaking_changes.append({
114
+ "type": "endpoint_added",
115
+ "path": path
116
+ })
117
+ evidence_list.append(Evidence(
118
+ rule="backward_compatible_additions",
119
+ passed=True,
120
+ details={"path": path, "status": "added"}
121
+ ))
122
+
123
+ # Determine decision and exit code
124
+ if violations:
125
+ decision = Decision.FAIL
126
+ exit_code = 1
127
+ summary = f"API validation failed: {len(violations)} breaking changes detected"
128
+ else:
129
+ decision = Decision.PASS
130
+ exit_code = 0
131
+ summary = "API validation passed: No breaking changes detected"
132
+
133
+ # Calculate risk score
134
+ risk_score = min(len(violations) * 10, 100)
135
+
136
+ # Build remediation if needed
137
+ remediation = None
138
+ if violations:
139
+ remediation = Remediation(
140
+ summary="Breaking changes detected in API specification",
141
+ steps=[
142
+ "Option 1: Restore removed endpoints/methods to maintain compatibility",
143
+ "Option 2: Create a new API version (e.g., v2) for breaking changes",
144
+ "Option 3: Implement deprecation warnings before removal",
145
+ "Option 4: Document migration path for API consumers"
146
+ ],
147
+ examples=[
148
+ "Keep old endpoint with deprecation notice",
149
+ "Add version prefix: /v2/api/..."
150
+ ],
151
+ documentation="https://docs.delimit.ai/api-versioning"
152
+ )
153
+
154
+ # Return evidence contract
155
+ return APIChangeEvidence(
156
+ task="validate-api",
157
+ task_version="1.0",
158
+ decision=decision,
159
+ exit_code=exit_code,
160
+ violations=violations,
161
+ evidence=evidence_list,
162
+ remediation=remediation,
163
+ summary=summary,
164
+ correlation_id=request.correlation_id,
165
+ metrics={
166
+ "endpoints_checked": len(old_paths | new_paths),
167
+ "breaking_changes": len(breaking_changes),
168
+ "non_breaking_changes": len(non_breaking_changes)
169
+ },
170
+ breaking_changes=breaking_changes,
171
+ non_breaking_changes=non_breaking_changes,
172
+ risk_score=risk_score
173
+ )
174
+
175
+
176
+ def load_spec(file_path: str) -> Dict:
177
+ """Load API specification from file"""
178
+ path = Path(file_path)
179
+ if not path.exists():
180
+ raise FileNotFoundError(
181
+ f"Spec file not found: {file_path}\n"
182
+ f"If the spec was deleted, ensure both old and new spec paths exist before running validation."
183
+ )
184
+ with path.open('r') as f:
185
+ if path.suffix in ['.yaml', '.yml']:
186
+ return yaml.safe_load(f)
187
+ elif path.suffix == '.json':
188
+ return json.load(f)
189
+ else:
190
+ # Try YAML first, then JSON
191
+ content = f.read()
192
+ try:
193
+ return yaml.safe_load(content)
194
+ except:
195
+ return json.loads(content)
196
+
197
+
198
+ def extract_parameters(operation: Dict) -> List[Dict]:
199
+ """Extract parameters from an operation"""
200
+ params = operation.get("parameters", [])
201
+ # Also check requestBody for required fields
202
+ if "requestBody" in operation and operation["requestBody"].get("required", False):
203
+ params.append({
204
+ "name": "requestBody",
205
+ "required": True,
206
+ "in": "body"
207
+ })
208
+ return params
@@ -0,0 +1,163 @@
1
+ """
2
+ Validate API task with Evidence Contract - V12 Final
3
+ Complete implementation with Pydantic v2
4
+ """
5
+
6
+ import yaml
7
+ import json
8
+ from typing import Dict, List, Set
9
+ from pathlib import Path
10
+
11
+ from core.registry_v3 import task_registry
12
+ from core.diff_engine_v2 import OpenAPIDiffEngine, ChangeType
13
+ from schemas.requests_v2 import ValidateAPIRequest
14
+ from schemas.evidence import (
15
+ APIChangeEvidence, Decision, Violation, ViolationSeverity,
16
+ Evidence, Remediation
17
+ )
18
+
19
+
20
+ @task_registry.register("validate-api", task_version="1.0", description="Check API for breaking changes")
21
+ def validate_api_handler(request: ValidateAPIRequest) -> APIChangeEvidence:
22
+ """Check API specifications for breaking changes with evidence contract"""
23
+
24
+ # Load specifications
25
+ old_spec = load_spec(request.old_spec)
26
+ new_spec = load_spec(request.new_spec)
27
+
28
+ # Use diff engine for comprehensive change detection
29
+ diff_engine = OpenAPIDiffEngine()
30
+ all_changes = diff_engine.compare(old_spec, new_spec)
31
+
32
+ # Process changes from diff engine
33
+ violations = []
34
+ evidence_list = []
35
+ breaking_changes = []
36
+ non_breaking_changes = []
37
+
38
+ for change in all_changes:
39
+ if change.is_breaking:
40
+ # Convert to violation
41
+ violations.append(Violation(
42
+ rule=f"no_{change.type.value}",
43
+ severity=ViolationSeverity.HIGH if change.severity == "high" else ViolationSeverity.MEDIUM,
44
+ path=change.path,
45
+ message=change.message,
46
+ details=change.details
47
+ ))
48
+ breaking_changes.append({
49
+ "type": change.type.value,
50
+ "path": change.path,
51
+ **change.details
52
+ })
53
+ evidence_list.append(Evidence(
54
+ rule=f"no_{change.type.value}",
55
+ passed=False,
56
+ details={"path": change.path, **change.details}
57
+ ))
58
+ else:
59
+ # Non-breaking change
60
+ non_breaking_changes.append({
61
+ "type": change.type.value,
62
+ "path": change.path,
63
+ **change.details
64
+ })
65
+ evidence_list.append(Evidence(
66
+ rule="backward_compatible_additions",
67
+ passed=True,
68
+ details={"path": change.path, **change.details}
69
+ ))
70
+
71
+ # Also keep legacy endpoint checks for backward compatibility
72
+ # Legacy checks are now handled by diff engine above
73
+
74
+ # Get path counts for metrics
75
+ old_paths = set(old_spec.get("paths", {}).keys())
76
+ new_paths = set(new_spec.get("paths", {}).keys())
77
+
78
+ # Determine decision and exit code
79
+ if violations:
80
+ decision = Decision.FAIL
81
+ exit_code = 1
82
+ summary = f"API validation failed: {len(violations)} breaking changes detected"
83
+ else:
84
+ decision = Decision.PASS
85
+ exit_code = 0
86
+ summary = "API validation passed: No breaking changes detected"
87
+
88
+ # Calculate risk score
89
+ risk_score = min(len(violations) * 10, 100)
90
+
91
+ # Build remediation if needed
92
+ remediation = None
93
+ if violations:
94
+ remediation = Remediation(
95
+ summary="Breaking changes detected in API specification",
96
+ steps=[
97
+ "Option 1: Restore removed endpoints/methods to maintain compatibility",
98
+ "Option 2: Create a new API version (e.g., v2) for breaking changes",
99
+ "Option 3: Implement deprecation warnings before removal",
100
+ "Option 4: Document migration path for API consumers"
101
+ ],
102
+ examples=[
103
+ "Keep old endpoint with deprecation notice",
104
+ "Add version prefix: /v2/api/..."
105
+ ],
106
+ documentation="https://docs.delimit.ai/api-versioning"
107
+ )
108
+
109
+ # Return evidence contract
110
+ return APIChangeEvidence(
111
+ task="validate-api",
112
+ task_version="1.0",
113
+ decision=decision,
114
+ exit_code=exit_code,
115
+ violations=violations,
116
+ evidence=evidence_list,
117
+ remediation=remediation,
118
+ summary=summary,
119
+ correlation_id=request.correlation_id,
120
+ metrics={
121
+ "endpoints_checked": len(old_paths | new_paths),
122
+ "breaking_changes": len(breaking_changes),
123
+ "non_breaking_changes": len(non_breaking_changes)
124
+ },
125
+ breaking_changes=breaking_changes,
126
+ non_breaking_changes=non_breaking_changes,
127
+ risk_score=risk_score
128
+ )
129
+
130
+
131
+ def load_spec(file_path: str) -> Dict:
132
+ """Load API specification from file"""
133
+ path = Path(file_path)
134
+ if not path.exists():
135
+ raise FileNotFoundError(
136
+ f"Spec file not found: {file_path}\n"
137
+ f"If the spec was deleted, ensure both old and new spec paths exist before running validation."
138
+ )
139
+ with path.open('r') as f:
140
+ if path.suffix in ['.yaml', '.yml']:
141
+ return yaml.safe_load(f)
142
+ elif path.suffix == '.json':
143
+ return json.load(f)
144
+ else:
145
+ # Try YAML first, then JSON
146
+ content = f.read()
147
+ try:
148
+ return yaml.safe_load(content)
149
+ except:
150
+ return json.loads(content)
151
+
152
+
153
+ def extract_parameters(operation: Dict) -> List[Dict]:
154
+ """Extract parameters from an operation"""
155
+ params = operation.get("parameters", [])
156
+ # Also check requestBody for required fields
157
+ if "requestBody" in operation and operation["requestBody"].get("required", False):
158
+ params.append({
159
+ "name": "requestBody",
160
+ "required": True,
161
+ "in": "body"
162
+ })
163
+ return params
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "delimit-cli",
3
- "version": "2.4.0",
4
- "description": "Prevent breaking API changes before they reach production. Deterministic diff engine + policy enforcement + semver classification.",
3
+ "version": "3.0.0",
4
+ "description": "AI agent guardrails for developers. Install governance tools into Claude Code with one command.",
5
5
  "main": "index.js",
6
6
  "bin": {
7
7
  "delimit": "./bin/delimit-cli.js"
@@ -1,87 +0,0 @@
1
- #!/usr/bin/env node
2
- /**
3
- * Delimit™ Codex Skill Adapter
4
- * Implements GitHub Codex "Skills" interface
5
- */
6
-
7
- const axios = require('axios');
8
- const AGENT_URL = `http://127.0.0.1:${process.env.DELIMIT_AGENT_PORT || 7823}`;
9
-
10
- class DelimitCodexSkill {
11
- constructor() {
12
- this.name = 'delimit-governance';
13
- this.version = '2.0.0';
14
- }
15
-
16
- /**
17
- * Codex Skills use onBeforeSuggestion and onAfterAccept events
18
- */
19
- async onBeforeSuggestion(context) {
20
- console.log('[DELIMIT CODEX] Validating code suggestion...');
21
-
22
- try {
23
- const { code, language, file } = context;
24
-
25
- // Check governance rules
26
- const response = await axios.post(`${AGENT_URL}/evaluate`, {
27
- action: 'codex_suggestion',
28
- code: code,
29
- language: language,
30
- file: file,
31
- tool: 'codex'
32
- });
33
-
34
- if (response.data.action === 'block') {
35
- return {
36
- allow: false,
37
- message: `[DELIMIT] Code blocked: ${response.data.reason}`
38
- };
39
- }
40
-
41
- if (response.data.action === 'prompt') {
42
- return {
43
- allow: true,
44
- warning: response.data.message
45
- };
46
- }
47
-
48
- return { allow: true };
49
- } catch (error) {
50
- console.warn('[DELIMIT CODEX] Governance check failed:', error.message);
51
- return { allow: true }; // Fail open
52
- }
53
- }
54
-
55
- async onAfterAccept(context) {
56
- console.log('[DELIMIT CODEX] Recording accepted suggestion...');
57
-
58
- try {
59
- // Collect evidence
60
- await axios.post(`${AGENT_URL}/audit`, {
61
- action: 'codex_accept',
62
- context: context,
63
- timestamp: new Date().toISOString()
64
- });
65
- } catch (error) {
66
- // Silent fail for audit
67
- }
68
- }
69
-
70
- // Codex-specific command handler
71
- async handleCommand(command, args) {
72
- if (command === 'governance') {
73
- const { execSync } = require('child_process');
74
- return execSync('delimit status --verbose').toString();
75
- }
76
- }
77
- }
78
-
79
- // Export for Codex
80
- if (typeof module !== 'undefined' && module.exports) {
81
- module.exports = new DelimitCodexSkill();
82
- }
83
-
84
- // Codex registration
85
- if (typeof registerSkill === 'function') {
86
- registerSkill(new DelimitCodexSkill());
87
- }