delimit-cli 2.4.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dockerignore +7 -0
- package/.github/workflows/ci.yml +22 -0
- package/CODE_OF_CONDUCT.md +48 -0
- package/CONTRIBUTING.md +67 -0
- package/Dockerfile +9 -0
- package/LICENSE +21 -0
- package/README.md +18 -69
- package/SECURITY.md +42 -0
- package/adapters/gemini-forge.js +11 -0
- package/adapters/gemini-jamsons.js +152 -0
- package/bin/delimit-cli.js +8 -0
- package/bin/delimit-setup.js +258 -0
- package/gateway/ai/backends/__init__.py +0 -0
- package/gateway/ai/backends/async_utils.py +21 -0
- package/gateway/ai/backends/deploy_bridge.py +150 -0
- package/gateway/ai/backends/gateway_core.py +261 -0
- package/gateway/ai/backends/generate_bridge.py +38 -0
- package/gateway/ai/backends/governance_bridge.py +196 -0
- package/gateway/ai/backends/intel_bridge.py +59 -0
- package/gateway/ai/backends/memory_bridge.py +93 -0
- package/gateway/ai/backends/ops_bridge.py +137 -0
- package/gateway/ai/backends/os_bridge.py +82 -0
- package/gateway/ai/backends/repo_bridge.py +117 -0
- package/gateway/ai/backends/ui_bridge.py +118 -0
- package/gateway/ai/backends/vault_bridge.py +129 -0
- package/gateway/ai/server.py +1182 -0
- package/gateway/core/__init__.py +3 -0
- package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
- package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
- package/gateway/core/auto_baseline.py +304 -0
- package/gateway/core/ci_formatter.py +283 -0
- package/gateway/core/complexity_analyzer.py +386 -0
- package/gateway/core/contract_ledger.py +345 -0
- package/gateway/core/dependency_graph.py +218 -0
- package/gateway/core/dependency_manifest.py +223 -0
- package/gateway/core/diff_engine_v2.py +477 -0
- package/gateway/core/diff_engine_v2.py.bak +426 -0
- package/gateway/core/event_backbone.py +268 -0
- package/gateway/core/event_schema.py +258 -0
- package/gateway/core/explainer.py +438 -0
- package/gateway/core/gateway.py +128 -0
- package/gateway/core/gateway_v2.py +154 -0
- package/gateway/core/gateway_v3.py +224 -0
- package/gateway/core/impact_analyzer.py +163 -0
- package/gateway/core/policies/default.yml +13 -0
- package/gateway/core/policies/relaxed.yml +48 -0
- package/gateway/core/policies/strict.yml +55 -0
- package/gateway/core/policy_engine.py +464 -0
- package/gateway/core/registry.py +52 -0
- package/gateway/core/registry_v2.py +132 -0
- package/gateway/core/registry_v3.py +134 -0
- package/gateway/core/semver_classifier.py +152 -0
- package/gateway/core/spec_detector.py +130 -0
- package/gateway/core/surface_bridge.py +307 -0
- package/gateway/core/zero_spec/__init__.py +4 -0
- package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
- package/gateway/core/zero_spec/detector.py +353 -0
- package/gateway/core/zero_spec/express_extractor.py +483 -0
- package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
- package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
- package/gateway/tasks/__init__.py +1 -0
- package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
- package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
- package/gateway/tasks/check_policy.py +177 -0
- package/gateway/tasks/check_policy_v2.py +255 -0
- package/gateway/tasks/check_policy_v3.py +255 -0
- package/gateway/tasks/explain_diff.py +305 -0
- package/gateway/tasks/explain_diff_v2.py +267 -0
- package/gateway/tasks/validate_api.py +131 -0
- package/gateway/tasks/validate_api_v2.py +208 -0
- package/gateway/tasks/validate_api_v3.py +163 -0
- package/package.json +2 -2
- package/adapters/codex-skill.js +0 -87
- package/adapters/cursor-extension.js +0 -190
- package/adapters/gemini-action.js +0 -93
- package/adapters/openai-function.js +0 -112
- package/adapters/xai-plugin.js +0 -151
- package/test-decision-engine.js +0 -181
- package/test-hook.js +0 -27
- package/tests/cli.test.js +0 -359
- package/tests/fixtures/openapi-changed.yaml +0 -56
- package/tests/fixtures/openapi.yaml +0 -87
|
@@ -0,0 +1,258 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* delimit setup — Install Delimit MCP governance tools into Claude Code.
|
|
4
|
+
*
|
|
5
|
+
* What it does:
|
|
6
|
+
* 1. Creates ~/.delimit/ directory with the MCP server + core engine
|
|
7
|
+
* 2. Adds "delimit" to Claude Code's MCP config (~/.mcp.json or .mcp.json)
|
|
8
|
+
* 3. Installs default agents into ~/.claude/agents/
|
|
9
|
+
* 4. Prints next steps
|
|
10
|
+
*/
|
|
11
|
+
|
|
12
|
+
const fs = require('fs');
|
|
13
|
+
const path = require('path');
|
|
14
|
+
const { execSync } = require('child_process');
|
|
15
|
+
const os = require('os');
|
|
16
|
+
|
|
17
|
+
const DELIMIT_HOME = path.join(os.homedir(), '.delimit');
|
|
18
|
+
const MCP_CONFIG = path.join(os.homedir(), '.mcp.json');
|
|
19
|
+
const CLAUDE_DIR = path.join(os.homedir(), '.claude');
|
|
20
|
+
const AGENTS_DIR = path.join(CLAUDE_DIR, 'agents');
|
|
21
|
+
|
|
22
|
+
// Colors
|
|
23
|
+
const green = (s) => `\x1b[32m${s}\x1b[0m`;
|
|
24
|
+
const yellow = (s) => `\x1b[33m${s}\x1b[0m`;
|
|
25
|
+
const blue = (s) => `\x1b[34m${s}\x1b[0m`;
|
|
26
|
+
const dim = (s) => `\x1b[2m${s}\x1b[0m`;
|
|
27
|
+
const bold = (s) => `\x1b[1m${s}\x1b[0m`;
|
|
28
|
+
|
|
29
|
+
function log(msg) { console.log(msg); }
|
|
30
|
+
function step(n, msg) { log(`\n${blue(`[${n}]`)} ${msg}`); }
|
|
31
|
+
|
|
32
|
+
async function main() {
|
|
33
|
+
log('');
|
|
34
|
+
log(bold(' Delimit Setup'));
|
|
35
|
+
log(dim(' AI agent guardrails for developers'));
|
|
36
|
+
log('');
|
|
37
|
+
|
|
38
|
+
// Step 1: Check prerequisites
|
|
39
|
+
step(1, 'Checking prerequisites...');
|
|
40
|
+
|
|
41
|
+
// Python 3.9+
|
|
42
|
+
let python = null;
|
|
43
|
+
for (const cmd of ['python3', 'python']) {
|
|
44
|
+
try {
|
|
45
|
+
const ver = execSync(`${cmd} --version 2>&1`, { encoding: 'utf-8' }).trim();
|
|
46
|
+
const match = ver.match(/(\d+)\.(\d+)/);
|
|
47
|
+
if (match && (parseInt(match[1]) >= 3 && parseInt(match[2]) >= 9)) {
|
|
48
|
+
python = cmd;
|
|
49
|
+
log(` ${green('✓')} ${ver}`);
|
|
50
|
+
break;
|
|
51
|
+
}
|
|
52
|
+
} catch {}
|
|
53
|
+
}
|
|
54
|
+
if (!python) {
|
|
55
|
+
log(` ${yellow('✗')} Python 3.9+ not found. Install Python first.`);
|
|
56
|
+
process.exit(1);
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
// Check if Claude Code is available
|
|
60
|
+
let hasClaude = false;
|
|
61
|
+
try {
|
|
62
|
+
execSync('claude --version 2>/dev/null', { encoding: 'utf-8' });
|
|
63
|
+
hasClaude = true;
|
|
64
|
+
log(` ${green('✓')} Claude Code detected`);
|
|
65
|
+
} catch {
|
|
66
|
+
log(` ${yellow('!')} Claude Code not detected — MCP config will still be created`);
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// Step 2: Install Delimit MCP server
|
|
70
|
+
step(2, 'Installing Delimit MCP server...');
|
|
71
|
+
|
|
72
|
+
// Create ~/.delimit directory
|
|
73
|
+
fs.mkdirSync(path.join(DELIMIT_HOME, 'server', 'core', 'zero_spec'), { recursive: true });
|
|
74
|
+
fs.mkdirSync(path.join(DELIMIT_HOME, 'server', 'tasks'), { recursive: true });
|
|
75
|
+
fs.mkdirSync(path.join(DELIMIT_HOME, 'deploys'), { recursive: true });
|
|
76
|
+
fs.mkdirSync(path.join(DELIMIT_HOME, 'ledger'), { recursive: true });
|
|
77
|
+
fs.mkdirSync(path.join(DELIMIT_HOME, 'evidence'), { recursive: true });
|
|
78
|
+
|
|
79
|
+
// Copy the gateway core from our bundled copy
|
|
80
|
+
const gatewaySource = path.join(__dirname, '..', 'gateway');
|
|
81
|
+
if (fs.existsSync(gatewaySource)) {
|
|
82
|
+
copyDir(gatewaySource, path.join(DELIMIT_HOME, 'server'));
|
|
83
|
+
log(` ${green('✓')} Core engine installed`);
|
|
84
|
+
} else {
|
|
85
|
+
// Fallback: try to clone from GitHub
|
|
86
|
+
log(` ${dim(' Downloading from GitHub...')}`);
|
|
87
|
+
try {
|
|
88
|
+
execSync(`git clone --depth 1 https://github.com/delimit-ai/delimit-gateway.git "${path.join(DELIMIT_HOME, 'server')}" 2>/dev/null`, { stdio: 'pipe' });
|
|
89
|
+
log(` ${green('✓')} Core engine cloned`);
|
|
90
|
+
} catch {
|
|
91
|
+
log(` ${yellow('!')} Could not download. Clone manually: git clone https://github.com/delimit-ai/delimit-gateway.git ~/.delimit/server`);
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Copy the MCP server file
|
|
96
|
+
const serverSource = path.join(__dirname, '..', 'mcp-server.py');
|
|
97
|
+
if (fs.existsSync(serverSource)) {
|
|
98
|
+
fs.copyFileSync(serverSource, path.join(DELIMIT_HOME, 'server', 'mcp-server.py'));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Install Python deps
|
|
102
|
+
log(` ${dim(' Installing Python dependencies...')}`);
|
|
103
|
+
try {
|
|
104
|
+
execSync(`${python} -m pip install --quiet fastmcp pyyaml pydantic packaging 2>/dev/null`, { stdio: 'pipe' });
|
|
105
|
+
log(` ${green('✓')} Python dependencies installed`);
|
|
106
|
+
} catch {
|
|
107
|
+
log(` ${yellow('!')} pip install failed — run manually: pip install fastmcp pyyaml pydantic packaging`);
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Step 3: Configure Claude Code MCP
|
|
111
|
+
step(3, 'Configuring Claude Code MCP...');
|
|
112
|
+
|
|
113
|
+
let mcpConfig = {};
|
|
114
|
+
if (fs.existsSync(MCP_CONFIG)) {
|
|
115
|
+
try {
|
|
116
|
+
mcpConfig = JSON.parse(fs.readFileSync(MCP_CONFIG, 'utf-8'));
|
|
117
|
+
} catch {}
|
|
118
|
+
}
|
|
119
|
+
if (!mcpConfig.mcpServers) mcpConfig.mcpServers = {};
|
|
120
|
+
|
|
121
|
+
const serverPath = path.join(DELIMIT_HOME, 'server', 'ai', 'server.py');
|
|
122
|
+
const serverPathAlt = path.join(DELIMIT_HOME, 'server', 'mcp-server.py');
|
|
123
|
+
const actualServer = fs.existsSync(serverPath) ? serverPath : serverPathAlt;
|
|
124
|
+
|
|
125
|
+
if (mcpConfig.mcpServers.delimit) {
|
|
126
|
+
log(` ${green('✓')} Delimit MCP already configured`);
|
|
127
|
+
} else {
|
|
128
|
+
mcpConfig.mcpServers.delimit = {
|
|
129
|
+
type: 'stdio',
|
|
130
|
+
command: python,
|
|
131
|
+
args: [actualServer],
|
|
132
|
+
cwd: path.join(DELIMIT_HOME, 'server'),
|
|
133
|
+
env: {
|
|
134
|
+
PYTHONPATH: path.join(DELIMIT_HOME, 'server')
|
|
135
|
+
},
|
|
136
|
+
description: 'Delimit — AI agent guardrails'
|
|
137
|
+
};
|
|
138
|
+
fs.writeFileSync(MCP_CONFIG, JSON.stringify(mcpConfig, null, 2));
|
|
139
|
+
log(` ${green('✓')} Added delimit to ${MCP_CONFIG}`);
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Step 4: Install default agents
|
|
143
|
+
step(4, 'Installing governance agents...');
|
|
144
|
+
|
|
145
|
+
fs.mkdirSync(AGENTS_DIR, { recursive: true });
|
|
146
|
+
|
|
147
|
+
const agents = {
|
|
148
|
+
'lint.md': `---
|
|
149
|
+
name: lint
|
|
150
|
+
description: Lint OpenAPI specs for breaking changes and policy violations
|
|
151
|
+
tools:
|
|
152
|
+
- Read
|
|
153
|
+
- Grep
|
|
154
|
+
- Glob
|
|
155
|
+
- Bash
|
|
156
|
+
- mcp__delimit__delimit_lint
|
|
157
|
+
- mcp__delimit__delimit_diff
|
|
158
|
+
- mcp__delimit__delimit_policy
|
|
159
|
+
- mcp__delimit__delimit_impact
|
|
160
|
+
- mcp__delimit__delimit_ledger
|
|
161
|
+
---
|
|
162
|
+
|
|
163
|
+
# Lint Agent
|
|
164
|
+
|
|
165
|
+
Run API governance checks. Use delimit_lint to compare specs, delimit_policy to check rules, delimit_impact for downstream analysis.
|
|
166
|
+
`,
|
|
167
|
+
'engineering.md': `---
|
|
168
|
+
name: engineering
|
|
169
|
+
description: Build features, fix bugs, write tests, refactor code
|
|
170
|
+
tools:
|
|
171
|
+
- Read
|
|
172
|
+
- Glob
|
|
173
|
+
- Grep
|
|
174
|
+
- Bash
|
|
175
|
+
- Edit
|
|
176
|
+
- Write
|
|
177
|
+
- mcp__delimit__delimit_lint
|
|
178
|
+
- mcp__delimit__delimit_diff
|
|
179
|
+
- mcp__delimit__delimit_test_generate
|
|
180
|
+
- mcp__delimit__delimit_test_coverage
|
|
181
|
+
---
|
|
182
|
+
|
|
183
|
+
# Engineering Agent
|
|
184
|
+
|
|
185
|
+
Execute code directives. Use delimit_test_coverage to verify coverage targets. Use delimit_lint to check API compatibility after changes.
|
|
186
|
+
`,
|
|
187
|
+
'governance.md': `---
|
|
188
|
+
name: governance
|
|
189
|
+
description: Run governance checks on the current project
|
|
190
|
+
tools:
|
|
191
|
+
- Read
|
|
192
|
+
- Grep
|
|
193
|
+
- Glob
|
|
194
|
+
- Bash
|
|
195
|
+
- mcp__delimit__delimit_gov_health
|
|
196
|
+
- mcp__delimit__delimit_gov_status
|
|
197
|
+
- mcp__delimit__delimit_gov_policy
|
|
198
|
+
- mcp__delimit__delimit_security_scan
|
|
199
|
+
- mcp__delimit__delimit_security_audit
|
|
200
|
+
- mcp__delimit__delimit_evidence_collect
|
|
201
|
+
- mcp__delimit__delimit_repo_analyze
|
|
202
|
+
- mcp__delimit__delimit_repo_config_validate
|
|
203
|
+
---
|
|
204
|
+
|
|
205
|
+
# Governance Agent
|
|
206
|
+
|
|
207
|
+
Run full governance compliance checks. Verify security, policy compliance, evidence collection, and repo health.
|
|
208
|
+
`
|
|
209
|
+
};
|
|
210
|
+
|
|
211
|
+
let installed = 0;
|
|
212
|
+
for (const [filename, content] of Object.entries(agents)) {
|
|
213
|
+
const agentPath = path.join(AGENTS_DIR, filename);
|
|
214
|
+
if (!fs.existsSync(agentPath)) {
|
|
215
|
+
fs.writeFileSync(agentPath, content);
|
|
216
|
+
installed++;
|
|
217
|
+
}
|
|
218
|
+
}
|
|
219
|
+
log(` ${green('✓')} ${installed} agents installed (${Object.keys(agents).length - installed} already existed)`);
|
|
220
|
+
|
|
221
|
+
// Step 5: Summary
|
|
222
|
+
step(5, 'Done!');
|
|
223
|
+
log('');
|
|
224
|
+
log(` ${green('Delimit is installed.')} Your AI agents are now monitored.`);
|
|
225
|
+
log('');
|
|
226
|
+
log(' What happens next:');
|
|
227
|
+
log(` ${dim('1.')} Start Claude Code in any project`);
|
|
228
|
+
log(` ${dim('2.')} The delimit MCP tools load automatically`);
|
|
229
|
+
log(` ${dim('3.')} Use agents: ${blue('/lint')}, ${blue('/governance')}, ${blue('/engineering')}`);
|
|
230
|
+
log(` ${dim('4.')} Or ask: "check governance health" / "run test coverage"`);
|
|
231
|
+
log('');
|
|
232
|
+
log(` ${dim('Config:')} ${MCP_CONFIG}`);
|
|
233
|
+
log(` ${dim('Server:')} ${actualServer}`);
|
|
234
|
+
log(` ${dim('Agents:')} ${AGENTS_DIR}`);
|
|
235
|
+
log('');
|
|
236
|
+
log(` ${dim('Docs:')} https://delimit.ai/docs`);
|
|
237
|
+
log(` ${dim('GitHub:')} https://github.com/delimit-ai/delimit`);
|
|
238
|
+
log('');
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
function copyDir(src, dest) {
|
|
242
|
+
fs.mkdirSync(dest, { recursive: true });
|
|
243
|
+
for (const entry of fs.readdirSync(src, { withFileTypes: true })) {
|
|
244
|
+
const srcPath = path.join(src, entry.name);
|
|
245
|
+
const destPath = path.join(dest, entry.name);
|
|
246
|
+
if (entry.name === '__pycache__' || entry.name === 'node_modules' || entry.name === '.git') continue;
|
|
247
|
+
if (entry.isDirectory()) {
|
|
248
|
+
copyDir(srcPath, destPath);
|
|
249
|
+
} else {
|
|
250
|
+
fs.copyFileSync(srcPath, destPath);
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
main().catch(err => {
|
|
256
|
+
console.error('Setup failed:', err.message);
|
|
257
|
+
process.exit(1);
|
|
258
|
+
});
|
|
File without changes
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"""Shared async utilities for gateway bridge modules."""
|
|
2
|
+
import asyncio
|
|
3
|
+
import concurrent.futures
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def run_async(coro):
|
|
7
|
+
"""Run an async coroutine from sync code, handling nested event loops.
|
|
8
|
+
|
|
9
|
+
When called from inside an already-running event loop (FastMCP, AnyIO),
|
|
10
|
+
offloads to a ThreadPoolExecutor. Otherwise uses asyncio.run() directly.
|
|
11
|
+
"""
|
|
12
|
+
try:
|
|
13
|
+
loop = asyncio.get_running_loop()
|
|
14
|
+
except RuntimeError:
|
|
15
|
+
loop = None
|
|
16
|
+
|
|
17
|
+
if loop and loop.is_running():
|
|
18
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as pool:
|
|
19
|
+
return pool.submit(asyncio.run, coro).result(timeout=30)
|
|
20
|
+
else:
|
|
21
|
+
return asyncio.run(coro)
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Bridge to deploy tracking — file-based deploy plan management.
|
|
3
|
+
Tier 3 Extended — tracks deploy plans, builds, and rollbacks locally.
|
|
4
|
+
|
|
5
|
+
No external server required. Plans stored at ~/.delimit/deploys/.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import json
|
|
9
|
+
import logging
|
|
10
|
+
import uuid
|
|
11
|
+
from datetime import datetime, timezone
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Dict, List, Optional
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger("delimit.ai.deploy_bridge")
|
|
16
|
+
|
|
17
|
+
DEPLOY_DIR = Path.home() / ".delimit" / "deploys"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _ensure_dir():
|
|
21
|
+
DEPLOY_DIR.mkdir(parents=True, exist_ok=True)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _list_plans(app: Optional[str] = None, env: Optional[str] = None) -> List[Dict]:
|
|
25
|
+
"""List all deploy plans, optionally filtered by app and/or env."""
|
|
26
|
+
_ensure_dir()
|
|
27
|
+
plans = []
|
|
28
|
+
for f in sorted(DEPLOY_DIR.glob("PLAN-*.json"), reverse=True):
|
|
29
|
+
try:
|
|
30
|
+
data = json.loads(f.read_text())
|
|
31
|
+
if app and data.get("app") != app:
|
|
32
|
+
continue
|
|
33
|
+
if env and data.get("env") != env:
|
|
34
|
+
continue
|
|
35
|
+
plans.append(data)
|
|
36
|
+
except Exception:
|
|
37
|
+
continue
|
|
38
|
+
return plans
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def plan(app: str, env: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
|
|
42
|
+
"""Create a deploy plan."""
|
|
43
|
+
_ensure_dir()
|
|
44
|
+
plan_id = f"PLAN-{uuid.uuid4().hex[:8].upper()}"
|
|
45
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
46
|
+
data = {
|
|
47
|
+
"plan_id": plan_id,
|
|
48
|
+
"app": app,
|
|
49
|
+
"env": env,
|
|
50
|
+
"git_ref": git_ref or "HEAD",
|
|
51
|
+
"status": "planned",
|
|
52
|
+
"created_at": now,
|
|
53
|
+
"updated_at": now,
|
|
54
|
+
"history": [{"status": "planned", "at": now}],
|
|
55
|
+
}
|
|
56
|
+
(DEPLOY_DIR / f"{plan_id}.json").write_text(json.dumps(data, indent=2))
|
|
57
|
+
return data
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def status(app: str, env: str) -> Dict[str, Any]:
|
|
61
|
+
"""Get latest deploy status for an app+env."""
|
|
62
|
+
plans = _list_plans(app=app, env=env)
|
|
63
|
+
if not plans:
|
|
64
|
+
return {
|
|
65
|
+
"app": app,
|
|
66
|
+
"env": env,
|
|
67
|
+
"status": "no_deploys",
|
|
68
|
+
"message": f"No deploy plans found for {app} in {env}.",
|
|
69
|
+
}
|
|
70
|
+
latest = plans[0]
|
|
71
|
+
return {
|
|
72
|
+
"app": app,
|
|
73
|
+
"env": env,
|
|
74
|
+
"latest_plan": latest["plan_id"],
|
|
75
|
+
"status": latest["status"],
|
|
76
|
+
"git_ref": latest.get("git_ref"),
|
|
77
|
+
"updated_at": latest.get("updated_at"),
|
|
78
|
+
"total_plans": len(plans),
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def build(app: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
|
|
83
|
+
"""Check if a Dockerfile exists and return build info."""
|
|
84
|
+
dockerfile = Path.cwd() / "Dockerfile"
|
|
85
|
+
if not dockerfile.exists():
|
|
86
|
+
# Check app-specific paths
|
|
87
|
+
for candidate in [Path(f"/home/delimit/{app}/Dockerfile"), Path(f"./{app}/Dockerfile")]:
|
|
88
|
+
if candidate.exists():
|
|
89
|
+
dockerfile = candidate
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
if dockerfile.exists():
|
|
93
|
+
return {
|
|
94
|
+
"app": app,
|
|
95
|
+
"git_ref": git_ref or "HEAD",
|
|
96
|
+
"dockerfile": str(dockerfile),
|
|
97
|
+
"status": "ready",
|
|
98
|
+
"message": f"Dockerfile found at {dockerfile}. Ready to build.",
|
|
99
|
+
}
|
|
100
|
+
return {
|
|
101
|
+
"app": app,
|
|
102
|
+
"git_ref": git_ref or "HEAD",
|
|
103
|
+
"status": "no_dockerfile",
|
|
104
|
+
"message": f"No Dockerfile found for {app}. Create one to enable Docker builds.",
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def publish(app: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
|
|
109
|
+
"""Update latest plan status to published."""
|
|
110
|
+
plans = _list_plans(app=app)
|
|
111
|
+
if not plans:
|
|
112
|
+
return {"error": f"No deploy plans found for {app}"}
|
|
113
|
+
latest = plans[0]
|
|
114
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
115
|
+
latest["status"] = "published"
|
|
116
|
+
latest["updated_at"] = now
|
|
117
|
+
latest["history"].append({"status": "published", "at": now})
|
|
118
|
+
(DEPLOY_DIR / f"{latest['plan_id']}.json").write_text(json.dumps(latest, indent=2))
|
|
119
|
+
return latest
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def verify(app: str, env: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
|
|
123
|
+
"""Verify deployment health (stub — returns plan status)."""
|
|
124
|
+
plans = _list_plans(app=app, env=env)
|
|
125
|
+
if not plans:
|
|
126
|
+
return {"app": app, "env": env, "status": "no_deploys", "healthy": False}
|
|
127
|
+
latest = plans[0]
|
|
128
|
+
return {
|
|
129
|
+
"app": app,
|
|
130
|
+
"env": env,
|
|
131
|
+
"plan_id": latest["plan_id"],
|
|
132
|
+
"status": latest["status"],
|
|
133
|
+
"healthy": latest["status"] in ("published", "planned"),
|
|
134
|
+
"message": "Health check is a stub — no real endpoint verification yet.",
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def rollback(app: str, env: str, to_sha: Optional[str] = None) -> Dict[str, Any]:
|
|
139
|
+
"""Mark latest plan as rolled back."""
|
|
140
|
+
plans = _list_plans(app=app, env=env)
|
|
141
|
+
if not plans:
|
|
142
|
+
return {"error": f"No deploy plans found for {app} in {env}"}
|
|
143
|
+
latest = plans[0]
|
|
144
|
+
now = datetime.now(timezone.utc).isoformat()
|
|
145
|
+
latest["status"] = "rolled_back"
|
|
146
|
+
latest["updated_at"] = now
|
|
147
|
+
latest["rolled_back_to"] = to_sha
|
|
148
|
+
latest["history"].append({"status": "rolled_back", "at": now, "to_sha": to_sha})
|
|
149
|
+
(DEPLOY_DIR / f"{latest['plan_id']}.json").write_text(json.dumps(latest, indent=2))
|
|
150
|
+
return latest
|
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backend bridge to delimit-gateway core engine.
|
|
3
|
+
|
|
4
|
+
Adapter Boundary Contract v1.0:
|
|
5
|
+
- Pure translation layer: no governance logic here
|
|
6
|
+
- Deterministic error on failure (never swallow)
|
|
7
|
+
- Zero state (stateless between calls)
|
|
8
|
+
- No schema forking (gateway types are canonical)
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import sys
|
|
12
|
+
import logging
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import Any, Dict, List, Optional
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger("delimit.ai.gateway_core")
|
|
17
|
+
|
|
18
|
+
# Add gateway root to path so we can import core modules
|
|
19
|
+
GATEWAY_ROOT = Path(__file__).resolve().parent.parent.parent
|
|
20
|
+
if str(GATEWAY_ROOT) not in sys.path:
|
|
21
|
+
sys.path.insert(0, str(GATEWAY_ROOT))
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _load_specs(spec_path: str) -> Dict[str, Any]:
|
|
25
|
+
"""Load an OpenAPI spec from a file path."""
|
|
26
|
+
import json
|
|
27
|
+
import yaml
|
|
28
|
+
|
|
29
|
+
p = Path(spec_path)
|
|
30
|
+
if not p.exists():
|
|
31
|
+
raise FileNotFoundError(f"Spec file not found: {spec_path}")
|
|
32
|
+
|
|
33
|
+
content = p.read_text(encoding="utf-8")
|
|
34
|
+
if p.suffix in (".yaml", ".yml"):
|
|
35
|
+
return yaml.safe_load(content)
|
|
36
|
+
return json.loads(content)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def run_lint(old_spec: str, new_spec: str, policy_file: Optional[str] = None) -> Dict[str, Any]:
|
|
40
|
+
"""Run the full lint pipeline: diff + policy evaluation.
|
|
41
|
+
|
|
42
|
+
This is the Tier 1 primary tool — combines diff detection with
|
|
43
|
+
policy enforcement into a single pass/fail decision.
|
|
44
|
+
"""
|
|
45
|
+
from core.policy_engine import evaluate_with_policy
|
|
46
|
+
|
|
47
|
+
old = _load_specs(old_spec)
|
|
48
|
+
new = _load_specs(new_spec)
|
|
49
|
+
|
|
50
|
+
return evaluate_with_policy(old, new, policy_file)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def run_diff(old_spec: str, new_spec: str) -> Dict[str, Any]:
|
|
54
|
+
"""Run diff engine only — no policy evaluation."""
|
|
55
|
+
from core.diff_engine_v2 import OpenAPIDiffEngine
|
|
56
|
+
|
|
57
|
+
old = _load_specs(old_spec)
|
|
58
|
+
new = _load_specs(new_spec)
|
|
59
|
+
|
|
60
|
+
engine = OpenAPIDiffEngine()
|
|
61
|
+
changes = engine.compare(old, new)
|
|
62
|
+
|
|
63
|
+
breaking = [c for c in changes if c.is_breaking]
|
|
64
|
+
|
|
65
|
+
return {
|
|
66
|
+
"total_changes": len(changes),
|
|
67
|
+
"breaking_changes": len(breaking),
|
|
68
|
+
"changes": [
|
|
69
|
+
{
|
|
70
|
+
"type": c.type.value,
|
|
71
|
+
"path": c.path,
|
|
72
|
+
"message": c.message,
|
|
73
|
+
"is_breaking": c.is_breaking,
|
|
74
|
+
"details": c.details,
|
|
75
|
+
}
|
|
76
|
+
for c in changes
|
|
77
|
+
],
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def run_policy(spec_files: List[str], policy_file: Optional[str] = None) -> Dict[str, Any]:
|
|
82
|
+
"""Evaluate specs against governance policy without diffing."""
|
|
83
|
+
from core.policy_engine import PolicyEngine
|
|
84
|
+
|
|
85
|
+
engine = PolicyEngine(policy_file)
|
|
86
|
+
|
|
87
|
+
return {
|
|
88
|
+
"rules_loaded": len(engine.rules),
|
|
89
|
+
"custom_rules": len(engine.custom_rules),
|
|
90
|
+
"policy_file": policy_file,
|
|
91
|
+
"template": engine.create_policy_template() if not policy_file else None,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def query_ledger(
|
|
96
|
+
ledger_path: str,
|
|
97
|
+
api_name: Optional[str] = None,
|
|
98
|
+
repository: Optional[str] = None,
|
|
99
|
+
validate_chain: bool = False,
|
|
100
|
+
) -> Dict[str, Any]:
|
|
101
|
+
"""Query the contract ledger."""
|
|
102
|
+
from core.contract_ledger import ContractLedger
|
|
103
|
+
|
|
104
|
+
ledger = ContractLedger(ledger_path)
|
|
105
|
+
|
|
106
|
+
if not ledger.exists():
|
|
107
|
+
return {"error": "Ledger not found", "path": ledger_path}
|
|
108
|
+
|
|
109
|
+
result: Dict[str, Any] = {"path": ledger_path, "event_count": ledger.get_event_count()}
|
|
110
|
+
|
|
111
|
+
if validate_chain:
|
|
112
|
+
try:
|
|
113
|
+
ledger.validate_chain()
|
|
114
|
+
result["chain_valid"] = True
|
|
115
|
+
except Exception as e:
|
|
116
|
+
result["chain_valid"] = False
|
|
117
|
+
result["chain_error"] = str(e)
|
|
118
|
+
|
|
119
|
+
if api_name:
|
|
120
|
+
result["events"] = ledger.get_api_timeline(api_name)
|
|
121
|
+
elif repository:
|
|
122
|
+
result["events"] = ledger.get_events_by_repository(repository)
|
|
123
|
+
else:
|
|
124
|
+
latest = ledger.get_latest_event()
|
|
125
|
+
result["latest_event"] = latest
|
|
126
|
+
|
|
127
|
+
return result
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def run_impact(api_name: str, dependency_file: Optional[str] = None) -> Dict[str, Any]:
|
|
131
|
+
"""Analyze downstream impact of an API change."""
|
|
132
|
+
from core.dependency_graph import DependencyGraph
|
|
133
|
+
from core.impact_analyzer import ImpactAnalyzer
|
|
134
|
+
|
|
135
|
+
graph = DependencyGraph()
|
|
136
|
+
if dependency_file:
|
|
137
|
+
graph.load_from_file(dependency_file)
|
|
138
|
+
|
|
139
|
+
analyzer = ImpactAnalyzer(graph)
|
|
140
|
+
return analyzer.analyze(api_name)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def run_semver(
|
|
144
|
+
old_spec: str,
|
|
145
|
+
new_spec: str,
|
|
146
|
+
current_version: Optional[str] = None,
|
|
147
|
+
) -> Dict[str, Any]:
|
|
148
|
+
"""Classify the semver bump for a spec change.
|
|
149
|
+
|
|
150
|
+
Returns detailed breakdown: bump level, per-category counts,
|
|
151
|
+
and optionally the bumped version string.
|
|
152
|
+
"""
|
|
153
|
+
from core.diff_engine_v2 import OpenAPIDiffEngine
|
|
154
|
+
from core.semver_classifier import classify_detailed, bump_version, classify
|
|
155
|
+
|
|
156
|
+
old = _load_specs(old_spec)
|
|
157
|
+
new = _load_specs(new_spec)
|
|
158
|
+
|
|
159
|
+
engine = OpenAPIDiffEngine()
|
|
160
|
+
changes = engine.compare(old, new)
|
|
161
|
+
result = classify_detailed(changes)
|
|
162
|
+
|
|
163
|
+
if current_version:
|
|
164
|
+
bump = classify(changes)
|
|
165
|
+
result["current_version"] = current_version
|
|
166
|
+
result["next_version"] = bump_version(current_version, bump)
|
|
167
|
+
|
|
168
|
+
return result
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def run_explain(
|
|
172
|
+
old_spec: str,
|
|
173
|
+
new_spec: str,
|
|
174
|
+
template: str = "developer",
|
|
175
|
+
old_version: Optional[str] = None,
|
|
176
|
+
new_version: Optional[str] = None,
|
|
177
|
+
api_name: Optional[str] = None,
|
|
178
|
+
) -> Dict[str, Any]:
|
|
179
|
+
"""Generate a human-readable explanation of API changes.
|
|
180
|
+
|
|
181
|
+
Supports 7 templates: developer, team_lead, product, migration,
|
|
182
|
+
changelog, pr_comment, slack.
|
|
183
|
+
"""
|
|
184
|
+
from core.diff_engine_v2 import OpenAPIDiffEngine
|
|
185
|
+
from core.explainer import explain, TEMPLATES
|
|
186
|
+
|
|
187
|
+
old = _load_specs(old_spec)
|
|
188
|
+
new = _load_specs(new_spec)
|
|
189
|
+
|
|
190
|
+
engine = OpenAPIDiffEngine()
|
|
191
|
+
changes = engine.compare(old, new)
|
|
192
|
+
|
|
193
|
+
output = explain(
|
|
194
|
+
changes,
|
|
195
|
+
template=template,
|
|
196
|
+
old_version=old_version,
|
|
197
|
+
new_version=new_version,
|
|
198
|
+
api_name=api_name,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
return {
|
|
202
|
+
"template": template,
|
|
203
|
+
"available_templates": TEMPLATES,
|
|
204
|
+
"output": output,
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def run_zero_spec(
|
|
209
|
+
project_dir: str = ".",
|
|
210
|
+
python_bin: Optional[str] = None,
|
|
211
|
+
) -> Dict[str, Any]:
|
|
212
|
+
"""Detect framework and extract OpenAPI spec from source code.
|
|
213
|
+
|
|
214
|
+
Currently supports FastAPI. Returns the extracted spec or an error
|
|
215
|
+
with guidance on how to fix it.
|
|
216
|
+
"""
|
|
217
|
+
from core.zero_spec.detector import detect_framework, Framework
|
|
218
|
+
from core.zero_spec.express_extractor import extract_express_spec
|
|
219
|
+
from core.zero_spec.fastapi_extractor import extract_fastapi_spec
|
|
220
|
+
from core.zero_spec.nestjs_extractor import extract_nestjs_spec
|
|
221
|
+
|
|
222
|
+
info = detect_framework(project_dir)
|
|
223
|
+
|
|
224
|
+
result: Dict[str, Any] = {
|
|
225
|
+
"framework": info.framework.value,
|
|
226
|
+
"confidence": info.confidence,
|
|
227
|
+
"message": info.message,
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
if info.framework == Framework.FASTAPI:
|
|
231
|
+
extraction = extract_fastapi_spec(
|
|
232
|
+
info, project_dir, python_bin=python_bin
|
|
233
|
+
)
|
|
234
|
+
result.update(extraction)
|
|
235
|
+
if extraction["success"] and info.app_locations:
|
|
236
|
+
loc = info.app_locations[0]
|
|
237
|
+
result["app_file"] = loc.file
|
|
238
|
+
result["app_variable"] = loc.variable
|
|
239
|
+
result["app_line"] = loc.line
|
|
240
|
+
elif info.framework == Framework.NESTJS:
|
|
241
|
+
extraction = extract_nestjs_spec(info, project_dir)
|
|
242
|
+
result.update(extraction)
|
|
243
|
+
if extraction["success"] and info.app_locations:
|
|
244
|
+
loc = info.app_locations[0]
|
|
245
|
+
result["app_file"] = loc.file
|
|
246
|
+
result["app_variable"] = loc.variable
|
|
247
|
+
result["app_line"] = loc.line
|
|
248
|
+
elif info.framework == Framework.EXPRESS:
|
|
249
|
+
extraction = extract_express_spec(info, project_dir)
|
|
250
|
+
result.update(extraction)
|
|
251
|
+
if extraction["success"] and info.app_locations:
|
|
252
|
+
loc = info.app_locations[0]
|
|
253
|
+
result["app_file"] = loc.file
|
|
254
|
+
result["app_variable"] = loc.variable
|
|
255
|
+
result["app_line"] = loc.line
|
|
256
|
+
else:
|
|
257
|
+
result["success"] = False
|
|
258
|
+
result["error"] = "No supported API framework found. Provide an OpenAPI spec file."
|
|
259
|
+
result["error_type"] = "no_framework"
|
|
260
|
+
|
|
261
|
+
return result
|