delimit-cli 4.1.48 → 4.1.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +39 -0
- package/bin/delimit-setup.js +30 -13
- package/gateway/ai/backends/gateway_core.py +13 -222
- package/gateway/ai/backends/repo_bridge.py +16 -80
- package/gateway/core/spec_detector.py +7 -47
- package/lib/cross-model-hooks.js +30 -6
- package/lib/hooks-installer.js +19 -11
- package/package.json +2 -2
- package/gateway/core/generator_drift.py +0 -242
- package/gateway/core/json_schema_diff.py +0 -375
package/lib/cross-model-hooks.js
CHANGED
|
@@ -577,14 +577,38 @@ echo ""
|
|
|
577
577
|
const configJson = JSON.stringify(config, null, 2);
|
|
578
578
|
for (const target of writeTargets) {
|
|
579
579
|
try {
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
const existing = JSON.parse(fs.readFileSync(target, 'utf-8'));
|
|
583
|
-
existing.hooks = config.hooks;
|
|
584
|
-
fs.writeFileSync(target, JSON.stringify(existing, null, 2));
|
|
585
|
-
} else {
|
|
580
|
+
if (target === configPath) {
|
|
581
|
+
// Global ~/.claude/settings.json: write the merged config we built
|
|
586
582
|
fs.writeFileSync(target, configJson);
|
|
583
|
+
continue;
|
|
584
|
+
}
|
|
585
|
+
|
|
586
|
+
// Project settings (.claude/settings.json in cwd): merge ONLY the
|
|
587
|
+
// Delimit-added hook entries into existing project hooks. Never
|
|
588
|
+
// overwrite the project's own hook entries with global ones.
|
|
589
|
+
// Previous behavior (`existing.hooks = config.hooks`) propagated
|
|
590
|
+
// every global hook into project files, wiping project-local hooks
|
|
591
|
+
// and leaking unrelated user customizations across repos.
|
|
592
|
+
let existing = {};
|
|
593
|
+
if (fs.existsSync(target)) {
|
|
594
|
+
try { existing = JSON.parse(fs.readFileSync(target, 'utf-8')); } catch { existing = {}; }
|
|
595
|
+
}
|
|
596
|
+
if (!existing.hooks) existing.hooks = {};
|
|
597
|
+
|
|
598
|
+
for (const [event, groups] of Object.entries(config.hooks || {})) {
|
|
599
|
+
if (!Array.isArray(groups)) continue;
|
|
600
|
+
if (!existing.hooks[event]) existing.hooks[event] = [];
|
|
601
|
+
for (const group of groups) {
|
|
602
|
+
const cmds = (group.hooks || []).map(h => h.command || '');
|
|
603
|
+
// Only propagate Delimit-owned hook groups to project files
|
|
604
|
+
if (!cmds.some(c => c.includes('delimit'))) continue;
|
|
605
|
+
const alreadyHas = existing.hooks[event].some(eg =>
|
|
606
|
+
(eg.hooks || []).some(h => cmds.includes(h.command))
|
|
607
|
+
);
|
|
608
|
+
if (!alreadyHas) existing.hooks[event].push(group);
|
|
609
|
+
}
|
|
587
610
|
}
|
|
611
|
+
fs.writeFileSync(target, JSON.stringify(existing, null, 2));
|
|
588
612
|
} catch {}
|
|
589
613
|
}
|
|
590
614
|
return changes;
|
package/lib/hooks-installer.js
CHANGED
|
@@ -185,29 +185,37 @@ class DelimitHooksInstaller {
|
|
|
185
185
|
|
|
186
186
|
async configureClaudeCode() {
|
|
187
187
|
const claudeConfigPath = path.join(process.env.HOME, '.claude.json');
|
|
188
|
-
|
|
188
|
+
|
|
189
189
|
if (fs.existsSync(claudeConfigPath)) {
|
|
190
190
|
try {
|
|
191
191
|
const config = JSON.parse(fs.readFileSync(claudeConfigPath, 'utf8'));
|
|
192
|
-
|
|
193
|
-
//
|
|
192
|
+
|
|
193
|
+
// Preserve any existing hooks the user has set. Only fill in
|
|
194
|
+
// Delimit MCP hooks if those specific keys are missing —
|
|
195
|
+
// never overwrite a user-chosen preCommand/postCommand.
|
|
194
196
|
if (!config.hooks) {
|
|
195
197
|
config.hooks = {};
|
|
196
198
|
}
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
199
|
+
const delimitHooks = {
|
|
200
|
+
preCommand: path.join(this.mcpHooksDir, 'pre-mcp-call'),
|
|
201
|
+
postCommand: path.join(this.mcpHooksDir, 'post-mcp-call'),
|
|
202
|
+
authentication: path.join(this.mcpHooksDir, 'mcp-auth'),
|
|
203
|
+
audit: path.join(this.mcpHooksDir, 'mcp-audit'),
|
|
204
|
+
};
|
|
205
|
+
for (const [key, value] of Object.entries(delimitHooks)) {
|
|
206
|
+
if (!config.hooks[key]) {
|
|
207
|
+
config.hooks[key] = value;
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
// Add Delimit governance settings (own namespace, safe to set)
|
|
204
212
|
config.delimitGovernance = {
|
|
205
213
|
enabled: true,
|
|
206
214
|
agent: 'http://127.0.0.1:7823',
|
|
207
215
|
mode: 'auto',
|
|
208
216
|
hooks: this.mcpHooks.map(h => path.join(this.mcpHooksDir, h))
|
|
209
217
|
};
|
|
210
|
-
|
|
218
|
+
|
|
211
219
|
fs.writeFileSync(claudeConfigPath, JSON.stringify(config, null, 2));
|
|
212
220
|
console.log(chalk.green(' ✓ Claude Code configuration updated'));
|
|
213
221
|
} catch (e) {
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "delimit-cli",
|
|
3
3
|
"mcpName": "io.github.delimit-ai/delimit-mcp-server",
|
|
4
|
-
"version": "4.1.
|
|
4
|
+
"version": "4.1.50",
|
|
5
5
|
"description": "Unify Claude Code, Codex, Cursor, and Gemini CLI with persistent context, governance, and multi-model debate.",
|
|
6
6
|
"main": "index.js",
|
|
7
7
|
"files": [
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
"postinstall": "node scripts/postinstall.js",
|
|
36
36
|
"sync-gateway": "bash scripts/sync-gateway.sh",
|
|
37
37
|
"prepublishOnly": "bash scripts/publish-ci-guard.sh && npm run sync-gateway && bash scripts/security-check.sh",
|
|
38
|
-
"test": "node --test tests/setup-onboarding.test.js tests/setup-matrix.test.js tests/config-export-import.test.js tests/cross-model-hooks.test.js tests/golden-path.test.js tests/v420-features.test.js"
|
|
38
|
+
"test": "node --test tests/setup-onboarding.test.js tests/setup-matrix.test.js tests/setup-no-clobber.test.js tests/config-export-import.test.js tests/cross-model-hooks.test.js tests/golden-path.test.js tests/v420-features.test.js"
|
|
39
39
|
},
|
|
40
40
|
"keywords": [
|
|
41
41
|
"openapi",
|
|
@@ -1,242 +0,0 @@
|
|
|
1
|
-
"""Generator drift detection (LED-713).
|
|
2
|
-
|
|
3
|
-
Detects when a committed generated artifact (e.g. agentspec's
|
|
4
|
-
schemas/v1/agent.schema.json regenerated from a Zod source) has drifted
|
|
5
|
-
from what its generator script would produce today.
|
|
6
|
-
|
|
7
|
-
Use case: a maintainer changes the source of truth (Zod schema, OpenAPI
|
|
8
|
-
generator, protobuf, etc.) but forgets to regenerate and commit the
|
|
9
|
-
artifact. CI catches the drift before the stale generated file ships.
|
|
10
|
-
|
|
11
|
-
Generic over generators — caller supplies the regen command and the
|
|
12
|
-
artifact path. Returns a structured drift report that can be merged into
|
|
13
|
-
the standard delimit-action PR comment.
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
from __future__ import annotations
|
|
17
|
-
|
|
18
|
-
import json
|
|
19
|
-
import os
|
|
20
|
-
import shlex
|
|
21
|
-
import shutil
|
|
22
|
-
import subprocess
|
|
23
|
-
import tempfile
|
|
24
|
-
from dataclasses import dataclass, field
|
|
25
|
-
from pathlib import Path
|
|
26
|
-
from typing import Any, Dict, List, Optional
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
@dataclass
|
|
30
|
-
class DriftResult:
|
|
31
|
-
drifted: bool
|
|
32
|
-
artifact_path: str
|
|
33
|
-
regen_command: str
|
|
34
|
-
changes: List[Any] = field(default_factory=list) # JSONSchemaChange list when drift detected
|
|
35
|
-
error: Optional[str] = None
|
|
36
|
-
runtime_seconds: float = 0.0
|
|
37
|
-
|
|
38
|
-
def to_dict(self) -> Dict[str, Any]:
|
|
39
|
-
return {
|
|
40
|
-
"drifted": self.drifted,
|
|
41
|
-
"artifact_path": self.artifact_path,
|
|
42
|
-
"regen_command": self.regen_command,
|
|
43
|
-
"change_count": len(self.changes),
|
|
44
|
-
"changes": [
|
|
45
|
-
{
|
|
46
|
-
"type": c.type.value,
|
|
47
|
-
"path": c.path,
|
|
48
|
-
"message": c.message,
|
|
49
|
-
"is_breaking": c.is_breaking,
|
|
50
|
-
}
|
|
51
|
-
for c in self.changes
|
|
52
|
-
],
|
|
53
|
-
"error": self.error,
|
|
54
|
-
"runtime_seconds": round(self.runtime_seconds, 3),
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def detect_drift(
|
|
59
|
-
repo_root: str,
|
|
60
|
-
artifact_path: str,
|
|
61
|
-
regen_command: str,
|
|
62
|
-
timeout_seconds: int = 60,
|
|
63
|
-
) -> DriftResult:
|
|
64
|
-
"""Check whether the committed artifact matches its generator output.
|
|
65
|
-
|
|
66
|
-
Args:
|
|
67
|
-
repo_root: Absolute path to the repo checkout.
|
|
68
|
-
artifact_path: Path to the generated artifact, relative to repo_root.
|
|
69
|
-
regen_command: Shell command that regenerates the artifact in place.
|
|
70
|
-
Example: "pnpm -r run build" or "node packages/sdk/dist/scripts/export-schema.js"
|
|
71
|
-
timeout_seconds: Hard timeout for the generator (default 60).
|
|
72
|
-
|
|
73
|
-
Returns:
|
|
74
|
-
DriftResult with drift status, classified changes, and runtime.
|
|
75
|
-
"""
|
|
76
|
-
import time
|
|
77
|
-
|
|
78
|
-
repo_root_p = Path(repo_root).resolve()
|
|
79
|
-
artifact_p = (repo_root_p / artifact_path).resolve()
|
|
80
|
-
|
|
81
|
-
if not artifact_p.exists():
|
|
82
|
-
return DriftResult(
|
|
83
|
-
drifted=False,
|
|
84
|
-
artifact_path=artifact_path,
|
|
85
|
-
regen_command=regen_command,
|
|
86
|
-
error=f"Artifact not found: {artifact_path}",
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
# Snapshot the committed artifact before regen
|
|
90
|
-
try:
|
|
91
|
-
committed_text = artifact_p.read_text()
|
|
92
|
-
committed_doc = json.loads(committed_text)
|
|
93
|
-
except (OSError, json.JSONDecodeError) as e:
|
|
94
|
-
return DriftResult(
|
|
95
|
-
drifted=False,
|
|
96
|
-
artifact_path=artifact_path,
|
|
97
|
-
regen_command=regen_command,
|
|
98
|
-
error=f"Failed to read committed artifact: {e}",
|
|
99
|
-
)
|
|
100
|
-
|
|
101
|
-
# Parse the command safely — shell=False to avoid command injection.
|
|
102
|
-
# Users needing shell features (&&, |, env vars, etc.) should point
|
|
103
|
-
# generator_command at a script file instead of an inline chain.
|
|
104
|
-
try:
|
|
105
|
-
argv = shlex.split(regen_command)
|
|
106
|
-
except ValueError as e:
|
|
107
|
-
return DriftResult(
|
|
108
|
-
drifted=False,
|
|
109
|
-
artifact_path=artifact_path,
|
|
110
|
-
regen_command=regen_command,
|
|
111
|
-
error=f"Could not parse generator_command: {e}",
|
|
112
|
-
)
|
|
113
|
-
if not argv:
|
|
114
|
-
return DriftResult(
|
|
115
|
-
drifted=False,
|
|
116
|
-
artifact_path=artifact_path,
|
|
117
|
-
regen_command=regen_command,
|
|
118
|
-
error="generator_command is empty",
|
|
119
|
-
)
|
|
120
|
-
# Reject obvious shell metacharacters — force users to use a script
|
|
121
|
-
# file if they need chaining or redirection.
|
|
122
|
-
SHELL_META = set("&|;><`$")
|
|
123
|
-
if any(ch in token for token in argv for ch in SHELL_META):
|
|
124
|
-
return DriftResult(
|
|
125
|
-
drifted=False,
|
|
126
|
-
artifact_path=artifact_path,
|
|
127
|
-
regen_command=regen_command,
|
|
128
|
-
error="generator_command contains shell metacharacters (&|;><`$). Point it at a script file instead of chaining inline.",
|
|
129
|
-
)
|
|
130
|
-
|
|
131
|
-
# Run the regenerator
|
|
132
|
-
start = time.time()
|
|
133
|
-
try:
|
|
134
|
-
result = subprocess.run(
|
|
135
|
-
argv,
|
|
136
|
-
shell=False,
|
|
137
|
-
cwd=str(repo_root_p),
|
|
138
|
-
capture_output=True,
|
|
139
|
-
text=True,
|
|
140
|
-
timeout=timeout_seconds,
|
|
141
|
-
)
|
|
142
|
-
except subprocess.TimeoutExpired:
|
|
143
|
-
return DriftResult(
|
|
144
|
-
drifted=False,
|
|
145
|
-
artifact_path=artifact_path,
|
|
146
|
-
regen_command=regen_command,
|
|
147
|
-
error=f"Generator timed out after {timeout_seconds}s",
|
|
148
|
-
runtime_seconds=time.time() - start,
|
|
149
|
-
)
|
|
150
|
-
except FileNotFoundError as e:
|
|
151
|
-
return DriftResult(
|
|
152
|
-
drifted=False,
|
|
153
|
-
artifact_path=artifact_path,
|
|
154
|
-
regen_command=regen_command,
|
|
155
|
-
error=f"Generator executable not found: {e}",
|
|
156
|
-
runtime_seconds=time.time() - start,
|
|
157
|
-
)
|
|
158
|
-
|
|
159
|
-
runtime = time.time() - start
|
|
160
|
-
|
|
161
|
-
if result.returncode != 0:
|
|
162
|
-
return DriftResult(
|
|
163
|
-
drifted=False,
|
|
164
|
-
artifact_path=artifact_path,
|
|
165
|
-
regen_command=regen_command,
|
|
166
|
-
error=f"Generator exited {result.returncode}: {result.stderr.strip()[:500]}",
|
|
167
|
-
runtime_seconds=runtime,
|
|
168
|
-
)
|
|
169
|
-
|
|
170
|
-
# Read the regenerated artifact
|
|
171
|
-
try:
|
|
172
|
-
regen_text = artifact_p.read_text()
|
|
173
|
-
regen_doc = json.loads(regen_text)
|
|
174
|
-
except (OSError, json.JSONDecodeError) as e:
|
|
175
|
-
# Restore committed version so we don't leave the workspace dirty
|
|
176
|
-
artifact_p.write_text(committed_text)
|
|
177
|
-
return DriftResult(
|
|
178
|
-
drifted=False,
|
|
179
|
-
artifact_path=artifact_path,
|
|
180
|
-
regen_command=regen_command,
|
|
181
|
-
error=f"Failed to read regenerated artifact: {e}",
|
|
182
|
-
runtime_seconds=runtime,
|
|
183
|
-
)
|
|
184
|
-
|
|
185
|
-
# Restore the committed file before diffing — leave the workspace clean
|
|
186
|
-
artifact_p.write_text(committed_text)
|
|
187
|
-
|
|
188
|
-
# Quick equality check first
|
|
189
|
-
if committed_doc == regen_doc:
|
|
190
|
-
return DriftResult(
|
|
191
|
-
drifted=False,
|
|
192
|
-
artifact_path=artifact_path,
|
|
193
|
-
regen_command=regen_command,
|
|
194
|
-
runtime_seconds=runtime,
|
|
195
|
-
)
|
|
196
|
-
|
|
197
|
-
# Drift detected — classify the changes via the JSON Schema diff engine
|
|
198
|
-
from .json_schema_diff import JSONSchemaDiffEngine
|
|
199
|
-
|
|
200
|
-
engine = JSONSchemaDiffEngine()
|
|
201
|
-
changes = engine.compare(committed_doc, regen_doc)
|
|
202
|
-
return DriftResult(
|
|
203
|
-
drifted=True,
|
|
204
|
-
artifact_path=artifact_path,
|
|
205
|
-
regen_command=regen_command,
|
|
206
|
-
changes=changes,
|
|
207
|
-
runtime_seconds=runtime,
|
|
208
|
-
)
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
def format_drift_report(result: DriftResult) -> str:
|
|
212
|
-
"""Render a drift report as a markdown block for PR comments."""
|
|
213
|
-
if result.error:
|
|
214
|
-
return (
|
|
215
|
-
f"### Generator drift check\n\n"
|
|
216
|
-
f"Artifact: `{result.artifact_path}` \n"
|
|
217
|
-
f"Status: error \n"
|
|
218
|
-
f"Detail: {result.error}\n"
|
|
219
|
-
)
|
|
220
|
-
if not result.drifted:
|
|
221
|
-
return (
|
|
222
|
-
f"### Generator drift check\n\n"
|
|
223
|
-
f"Artifact: `{result.artifact_path}` \n"
|
|
224
|
-
f"Status: clean (committed artifact matches generator output) \n"
|
|
225
|
-
f"Generator runtime: {result.runtime_seconds:.2f}s\n"
|
|
226
|
-
)
|
|
227
|
-
breaking = sum(1 for c in result.changes if c.is_breaking)
|
|
228
|
-
non_breaking = len(result.changes) - breaking
|
|
229
|
-
lines = [
|
|
230
|
-
"### Generator drift check",
|
|
231
|
-
"",
|
|
232
|
-
f"Artifact: `{result.artifact_path}` ",
|
|
233
|
-
f"Status: drifted ({len(result.changes)} change(s) — {breaking} breaking, {non_breaking} non-breaking) ",
|
|
234
|
-
f"Generator runtime: {result.runtime_seconds:.2f}s ",
|
|
235
|
-
"",
|
|
236
|
-
"The committed artifact does not match what the generator produces today. Re-run the generator and commit the result, or revert the source change.",
|
|
237
|
-
"",
|
|
238
|
-
]
|
|
239
|
-
for c in result.changes:
|
|
240
|
-
marker = "breaking" if c.is_breaking else "ok"
|
|
241
|
-
lines.append(f"- [{marker}] {c.type.value} at `{c.path}` — {c.message}")
|
|
242
|
-
return "\n".join(lines) + "\n"
|