delimit-cli 4.1.43 → 4.1.47

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/CHANGELOG.md +33 -0
  2. package/README.md +46 -5
  3. package/bin/delimit-cli.js +1987 -337
  4. package/bin/delimit-setup.js +108 -66
  5. package/gateway/ai/activate_helpers.py +253 -7
  6. package/gateway/ai/agent_dispatch.py +34 -2
  7. package/gateway/ai/backends/deploy_bridge.py +167 -12
  8. package/gateway/ai/backends/gateway_core.py +236 -13
  9. package/gateway/ai/backends/repo_bridge.py +80 -16
  10. package/gateway/ai/backends/tools_infra.py +49 -32
  11. package/gateway/ai/checksums.sha256 +6 -0
  12. package/gateway/ai/content_engine.py +1276 -2
  13. package/gateway/ai/continuity.py +462 -0
  14. package/gateway/ai/deliberation.pyi +53 -0
  15. package/gateway/ai/github_scanner.py +1 -1
  16. package/gateway/ai/governance.py +58 -0
  17. package/gateway/ai/governance.pyi +32 -0
  18. package/gateway/ai/governance_hardening.py +569 -0
  19. package/gateway/ai/inbox_daemon_runner.py +217 -0
  20. package/gateway/ai/key_resolver.py +95 -2
  21. package/gateway/ai/ledger_manager.py +53 -3
  22. package/gateway/ai/license.py +104 -3
  23. package/gateway/ai/license_core.py +177 -36
  24. package/gateway/ai/license_core.pyi +50 -0
  25. package/gateway/ai/loop_engine.py +929 -294
  26. package/gateway/ai/notify.py +1786 -2
  27. package/gateway/ai/reddit_scanner.py +190 -1
  28. package/gateway/ai/screen_record.py +1 -1
  29. package/gateway/ai/secrets_broker.py +5 -1
  30. package/gateway/ai/server.py +254 -19
  31. package/gateway/ai/social_cache.py +341 -0
  32. package/gateway/ai/social_daemon.py +41 -10
  33. package/gateway/ai/supabase_sync.py +190 -2
  34. package/gateway/ai/swarm.py +86 -0
  35. package/gateway/ai/swarm_infra.py +656 -0
  36. package/gateway/ai/tui.py +594 -36
  37. package/gateway/ai/tweet_corpus_schema.sql +76 -0
  38. package/gateway/core/diff_engine_v2.py +6 -2
  39. package/gateway/core/generator_drift.py +242 -0
  40. package/gateway/core/json_schema_diff.py +375 -0
  41. package/gateway/core/openapi_version.py +124 -0
  42. package/gateway/core/spec_detector.py +47 -7
  43. package/gateway/core/spec_health.py +5 -2
  44. package/gateway/core/zero_spec/express_extractor.py +2 -2
  45. package/gateway/core/zero_spec/nestjs_extractor.py +40 -9
  46. package/gateway/requirements.txt +3 -6
  47. package/lib/cross-model-hooks.js +4 -12
  48. package/package.json +11 -3
  49. package/scripts/demo-v420-clean.sh +267 -0
  50. package/scripts/demo-v420-deliberation.sh +217 -0
  51. package/scripts/demo-v420.sh +55 -0
  52. package/scripts/postinstall.js +4 -3
  53. package/scripts/publish-ci-guard.sh +30 -0
  54. package/scripts/record-and-upload.sh +132 -0
  55. package/scripts/release.sh +126 -0
  56. package/scripts/sync-gateway.sh +112 -0
  57. package/scripts/youtube-upload.py +141 -0
@@ -154,20 +154,175 @@ def publish(app: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
154
154
  return latest
155
155
 
156
156
 
157
+ DEPLOY_TARGETS = [
158
+ {"name": "delimit.ai", "url": "https://delimit.ai", "kind": "vercel"},
159
+ {"name": "electricgrill.com", "url": "https://electricgrill.com", "kind": "vercel"},
160
+ {"name": "robotax.com", "url": "https://robotax.com", "kind": "vercel"},
161
+ {"name": "npm:delimit-cli", "url": "https://www.npmjs.com/package/delimit-cli", "kind": "npm"},
162
+ {"name": "github:delimit-mcp-server", "url": "https://github.com/delimit-ai/delimit-mcp-server", "kind": "github"},
163
+ ]
164
+
165
+
166
+ def _check_http_health(url: str, timeout: int = 10) -> Dict[str, Any]:
167
+ """Check HTTP health for a single URL. Returns status, response time, headers."""
168
+ import ssl
169
+ import time
170
+ import urllib.request
171
+
172
+ result: Dict[str, Any] = {"url": url, "healthy": False}
173
+ try:
174
+ ctx = ssl.create_default_context()
175
+ req = urllib.request.Request(url, method="GET", headers={"User-Agent": "delimit-deploy-verify/1.0"})
176
+ start = time.monotonic()
177
+ with urllib.request.urlopen(req, timeout=timeout, context=ctx) as resp:
178
+ elapsed_ms = round((time.monotonic() - start) * 1000)
179
+ result["status_code"] = resp.status
180
+ result["response_time_ms"] = elapsed_ms
181
+ result["healthy"] = 200 <= resp.status < 400
182
+ except Exception as exc:
183
+ result["error"] = str(exc)
184
+ result["status_code"] = None
185
+ result["response_time_ms"] = None
186
+ return result
187
+
188
+
189
+ def _check_ssl_cert(hostname: str, port: int = 443, warn_days: int = 30) -> Dict[str, Any]:
190
+ """Validate SSL certificate for a hostname. Checks expiry within warn_days."""
191
+ import socket
192
+ import ssl
193
+
194
+ result: Dict[str, Any] = {"hostname": hostname, "ssl_valid": False}
195
+ try:
196
+ ctx = ssl.create_default_context()
197
+ with socket.create_connection((hostname, port), timeout=10) as sock:
198
+ with ctx.wrap_socket(sock, server_hostname=hostname) as ssock:
199
+ cert = ssock.getpeercert()
200
+ if not cert:
201
+ result["error"] = "No certificate returned"
202
+ return result
203
+ not_after_str = cert.get("notAfter", "")
204
+ # Python ssl cert dates: 'Mon DD HH:MM:SS YYYY GMT'
205
+ not_after = datetime.strptime(not_after_str, "%b %d %H:%M:%S %Y %Z").replace(tzinfo=timezone.utc)
206
+ now = datetime.now(timezone.utc)
207
+ days_remaining = (not_after - now).days
208
+ result["ssl_valid"] = True
209
+ result["expires"] = not_after.isoformat()
210
+ result["days_remaining"] = days_remaining
211
+ result["expiry_warning"] = days_remaining < warn_days
212
+ if days_remaining < warn_days:
213
+ result["warning"] = f"SSL certificate expires in {days_remaining} days (threshold: {warn_days})"
214
+ # Extract issuer for diagnostics
215
+ issuer = dict(x[0] for x in cert.get("issuer", ()))
216
+ result["issuer"] = issuer.get("organizationName", issuer.get("commonName", "unknown"))
217
+ except Exception as exc:
218
+ result["error"] = str(exc)
219
+ return result
220
+
221
+
222
+ def _check_npm_version(expected_version: Optional[str] = None) -> Dict[str, Any]:
223
+ """Check the published npm version of delimit-cli."""
224
+ import subprocess
225
+
226
+ result: Dict[str, Any] = {"package": "delimit-cli", "healthy": False}
227
+ try:
228
+ proc = subprocess.run(
229
+ ["npm", "view", "delimit-cli", "version"],
230
+ capture_output=True, text=True, timeout=15,
231
+ )
232
+ if proc.returncode == 0:
233
+ published = proc.stdout.strip()
234
+ result["published_version"] = published
235
+ result["healthy"] = True
236
+ if expected_version:
237
+ result["expected_version"] = expected_version
238
+ result["version_match"] = published == expected_version
239
+ if published != expected_version:
240
+ result["warning"] = f"Version mismatch: published={published}, expected={expected_version}"
241
+ else:
242
+ result["error"] = proc.stderr.strip() or "npm view returned non-zero"
243
+ except FileNotFoundError:
244
+ result["error"] = "npm not found on PATH"
245
+ except subprocess.TimeoutExpired:
246
+ result["error"] = "npm view timed out after 15s"
247
+ except Exception as exc:
248
+ result["error"] = str(exc)
249
+ return result
250
+
251
+
252
+ def _extract_hostname(url: str) -> str:
253
+ """Extract hostname from a URL."""
254
+ from urllib.parse import urlparse
255
+ return urlparse(url).hostname or ""
256
+
257
+
157
258
  def verify(app: str, env: str, git_ref: Optional[str] = None) -> Dict[str, Any]:
158
- """Verify deployment health (stub returns plan status)."""
159
- plans = _list_plans(app=app, env=env)
160
- if not plans:
161
- return {"app": app, "env": env, "status": "no_deploys", "healthy": False}
162
- latest = plans[0]
163
- return {
164
- "app": app,
165
- "env": env,
166
- "plan_id": latest["plan_id"],
167
- "status": latest["status"],
168
- "healthy": latest["status"] in ("published", "planned"),
169
- "message": "Health check is a stub — no real endpoint verification yet.",
259
+ """Verify deployment health with real HTTP checks, SSL validation, and npm version.
260
+
261
+ Checks every deployment target for:
262
+ - HTTP 2xx reachability and response time
263
+ - SSL certificate validity (warns if expiring within 30 days)
264
+ - npm published version (for npm targets)
265
+
266
+ Also cross-references local deploy plan status when available.
267
+ """
268
+ now = datetime.now(timezone.utc).isoformat()
269
+ checks: List[Dict[str, Any]] = []
270
+ all_healthy = True
271
+ warnings: List[str] = []
272
+
273
+ for target in DEPLOY_TARGETS:
274
+ entry: Dict[str, Any] = {"name": target["name"], "kind": target["kind"]}
275
+
276
+ # HTTP health
277
+ http = _check_http_health(target["url"])
278
+ entry["http"] = http
279
+ if not http.get("healthy"):
280
+ all_healthy = False
281
+
282
+ # SSL cert check
283
+ hostname = _extract_hostname(target["url"])
284
+ if hostname:
285
+ ssl_result = _check_ssl_cert(hostname)
286
+ entry["ssl"] = ssl_result
287
+ if ssl_result.get("expiry_warning"):
288
+ warnings.append(ssl_result.get("warning", f"SSL expiry warning for {hostname}"))
289
+ if not ssl_result.get("ssl_valid"):
290
+ all_healthy = False
291
+
292
+ # npm version check (only for npm targets)
293
+ if target["kind"] == "npm":
294
+ npm_result = _check_npm_version()
295
+ entry["npm"] = npm_result
296
+ if not npm_result.get("healthy"):
297
+ all_healthy = False
298
+
299
+ checks.append(entry)
300
+
301
+ # Cross-reference deploy plan if one exists
302
+ plan_info: Optional[Dict[str, Any]] = None
303
+ plans = _list_plans(app=app or None, env=env or None)
304
+ if plans:
305
+ latest = plans[0]
306
+ plan_info = {
307
+ "plan_id": latest["plan_id"],
308
+ "plan_status": latest["status"],
309
+ "updated_at": latest.get("updated_at"),
310
+ }
311
+
312
+ result: Dict[str, Any] = {
313
+ "app": app or "all",
314
+ "env": env or "production",
315
+ "verified_at": now,
316
+ "healthy": all_healthy,
317
+ "targets_checked": len(checks),
318
+ "targets_healthy": sum(1 for c in checks if c.get("http", {}).get("healthy")),
319
+ "checks": checks,
170
320
  }
321
+ if warnings:
322
+ result["warnings"] = warnings
323
+ if plan_info:
324
+ result["deploy_plan"] = plan_info
325
+ return result
171
326
 
172
327
 
173
328
  def rollback(app: str, env: str, to_sha: Optional[str] = None) -> Dict[str, Any]:
@@ -23,7 +23,12 @@ if str(GATEWAY_ROOT) not in sys.path:
23
23
 
24
24
 
25
25
  def _load_specs(spec_path: str) -> Dict[str, Any]:
26
- """Load an OpenAPI spec from a file path."""
26
+ """Load an API spec (OpenAPI or JSON Schema) from a file path.
27
+
28
+ Performs a non-fatal version compatibility check (LED-290) so that
29
+ unknown OpenAPI versions log a warning instead of silently parsing.
30
+ JSON Schema documents skip the OpenAPI version assert.
31
+ """
27
32
  import yaml
28
33
 
29
34
  p = Path(spec_path)
@@ -32,8 +37,149 @@ def _load_specs(spec_path: str) -> Dict[str, Any]:
32
37
 
33
38
  content = p.read_text(encoding="utf-8")
34
39
  if p.suffix in (".yaml", ".yml"):
35
- return yaml.safe_load(content)
36
- return json.loads(content)
40
+ spec = yaml.safe_load(content)
41
+ else:
42
+ spec = json.loads(content)
43
+
44
+ # LED-290: warn (non-fatal) if version is outside the validated set.
45
+ # Only applies to OpenAPI/Swagger documents — bare JSON Schema files
46
+ # have no "openapi"/"swagger" key and would otherwise trip the assert.
47
+ try:
48
+ if isinstance(spec, dict) and ("openapi" in spec or "swagger" in spec):
49
+ from core.openapi_version import assert_supported
50
+ assert_supported(spec, strict=False)
51
+ except Exception as exc: # pragma: no cover -- defensive only
52
+ logger.debug("openapi version check skipped: %s", exc)
53
+
54
+ return spec
55
+
56
+
57
+ # ---------------------------------------------------------------------------
58
+ # LED-713: JSON Schema spec-type dispatch helpers
59
+ # ---------------------------------------------------------------------------
60
+
61
+
62
+ def _spec_type(doc: Any) -> str:
63
+ """Classify a loaded spec doc. 'openapi' or 'json_schema'."""
64
+ from core.spec_detector import detect_spec_type
65
+ t = detect_spec_type(doc)
66
+ # Fallback to openapi for unknown so we never break existing flows.
67
+ return "json_schema" if t == "json_schema" else "openapi"
68
+
69
+
70
+ def _json_schema_changes_to_dicts(changes: List[Any]) -> List[Dict[str, Any]]:
71
+ return [
72
+ {
73
+ "type": c.type.value,
74
+ "path": c.path,
75
+ "message": c.message,
76
+ "is_breaking": c.is_breaking,
77
+ "details": c.details,
78
+ }
79
+ for c in changes
80
+ ]
81
+
82
+
83
+ def _json_schema_semver(changes: List[Any]) -> Dict[str, Any]:
84
+ """Build an OpenAPI-compatible semver result from JSON Schema changes.
85
+
86
+ Mirrors core.semver_classifier.classify_detailed shape so downstream
87
+ consumers (PR comment, CI formatter, ledger) don't need to branch.
88
+ """
89
+ breaking = [c for c in changes if c.is_breaking]
90
+ non_breaking = [c for c in changes if not c.is_breaking]
91
+ if breaking:
92
+ bump = "major"
93
+ elif non_breaking:
94
+ bump = "minor"
95
+ else:
96
+ bump = "none"
97
+ return {
98
+ "bump": bump,
99
+ "is_breaking": bool(breaking),
100
+ "counts": {
101
+ "breaking": len(breaking),
102
+ "non_breaking": len(non_breaking),
103
+ "total": len(changes),
104
+ },
105
+ }
106
+
107
+
108
+ def _bump_semver_version(current: str, bump: str) -> Optional[str]:
109
+ """Minimal semver bump for JSON Schema path (core.semver_classifier
110
+ only understands OpenAPI ChangeType enums)."""
111
+ if not current:
112
+ return None
113
+ try:
114
+ parts = current.lstrip("v").split(".")
115
+ major, minor, patch = (int(parts[0]), int(parts[1]), int(parts[2]))
116
+ except Exception:
117
+ return None
118
+ if bump == "major":
119
+ return f"{major + 1}.0.0"
120
+ if bump == "minor":
121
+ return f"{major}.{minor + 1}.0"
122
+ if bump == "patch":
123
+ return f"{major}.{minor}.{patch + 1}"
124
+ return current
125
+
126
+
127
+ def _run_json_schema_lint(
128
+ old_doc: Dict[str, Any],
129
+ new_doc: Dict[str, Any],
130
+ current_version: Optional[str] = None,
131
+ api_name: Optional[str] = None,
132
+ ) -> Dict[str, Any]:
133
+ """Build an evaluate_with_policy-compatible result for JSON Schema.
134
+
135
+ Policy rules in Delimit are defined against OpenAPI ChangeType values,
136
+ so they do not apply here. We return zero violations and rely on the
137
+ breaking-change count + semver bump to drive the governance gate.
138
+ """
139
+ from core.json_schema_diff import JSONSchemaDiffEngine
140
+
141
+ engine = JSONSchemaDiffEngine()
142
+ changes = engine.compare(old_doc, new_doc)
143
+ semver = _json_schema_semver(changes)
144
+
145
+ if current_version:
146
+ semver["current_version"] = current_version
147
+ semver["next_version"] = _bump_semver_version(current_version, semver["bump"])
148
+
149
+ breaking_count = semver["counts"]["breaking"]
150
+ total = semver["counts"]["total"]
151
+
152
+ decision = "pass"
153
+ exit_code = 0
154
+ # No policy rules apply to JSON Schema, but breaking changes still
155
+ # flag MAJOR semver and the downstream gate uses that to block.
156
+ # Mirror the shape of evaluate_with_policy so the action/CLI renderers
157
+ # need no JSON Schema-specific branch.
158
+ result: Dict[str, Any] = {
159
+ "spec_type": "json_schema",
160
+ "api_name": api_name or new_doc.get("title") or old_doc.get("title") or "JSON Schema",
161
+ "decision": decision,
162
+ "exit_code": exit_code,
163
+ "violations": [],
164
+ "summary": {
165
+ "total_changes": total,
166
+ "breaking_changes": breaking_count,
167
+ "violations": 0,
168
+ "errors": 0,
169
+ "warnings": 0,
170
+ },
171
+ "all_changes": [
172
+ {
173
+ "type": c.type.value,
174
+ "path": c.path,
175
+ "message": c.message,
176
+ "is_breaking": c.is_breaking,
177
+ }
178
+ for c in changes
179
+ ],
180
+ "semver": semver,
181
+ }
182
+ return result
37
183
 
38
184
 
39
185
  def _read_jsonl(path: Path) -> List[Dict[str, Any]]:
@@ -101,29 +247,51 @@ def run_lint(old_spec: str, new_spec: str, policy_file: Optional[str] = None) ->
101
247
  """Run the full lint pipeline: diff + policy evaluation.
102
248
 
103
249
  This is the Tier 1 primary tool — combines diff detection with
104
- policy enforcement into a single pass/fail decision.
250
+ policy enforcement into a single pass/fail decision. Auto-detects
251
+ spec type (OpenAPI vs JSON Schema, LED-713) and dispatches to the
252
+ matching engine.
105
253
  """
106
254
  from core.policy_engine import evaluate_with_policy
107
255
 
108
256
  old = _load_specs(old_spec)
109
257
  new = _load_specs(new_spec)
110
258
 
259
+ # LED-713: JSON Schema dispatch. Policy rules are OpenAPI-specific,
260
+ # so JSON Schema takes the no-policy (breaking-count + semver) path.
261
+ if _spec_type(new) == "json_schema" or _spec_type(old) == "json_schema":
262
+ return _run_json_schema_lint(old, new)
263
+
111
264
  return evaluate_with_policy(old, new, policy_file)
112
265
 
113
266
 
114
267
  def run_diff(old_spec: str, new_spec: str) -> Dict[str, Any]:
115
- """Run diff engine only — no policy evaluation."""
116
- from core.diff_engine_v2 import OpenAPIDiffEngine
268
+ """Run diff engine only — no policy evaluation.
117
269
 
270
+ Auto-detects OpenAPI vs JSON Schema and dispatches (LED-713).
271
+ """
118
272
  old = _load_specs(old_spec)
119
273
  new = _load_specs(new_spec)
120
274
 
275
+ if _spec_type(new) == "json_schema" or _spec_type(old) == "json_schema":
276
+ from core.json_schema_diff import JSONSchemaDiffEngine
277
+ engine = JSONSchemaDiffEngine()
278
+ changes = engine.compare(old, new)
279
+ breaking = [c for c in changes if c.is_breaking]
280
+ return {
281
+ "spec_type": "json_schema",
282
+ "total_changes": len(changes),
283
+ "breaking_changes": len(breaking),
284
+ "changes": _json_schema_changes_to_dicts(changes),
285
+ }
286
+
287
+ from core.diff_engine_v2 import OpenAPIDiffEngine
121
288
  engine = OpenAPIDiffEngine()
122
289
  changes = engine.compare(old, new)
123
290
 
124
291
  breaking = [c for c in changes if c.is_breaking]
125
292
 
126
293
  return {
294
+ "spec_type": "openapi",
127
295
  "total_changes": len(changes),
128
296
  "breaking_changes": len(breaking),
129
297
  "changes": [
@@ -150,13 +318,20 @@ def run_changelog(
150
318
  Uses the diff engine to detect changes, then formats them into
151
319
  a human-readable changelog grouped by category.
152
320
  """
153
- from core.diff_engine_v2 import OpenAPIDiffEngine
154
321
  from datetime import datetime, timezone
155
322
 
156
323
  old = _load_specs(old_spec)
157
324
  new = _load_specs(new_spec)
158
325
 
159
- engine = OpenAPIDiffEngine()
326
+ # LED-713: dispatch on spec type. JSONSchemaChange / Change share the
327
+ # (.type.value, .path, .message, .is_breaking) duck type.
328
+ if _spec_type(new) == "json_schema" or _spec_type(old) == "json_schema":
329
+ from core.json_schema_diff import JSONSchemaDiffEngine
330
+ engine = JSONSchemaDiffEngine()
331
+ else:
332
+ from core.diff_engine_v2 import OpenAPIDiffEngine
333
+ engine = OpenAPIDiffEngine()
334
+
160
335
  changes = engine.compare(old, new)
161
336
 
162
337
  # Categorize changes
@@ -794,14 +969,26 @@ def run_semver(
794
969
  """Classify the semver bump for a spec change.
795
970
 
796
971
  Returns detailed breakdown: bump level, per-category counts,
797
- and optionally the bumped version string.
972
+ and optionally the bumped version string. Auto-detects OpenAPI vs
973
+ JSON Schema (LED-713).
798
974
  """
799
- from core.diff_engine_v2 import OpenAPIDiffEngine
800
- from core.semver_classifier import classify_detailed, bump_version, classify
801
-
802
975
  old = _load_specs(old_spec)
803
976
  new = _load_specs(new_spec)
804
977
 
978
+ # LED-713: JSON Schema path
979
+ if _spec_type(new) == "json_schema" or _spec_type(old) == "json_schema":
980
+ from core.json_schema_diff import JSONSchemaDiffEngine
981
+ engine = JSONSchemaDiffEngine()
982
+ changes = engine.compare(old, new)
983
+ result = _json_schema_semver(changes)
984
+ if current_version:
985
+ result["current_version"] = current_version
986
+ result["next_version"] = _bump_semver_version(current_version, result["bump"])
987
+ return result
988
+
989
+ from core.diff_engine_v2 import OpenAPIDiffEngine
990
+ from core.semver_classifier import classify_detailed, bump_version, classify
991
+
805
992
  engine = OpenAPIDiffEngine()
806
993
  changes = engine.compare(old, new)
807
994
  result = classify_detailed(changes)
@@ -932,7 +1119,6 @@ def run_diff_report(
932
1119
  """
933
1120
  from datetime import datetime, timezone
934
1121
 
935
- from core.diff_engine_v2 import OpenAPIDiffEngine
936
1122
  from core.policy_engine import PolicyEngine
937
1123
  from core.semver_classifier import classify_detailed, classify
938
1124
  from core.spec_health import score_spec
@@ -941,6 +1127,43 @@ def run_diff_report(
941
1127
  old = _load_specs(old_spec)
942
1128
  new = _load_specs(new_spec)
943
1129
 
1130
+ # LED-713: JSON Schema dispatch — short-circuit to a minimal report
1131
+ # shape compatible with the JSON renderer (HTML renderer remains
1132
+ # OpenAPI-only; JSON Schema callers should use fmt="json").
1133
+ if _spec_type(new) == "json_schema" or _spec_type(old) == "json_schema":
1134
+ from core.json_schema_diff import JSONSchemaDiffEngine
1135
+ js_engine = JSONSchemaDiffEngine()
1136
+ js_changes = js_engine.compare(old, new)
1137
+ js_breaking = [c for c in js_changes if c.is_breaking]
1138
+ js_semver = _json_schema_semver(js_changes)
1139
+ now_js = datetime.now(timezone.utc)
1140
+ return {
1141
+ "format": fmt,
1142
+ "spec_type": "json_schema",
1143
+ "generated_at": now_js.isoformat(),
1144
+ "old_spec": old_spec,
1145
+ "new_spec": new_spec,
1146
+ "old_title": old.get("title", "") if isinstance(old, dict) else "",
1147
+ "new_title": new.get("title", "") if isinstance(new, dict) else "",
1148
+ "semver": js_semver,
1149
+ "changes": _json_schema_changes_to_dicts(js_changes),
1150
+ "breaking_count": len(js_breaking),
1151
+ "non_breaking_count": len(js_changes) - len(js_breaking),
1152
+ "total_changes": len(js_changes),
1153
+ "policy": {
1154
+ "decision": "pass",
1155
+ "violations": [],
1156
+ "errors": 0,
1157
+ "warnings": 0,
1158
+ },
1159
+ "health": None,
1160
+ "migration": "",
1161
+ "output_file": output_file,
1162
+ "note": "JSON Schema report (policy rules and HTML report are OpenAPI-only in v1)",
1163
+ }
1164
+
1165
+ from core.diff_engine_v2 import OpenAPIDiffEngine
1166
+
944
1167
  # -- Diff --
945
1168
  engine = OpenAPIDiffEngine()
946
1169
  changes = engine.compare(old, new)
@@ -158,21 +158,80 @@ def config_audit(target: str = ".", options: Optional[Dict] = None) -> Dict[str,
158
158
  # ─── EvidencePack ───────────────────────────────────────────────────────
159
159
 
160
160
  def evidence_collect(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
161
- """Collect project evidence: git log, test files, configs, governance data."""
162
- import subprocess, time as _time
163
- root = Path(target).resolve()
164
- evidence: Dict[str, Any] = {"collected_at": _time.time(), "target": str(root)}
165
- # Git log
166
- try:
167
- r = subprocess.run(["git", "-C", str(root), "log", "--oneline", "-10"], capture_output=True, text=True, timeout=10)
168
- evidence["git_log"] = r.stdout.strip().splitlines() if r.returncode == 0 else []
169
- except Exception:
161
+ """Collect project evidence: git log, test files, configs, governance data.
162
+
163
+ Accepts either a local filesystem path (repo directory) or a remote
164
+ reference (GitHub URL, owner/repo#N, or any non-filesystem string).
165
+ Remote targets skip the filesystem walk and store reference metadata.
166
+ """
167
+ import re
168
+ import subprocess
169
+ import time as _time
170
+
171
+ opts = options or {}
172
+ evidence_type = opts.get("evidence_type", "")
173
+
174
+ # Detect non-filesystem targets: URLs, owner/repo#N, bare issue refs, etc.
175
+ is_remote = (
176
+ "://" in target
177
+ or target.startswith("http")
178
+ or re.match(r"^[\w.-]+/[\w.-]+#\d+$", target) is not None
179
+ or "#" in target
180
+ )
181
+
182
+ evidence: Dict[str, Any] = {"collected_at": _time.time(), "target": target}
183
+ if evidence_type:
184
+ evidence["evidence_type"] = evidence_type
185
+
186
+ if is_remote:
187
+ # Remote/reference target — no filesystem walk, just record metadata.
188
+ evidence["target_type"] = "remote"
170
189
  evidence["git_log"] = []
171
- # Test files
172
- test_dirs = [d for d in ["tests", "test", "__tests__", "spec"] if (root / d).exists()]
173
- evidence["test_directories"] = test_dirs
174
- # Configs
175
- evidence["configs"] = [f.name for f in root.iterdir() if f.is_file() and (f.suffix in [".json", ".yaml", ".yml", ".toml"] or f.name.startswith("."))]
190
+ evidence["test_directories"] = []
191
+ evidence["configs"] = []
192
+ m = re.match(r"^([\w.-]+)/([\w.-]+)#(\d+)$", target)
193
+ if m:
194
+ evidence["repo"] = f"{m.group(1)}/{m.group(2)}"
195
+ evidence["issue_number"] = int(m.group(3))
196
+ else:
197
+ root = Path(target).resolve()
198
+ evidence["target"] = str(root)
199
+ evidence["target_type"] = "local"
200
+
201
+ if not root.exists():
202
+ return {
203
+ "tool": "evidence.collect",
204
+ "status": "error",
205
+ "error": "target_not_found",
206
+ "message": f"Path {root} does not exist. For remote targets, pass a URL or owner/repo#N.",
207
+ "target": target,
208
+ }
209
+
210
+ # Git log (safe for non-git dirs)
211
+ try:
212
+ r = subprocess.run(
213
+ ["git", "-C", str(root), "log", "--oneline", "-10"],
214
+ capture_output=True, text=True, timeout=10,
215
+ )
216
+ evidence["git_log"] = r.stdout.strip().splitlines() if r.returncode == 0 else []
217
+ except Exception:
218
+ evidence["git_log"] = []
219
+
220
+ # Test dirs + configs (only if target is a directory)
221
+ if root.is_dir():
222
+ test_dirs = [d for d in ["tests", "test", "__tests__", "spec"] if (root / d).exists()]
223
+ evidence["test_directories"] = test_dirs
224
+ try:
225
+ evidence["configs"] = [
226
+ f.name for f in root.iterdir()
227
+ if f.is_file() and (f.suffix in [".json", ".yaml", ".yml", ".toml"] or f.name.startswith("."))
228
+ ]
229
+ except (PermissionError, OSError):
230
+ evidence["configs"] = []
231
+ else:
232
+ evidence["test_directories"] = []
233
+ evidence["configs"] = []
234
+
176
235
  # Save bundle
177
236
  ev_dir = Path(os.environ.get("DELIMIT_HOME", str(Path.home() / ".delimit"))) / "evidence"
178
237
  ev_dir.mkdir(parents=True, exist_ok=True)
@@ -180,8 +239,13 @@ def evidence_collect(target: str = ".", options: Optional[Dict] = None) -> Dict[
180
239
  bundle_path = ev_dir / f"{bundle_id}.json"
181
240
  evidence["bundle_id"] = bundle_id
182
241
  bundle_path.write_text(json.dumps(evidence, indent=2))
183
- return {"tool": "evidence.collect", "status": "ok", "bundle_id": bundle_id,
184
- "bundle_path": str(bundle_path), "summary": {k: len(v) if isinstance(v, list) else v for k, v in evidence.items()}}
242
+ return {
243
+ "tool": "evidence.collect",
244
+ "status": "ok",
245
+ "bundle_id": bundle_id,
246
+ "bundle_path": str(bundle_path),
247
+ "summary": {k: len(v) if isinstance(v, list) else v for k, v in evidence.items()},
248
+ }
185
249
 
186
250
 
187
251
  def evidence_verify(bundle_id: Optional[str] = None, bundle_path: Optional[str] = None, options: Optional[Dict] = None) -> Dict[str, Any]: