delimit-cli 2.4.0 → 3.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. package/.dockerignore +7 -0
  2. package/.github/workflows/ci.yml +22 -0
  3. package/CODE_OF_CONDUCT.md +48 -0
  4. package/CONTRIBUTING.md +67 -0
  5. package/Dockerfile +9 -0
  6. package/LICENSE +21 -0
  7. package/README.md +18 -69
  8. package/SECURITY.md +42 -0
  9. package/adapters/gemini-forge.js +11 -0
  10. package/adapters/gemini-jamsons.js +152 -0
  11. package/bin/delimit-cli.js +8 -0
  12. package/bin/delimit-setup.js +258 -0
  13. package/gateway/ai/backends/__init__.py +0 -0
  14. package/gateway/ai/backends/async_utils.py +21 -0
  15. package/gateway/ai/backends/deploy_bridge.py +150 -0
  16. package/gateway/ai/backends/gateway_core.py +261 -0
  17. package/gateway/ai/backends/generate_bridge.py +38 -0
  18. package/gateway/ai/backends/governance_bridge.py +196 -0
  19. package/gateway/ai/backends/intel_bridge.py +59 -0
  20. package/gateway/ai/backends/memory_bridge.py +93 -0
  21. package/gateway/ai/backends/ops_bridge.py +137 -0
  22. package/gateway/ai/backends/os_bridge.py +82 -0
  23. package/gateway/ai/backends/repo_bridge.py +117 -0
  24. package/gateway/ai/backends/ui_bridge.py +118 -0
  25. package/gateway/ai/backends/vault_bridge.py +129 -0
  26. package/gateway/ai/server.py +1182 -0
  27. package/gateway/core/__init__.py +3 -0
  28. package/gateway/core/__pycache__/__init__.cpython-310.pyc +0 -0
  29. package/gateway/core/__pycache__/auto_baseline.cpython-310.pyc +0 -0
  30. package/gateway/core/__pycache__/ci_formatter.cpython-310.pyc +0 -0
  31. package/gateway/core/__pycache__/contract_ledger.cpython-310.pyc +0 -0
  32. package/gateway/core/__pycache__/dependency_graph.cpython-310.pyc +0 -0
  33. package/gateway/core/__pycache__/dependency_manifest.cpython-310.pyc +0 -0
  34. package/gateway/core/__pycache__/diff_engine_v2.cpython-310.pyc +0 -0
  35. package/gateway/core/__pycache__/event_backbone.cpython-310.pyc +0 -0
  36. package/gateway/core/__pycache__/event_schema.cpython-310.pyc +0 -0
  37. package/gateway/core/__pycache__/explainer.cpython-310.pyc +0 -0
  38. package/gateway/core/__pycache__/gateway.cpython-310.pyc +0 -0
  39. package/gateway/core/__pycache__/gateway_v2.cpython-310.pyc +0 -0
  40. package/gateway/core/__pycache__/gateway_v3.cpython-310.pyc +0 -0
  41. package/gateway/core/__pycache__/impact_analyzer.cpython-310.pyc +0 -0
  42. package/gateway/core/__pycache__/policy_engine.cpython-310.pyc +0 -0
  43. package/gateway/core/__pycache__/registry.cpython-310.pyc +0 -0
  44. package/gateway/core/__pycache__/registry_v2.cpython-310.pyc +0 -0
  45. package/gateway/core/__pycache__/registry_v3.cpython-310.pyc +0 -0
  46. package/gateway/core/__pycache__/semver_classifier.cpython-310.pyc +0 -0
  47. package/gateway/core/__pycache__/spec_detector.cpython-310.pyc +0 -0
  48. package/gateway/core/__pycache__/surface_bridge.cpython-310.pyc +0 -0
  49. package/gateway/core/auto_baseline.py +304 -0
  50. package/gateway/core/ci_formatter.py +283 -0
  51. package/gateway/core/complexity_analyzer.py +386 -0
  52. package/gateway/core/contract_ledger.py +345 -0
  53. package/gateway/core/dependency_graph.py +218 -0
  54. package/gateway/core/dependency_manifest.py +223 -0
  55. package/gateway/core/diff_engine_v2.py +477 -0
  56. package/gateway/core/diff_engine_v2.py.bak +426 -0
  57. package/gateway/core/event_backbone.py +268 -0
  58. package/gateway/core/event_schema.py +258 -0
  59. package/gateway/core/explainer.py +438 -0
  60. package/gateway/core/gateway.py +128 -0
  61. package/gateway/core/gateway_v2.py +154 -0
  62. package/gateway/core/gateway_v3.py +224 -0
  63. package/gateway/core/impact_analyzer.py +163 -0
  64. package/gateway/core/policies/default.yml +13 -0
  65. package/gateway/core/policies/relaxed.yml +48 -0
  66. package/gateway/core/policies/strict.yml +55 -0
  67. package/gateway/core/policy_engine.py +464 -0
  68. package/gateway/core/registry.py +52 -0
  69. package/gateway/core/registry_v2.py +132 -0
  70. package/gateway/core/registry_v3.py +134 -0
  71. package/gateway/core/semver_classifier.py +152 -0
  72. package/gateway/core/spec_detector.py +130 -0
  73. package/gateway/core/surface_bridge.py +307 -0
  74. package/gateway/core/zero_spec/__init__.py +4 -0
  75. package/gateway/core/zero_spec/__pycache__/__init__.cpython-310.pyc +0 -0
  76. package/gateway/core/zero_spec/__pycache__/detector.cpython-310.pyc +0 -0
  77. package/gateway/core/zero_spec/__pycache__/express_extractor.cpython-310.pyc +0 -0
  78. package/gateway/core/zero_spec/__pycache__/fastapi_extractor.cpython-310.pyc +0 -0
  79. package/gateway/core/zero_spec/__pycache__/nestjs_extractor.cpython-310.pyc +0 -0
  80. package/gateway/core/zero_spec/detector.py +353 -0
  81. package/gateway/core/zero_spec/express_extractor.py +483 -0
  82. package/gateway/core/zero_spec/fastapi_extractor.py +254 -0
  83. package/gateway/core/zero_spec/nestjs_extractor.py +369 -0
  84. package/gateway/tasks/__init__.py +1 -0
  85. package/gateway/tasks/__pycache__/__init__.cpython-310.pyc +0 -0
  86. package/gateway/tasks/__pycache__/check_policy.cpython-310.pyc +0 -0
  87. package/gateway/tasks/__pycache__/check_policy_v2.cpython-310.pyc +0 -0
  88. package/gateway/tasks/__pycache__/check_policy_v3.cpython-310.pyc +0 -0
  89. package/gateway/tasks/__pycache__/explain_diff.cpython-310.pyc +0 -0
  90. package/gateway/tasks/__pycache__/explain_diff_v2.cpython-310.pyc +0 -0
  91. package/gateway/tasks/__pycache__/validate_api.cpython-310.pyc +0 -0
  92. package/gateway/tasks/__pycache__/validate_api_v2.cpython-310.pyc +0 -0
  93. package/gateway/tasks/__pycache__/validate_api_v3.cpython-310.pyc +0 -0
  94. package/gateway/tasks/check_policy.py +177 -0
  95. package/gateway/tasks/check_policy_v2.py +255 -0
  96. package/gateway/tasks/check_policy_v3.py +255 -0
  97. package/gateway/tasks/explain_diff.py +305 -0
  98. package/gateway/tasks/explain_diff_v2.py +267 -0
  99. package/gateway/tasks/validate_api.py +131 -0
  100. package/gateway/tasks/validate_api_v2.py +208 -0
  101. package/gateway/tasks/validate_api_v3.py +163 -0
  102. package/package.json +4 -3
  103. package/adapters/codex-skill.js +0 -87
  104. package/adapters/cursor-extension.js +0 -190
  105. package/adapters/gemini-action.js +0 -93
  106. package/adapters/openai-function.js +0 -112
  107. package/adapters/xai-plugin.js +0 -151
  108. package/test-decision-engine.js +0 -181
  109. package/test-hook.js +0 -27
  110. package/tests/cli.test.js +0 -359
  111. package/tests/fixtures/openapi-changed.yaml +0 -56
  112. package/tests/fixtures/openapi.yaml +0 -87
@@ -0,0 +1,38 @@
1
+ """
2
+ Bridge to delimit-generator MCP server.
3
+ Tier 3 Extended — code generation and project scaffolding.
4
+ """
5
+
6
+ import sys
7
+ import logging
8
+ from pathlib import Path
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ logger = logging.getLogger("delimit.ai.generate_bridge")
12
+
13
+ GEN_PACKAGE = Path("/home/delimit/.delimit_suite/packages/delimit-generator")
14
+
15
+
16
+ def _ensure_gen_path():
17
+ if str(GEN_PACKAGE) not in sys.path:
18
+ sys.path.insert(0, str(GEN_PACKAGE))
19
+
20
+
21
+ def template(template_type: str, name: str, framework: str = "nextjs", features: Optional[List[str]] = None) -> Dict[str, Any]:
22
+ """Generate code template."""
23
+ _ensure_gen_path()
24
+ try:
25
+ from run_mcp import generate_template
26
+ return generate_template(template_type=template_type, name=name, framework=framework, features=features or [])
27
+ except (ImportError, AttributeError) as e:
28
+ return {"tool": "gen.template", "template_type": template_type, "name": name, "framework": framework, "features": features or [], "note": str(e)}
29
+
30
+
31
+ def scaffold(project_type: str, name: str, packages: Optional[List[str]] = None) -> Dict[str, Any]:
32
+ """Scaffold new project structure."""
33
+ _ensure_gen_path()
34
+ try:
35
+ from run_mcp import scaffold_project
36
+ return scaffold_project(project_type=project_type, name=name, packages=packages or [])
37
+ except (ImportError, AttributeError) as e:
38
+ return {"tool": "gen.scaffold", "project_type": project_type, "name": name, "packages": packages or [], "note": str(e)}
@@ -0,0 +1,196 @@
1
+ """
2
+ Bridge to governance tools.
3
+ Tier 2 Platform — governance policy enforcement and task management.
4
+
5
+ health/status/policy: implemented with real filesystem checks.
6
+ evaluate/new_task/run/verify: require governancegate package (honest error if missing).
7
+ """
8
+
9
+ import json
10
+ import subprocess
11
+ import sys
12
+ import logging
13
+ import yaml
14
+ from pathlib import Path
15
+ from typing import Any, Dict, Optional
16
+
17
+ logger = logging.getLogger("delimit.ai.governance_bridge")
18
+
19
+
20
+ def health(repo: str = ".") -> Dict[str, Any]:
21
+ """Check governance system health with real filesystem checks."""
22
+ repo_path = Path(repo).resolve()
23
+ delimit_dir = repo_path / ".delimit"
24
+ policies_file = delimit_dir / "policies.yml"
25
+ ledger_file = delimit_dir / "ledger" / "events.jsonl"
26
+
27
+ checks = {}
28
+
29
+ # Check .delimit/ directory
30
+ checks["delimit_dir"] = delimit_dir.is_dir()
31
+
32
+ # Check policies.yml
33
+ checks["policies_file"] = policies_file.is_file()
34
+
35
+ # Check ledger
36
+ ledger_entries = 0
37
+ if ledger_file.is_file():
38
+ try:
39
+ ledger_entries = sum(1 for line in ledger_file.read_text().splitlines() if line.strip())
40
+ except Exception:
41
+ pass
42
+ checks["ledger_exists"] = ledger_file.is_file()
43
+ checks["ledger_entries"] = ledger_entries
44
+
45
+ # Check git cleanliness
46
+ git_clean = None
47
+ try:
48
+ result = subprocess.run(
49
+ ["git", "status", "--porcelain"],
50
+ capture_output=True, text=True, timeout=5, cwd=str(repo_path)
51
+ )
52
+ if result.returncode == 0:
53
+ git_clean = len(result.stdout.strip()) == 0
54
+ except Exception:
55
+ pass
56
+ checks["git_clean"] = git_clean
57
+
58
+ # Determine overall health
59
+ if not checks["delimit_dir"]:
60
+ overall = "not_initialized"
61
+ elif not checks["policies_file"]:
62
+ overall = "degraded"
63
+ else:
64
+ overall = "healthy"
65
+
66
+ return {
67
+ "tool": "gov.health",
68
+ "repo": str(repo_path),
69
+ "status": overall,
70
+ "checks": checks,
71
+ }
72
+
73
+
74
+ def status(repo: str = ".") -> Dict[str, Any]:
75
+ """Get governance status by reading actual policy files."""
76
+ repo_path = Path(repo).resolve()
77
+ policies_file = repo_path / ".delimit" / "policies.yml"
78
+ ledger_file = repo_path / ".delimit" / "ledger" / "events.jsonl"
79
+
80
+ rules = []
81
+ if policies_file.is_file():
82
+ try:
83
+ data = yaml.safe_load(policies_file.read_text())
84
+ if isinstance(data, dict):
85
+ for rule in data.get("rules", []):
86
+ rules.append({
87
+ "id": rule.get("id", "unknown"),
88
+ "name": rule.get("name", ""),
89
+ "severity": rule.get("severity", "warning"),
90
+ "action": rule.get("action", "warn"),
91
+ })
92
+ except Exception as e:
93
+ logger.warning("Failed to parse policies.yml: %s", e)
94
+
95
+ ledger_entries = 0
96
+ if ledger_file.is_file():
97
+ try:
98
+ ledger_entries = sum(1 for line in ledger_file.read_text().splitlines() if line.strip())
99
+ except Exception:
100
+ pass
101
+
102
+ return {
103
+ "tool": "gov.status",
104
+ "repo": str(repo_path),
105
+ "policies_file": str(policies_file) if policies_file.is_file() else None,
106
+ "active_rules": len(rules),
107
+ "rules": rules,
108
+ "ledger_entries": ledger_entries,
109
+ }
110
+
111
+
112
+ def policy(repo: str = ".") -> Dict[str, Any]:
113
+ """Load and return the actual policies.yml content."""
114
+ repo_path = Path(repo).resolve()
115
+ policies_file = repo_path / ".delimit" / "policies.yml"
116
+
117
+ if not policies_file.is_file():
118
+ return {
119
+ "tool": "gov.policy",
120
+ "repo": str(repo_path),
121
+ "status": "no_policy",
122
+ "error": f"No policies.yml found at {policies_file}. Run: delimit init",
123
+ }
124
+
125
+ try:
126
+ raw = policies_file.read_text()
127
+ parsed = yaml.safe_load(raw)
128
+ return {
129
+ "tool": "gov.policy",
130
+ "repo": str(repo_path),
131
+ "policy": parsed,
132
+ "raw": raw,
133
+ }
134
+ except Exception as e:
135
+ return {
136
+ "tool": "gov.policy",
137
+ "repo": str(repo_path),
138
+ "status": "parse_error",
139
+ "error": f"Failed to parse policies.yml: {e}",
140
+ }
141
+
142
+
143
+ def evaluate_trigger(action: str, context: Optional[Dict] = None, repo: str = ".") -> Dict[str, Any]:
144
+ """Evaluate if governance is required for an action."""
145
+ return {
146
+ "tool": "gov.evaluate",
147
+ "status": "not_available",
148
+ "error": "Governance evaluation engine not running. This tool requires the governancegate package.",
149
+ "action": action,
150
+ "repo": repo,
151
+ }
152
+
153
+
154
+ def new_task(title: str, scope: str, risk_level: str = "medium", repo: str = ".") -> Dict[str, Any]:
155
+ """Create a new governance task."""
156
+ return {
157
+ "tool": "gov.new_task",
158
+ "status": "not_available",
159
+ "error": "Governance task engine not running. This tool requires the governancegate package.",
160
+ }
161
+
162
+
163
+ def run_task(task_id: str, repo: str = ".") -> Dict[str, Any]:
164
+ """Run a governance task."""
165
+ return {
166
+ "tool": "gov.run",
167
+ "status": "not_available",
168
+ "error": "Governance task engine not running. This tool requires the governancegate package.",
169
+ }
170
+
171
+
172
+ def verify(task_id: str, repo: str = ".") -> Dict[str, Any]:
173
+ """Verify a governance task."""
174
+ return {
175
+ "tool": "gov.verify",
176
+ "status": "not_available",
177
+ "error": "Governance task engine not running. This tool requires the governancegate package.",
178
+ }
179
+
180
+
181
+ def evidence_index(task_id: str, repo: str = ".") -> Dict[str, Any]:
182
+ """Get evidence index for a task."""
183
+ return {
184
+ "tool": "gov.evidence_index",
185
+ "status": "not_available",
186
+ "error": "Governance evidence engine not running. This tool requires the governancegate package.",
187
+ }
188
+
189
+
190
+ def require_owner_approval(context: str, repo: str = ".") -> Dict[str, Any]:
191
+ """Check if owner approval is required."""
192
+ return {
193
+ "tool": "gov.require_owner_approval",
194
+ "status": "not_available",
195
+ "error": "Governance approval engine not running. This tool requires the governancegate package.",
196
+ }
@@ -0,0 +1,59 @@
1
+ """
2
+ Bridge to delimit-intel (wireintel) MCP server.
3
+ Tier 3 Extended — data intelligence and versioned datasets.
4
+ """
5
+
6
+ import sys
7
+ import logging
8
+ from pathlib import Path
9
+ from typing import Any, Dict, List, Optional
10
+
11
+ logger = logging.getLogger("delimit.ai.intel_bridge")
12
+
13
+ INTEL_PACKAGE = Path("/home/delimit/.delimit_suite/packages/wireintel")
14
+
15
+
16
+ def _ensure_intel_path():
17
+ for p in [str(INTEL_PACKAGE), str(INTEL_PACKAGE / "wireintel")]:
18
+ if p not in sys.path:
19
+ sys.path.insert(0, p)
20
+
21
+
22
+ def dataset_register(name: str, schema: Dict[str, Any], description: Optional[str] = None) -> Dict[str, Any]:
23
+ """Register a new dataset with schema."""
24
+ return {"tool": "wireintel.dataset.register", "name": name, "schema": schema, "description": description}
25
+
26
+
27
+ def dataset_list() -> Dict[str, Any]:
28
+ """List registered datasets."""
29
+ return {"tool": "wireintel.dataset.list"}
30
+
31
+
32
+ def dataset_freeze(dataset_id: str) -> Dict[str, Any]:
33
+ """Mark dataset as immutable."""
34
+ return {"tool": "wireintel.dataset.freeze", "dataset_id": dataset_id}
35
+
36
+
37
+ def dataset_version_create(dataset_id: str, data: Any) -> Dict[str, Any]:
38
+ """Create new version of dataset."""
39
+ return {"tool": "wireintel.dataset.version_create", "dataset_id": dataset_id}
40
+
41
+
42
+ def dataset_get_version(dataset_id: str, version: Optional[str] = None) -> Dict[str, Any]:
43
+ """Get specific dataset version."""
44
+ return {"tool": "wireintel.dataset.get_version", "dataset_id": dataset_id, "version": version}
45
+
46
+
47
+ def snapshot_ingest(data: Dict[str, Any], provenance: Optional[Dict] = None) -> Dict[str, Any]:
48
+ """Store research snapshot with provenance."""
49
+ return {"tool": "wireintel.snapshot.ingest", "data": data, "provenance": provenance}
50
+
51
+
52
+ def snapshot_get(snapshot_id: str) -> Dict[str, Any]:
53
+ """Retrieve snapshot by ID."""
54
+ return {"tool": "wireintel.snapshot.get", "snapshot_id": snapshot_id}
55
+
56
+
57
+ def query_run(dataset_id: str, query: str, parameters: Optional[Dict] = None) -> Dict[str, Any]:
58
+ """Execute deterministic query on dataset."""
59
+ return {"tool": "wireintel.query.run", "dataset_id": dataset_id, "query": query, "parameters": parameters}
@@ -0,0 +1,93 @@
1
+ """
2
+ Bridge to delimit-memory package.
3
+ Tier 2 Platform tools — semantic memory search and store.
4
+ """
5
+
6
+ import sys
7
+ import json
8
+ import asyncio
9
+ import logging
10
+ from pathlib import Path
11
+ from typing import Any, Dict, Optional
12
+
13
+ logger = logging.getLogger("delimit.ai.memory_bridge")
14
+
15
+ MEM_PACKAGE = Path("/home/delimit/.delimit_suite/packages/delimit-memory")
16
+
17
+ _server = None
18
+
19
+
20
+ def _run_async(coro):
21
+ """Run an async coroutine from sync code, handling nested event loops."""
22
+ try:
23
+ loop = asyncio.get_running_loop()
24
+ except RuntimeError:
25
+ loop = None
26
+
27
+ if loop and loop.is_running():
28
+ # We're inside an async context (e.g., FastMCP) — use a new thread
29
+ import concurrent.futures
30
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as pool:
31
+ return pool.submit(asyncio.run, coro).result(timeout=30)
32
+ else:
33
+ return asyncio.run(coro)
34
+
35
+
36
+ def _get_server():
37
+ global _server
38
+ if _server is not None:
39
+ return _server
40
+ pkg_path = str(MEM_PACKAGE / "delimit_memory")
41
+ if pkg_path not in sys.path:
42
+ sys.path.insert(0, pkg_path)
43
+ if str(MEM_PACKAGE) not in sys.path:
44
+ sys.path.insert(0, str(MEM_PACKAGE))
45
+ try:
46
+ from delimit_memory.server import DelimitMemoryServer
47
+ _server = DelimitMemoryServer()
48
+ _run_async(_server._initialize_engine())
49
+ return _server
50
+ except Exception as e:
51
+ logger.warning(f"Failed to init memory server: {e}")
52
+ return None
53
+
54
+
55
+ def search(query: str, limit: int = 10) -> Dict[str, Any]:
56
+ """Semantic search across conversation memory."""
57
+ srv = _get_server()
58
+ if srv is None:
59
+ return {"error": "Memory server unavailable", "results": []}
60
+ try:
61
+ result = _run_async(srv._handle_search({"query": query, "limit": limit}))
62
+ return json.loads(result) if isinstance(result, str) else result
63
+ except Exception as e:
64
+ return {"error": f"Memory search failed: {e}", "results": []}
65
+
66
+
67
+ def store(content: str, tags: Optional[list] = None, context: Optional[str] = None) -> Dict[str, Any]:
68
+ """Store a memory entry."""
69
+ srv = _get_server()
70
+ if srv is None:
71
+ return {"error": "Memory server unavailable"}
72
+ try:
73
+ args = {"content": content}
74
+ if tags:
75
+ args["tags"] = tags
76
+ if context:
77
+ args["context"] = context
78
+ result = _run_async(srv._handle_store(args))
79
+ return json.loads(result) if isinstance(result, str) else result
80
+ except Exception as e:
81
+ return {"error": f"Memory store failed: {e}"}
82
+
83
+
84
+ def get_recent(limit: int = 5) -> Dict[str, Any]:
85
+ """Get recent work summary."""
86
+ srv = _get_server()
87
+ if srv is None:
88
+ return {"error": "Memory server unavailable", "results": []}
89
+ try:
90
+ result = _run_async(srv._handle_get_recent_work({"limit": limit}))
91
+ return json.loads(result) if isinstance(result, str) else result
92
+ except Exception as e:
93
+ return {"error": f"Recent work failed: {e}", "results": []}
@@ -0,0 +1,137 @@
1
+ """
2
+ Bridge to operational tools: releasepilot, costguard, datasteward, observabilityops.
3
+ Governance primitives + internal OS layer.
4
+ """
5
+
6
+ import sys
7
+ import json
8
+ import asyncio
9
+ import logging
10
+ import importlib
11
+ from pathlib import Path
12
+ from typing import Any, Dict, List, Optional
13
+ from .async_utils import run_async
14
+
15
+ logger = logging.getLogger("delimit.ai.ops_bridge")
16
+
17
+ PACKAGES = Path("/home/delimit/.delimit_suite/packages")
18
+
19
+ # Add PACKAGES dir so `from shared.base_server import BaseMCPServer` resolves
20
+ _packages = str(PACKAGES)
21
+ if _packages not in sys.path:
22
+ sys.path.insert(0, _packages)
23
+
24
+ _servers = {}
25
+
26
+
27
+ def _call(pkg: str, factory_name: str, method: str, args: Dict, tool_label: str) -> Dict[str, Any]:
28
+ try:
29
+ srv = _servers.get(pkg)
30
+ if srv is None:
31
+ mod = importlib.import_module(f"{pkg}.server")
32
+ factory = getattr(mod, factory_name)
33
+ srv = factory()
34
+ # Disable DSN requirement for observabilityops in bridge context
35
+ if pkg == "observabilityops" and hasattr(srv, "require_dsn_validation"):
36
+ srv.require_dsn_validation = False
37
+ _servers[pkg] = srv
38
+ fn = getattr(srv, method, None)
39
+ if fn is None:
40
+ return {"tool": tool_label, "status": "not_implemented", "error": f"Method {method} not found"}
41
+ result = run_async(fn(args, None))
42
+ return json.loads(result) if isinstance(result, str) else result
43
+ except Exception as e:
44
+ return {"tool": tool_label, "error": str(e)}
45
+
46
+
47
+ # ─── ReleasePilot (Governance Primitive) ────────────────────────────────
48
+
49
+ def release_plan(environment: str, version: str, repository: str, services: Optional[List[str]] = None) -> Dict[str, Any]:
50
+ return _call("releasepilot", "create_releasepilot_server", "_tool_plan",
51
+ {"environment": environment, "version": version, "repository": repository, "services": services or []}, "release.plan")
52
+
53
+
54
+ def release_validate(environment: str, version: str) -> Dict[str, Any]:
55
+ return _call("releasepilot", "create_releasepilot_server", "_tool_validate",
56
+ {"environment": environment, "version": version}, "release.validate")
57
+
58
+
59
+ def release_status(environment: str) -> Dict[str, Any]:
60
+ return _call("releasepilot", "create_releasepilot_server", "_tool_status",
61
+ {"environment": environment}, "release.status")
62
+
63
+
64
+ def release_rollback(environment: str, version: str, to_version: str) -> Dict[str, Any]:
65
+ return _call("releasepilot", "create_releasepilot_server", "_tool_rollback",
66
+ {"environment": environment, "version": version, "to_version": to_version}, "release.rollback")
67
+
68
+
69
+ def release_history(environment: str, limit: int = 10) -> Dict[str, Any]:
70
+ return _call("releasepilot", "create_releasepilot_server", "_tool_history",
71
+ {"environment": environment, "limit": limit}, "release.history")
72
+
73
+
74
+ # ─── CostGuard (Governance Primitive) ──────────────────────────────────
75
+
76
+ def cost_analyze(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
77
+ result = _call("costguard", "create_costguard_server", "_tool_analyze",
78
+ {"target": target, **(options or {})}, "cost.analyze")
79
+ # Guard against hardcoded fake AWS cost data from stub implementation
80
+ if result.get("total_cost") == 1247.83 or result.get("total_cost") == "1247.83":
81
+ return {"tool": "cost.analyze", "status": "not_configured",
82
+ "error": "No cloud provider configured. Cost analyzer returned placeholder data. Set cloud credentials to enable real cost analysis."}
83
+ return result
84
+
85
+
86
+ def cost_optimize(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
87
+ return _call("costguard", "create_costguard_server", "_tool_optimize",
88
+ {"target": target, **(options or {})}, "cost.optimize")
89
+
90
+
91
+ def cost_alert(action: str = "list", options: Optional[Dict] = None) -> Dict[str, Any]:
92
+ return _call("costguard", "create_costguard_server", "_tool_alerts",
93
+ {"action": action, **(options or {})}, "cost.alert")
94
+
95
+
96
+ # ─── DataSteward (Governance Primitive) ────────────────────────────────
97
+
98
+ def data_validate(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
99
+ result = _call("datasteward", "create_datasteward_server", "_tool_integrity_check",
100
+ {"database_url": target, **(options or {})}, "data.validate")
101
+ # Guard against stub that returns "passed" with 0 tables checked
102
+ if result.get("tables_checked", -1) == 0 and result.get("integrity_status") == "passed":
103
+ return {"tool": "data.validate", "status": "not_configured",
104
+ "error": "No database configured for validation. Provide a database_url or configure a data source."}
105
+ return result
106
+
107
+
108
+ def data_migrate(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
109
+ return _call("datasteward", "create_datasteward_server", "_tool_migration_status",
110
+ {"database_url": target, **(options or {})}, "data.migrate")
111
+
112
+
113
+ def data_backup(target: str = ".", options: Optional[Dict] = None) -> Dict[str, Any]:
114
+ return _call("datasteward", "create_datasteward_server", "_tool_backup_plan",
115
+ {"database_url": target, **(options or {})}, "data.backup")
116
+
117
+
118
+ # ─── ObservabilityOps (Internal OS) ────────────────────────────────────
119
+
120
+ def obs_metrics(query: str, time_range: str = "1h", source: Optional[str] = None) -> Dict[str, Any]:
121
+ return _call("observabilityops", "create_observabilityops_server", "_tool_metrics",
122
+ {"query": query, "time_range": time_range, "source": source}, "obs.metrics")
123
+
124
+
125
+ def obs_logs(query: str, time_range: str = "1h", source: Optional[str] = None) -> Dict[str, Any]:
126
+ return _call("observabilityops", "create_observabilityops_server", "_tool_logs",
127
+ {"query": query, "time_range": time_range, "source": source}, "obs.logs")
128
+
129
+
130
+ def obs_alerts(action: str, alert_rule: Optional[Dict] = None, rule_id: Optional[str] = None) -> Dict[str, Any]:
131
+ return _call("observabilityops", "create_observabilityops_server", "_tool_alerts",
132
+ {"action": action, "alert_rule": alert_rule, "rule_id": rule_id}, "obs.alerts")
133
+
134
+
135
+ def obs_status() -> Dict[str, Any]:
136
+ return _call("observabilityops", "create_observabilityops_server", "_tool_status",
137
+ {}, "obs.status")
@@ -0,0 +1,82 @@
1
+ """
2
+ Bridge to delimit-os MCP server.
3
+ Tier 2 Platform tools — pass-through to the OS orchestration layer.
4
+
5
+ These do NOT re-implement OS logic. They translate requests
6
+ and forward to the running delimit-os server via direct import.
7
+ """
8
+
9
+ import sys
10
+ import logging
11
+ from pathlib import Path
12
+ from typing import Any, Dict, List, Optional
13
+
14
+ logger = logging.getLogger("delimit.ai.os_bridge")
15
+
16
+ OS_PACKAGE = Path("/home/delimit/.delimit_suite/packages/delimit-os")
17
+
18
+
19
+ def _ensure_os_path():
20
+ if str(OS_PACKAGE) not in sys.path:
21
+ sys.path.insert(0, str(OS_PACKAGE))
22
+
23
+
24
+ def create_plan(operation: str, target: str, parameters: Optional[Dict] = None, require_approval: bool = True) -> Dict[str, Any]:
25
+ """Create an execution plan via delimit-os."""
26
+ _ensure_os_path()
27
+ try:
28
+ from server import PLANS
29
+ import uuid, time
30
+
31
+ plan_id = f"PLAN-{str(uuid.uuid4())[:8].upper()}"
32
+ risk_level = "LOW"
33
+ if any(x in operation.lower() for x in ["prod", "delete", "drop", "rm"]):
34
+ risk_level = "HIGH"
35
+ elif any(x in operation.lower() for x in ["deploy", "restart", "update"]):
36
+ risk_level = "MEDIUM"
37
+
38
+ plan = {
39
+ "plan_id": plan_id,
40
+ "operation": operation,
41
+ "target": target,
42
+ "parameters": parameters or {},
43
+ "risk_level": risk_level,
44
+ "status": "PENDING_APPROVAL" if require_approval else "READY",
45
+ "created_at": time.time(),
46
+ }
47
+ PLANS[plan_id] = plan
48
+ return plan
49
+ except ImportError:
50
+ return {"error": "delimit-os not available", "fallback": True}
51
+
52
+
53
+ def get_status() -> Dict[str, Any]:
54
+ """Get current OS status."""
55
+ _ensure_os_path()
56
+ try:
57
+ from server import PLANS, TASKS, TOKENS
58
+ return {
59
+ "status": "operational",
60
+ "plans": len(PLANS),
61
+ "tasks": len(TASKS),
62
+ "tokens": len(TOKENS),
63
+ }
64
+ except ImportError:
65
+ return {"status": "unavailable", "error": "delimit-os not loaded"}
66
+
67
+
68
+ def check_gates(plan_id: str) -> Dict[str, Any]:
69
+ """Check governance gates for a plan."""
70
+ _ensure_os_path()
71
+ try:
72
+ from server import PLANS
73
+ plan = PLANS.get(plan_id)
74
+ if not plan:
75
+ return {"error": f"Plan {plan_id} not found"}
76
+ return {
77
+ "plan_id": plan_id,
78
+ "gates_passed": plan.get("status") in ("READY", "APPROVED"),
79
+ "status": plan.get("status"),
80
+ }
81
+ except ImportError:
82
+ return {"error": "delimit-os not available"}