kalibr 1.0.21__py3-none-any.whl → 1.0.23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kalibr/__main__.py +79 -550
- kalibr/deployment.py +36 -21
- kalibr/kalibr_app.py +32 -11
- kalibr/packager.py +43 -0
- kalibr/runtime_router.py +138 -0
- kalibr/schema_generators.py +21 -74
- kalibr/validator.py +70 -0
- kalibr-1.0.23.dist-info/METADATA +257 -0
- kalibr-1.0.23.dist-info/RECORD +19 -0
- kalibr-1.0.21.dist-info/METADATA +0 -302
- kalibr-1.0.21.dist-info/RECORD +0 -16
- {kalibr-1.0.21.data → kalibr-1.0.23.data}/data/examples/README.md +0 -0
- {kalibr-1.0.21.data → kalibr-1.0.23.data}/data/examples/basic_kalibr_example.py +0 -0
- {kalibr-1.0.21.data → kalibr-1.0.23.data}/data/examples/enhanced_kalibr_example.py +0 -0
- {kalibr-1.0.21.dist-info → kalibr-1.0.23.dist-info}/WHEEL +0 -0
- {kalibr-1.0.21.dist-info → kalibr-1.0.23.dist-info}/entry_points.txt +0 -0
- {kalibr-1.0.21.dist-info → kalibr-1.0.23.dist-info}/licenses/LICENSE +0 -0
- {kalibr-1.0.21.dist-info → kalibr-1.0.23.dist-info}/top_level.txt +0 -0
kalibr/deployment.py
CHANGED
|
@@ -1,26 +1,41 @@
|
|
|
1
1
|
"""
|
|
2
|
-
Kalibr Deployment
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
Current supported strategy:
|
|
7
|
-
- Local (via Uvicorn)
|
|
8
|
-
|
|
9
|
-
Planned:
|
|
10
|
-
- Fly.io
|
|
11
|
-
- Render
|
|
12
|
-
- Railway
|
|
13
|
-
|
|
14
|
-
No AWS dependencies are required or used.
|
|
2
|
+
Kalibr Deployment
|
|
3
|
+
-----------------
|
|
4
|
+
Thin wrapper that forwards to the runtime router.
|
|
5
|
+
Keeps a simple API surface for backwards-compat commands.
|
|
15
6
|
"""
|
|
16
7
|
|
|
17
|
-
import
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from typing import Dict, Any
|
|
11
|
+
from kalibr.runtime_router import deploy as router_deploy
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class DeploymentConfig:
|
|
15
|
+
app_name: str
|
|
16
|
+
memory_mb: int = 512
|
|
17
|
+
timeout_seconds: int = 30
|
|
18
|
+
environment_vars: Dict[str, str] = field(default_factory=dict)
|
|
18
19
|
|
|
19
|
-
def
|
|
20
|
-
""
|
|
21
|
-
|
|
20
|
+
def deploy_app(file_path: str, config: DeploymentConfig, platform: str = "local") -> Dict[str, Any]:
|
|
21
|
+
# Map older "platform" to runtime names used by router
|
|
22
|
+
runtime = {
|
|
23
|
+
"local": "local",
|
|
24
|
+
"fly": "fly",
|
|
25
|
+
"aws-lambda": "local", # not supported; punt to local
|
|
26
|
+
"render": "render",
|
|
27
|
+
}.get(platform, platform)
|
|
22
28
|
|
|
23
|
-
|
|
24
|
-
""
|
|
25
|
-
|
|
26
|
-
|
|
29
|
+
result = router_deploy(runtime=runtime, app_name=config.app_name, app_file=file_path)
|
|
30
|
+
if result.get("status") in ("success", "started"):
|
|
31
|
+
eps = result.get("endpoints", {})
|
|
32
|
+
return {
|
|
33
|
+
"status": "success",
|
|
34
|
+
"endpoints": {
|
|
35
|
+
"root": eps.get("root", ""),
|
|
36
|
+
"mcp": eps.get("mcp", ""),
|
|
37
|
+
"openapi": eps.get("openapi", ""),
|
|
38
|
+
"health": eps.get("health", ""),
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return {"status": "error", "error": "unknown deploy outcome", "raw": result}
|
kalibr/kalibr_app.py
CHANGED
|
@@ -9,7 +9,6 @@ import os
|
|
|
9
9
|
|
|
10
10
|
from kalibr.types import FileUpload, Session, WorkflowState
|
|
11
11
|
|
|
12
|
-
|
|
13
12
|
class KalibrApp:
|
|
14
13
|
"""
|
|
15
14
|
Enhanced app-level Kalibr framework with advanced capabilities:
|
|
@@ -20,7 +19,7 @@ class KalibrApp:
|
|
|
20
19
|
- Multi-model schema generation
|
|
21
20
|
"""
|
|
22
21
|
|
|
23
|
-
def __init__(self, title="Kalibr Enhanced API", version="2.0.0", base_url: Optional[str] = None):
|
|
22
|
+
def __init__(self, title="Kalibr Enhanced API", version="2.0.0", base_url: Optional[str] = None, models: Optional[List[str]] = None):
|
|
24
23
|
"""
|
|
25
24
|
Initialize the Kalibr enhanced app.
|
|
26
25
|
Automatically determines correct base URL for deployed environments.
|
|
@@ -30,6 +29,9 @@ class KalibrApp:
|
|
|
30
29
|
2. Env var `KALIBR_BASE_URL`
|
|
31
30
|
3. Env var `FLY_APP_NAME` -> https://<fly_app_name>.fly.dev
|
|
32
31
|
4. Default localhost for dev
|
|
32
|
+
|
|
33
|
+
`models`: optional list like ["mcp"] or ["mcp","gpt-actions","gemini","copilot"]
|
|
34
|
+
controls which schema endpoints are advertised.
|
|
33
35
|
"""
|
|
34
36
|
self.app = FastAPI(title=title, version=version)
|
|
35
37
|
|
|
@@ -42,6 +44,8 @@ class KalibrApp:
|
|
|
42
44
|
else:
|
|
43
45
|
self.base_url = "http://localhost:8000"
|
|
44
46
|
|
|
47
|
+
self.models_supported = models or ["mcp", "gpt-actions", "gemini", "copilot"]
|
|
48
|
+
|
|
45
49
|
# Storage for different action types
|
|
46
50
|
self.actions: Dict[str, Any] = {}
|
|
47
51
|
self.file_handlers: Dict[str, Any] = {}
|
|
@@ -221,10 +225,10 @@ class KalibrApp:
|
|
|
221
225
|
import json
|
|
222
226
|
if inspect.isasyncgen(result):
|
|
223
227
|
async for item in result:
|
|
224
|
-
yield json.dumps(item) + "
|
|
228
|
+
yield json.dumps(item) + "\\n"
|
|
225
229
|
elif inspect.isgenerator(result):
|
|
226
230
|
for item in result:
|
|
227
|
-
yield json.dumps(item) + "
|
|
231
|
+
yield json.dumps(item) + "\\n"
|
|
228
232
|
|
|
229
233
|
return FastAPIStreamingResponse(generate(), media_type="application/x-ndjson")
|
|
230
234
|
except Exception as e:
|
|
@@ -255,17 +259,25 @@ class KalibrApp:
|
|
|
255
259
|
|
|
256
260
|
@self.app.get("/")
|
|
257
261
|
def root():
|
|
262
|
+
schemas = {}
|
|
263
|
+
if "gpt-actions" in self.models_supported:
|
|
264
|
+
schemas["gpt_actions"] = f"{self.base_url}/gpt-actions.json"
|
|
265
|
+
schemas["claude_mcp"] = f"{self.base_url}/mcp.json" # MCP is the default lingua franca
|
|
266
|
+
if "gemini" in self.models_supported:
|
|
267
|
+
schemas["gemini"] = f"{self.base_url}/schemas/gemini"
|
|
268
|
+
if "copilot" in self.models_supported:
|
|
269
|
+
schemas["copilot"] = f"{self.base_url}/schemas/copilot"
|
|
270
|
+
|
|
258
271
|
return {
|
|
259
272
|
"message": "Kalibr Enhanced API is running",
|
|
260
273
|
"actions": list(self.actions.keys()),
|
|
261
|
-
"schemas":
|
|
262
|
-
"gpt_actions": f"{self.base_url}/gpt-actions.json",
|
|
263
|
-
"claude_mcp": f"{self.base_url}/mcp.json",
|
|
264
|
-
"gemini": f"{self.base_url}/schemas/gemini",
|
|
265
|
-
"copilot": f"{self.base_url}/schemas/copilot",
|
|
266
|
-
},
|
|
274
|
+
"schemas": schemas,
|
|
267
275
|
}
|
|
268
276
|
|
|
277
|
+
@self.app.get("/models/supported")
|
|
278
|
+
def supported_models():
|
|
279
|
+
return {"models": self.models_supported}
|
|
280
|
+
|
|
269
281
|
@self.app.get("/gpt-actions.json")
|
|
270
282
|
def gpt_actions_schema():
|
|
271
283
|
all_actions = {**self.actions, **self.file_handlers, **self.session_actions}
|
|
@@ -288,7 +300,16 @@ class KalibrApp:
|
|
|
288
300
|
|
|
289
301
|
@self.app.get("/health")
|
|
290
302
|
def health_check():
|
|
291
|
-
return {
|
|
303
|
+
return {
|
|
304
|
+
"status": "healthy",
|
|
305
|
+
"service": "Kalibr Enhanced API",
|
|
306
|
+
"version": self.app.version,
|
|
307
|
+
"features": {
|
|
308
|
+
"file_handlers": bool(self.file_handlers),
|
|
309
|
+
"sessions": True,
|
|
310
|
+
"streams": bool(self.stream_actions),
|
|
311
|
+
},
|
|
312
|
+
}
|
|
292
313
|
|
|
293
314
|
# -------------------------------------------------------------------------
|
|
294
315
|
# Helpers
|
kalibr/packager.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Packager
|
|
3
|
+
--------
|
|
4
|
+
Create a deployable MCP bundle (code + manifests + metadata).
|
|
5
|
+
This does not host anything; it prepares artifacts for any runtime.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import shutil
|
|
11
|
+
import json
|
|
12
|
+
import tempfile
|
|
13
|
+
from typing import Dict, Any, Optional
|
|
14
|
+
|
|
15
|
+
DEFAULT_BUNDLE = "kalibr_bundle.zip"
|
|
16
|
+
|
|
17
|
+
def package_app(app_dir: str = ".", output: str = DEFAULT_BUNDLE, models_supported: Optional[list] = None, kalibr_version: str = "unknown") -> str:
|
|
18
|
+
app_dir = Path(app_dir).resolve()
|
|
19
|
+
out_path = Path(output).resolve()
|
|
20
|
+
|
|
21
|
+
# Assemble temp dir with metadata
|
|
22
|
+
with tempfile.TemporaryDirectory() as tmp:
|
|
23
|
+
tmpdir = Path(tmp)
|
|
24
|
+
# Copy source tree
|
|
25
|
+
for item in app_dir.iterdir():
|
|
26
|
+
if item.name == out_path.name:
|
|
27
|
+
continue
|
|
28
|
+
dest = tmpdir / item.name
|
|
29
|
+
if item.is_dir():
|
|
30
|
+
shutil.copytree(item, dest)
|
|
31
|
+
else:
|
|
32
|
+
shutil.copy2(item, dest)
|
|
33
|
+
|
|
34
|
+
# Write bundle metadata
|
|
35
|
+
(tmpdir / "kalibr_manifest.json").write_text(json.dumps({
|
|
36
|
+
"kalibr_version": kalibr_version,
|
|
37
|
+
"models_supported": models_supported or ["mcp", "gpt-actions", "gemini", "copilot"],
|
|
38
|
+
}, indent=2))
|
|
39
|
+
|
|
40
|
+
# Zip
|
|
41
|
+
shutil.make_archive(out_path.with_suffix(""), "zip", tmpdir)
|
|
42
|
+
|
|
43
|
+
return str(out_path)
|
kalibr/runtime_router.py
ADDED
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Runtime Router
|
|
3
|
+
--------------
|
|
4
|
+
Abstraction over deployment targets without hosting them ourselves.
|
|
5
|
+
Generates minimal configs and invokes the target's CLI/API where possible.
|
|
6
|
+
|
|
7
|
+
Supported:
|
|
8
|
+
- local (uvicorn)
|
|
9
|
+
- fly (fly.io) -> generates fly.toml and basic Dockerfile
|
|
10
|
+
- render -> generates render.yaml
|
|
11
|
+
|
|
12
|
+
Note: We do not ship vendor SDKs. We shell out to their CLIs if present.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
from __future__ import annotations
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
import subprocess
|
|
18
|
+
import shutil
|
|
19
|
+
import os
|
|
20
|
+
import json
|
|
21
|
+
from typing import Dict, Any, Optional, Tuple
|
|
22
|
+
|
|
23
|
+
HERE = Path(__file__).parent
|
|
24
|
+
|
|
25
|
+
def which(cmd: str) -> Optional[str]:
|
|
26
|
+
return shutil.which(cmd)
|
|
27
|
+
|
|
28
|
+
def ensure_file(path: Path, content: str):
|
|
29
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
if not path.exists():
|
|
31
|
+
path.write_text(content)
|
|
32
|
+
|
|
33
|
+
def generate_fly_files(app_name: str) -> Tuple[Path, Path]:
|
|
34
|
+
fly_toml = Path("fly.toml")
|
|
35
|
+
dockerfile = Path("Dockerfile")
|
|
36
|
+
ensure_file(fly_toml, f"""# fly.toml
|
|
37
|
+
app = "{app_name}"
|
|
38
|
+
primary_region = "iad"
|
|
39
|
+
|
|
40
|
+
[build]
|
|
41
|
+
dockerfile = "Dockerfile"
|
|
42
|
+
|
|
43
|
+
[http_service]
|
|
44
|
+
internal_port = 8000
|
|
45
|
+
force_https = true
|
|
46
|
+
auto_stop_machines = "off"
|
|
47
|
+
auto_start_machines = true
|
|
48
|
+
min_machines_running = 1
|
|
49
|
+
""")
|
|
50
|
+
ensure_file(dockerfile, """# Dockerfile
|
|
51
|
+
FROM python:3.11-slim
|
|
52
|
+
WORKDIR /app
|
|
53
|
+
COPY . /app
|
|
54
|
+
RUN pip install --no-cache-dir -U pip && \
|
|
55
|
+
pip install --no-cache-dir fastapi uvicorn typer pydantic requests
|
|
56
|
+
EXPOSE 8000
|
|
57
|
+
CMD ["python", "-m", "kalibr", "serve", "kalibr_app.py", "--host", "0.0.0.0", "--port", "8000", "--base-url", "http://0.0.0.0:8000"]
|
|
58
|
+
""")
|
|
59
|
+
return fly_toml, dockerfile
|
|
60
|
+
|
|
61
|
+
def generate_render_file(service_name: str) -> Path:
|
|
62
|
+
render_yaml = Path("render.yaml")
|
|
63
|
+
ensure_file(render_yaml, f"""# render.yaml
|
|
64
|
+
services:
|
|
65
|
+
- type: web
|
|
66
|
+
name: {service_name}
|
|
67
|
+
env: docker
|
|
68
|
+
plan: free
|
|
69
|
+
dockerfilePath: ./Dockerfile
|
|
70
|
+
autoDeploy: true
|
|
71
|
+
""")
|
|
72
|
+
return render_yaml
|
|
73
|
+
|
|
74
|
+
def deploy_local(app_file: str, host: str = "0.0.0.0", port: int = 8000, base_url: str = "http://localhost:8000") -> Dict[str, Any]:
|
|
75
|
+
# Run uvicorn inline (non-blocking not handled here - CLI uses this to print guidance)
|
|
76
|
+
cmd = ["python", "-m", "kalibr", "serve", app_file, "--host", host, "--port", str(port), "--base-url", base_url]
|
|
77
|
+
print("▶︎", " ".join(cmd))
|
|
78
|
+
subprocess.run(cmd, check=False)
|
|
79
|
+
return {
|
|
80
|
+
"status": "started",
|
|
81
|
+
"endpoints": {
|
|
82
|
+
"root": f"{base_url}/",
|
|
83
|
+
"mcp": f"{base_url}/mcp.json",
|
|
84
|
+
"openapi": f"{base_url}/openapi.json",
|
|
85
|
+
"health": f"{base_url}/health"
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
def deploy_fly(app_name: str) -> Dict[str, Any]:
|
|
90
|
+
if not which("flyctl"):
|
|
91
|
+
raise RuntimeError("flyctl is not installed. See https://fly.io/docs/flyctl/install/")
|
|
92
|
+
# Ensure files exist
|
|
93
|
+
generate_fly_files(app_name)
|
|
94
|
+
# Launch or deploy
|
|
95
|
+
print("▶︎ flyctl apps list")
|
|
96
|
+
subprocess.run(["flyctl", "apps", "list"], check=False)
|
|
97
|
+
print(f"▶︎ flyctl deploy --app {app_name}")
|
|
98
|
+
subprocess.run(["flyctl", "deploy", "--app", app_name], check=False)
|
|
99
|
+
url = f"https://{app_name}.fly.dev"
|
|
100
|
+
return {
|
|
101
|
+
"status": "success",
|
|
102
|
+
"endpoints": {
|
|
103
|
+
"root": f"{url}/",
|
|
104
|
+
"mcp": f"{url}/mcp.json",
|
|
105
|
+
"openapi": f"{url}/openapi.json",
|
|
106
|
+
"health": f"{url}/health"
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
def deploy_render(service_name: str) -> Dict[str, Any]:
|
|
111
|
+
# We just generate render.yaml and Dockerfile. User connects repo in Render UI.
|
|
112
|
+
generate_render_file(service_name)
|
|
113
|
+
ensure_file(Path("Dockerfile"), """# Dockerfile for Render
|
|
114
|
+
FROM python:3.11-slim
|
|
115
|
+
WORKDIR /app
|
|
116
|
+
COPY . /app
|
|
117
|
+
RUN pip install --no-cache-dir -U pip && \
|
|
118
|
+
pip install --no-cache-dir fastapi uvicorn typer pydantic requests
|
|
119
|
+
EXPOSE 8000
|
|
120
|
+
CMD ["python", "-m", "kalibr", "serve", "kalibr_app.py", "--host", "0.0.0.0", "--port", "8000", "--base-url", "https://$RENDER_EXTERNAL_URL"]
|
|
121
|
+
""")
|
|
122
|
+
print("📄 Generated render.yaml and Dockerfile. Connect your repo in Render.com and auto-deploy.")
|
|
123
|
+
return {
|
|
124
|
+
"status": "success",
|
|
125
|
+
"endpoints": {},
|
|
126
|
+
"note": "Connect this repository to Render; it will build from render.yaml."
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
def deploy(runtime: str, app_name: str, app_file: str, host: str = "0.0.0.0", port: int = 8000, base_url: str = "http://localhost:8000") -> Dict[str, Any]:
|
|
130
|
+
runtime = runtime.lower()
|
|
131
|
+
if runtime in ("local", "dev"):
|
|
132
|
+
return deploy_local(app_file, host, port, base_url)
|
|
133
|
+
if runtime in ("fly", "flyio"):
|
|
134
|
+
return deploy_fly(app_name)
|
|
135
|
+
if runtime == "render":
|
|
136
|
+
return deploy_render(app_name)
|
|
137
|
+
raise ValueError(f"Unknown runtime: {runtime}")
|
|
138
|
+
|
kalibr/schema_generators.py
CHANGED
|
@@ -4,29 +4,27 @@ Multi-model schema generators for different AI platforms
|
|
|
4
4
|
from typing import Dict, Any, List
|
|
5
5
|
from abc import ABC, abstractmethod
|
|
6
6
|
|
|
7
|
+
KALIBR_TITLE = "Kalibr Enhanced API"
|
|
8
|
+
KALIBR_VERSION = "2.0.0"
|
|
9
|
+
MCP_SPEC_VERSION = "1.0" # bump via validator when MCP evolves
|
|
10
|
+
|
|
7
11
|
class BaseSchemaGenerator(ABC):
|
|
8
12
|
"""Base class for AI model schema generators"""
|
|
9
|
-
|
|
10
13
|
@abstractmethod
|
|
11
14
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
12
|
-
"""Generate schema for the specific AI model"""
|
|
13
15
|
pass
|
|
14
16
|
|
|
15
17
|
class MCPSchemaGenerator(BaseSchemaGenerator):
|
|
16
|
-
"""Claude MCP schema generator"""
|
|
17
|
-
|
|
18
|
+
"""Claude/GPT-AgentBuilder MCP schema generator"""
|
|
18
19
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
19
20
|
tools = []
|
|
20
21
|
for action_name, action_data in actions.items():
|
|
21
22
|
properties = {}
|
|
22
23
|
required = []
|
|
23
|
-
|
|
24
|
-
# Construct the input schema for the tool
|
|
25
24
|
for param_name, param_info in action_data["params"].items():
|
|
26
25
|
properties[param_name] = {"type": param_info["type"]}
|
|
27
26
|
if param_info["required"]:
|
|
28
27
|
required.append(param_name)
|
|
29
|
-
|
|
30
28
|
tools.append({
|
|
31
29
|
"name": action_name,
|
|
32
30
|
"description": action_data["description"],
|
|
@@ -35,32 +33,25 @@ class MCPSchemaGenerator(BaseSchemaGenerator):
|
|
|
35
33
|
"properties": properties,
|
|
36
34
|
"required": required
|
|
37
35
|
},
|
|
38
|
-
"server": {
|
|
39
|
-
"url": f"{base_url}/proxy/{action_name}"
|
|
40
|
-
}
|
|
36
|
+
"server": {"url": f"{base_url}/proxy/{action_name}"}
|
|
41
37
|
})
|
|
42
|
-
|
|
43
38
|
return {
|
|
44
|
-
"mcp":
|
|
39
|
+
"mcp": MCP_SPEC_VERSION,
|
|
45
40
|
"name": "kalibr-enhanced",
|
|
46
41
|
"tools": tools
|
|
47
42
|
}
|
|
48
43
|
|
|
49
44
|
class OpenAPISchemaGenerator(BaseSchemaGenerator):
|
|
50
|
-
"""GPT Actions OpenAPI schema generator"""
|
|
51
|
-
|
|
45
|
+
"""GPT Actions OpenAPI schema generator (legacy but still useful)"""
|
|
52
46
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
53
47
|
paths = {}
|
|
54
|
-
|
|
55
48
|
for action_name, action_data in actions.items():
|
|
56
49
|
properties = {}
|
|
57
50
|
required = []
|
|
58
|
-
|
|
59
51
|
for param_name, param_info in action_data["params"].items():
|
|
60
52
|
properties[param_name] = {"type": param_info["type"]}
|
|
61
53
|
if param_info["required"]:
|
|
62
54
|
required.append(param_name)
|
|
63
|
-
|
|
64
55
|
paths[f"/proxy/{action_name}"] = {
|
|
65
56
|
"post": {
|
|
66
57
|
"summary": action_data["description"],
|
|
@@ -80,57 +71,34 @@ class OpenAPISchemaGenerator(BaseSchemaGenerator):
|
|
|
80
71
|
"responses": {
|
|
81
72
|
"200": {
|
|
82
73
|
"description": "Successful response",
|
|
83
|
-
"content": {
|
|
84
|
-
"application/json": {
|
|
85
|
-
"schema": {"type": "object"}
|
|
86
|
-
}
|
|
87
|
-
}
|
|
74
|
+
"content": {"application/json": {"schema": {"type": "object"}}}
|
|
88
75
|
}
|
|
89
76
|
}
|
|
90
77
|
}
|
|
91
78
|
}
|
|
92
|
-
|
|
93
79
|
return {
|
|
94
80
|
"openapi": "3.0.0",
|
|
95
|
-
"info": {
|
|
96
|
-
"title": "Kalibr Enhanced API",
|
|
97
|
-
"version": "2.0.0",
|
|
98
|
-
"description": "Enhanced Kalibr API with app-level capabilities"
|
|
99
|
-
},
|
|
81
|
+
"info": {"title": KALIBR_TITLE, "version": KALIBR_VERSION, "description": "Enhanced Kalibr API"},
|
|
100
82
|
"servers": [{"url": base_url}],
|
|
101
83
|
"paths": paths
|
|
102
84
|
}
|
|
103
85
|
|
|
104
86
|
class GeminiSchemaGenerator(BaseSchemaGenerator):
|
|
105
87
|
"""Google Gemini Extensions schema generator"""
|
|
106
|
-
|
|
107
88
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
108
89
|
functions = []
|
|
109
|
-
|
|
110
90
|
for action_name, action_data in actions.items():
|
|
111
|
-
parameters = {
|
|
112
|
-
"type": "object",
|
|
113
|
-
"properties": {},
|
|
114
|
-
"required": []
|
|
115
|
-
}
|
|
116
|
-
|
|
91
|
+
parameters = {"type": "object", "properties": {}, "required": []}
|
|
117
92
|
for param_name, param_info in action_data["params"].items():
|
|
118
|
-
parameters["properties"][param_name] = {
|
|
119
|
-
"type": param_info["type"],
|
|
120
|
-
"description": f"Parameter {param_name}"
|
|
121
|
-
}
|
|
93
|
+
parameters["properties"][param_name] = {"type": param_info["type"], "description": f"Parameter {param_name}"}
|
|
122
94
|
if param_info["required"]:
|
|
123
95
|
parameters["required"].append(param_name)
|
|
124
|
-
|
|
125
96
|
functions.append({
|
|
126
97
|
"name": action_name,
|
|
127
98
|
"description": action_data["description"],
|
|
128
99
|
"parameters": parameters,
|
|
129
|
-
"server": {
|
|
130
|
-
"url": f"{base_url}/proxy/{action_name}"
|
|
131
|
-
}
|
|
100
|
+
"server": {"url": f"{base_url}/proxy/{action_name}"}
|
|
132
101
|
})
|
|
133
|
-
|
|
134
102
|
return {
|
|
135
103
|
"gemini_extension": "1.0",
|
|
136
104
|
"name": "kalibr_enhanced",
|
|
@@ -139,66 +107,45 @@ class GeminiSchemaGenerator(BaseSchemaGenerator):
|
|
|
139
107
|
}
|
|
140
108
|
|
|
141
109
|
class CopilotSchemaGenerator(BaseSchemaGenerator):
|
|
142
|
-
"""Microsoft Copilot plugin schema generator"""
|
|
143
|
-
|
|
110
|
+
"""Microsoft Copilot plugin schema generator (transitional)"""
|
|
144
111
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
145
112
|
apis = []
|
|
146
|
-
|
|
147
113
|
for action_name, action_data in actions.items():
|
|
148
|
-
request_schema = {
|
|
149
|
-
"type": "object",
|
|
150
|
-
"properties": {},
|
|
151
|
-
"required": []
|
|
152
|
-
}
|
|
153
|
-
|
|
114
|
+
request_schema = {"type": "object", "properties": {}, "required": []}
|
|
154
115
|
for param_name, param_info in action_data["params"].items():
|
|
155
|
-
request_schema["properties"][param_name] = {
|
|
156
|
-
"type": param_info["type"]
|
|
157
|
-
}
|
|
116
|
+
request_schema["properties"][param_name] = {"type": param_info["type"]}
|
|
158
117
|
if param_info["required"]:
|
|
159
118
|
request_schema["required"].append(param_name)
|
|
160
|
-
|
|
161
119
|
apis.append({
|
|
162
120
|
"name": action_name,
|
|
163
121
|
"description": action_data["description"],
|
|
164
122
|
"url": f"{base_url}/proxy/{action_name}",
|
|
165
123
|
"method": "POST",
|
|
166
124
|
"request_schema": request_schema,
|
|
167
|
-
"response_schema": {
|
|
168
|
-
"type": "object",
|
|
169
|
-
"description": "API response"
|
|
170
|
-
}
|
|
125
|
+
"response_schema": {"type": "object", "description": "API response"}
|
|
171
126
|
})
|
|
172
|
-
|
|
173
127
|
return {
|
|
174
128
|
"schema_version": "v1",
|
|
175
129
|
"name_for_model": "kalibr_enhanced",
|
|
176
130
|
"name_for_human": "Enhanced Kalibr API",
|
|
177
131
|
"description_for_model": "Enhanced Kalibr API with advanced capabilities",
|
|
178
132
|
"description_for_human": "API for advanced AI model integrations",
|
|
179
|
-
"auth": {
|
|
180
|
-
|
|
181
|
-
},
|
|
182
|
-
"api": {
|
|
183
|
-
"type": "openapi",
|
|
184
|
-
"url": f"{base_url}/openapi.json"
|
|
185
|
-
},
|
|
133
|
+
"auth": {"type": "none"},
|
|
134
|
+
"api": {"type": "openapi", "url": f"{base_url}/openapi.json"},
|
|
186
135
|
"apis": apis
|
|
187
136
|
}
|
|
188
137
|
|
|
189
138
|
class CustomModelSchemaGenerator(BaseSchemaGenerator):
|
|
190
139
|
"""Extensible generator for future AI models"""
|
|
191
|
-
|
|
192
140
|
def __init__(self, model_name: str, schema_format: str):
|
|
193
141
|
self.model_name = model_name
|
|
194
142
|
self.schema_format = schema_format
|
|
195
143
|
|
|
196
144
|
def generate_schema(self, actions: Dict, base_url: str) -> Dict[str, Any]:
|
|
197
|
-
# Generic schema format that can be customized
|
|
198
145
|
return {
|
|
199
146
|
"model": self.model_name,
|
|
200
147
|
"format": self.schema_format,
|
|
201
|
-
"version":
|
|
148
|
+
"version": KALIBR_VERSION,
|
|
202
149
|
"base_url": base_url,
|
|
203
150
|
"actions": [
|
|
204
151
|
{
|
|
@@ -209,4 +156,4 @@ class CustomModelSchemaGenerator(BaseSchemaGenerator):
|
|
|
209
156
|
}
|
|
210
157
|
for name, data in actions.items()
|
|
211
158
|
]
|
|
212
|
-
}
|
|
159
|
+
}
|
kalibr/validator.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Validator
|
|
3
|
+
---------
|
|
4
|
+
- Validates MCP manifest structure (lightweight JSONSchema)
|
|
5
|
+
- Detects spec drift (compares manifest version vs. embedded known latest)
|
|
6
|
+
- Provides 'update-schemas' hook (stub here; can fetch from GH in future)
|
|
7
|
+
|
|
8
|
+
Note: We keep it offline-safe. You can later wire network fetch if desired.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
from typing import Dict, Any
|
|
13
|
+
import json
|
|
14
|
+
from jsonschema import validate, Draft7Validator
|
|
15
|
+
|
|
16
|
+
# Minimal MCP JSON schema (aligned with your generator shape)
|
|
17
|
+
MCP_SCHEMA = {
|
|
18
|
+
"type": "object",
|
|
19
|
+
"required": ["mcp", "name", "tools"],
|
|
20
|
+
"properties": {
|
|
21
|
+
"mcp": {"type": "string"},
|
|
22
|
+
"name": {"type": "string"},
|
|
23
|
+
"tools": {
|
|
24
|
+
"type": "array",
|
|
25
|
+
"items": {
|
|
26
|
+
"type": "object",
|
|
27
|
+
"required": ["name", "description", "input_schema", "server"],
|
|
28
|
+
"properties": {
|
|
29
|
+
"name": {"type": "string"},
|
|
30
|
+
"description": {"type": "string"},
|
|
31
|
+
"input_schema": {
|
|
32
|
+
"type": "object",
|
|
33
|
+
"required": ["type", "properties"],
|
|
34
|
+
"properties": {
|
|
35
|
+
"type": {"type": "string"},
|
|
36
|
+
"properties": {"type": "object"},
|
|
37
|
+
"required": {"type": "array", "items": {"type": "string"}}
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
"server": {
|
|
41
|
+
"type": "object",
|
|
42
|
+
"required": ["url"],
|
|
43
|
+
"properties": {"url": {"type": "string"}}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
LATEST_MCP_SPEC_VERSION = "1.0" # bump here as spec evolves
|
|
52
|
+
|
|
53
|
+
def validate_mcp_manifest(manifest: Dict[str, Any]) -> None:
|
|
54
|
+
# Structural validation
|
|
55
|
+
validate(instance=manifest, schema=MCP_SCHEMA)
|
|
56
|
+
# Version guidance
|
|
57
|
+
version = str(manifest.get("mcp", ""))
|
|
58
|
+
if version != LATEST_MCP_SPEC_VERSION:
|
|
59
|
+
print(f"⚠️ MCP spec version in manifest is '{version}'. Latest known is '{LATEST_MCP_SPEC_VERSION}'.")
|
|
60
|
+
print(" Run `kalibr update-schemas` after updating the SDK, or regenerate the manifest.")
|
|
61
|
+
|
|
62
|
+
def update_schemas() -> None:
|
|
63
|
+
"""
|
|
64
|
+
Stub: In a connected environment, fetch updated schema templates.
|
|
65
|
+
Here, we simply print guidance so the CLI can expose the command now.
|
|
66
|
+
"""
|
|
67
|
+
print("🔄 Update schemas (stub):")
|
|
68
|
+
print(" - Upgrade kalibr SDK to latest version: pip install -U kalibr")
|
|
69
|
+
print(" - Re-run manifest generation to pick up spec changes.")
|
|
70
|
+
|