kailash 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +20 -20
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +44 -47
- kailash/nodes/base.py +21 -23
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +56 -55
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +10 -10
- kailash/nodes/data/file_discovery.py +28 -31
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +10 -10
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +17 -19
- kailash/runtime/docker.py +20 -22
- kailash/runtime/local.py +16 -16
- kailash/runtime/parallel.py +23 -23
- kailash/runtime/parallel_cyclic.py +27 -27
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +20 -20
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +2 -3
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +16 -16
- kailash/visualization/reports.py +25 -27
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +30 -32
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +44 -44
- kailash/workflow/graph.py +40 -40
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +22 -22
- {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/METADATA +53 -5
- kailash-0.3.1.dist-info/RECORD +136 -0
- kailash-0.3.0.dist-info/RECORD +0 -130
- {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
kailash/manifest.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
"""Workflow manifest generation for Kailash deployment."""
|
2
2
|
|
3
3
|
import json
|
4
|
-
from datetime import
|
4
|
+
from datetime import UTC, datetime
|
5
5
|
from pathlib import Path
|
6
|
-
from typing import Any
|
6
|
+
from typing import Any
|
7
7
|
|
8
8
|
import yaml
|
9
9
|
from pydantic import BaseModel, Field
|
@@ -17,13 +17,13 @@ class KailashManifest(BaseModel):
|
|
17
17
|
|
18
18
|
model_config = {"arbitrary_types_allowed": True}
|
19
19
|
|
20
|
-
metadata:
|
21
|
-
workflow:
|
22
|
-
resources:
|
20
|
+
metadata: dict[str, Any] = Field(..., description="Manifest metadata")
|
21
|
+
workflow: Workflow | None = Field(None, description="Associated workflow")
|
22
|
+
resources: dict[str, Any] | None = Field(
|
23
23
|
default_factory=dict, description="Additional deployment resources"
|
24
24
|
)
|
25
25
|
|
26
|
-
def to_dict(self) ->
|
26
|
+
def to_dict(self) -> dict[str, Any]:
|
27
27
|
"""Convert manifest to dictionary.
|
28
28
|
|
29
29
|
Returns:
|
@@ -55,7 +55,7 @@ class KailashManifest(BaseModel):
|
|
55
55
|
"""
|
56
56
|
return json.dumps(self.to_dict(), indent=2)
|
57
57
|
|
58
|
-
def save(self, path:
|
58
|
+
def save(self, path: str | Path, format: str = "yaml") -> None:
|
59
59
|
"""Save manifest to file.
|
60
60
|
|
61
61
|
Args:
|
@@ -95,7 +95,7 @@ class KailashManifest(BaseModel):
|
|
95
95
|
"version": workflow.metadata.version,
|
96
96
|
"author": workflow.metadata.author,
|
97
97
|
"description": workflow.metadata.description,
|
98
|
-
"created_at": datetime.now(
|
98
|
+
"created_at": datetime.now(UTC).isoformat(),
|
99
99
|
}
|
100
100
|
|
101
101
|
# Override defaults with provided metadata
|
@@ -104,7 +104,7 @@ class KailashManifest(BaseModel):
|
|
104
104
|
return cls(metadata=default_metadata, workflow=workflow)
|
105
105
|
|
106
106
|
@classmethod
|
107
|
-
def from_dict(cls, data:
|
107
|
+
def from_dict(cls, data: dict[str, Any]) -> "KailashManifest":
|
108
108
|
"""Create manifest from dictionary.
|
109
109
|
|
110
110
|
Args:
|
@@ -132,7 +132,7 @@ class KailashManifest(BaseModel):
|
|
132
132
|
raise ManifestError(f"Failed to create manifest from data: {e}") from e
|
133
133
|
|
134
134
|
@classmethod
|
135
|
-
def load(cls, path:
|
135
|
+
def load(cls, path: str | Path) -> "KailashManifest":
|
136
136
|
"""Load manifest from file.
|
137
137
|
|
138
138
|
Args:
|
@@ -149,7 +149,7 @@ class KailashManifest(BaseModel):
|
|
149
149
|
if not file_path.exists():
|
150
150
|
raise FileNotFoundError(f"File not found: {file_path}")
|
151
151
|
|
152
|
-
with open(file_path
|
152
|
+
with open(file_path) as f:
|
153
153
|
content = f.read()
|
154
154
|
|
155
155
|
# Parse based on file extension
|
@@ -172,21 +172,21 @@ class DeploymentConfig(BaseModel):
|
|
172
172
|
namespace: str = Field("default", description="Kubernetes namespace")
|
173
173
|
replicas: int = Field(1, description="Number of replicas")
|
174
174
|
strategy: str = Field("RollingUpdate", description="Deployment strategy")
|
175
|
-
labels:
|
175
|
+
labels: dict[str, str] = Field(
|
176
176
|
default_factory=dict, description="Kubernetes labels"
|
177
177
|
)
|
178
|
-
annotations:
|
178
|
+
annotations: dict[str, str] = Field(
|
179
179
|
default_factory=dict, description="Kubernetes annotations"
|
180
180
|
)
|
181
181
|
image_pull_policy: str = Field("IfNotPresent", description="Image pull policy")
|
182
|
-
service_account:
|
183
|
-
node_selector:
|
182
|
+
service_account: str | None = Field(None, description="Service account name")
|
183
|
+
node_selector: dict[str, str] = Field(
|
184
184
|
default_factory=dict, description="Node selector"
|
185
185
|
)
|
186
|
-
tolerations:
|
186
|
+
tolerations: list[dict[str, Any]] = Field(
|
187
187
|
default_factory=list, description="Pod tolerations"
|
188
188
|
)
|
189
|
-
affinity:
|
189
|
+
affinity: dict[str, Any] | None = Field(None, description="Pod affinity rules")
|
190
190
|
|
191
191
|
|
192
192
|
class ServiceConfig(BaseModel):
|
@@ -194,11 +194,11 @@ class ServiceConfig(BaseModel):
|
|
194
194
|
|
195
195
|
name: str = Field(..., description="Service name")
|
196
196
|
type: str = Field("ClusterIP", description="Service type")
|
197
|
-
ports:
|
197
|
+
ports: list[dict[str, Any]] = Field(
|
198
198
|
default_factory=list, description="Service ports"
|
199
199
|
)
|
200
|
-
selector:
|
201
|
-
labels:
|
200
|
+
selector: dict[str, str] = Field(default_factory=dict, description="Pod selector")
|
201
|
+
labels: dict[str, str] = Field(default_factory=dict, description="Service labels")
|
202
202
|
|
203
203
|
|
204
204
|
class VolumeConfig(BaseModel):
|
@@ -209,7 +209,7 @@ class VolumeConfig(BaseModel):
|
|
209
209
|
source: str = Field(..., description="Volume source")
|
210
210
|
mount_path: str = Field(..., description="Mount path in container")
|
211
211
|
read_only: bool = Field(True, description="Read-only mount")
|
212
|
-
sub_path:
|
212
|
+
sub_path: str | None = Field(None, description="Sub-path within volume")
|
213
213
|
|
214
214
|
|
215
215
|
class ConfigMapConfig(BaseModel):
|
@@ -217,9 +217,9 @@ class ConfigMapConfig(BaseModel):
|
|
217
217
|
|
218
218
|
name: str = Field(..., description="ConfigMap name")
|
219
219
|
namespace: str = Field("default", description="Namespace")
|
220
|
-
data:
|
221
|
-
binary_data:
|
222
|
-
labels:
|
220
|
+
data: dict[str, str] = Field(default_factory=dict, description="ConfigMap data")
|
221
|
+
binary_data: dict[str, str] = Field(default_factory=dict, description="Binary data")
|
222
|
+
labels: dict[str, str] = Field(default_factory=dict, description="Labels")
|
223
223
|
|
224
224
|
|
225
225
|
class SecretConfig(BaseModel):
|
@@ -228,9 +228,9 @@ class SecretConfig(BaseModel):
|
|
228
228
|
name: str = Field(..., description="Secret name")
|
229
229
|
namespace: str = Field("default", description="Namespace")
|
230
230
|
type: str = Field("Opaque", description="Secret type")
|
231
|
-
data:
|
232
|
-
string_data:
|
233
|
-
labels:
|
231
|
+
data: dict[str, str] = Field(default_factory=dict, description="Secret data")
|
232
|
+
string_data: dict[str, str] = Field(default_factory=dict, description="String data")
|
233
|
+
labels: dict[str, str] = Field(default_factory=dict, description="Labels")
|
234
234
|
|
235
235
|
|
236
236
|
class ManifestBuilder:
|
@@ -244,10 +244,10 @@ class ManifestBuilder:
|
|
244
244
|
"""
|
245
245
|
self.workflow = workflow
|
246
246
|
self.deployment_config = None
|
247
|
-
self.service_configs:
|
248
|
-
self.volume_configs:
|
249
|
-
self.configmap_configs:
|
250
|
-
self.secret_configs:
|
247
|
+
self.service_configs: list[ServiceConfig] = []
|
248
|
+
self.volume_configs: list[VolumeConfig] = []
|
249
|
+
self.configmap_configs: list[ConfigMapConfig] = []
|
250
|
+
self.secret_configs: list[SecretConfig] = []
|
251
251
|
|
252
252
|
def with_deployment(self, config: DeploymentConfig) -> "ManifestBuilder":
|
253
253
|
"""Add deployment configuration.
|
@@ -309,7 +309,7 @@ class ManifestBuilder:
|
|
309
309
|
self.secret_configs.append(config)
|
310
310
|
return self
|
311
311
|
|
312
|
-
def build(self) ->
|
312
|
+
def build(self) -> dict[str, Any]:
|
313
313
|
"""Build the complete manifest.
|
314
314
|
|
315
315
|
Returns:
|
@@ -340,7 +340,7 @@ class ManifestBuilder:
|
|
340
340
|
|
341
341
|
return manifest
|
342
342
|
|
343
|
-
def _build_deployment(self) ->
|
343
|
+
def _build_deployment(self) -> dict[str, Any]:
|
344
344
|
"""Build deployment manifest."""
|
345
345
|
config = self.deployment_config
|
346
346
|
|
@@ -446,7 +446,7 @@ class ManifestBuilder:
|
|
446
446
|
|
447
447
|
return deployment
|
448
448
|
|
449
|
-
def _build_service(self, config: ServiceConfig) ->
|
449
|
+
def _build_service(self, config: ServiceConfig) -> dict[str, Any]:
|
450
450
|
"""Build service manifest."""
|
451
451
|
service = {
|
452
452
|
"apiVersion": "v1",
|
@@ -469,7 +469,7 @@ class ManifestBuilder:
|
|
469
469
|
|
470
470
|
return service
|
471
471
|
|
472
|
-
def _build_configmap(self, config: ConfigMapConfig) ->
|
472
|
+
def _build_configmap(self, config: ConfigMapConfig) -> dict[str, Any]:
|
473
473
|
"""Build ConfigMap manifest."""
|
474
474
|
configmap = {
|
475
475
|
"apiVersion": "v1",
|
@@ -487,7 +487,7 @@ class ManifestBuilder:
|
|
487
487
|
|
488
488
|
return configmap
|
489
489
|
|
490
|
-
def _build_secret(self, config: SecretConfig) ->
|
490
|
+
def _build_secret(self, config: SecretConfig) -> dict[str, Any]:
|
491
491
|
"""Build Secret manifest."""
|
492
492
|
secret = {
|
493
493
|
"apiVersion": "v1",
|
@@ -506,7 +506,7 @@ class ManifestBuilder:
|
|
506
506
|
|
507
507
|
return secret
|
508
508
|
|
509
|
-
def _build_workflow_crd(self) ->
|
509
|
+
def _build_workflow_crd(self) -> dict[str, Any]:
|
510
510
|
"""Build workflow custom resource."""
|
511
511
|
from kailash.utils.export import ExportConfig, WorkflowExporter
|
512
512
|
|
@@ -529,7 +529,7 @@ class ManifestGenerator:
|
|
529
529
|
@staticmethod
|
530
530
|
def generate_simple_manifest(
|
531
531
|
workflow: Workflow, name: str, namespace: str = "default"
|
532
|
-
) ->
|
532
|
+
) -> dict[str, Any]:
|
533
533
|
"""Generate a simple deployment manifest.
|
534
534
|
|
535
535
|
Args:
|
@@ -586,9 +586,9 @@ class ManifestGenerator:
|
|
586
586
|
name: str,
|
587
587
|
namespace: str = "default",
|
588
588
|
replicas: int = 1,
|
589
|
-
resources:
|
589
|
+
resources: dict[str, Any] | None = None,
|
590
590
|
**kwargs,
|
591
|
-
) ->
|
591
|
+
) -> dict[str, Any]:
|
592
592
|
"""Generate an advanced deployment manifest with custom configuration.
|
593
593
|
|
594
594
|
Args:
|
@@ -713,7 +713,7 @@ class ManifestGenerator:
|
|
713
713
|
return builder.build()
|
714
714
|
|
715
715
|
@staticmethod
|
716
|
-
def save_manifest(manifest:
|
716
|
+
def save_manifest(manifest: dict[str, Any], path: str, format: str = "yaml"):
|
717
717
|
"""Save manifest to file.
|
718
718
|
|
719
719
|
Args:
|
@@ -737,7 +737,7 @@ class ManifestGenerator:
|
|
737
737
|
# Convenience functions
|
738
738
|
def create_deployment_manifest(
|
739
739
|
workflow: Workflow, deployment_name: str, **config
|
740
|
-
) ->
|
740
|
+
) -> dict[str, Any]:
|
741
741
|
"""Create a deployment manifest for a workflow.
|
742
742
|
|
743
743
|
Args:
|
kailash/mcp/__init__.py
CHANGED
@@ -11,7 +11,7 @@ Run as: python -m kailash.mcp.ai_registry_server
|
|
11
11
|
import asyncio
|
12
12
|
import json
|
13
13
|
import os
|
14
|
-
from typing import Any
|
14
|
+
from typing import Any
|
15
15
|
|
16
16
|
from mcp.server import Server
|
17
17
|
from mcp.types import Resource, TextContent, Tool
|
@@ -32,7 +32,7 @@ class AIRegistryServer:
|
|
32
32
|
self._setup_tools()
|
33
33
|
self._setup_resources()
|
34
34
|
|
35
|
-
def _load_registry_data(self, registry_file: str) ->
|
35
|
+
def _load_registry_data(self, registry_file: str) -> dict[str, Any]:
|
36
36
|
"""Load AI Registry data from JSON file."""
|
37
37
|
# Handle both absolute and relative paths
|
38
38
|
if not os.path.isabs(registry_file):
|
@@ -48,7 +48,7 @@ class AIRegistryServer:
|
|
48
48
|
registry_file = os.path.join(project_root, registry_file)
|
49
49
|
|
50
50
|
try:
|
51
|
-
with open(registry_file,
|
51
|
+
with open(registry_file, encoding="utf-8") as f:
|
52
52
|
return json.load(f)
|
53
53
|
except FileNotFoundError:
|
54
54
|
# Return mock data if file not found
|
@@ -425,10 +425,10 @@ class AIRegistryServer:
|
|
425
425
|
def _search_use_cases(
|
426
426
|
self,
|
427
427
|
query: str,
|
428
|
-
domains:
|
429
|
-
methods:
|
428
|
+
domains: list[str] | None = None,
|
429
|
+
methods: list[str] | None = None,
|
430
430
|
limit: int = 10,
|
431
|
-
) ->
|
431
|
+
) -> dict[str, Any]:
|
432
432
|
"""Search use cases with filters."""
|
433
433
|
use_cases = self.registry_data.get("use_cases", [])
|
434
434
|
results = []
|
@@ -460,8 +460,8 @@ class AIRegistryServer:
|
|
460
460
|
return {"results": results[:limit], "count": len(results), "query": query}
|
461
461
|
|
462
462
|
def _filter_by_domain(
|
463
|
-
self, domain: str, status:
|
464
|
-
) ->
|
463
|
+
self, domain: str, status: str | None = None, limit: int = 20
|
464
|
+
) -> dict[str, Any]:
|
465
465
|
"""Filter use cases by domain."""
|
466
466
|
use_cases = self.registry_data.get("use_cases", [])
|
467
467
|
filtered = []
|
@@ -473,7 +473,7 @@ class AIRegistryServer:
|
|
473
473
|
|
474
474
|
return {"domain": domain, "count": len(filtered), "use_cases": filtered[:limit]}
|
475
475
|
|
476
|
-
def _get_use_case_details(self, use_case_id: int) ->
|
476
|
+
def _get_use_case_details(self, use_case_id: int) -> dict[str, Any]:
|
477
477
|
"""Get detailed information for a specific use case."""
|
478
478
|
use_case = self._get_use_case_by_id(use_case_id)
|
479
479
|
if use_case:
|
@@ -486,7 +486,7 @@ class AIRegistryServer:
|
|
486
486
|
|
487
487
|
def _analyze_domain_trends(
|
488
488
|
self, domain: str, include_details: bool = False
|
489
|
-
) ->
|
489
|
+
) -> dict[str, Any]:
|
490
490
|
"""Analyze trends within a specific domain."""
|
491
491
|
use_cases = [
|
492
492
|
uc
|
@@ -518,9 +518,9 @@ class AIRegistryServer:
|
|
518
518
|
def _recommend_similar(
|
519
519
|
self,
|
520
520
|
use_case_id: int,
|
521
|
-
similarity_factors:
|
521
|
+
similarity_factors: list[str] | None = None,
|
522
522
|
limit: int = 5,
|
523
|
-
) ->
|
523
|
+
) -> dict[str, Any]:
|
524
524
|
"""Find similar use cases."""
|
525
525
|
reference_case = self._get_use_case_by_id(use_case_id)
|
526
526
|
if not reference_case:
|
@@ -542,8 +542,8 @@ class AIRegistryServer:
|
|
542
542
|
}
|
543
543
|
|
544
544
|
def _estimate_complexity(
|
545
|
-
self, use_case_id: int, organization_context:
|
546
|
-
) ->
|
545
|
+
self, use_case_id: int, organization_context: dict | None = None
|
546
|
+
) -> dict[str, Any]:
|
547
547
|
"""Estimate implementation complexity."""
|
548
548
|
use_case = self._get_use_case_by_id(use_case_id)
|
549
549
|
if not use_case:
|
@@ -587,8 +587,8 @@ class AIRegistryServer:
|
|
587
587
|
}
|
588
588
|
|
589
589
|
def _suggest_implementation_path(
|
590
|
-
self, use_case_id: int, organization_context:
|
591
|
-
) ->
|
590
|
+
self, use_case_id: int, organization_context: dict | None = None
|
591
|
+
) -> dict[str, Any]:
|
592
592
|
"""Suggest implementation roadmap."""
|
593
593
|
use_case = self._get_use_case_by_id(use_case_id)
|
594
594
|
if not use_case:
|
@@ -626,8 +626,8 @@ class AIRegistryServer:
|
|
626
626
|
}
|
627
627
|
|
628
628
|
def _filter_by_method(
|
629
|
-
self, method: str, min_maturity:
|
630
|
-
) ->
|
629
|
+
self, method: str, min_maturity: str | None = None, limit: int = 15
|
630
|
+
) -> dict[str, Any]:
|
631
631
|
"""Filter use cases by AI method."""
|
632
632
|
use_cases = self.registry_data.get("use_cases", [])
|
633
633
|
filtered = []
|
@@ -644,7 +644,7 @@ class AIRegistryServer:
|
|
644
644
|
|
645
645
|
# Helper methods
|
646
646
|
|
647
|
-
def _get_use_case_by_id(self, use_case_id: int) ->
|
647
|
+
def _get_use_case_by_id(self, use_case_id: int) -> dict[str, Any] | None:
|
648
648
|
"""Get use case by ID."""
|
649
649
|
for use_case in self.registry_data.get("use_cases", []):
|
650
650
|
if use_case.get("use_case_id") == use_case_id:
|
@@ -652,7 +652,7 @@ class AIRegistryServer:
|
|
652
652
|
return None
|
653
653
|
|
654
654
|
def _calculate_similarity(
|
655
|
-
self, case1:
|
655
|
+
self, case1: dict[str, Any], case2: dict[str, Any]
|
656
656
|
) -> float:
|
657
657
|
"""Calculate similarity between two use cases."""
|
658
658
|
score = 0.0
|
kailash/mcp/client.py
CHANGED
@@ -41,7 +41,7 @@ import json
|
|
41
41
|
import logging
|
42
42
|
import os
|
43
43
|
from contextlib import AsyncExitStack
|
44
|
-
from typing import Any
|
44
|
+
from typing import Any
|
45
45
|
|
46
46
|
logger = logging.getLogger(__name__)
|
47
47
|
|
@@ -72,8 +72,8 @@ class MCPClient:
|
|
72
72
|
self._discovered_resources = {} # Cache discovered resources
|
73
73
|
|
74
74
|
async def discover_tools(
|
75
|
-
self, server_config:
|
76
|
-
) ->
|
75
|
+
self, server_config: str | dict[str, Any]
|
76
|
+
) -> list[dict[str, Any]]:
|
77
77
|
"""Discover available tools from an MCP server.
|
78
78
|
|
79
79
|
Args:
|
@@ -169,9 +169,9 @@ class MCPClient:
|
|
169
169
|
|
170
170
|
async def call_tool(
|
171
171
|
self,
|
172
|
-
server_config:
|
172
|
+
server_config: str | dict[str, Any],
|
173
173
|
tool_name: str,
|
174
|
-
arguments:
|
174
|
+
arguments: dict[str, Any],
|
175
175
|
) -> Any:
|
176
176
|
"""Call a tool on an MCP server.
|
177
177
|
|
@@ -260,8 +260,8 @@ class MCPClient:
|
|
260
260
|
return {"error": str(e)}
|
261
261
|
|
262
262
|
async def list_resources(
|
263
|
-
self, server_config:
|
264
|
-
) ->
|
263
|
+
self, server_config: str | dict[str, Any]
|
264
|
+
) -> list[dict[str, Any]]:
|
265
265
|
"""List available resources from an MCP server.
|
266
266
|
|
267
267
|
Args:
|
@@ -350,9 +350,7 @@ class MCPClient:
|
|
350
350
|
logger.error(f"Failed to list resources: {e}")
|
351
351
|
return []
|
352
352
|
|
353
|
-
async def read_resource(
|
354
|
-
self, server_config: Union[str, Dict[str, Any]], uri: str
|
355
|
-
) -> Any:
|
353
|
+
async def read_resource(self, server_config: str | dict[str, Any], uri: str) -> Any:
|
356
354
|
"""Read a resource from an MCP server.
|
357
355
|
|
358
356
|
Args:
|
@@ -438,7 +436,7 @@ class MCPClient:
|
|
438
436
|
logger.error(f"Failed to read resource: {e}")
|
439
437
|
return {"error": str(e)}
|
440
438
|
|
441
|
-
def _get_server_key(self, server_config:
|
439
|
+
def _get_server_key(self, server_config: str | dict[str, Any]) -> str:
|
442
440
|
"""Generate a unique key for caching server data."""
|
443
441
|
if isinstance(server_config, str):
|
444
442
|
return server_config
|
kailash/mcp/client_new.py
CHANGED
@@ -6,7 +6,7 @@ This is NOT a node - it's a utility class used by LLM agents to interact with MC
|
|
6
6
|
|
7
7
|
import logging
|
8
8
|
import os
|
9
|
-
from typing import Any
|
9
|
+
from typing import Any
|
10
10
|
|
11
11
|
# Will use official MCP SDK when available
|
12
12
|
try:
|
@@ -47,7 +47,7 @@ class MCPClient:
|
|
47
47
|
self.logger = logging.getLogger(__name__)
|
48
48
|
|
49
49
|
async def connect_stdio(
|
50
|
-
self, command: str, args:
|
50
|
+
self, command: str, args: list[str], env: dict[str, str] | None = None
|
51
51
|
):
|
52
52
|
"""
|
53
53
|
Connect to an MCP server via stdio transport.
|
@@ -88,7 +88,7 @@ class MCPClient:
|
|
88
88
|
self.logger.error(f"Failed to connect to MCP server: {e}")
|
89
89
|
raise RuntimeError(f"MCP connection failed: {e}")
|
90
90
|
|
91
|
-
async def discover_tools(self, session: "ClientSession") ->
|
91
|
+
async def discover_tools(self, session: "ClientSession") -> list[dict[str, Any]]:
|
92
92
|
"""
|
93
93
|
Discover available tools from an MCP server.
|
94
94
|
|
@@ -118,7 +118,7 @@ class MCPClient:
|
|
118
118
|
return []
|
119
119
|
|
120
120
|
async def call_tool(
|
121
|
-
self, session: "ClientSession", name: str, arguments:
|
121
|
+
self, session: "ClientSession", name: str, arguments: dict[str, Any]
|
122
122
|
) -> Any:
|
123
123
|
"""
|
124
124
|
Call a tool on the MCP server.
|
@@ -150,7 +150,7 @@ class MCPClient:
|
|
150
150
|
self.logger.error(f"Failed to call tool '{name}': {e}")
|
151
151
|
raise
|
152
152
|
|
153
|
-
async def list_resources(self, session: "ClientSession") ->
|
153
|
+
async def list_resources(self, session: "ClientSession") -> list[dict[str, Any]]:
|
154
154
|
"""
|
155
155
|
List available resources from an MCP server.
|
156
156
|
|
@@ -212,7 +212,7 @@ class MCPClient:
|
|
212
212
|
self.logger.error(f"Failed to read resource '{uri}': {e}")
|
213
213
|
raise
|
214
214
|
|
215
|
-
async def list_prompts(self, session: "ClientSession") ->
|
215
|
+
async def list_prompts(self, session: "ClientSession") -> list[dict[str, Any]]:
|
216
216
|
"""
|
217
217
|
List available prompts from an MCP server.
|
218
218
|
|
@@ -252,8 +252,8 @@ class MCPClient:
|
|
252
252
|
return []
|
253
253
|
|
254
254
|
async def get_prompt(
|
255
|
-
self, session: "ClientSession", name: str, arguments:
|
256
|
-
) ->
|
255
|
+
self, session: "ClientSession", name: str, arguments: dict[str, Any]
|
256
|
+
) -> dict[str, Any]:
|
257
257
|
"""
|
258
258
|
Get a prompt from an MCP server.
|
259
259
|
|
@@ -292,8 +292,8 @@ class MCPClient:
|
|
292
292
|
|
293
293
|
# Convenience functions for LLM agents
|
294
294
|
async def discover_and_prepare_tools(
|
295
|
-
mcp_servers:
|
296
|
-
) ->
|
295
|
+
mcp_servers: list[str | dict[str, Any]],
|
296
|
+
) -> list[dict[str, Any]]:
|
297
297
|
"""
|
298
298
|
Discover tools from multiple MCP servers and prepare them for LLM use.
|
299
299
|
|
kailash/mcp/server.py
CHANGED
@@ -33,7 +33,7 @@ Examples:
|
|
33
33
|
|
34
34
|
import logging
|
35
35
|
from abc import ABC, abstractmethod
|
36
|
-
from
|
36
|
+
from collections.abc import Callable
|
37
37
|
|
38
38
|
logger = logging.getLogger(__name__)
|
39
39
|
|
@@ -84,7 +84,6 @@ class MCPServer(ABC):
|
|
84
84
|
Use @self.add_tool(), @self.add_resource(uri), and
|
85
85
|
@self.add_prompt(name) decorators to register capabilities.
|
86
86
|
"""
|
87
|
-
pass
|
88
87
|
|
89
88
|
def add_tool(self):
|
90
89
|
"""Decorator to add a tool to the server.
|