kailash 0.6.1__py3-none-any.whl → 0.6.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/core/actors/connection_actor.py +3 -3
- kailash/gateway/api.py +7 -5
- kailash/gateway/enhanced_gateway.py +1 -1
- kailash/{mcp → mcp_server}/__init__.py +12 -7
- kailash/{mcp → mcp_server}/ai_registry_server.py +2 -2
- kailash/{mcp/server_enhanced.py → mcp_server/server.py} +231 -48
- kailash/{mcp → mcp_server}/servers/ai_registry.py +2 -2
- kailash/{mcp → mcp_server}/utils/__init__.py +1 -6
- kailash/middleware/auth/access_control.py +5 -5
- kailash/middleware/gateway/checkpoint_manager.py +45 -8
- kailash/middleware/mcp/client_integration.py +1 -1
- kailash/middleware/mcp/enhanced_server.py +2 -2
- kailash/nodes/admin/permission_check.py +110 -30
- kailash/nodes/admin/schema.sql +387 -0
- kailash/nodes/admin/tenant_isolation.py +249 -0
- kailash/nodes/admin/transaction_utils.py +244 -0
- kailash/nodes/admin/user_management.py +37 -9
- kailash/nodes/ai/ai_providers.py +55 -3
- kailash/nodes/ai/iterative_llm_agent.py +1 -1
- kailash/nodes/ai/llm_agent.py +118 -16
- kailash/nodes/data/sql.py +24 -0
- kailash/resources/registry.py +6 -0
- kailash/runtime/async_local.py +7 -0
- kailash/utils/export.py +152 -0
- kailash/workflow/builder.py +42 -0
- kailash/workflow/graph.py +86 -17
- kailash/workflow/templates.py +4 -9
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/METADATA +3 -2
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/RECORD +40 -38
- kailash/mcp/server.py +0 -292
- /kailash/{mcp → mcp_server}/client.py +0 -0
- /kailash/{mcp → mcp_server}/client_new.py +0 -0
- /kailash/{mcp → mcp_server}/utils/cache.py +0 -0
- /kailash/{mcp → mcp_server}/utils/config.py +0 -0
- /kailash/{mcp → mcp_server}/utils/formatters.py +0 -0
- /kailash/{mcp → mcp_server}/utils/metrics.py +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/WHEEL +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/entry_points.txt +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.6.1.dist-info → kailash-0.6.3.dist-info}/top_level.txt +0 -0
kailash/nodes/ai/ai_providers.py
CHANGED
@@ -387,10 +387,16 @@ class OllamaProvider(UnifiedAIProvider):
|
|
387
387
|
return self._available
|
388
388
|
|
389
389
|
try:
|
390
|
+
import os
|
391
|
+
|
390
392
|
import ollama
|
391
393
|
|
394
|
+
# Check with environment-configured host if available
|
395
|
+
host = os.getenv("OLLAMA_BASE_URL") or os.getenv("OLLAMA_HOST")
|
396
|
+
client = ollama.Client(host=host) if host else ollama.Client()
|
397
|
+
|
392
398
|
# Check if Ollama is running
|
393
|
-
|
399
|
+
client.list()
|
394
400
|
self._available = True
|
395
401
|
except Exception:
|
396
402
|
self._available = False
|
@@ -409,6 +415,9 @@ class OllamaProvider(UnifiedAIProvider):
|
|
409
415
|
* temperature, max_tokens, top_p, top_k, repeat_penalty
|
410
416
|
* seed, stop, num_ctx, num_batch, num_thread
|
411
417
|
* tfs_z, typical_p, mirostat, mirostat_tau, mirostat_eta
|
418
|
+
backend_config (dict): Backend configuration including:
|
419
|
+
* host (str): Ollama host URL (default: from env or http://localhost:11434)
|
420
|
+
* port (int): Ollama port (if provided, will be appended to host)
|
412
421
|
|
413
422
|
Returns:
|
414
423
|
Dict containing the standardized response.
|
@@ -418,6 +427,28 @@ class OllamaProvider(UnifiedAIProvider):
|
|
418
427
|
|
419
428
|
model = kwargs.get("model", "llama3.1:8b-instruct-q8_0")
|
420
429
|
generation_config = kwargs.get("generation_config", {})
|
430
|
+
backend_config = kwargs.get("backend_config", {})
|
431
|
+
|
432
|
+
# Configure Ollama client with custom host if provided
|
433
|
+
if backend_config:
|
434
|
+
host = backend_config.get("host", "localhost")
|
435
|
+
port = backend_config.get("port")
|
436
|
+
if port:
|
437
|
+
# Construct full URL if port is provided
|
438
|
+
host = (
|
439
|
+
f"http://{host}:{port}"
|
440
|
+
if not host.startswith("http")
|
441
|
+
else f"{host}:{port}"
|
442
|
+
)
|
443
|
+
elif backend_config.get("base_url"):
|
444
|
+
host = backend_config["base_url"]
|
445
|
+
self._client = ollama.Client(host=host)
|
446
|
+
elif self._client is None:
|
447
|
+
# Use default client
|
448
|
+
import os
|
449
|
+
|
450
|
+
host = os.getenv("OLLAMA_BASE_URL") or os.getenv("OLLAMA_HOST")
|
451
|
+
self._client = ollama.Client(host=host) if host else ollama.Client()
|
421
452
|
|
422
453
|
# Map generation_config to Ollama options
|
423
454
|
options = {
|
@@ -482,7 +513,7 @@ class OllamaProvider(UnifiedAIProvider):
|
|
482
513
|
processed_messages.append(msg)
|
483
514
|
|
484
515
|
# Call Ollama
|
485
|
-
response =
|
516
|
+
response = self._client.chat(
|
486
517
|
model=model, messages=processed_messages, options=options
|
487
518
|
)
|
488
519
|
|
@@ -522,16 +553,37 @@ class OllamaProvider(UnifiedAIProvider):
|
|
522
553
|
Supported kwargs:
|
523
554
|
- model (str): Ollama model name (default: "snowflake-arctic-embed2")
|
524
555
|
- normalize (bool): Normalize embeddings to unit length
|
556
|
+
- backend_config (dict): Backend configuration (host, port, base_url)
|
525
557
|
"""
|
526
558
|
try:
|
527
559
|
import ollama
|
528
560
|
|
529
561
|
model = kwargs.get("model", "snowflake-arctic-embed2")
|
530
562
|
normalize = kwargs.get("normalize", False)
|
563
|
+
backend_config = kwargs.get("backend_config", {})
|
564
|
+
|
565
|
+
# Configure Ollama client if not already configured
|
566
|
+
if backend_config and not hasattr(self, "_client"):
|
567
|
+
host = backend_config.get("host", "localhost")
|
568
|
+
port = backend_config.get("port")
|
569
|
+
if port:
|
570
|
+
host = (
|
571
|
+
f"http://{host}:{port}"
|
572
|
+
if not host.startswith("http")
|
573
|
+
else f"{host}:{port}"
|
574
|
+
)
|
575
|
+
elif backend_config.get("base_url"):
|
576
|
+
host = backend_config["base_url"]
|
577
|
+
self._client = ollama.Client(host=host)
|
578
|
+
elif not hasattr(self, "_client") or self._client is None:
|
579
|
+
import os
|
580
|
+
|
581
|
+
host = os.getenv("OLLAMA_BASE_URL") or os.getenv("OLLAMA_HOST")
|
582
|
+
self._client = ollama.Client(host=host) if host else ollama.Client()
|
531
583
|
|
532
584
|
embeddings = []
|
533
585
|
for text in texts:
|
534
|
-
response =
|
586
|
+
response = self._client.embeddings(model=model, prompt=text)
|
535
587
|
embedding = response.get("embedding", [])
|
536
588
|
|
537
589
|
if normalize and embedding:
|
@@ -457,7 +457,7 @@ class IterativeLLMAgentNode(LLMAgentNode):
|
|
457
457
|
try:
|
458
458
|
# Ensure MCP client is initialized
|
459
459
|
if not hasattr(self, "_mcp_client"):
|
460
|
-
from kailash.
|
460
|
+
from kailash.mcp_server import MCPClient
|
461
461
|
|
462
462
|
self._mcp_client = MCPClient()
|
463
463
|
|
kailash/nodes/ai/llm_agent.py
CHANGED
@@ -853,6 +853,62 @@ class LLMAgentNode(Node):
|
|
853
853
|
"loaded_from": "mock_storage",
|
854
854
|
}
|
855
855
|
|
856
|
+
def _run_async_in_sync_context(self, coro):
|
857
|
+
"""
|
858
|
+
Run async coroutine in a synchronous context, handling existing event loops.
|
859
|
+
|
860
|
+
This helper method detects if an event loop is already running and handles
|
861
|
+
the execution appropriately to avoid "RuntimeError: This event loop is already running".
|
862
|
+
|
863
|
+
Args:
|
864
|
+
coro: The coroutine to execute
|
865
|
+
|
866
|
+
Returns:
|
867
|
+
The result of the coroutine execution
|
868
|
+
|
869
|
+
Raises:
|
870
|
+
TimeoutError: If the operation times out (30 seconds)
|
871
|
+
Exception: Any exception raised by the coroutine
|
872
|
+
"""
|
873
|
+
import asyncio
|
874
|
+
|
875
|
+
try:
|
876
|
+
# Check if there's already a running event loop
|
877
|
+
loop = asyncio.get_running_loop()
|
878
|
+
# If we're here, there's a running loop - create a new thread
|
879
|
+
import threading
|
880
|
+
|
881
|
+
result = None
|
882
|
+
exception = None
|
883
|
+
|
884
|
+
def run_in_thread():
|
885
|
+
nonlocal result, exception
|
886
|
+
try:
|
887
|
+
# Create new event loop in thread
|
888
|
+
new_loop = asyncio.new_event_loop()
|
889
|
+
asyncio.set_event_loop(new_loop)
|
890
|
+
try:
|
891
|
+
result = new_loop.run_until_complete(coro)
|
892
|
+
finally:
|
893
|
+
new_loop.close()
|
894
|
+
except Exception as e:
|
895
|
+
exception = e
|
896
|
+
|
897
|
+
thread = threading.Thread(target=run_in_thread)
|
898
|
+
thread.start()
|
899
|
+
thread.join(timeout=30) # 30 second timeout
|
900
|
+
|
901
|
+
if thread.is_alive():
|
902
|
+
raise TimeoutError("MCP operation timed out after 30 seconds")
|
903
|
+
|
904
|
+
if exception:
|
905
|
+
raise exception
|
906
|
+
return result
|
907
|
+
|
908
|
+
except RuntimeError:
|
909
|
+
# No running event loop, use asyncio.run()
|
910
|
+
return asyncio.run(coro)
|
911
|
+
|
856
912
|
def _retrieve_mcp_context(
|
857
913
|
self, mcp_servers: list[dict], mcp_context: list[str]
|
858
914
|
) -> list[dict[str, Any]]:
|
@@ -929,7 +985,7 @@ class LLMAgentNode(Node):
|
|
929
985
|
import asyncio
|
930
986
|
from datetime import datetime
|
931
987
|
|
932
|
-
from kailash.
|
988
|
+
from kailash.mcp_server import MCPClient
|
933
989
|
|
934
990
|
# Initialize MCP client if not already done
|
935
991
|
if not hasattr(self, "_mcp_client"):
|
@@ -939,14 +995,14 @@ class LLMAgentNode(Node):
|
|
939
995
|
for server_config in mcp_servers:
|
940
996
|
try:
|
941
997
|
# List resources from server
|
942
|
-
resources =
|
998
|
+
resources = self._run_async_in_sync_context(
|
943
999
|
self._mcp_client.list_resources(server_config)
|
944
1000
|
)
|
945
1001
|
|
946
1002
|
# Read specific resources if requested
|
947
1003
|
for uri in mcp_context:
|
948
1004
|
try:
|
949
|
-
resource_data =
|
1005
|
+
resource_data = self._run_async_in_sync_context(
|
950
1006
|
self._mcp_client.read_resource(server_config, uri)
|
951
1007
|
)
|
952
1008
|
|
@@ -1014,17 +1070,48 @@ class LLMAgentNode(Node):
|
|
1014
1070
|
}
|
1015
1071
|
)
|
1016
1072
|
|
1073
|
+
except TimeoutError as e:
|
1074
|
+
self.logger.warning(
|
1075
|
+
f"MCP server '{server_config.get('name', 'unknown')}' timed out after 30 seconds: {e}"
|
1076
|
+
)
|
1077
|
+
# Fall back to mock for this server
|
1078
|
+
context_data.append(
|
1079
|
+
{
|
1080
|
+
"uri": f"mcp://{server_config.get('name', 'unknown')}/fallback",
|
1081
|
+
"content": "MCP server timed out - using fallback content. Check if the server is running and accessible.",
|
1082
|
+
"source": server_config.get("name", "unknown"),
|
1083
|
+
"retrieved_at": datetime.now().isoformat(),
|
1084
|
+
"relevance_score": 0.5,
|
1085
|
+
"metadata": {
|
1086
|
+
"error": "timeout",
|
1087
|
+
"error_message": str(e),
|
1088
|
+
},
|
1089
|
+
}
|
1090
|
+
)
|
1017
1091
|
except Exception as e:
|
1018
|
-
|
1092
|
+
error_type = type(e).__name__
|
1093
|
+
self.logger.error(
|
1094
|
+
f"MCP server '{server_config.get('name', 'unknown')}' connection failed ({error_type}): {e}"
|
1095
|
+
)
|
1096
|
+
|
1097
|
+
# Provide helpful error messages based on exception type
|
1098
|
+
if "coroutine" in str(e).lower() and "await" in str(e).lower():
|
1099
|
+
self.logger.error(
|
1100
|
+
"This appears to be an async/await issue. Please report this bug to the Kailash SDK team."
|
1101
|
+
)
|
1102
|
+
|
1019
1103
|
# Fall back to mock for this server
|
1020
1104
|
context_data.append(
|
1021
1105
|
{
|
1022
1106
|
"uri": f"mcp://{server_config.get('name', 'unknown')}/fallback",
|
1023
|
-
"content": "Connection failed
|
1107
|
+
"content": f"Connection failed ({error_type}) - using fallback content. Error: {str(e)}",
|
1024
1108
|
"source": server_config.get("name", "unknown"),
|
1025
1109
|
"retrieved_at": datetime.now().isoformat(),
|
1026
1110
|
"relevance_score": 0.5,
|
1027
|
-
"metadata": {
|
1111
|
+
"metadata": {
|
1112
|
+
"error": error_type,
|
1113
|
+
"error_message": str(e),
|
1114
|
+
},
|
1028
1115
|
}
|
1029
1116
|
)
|
1030
1117
|
|
@@ -1032,11 +1119,17 @@ class LLMAgentNode(Node):
|
|
1032
1119
|
if context_data:
|
1033
1120
|
return context_data
|
1034
1121
|
|
1035
|
-
except ImportError:
|
1122
|
+
except ImportError as e:
|
1036
1123
|
# MCPClient not available, fall back to mock
|
1124
|
+
self.logger.info(
|
1125
|
+
"MCP client not available. Install the MCP SDK with 'pip install mcp' to use real MCP servers."
|
1126
|
+
)
|
1037
1127
|
pass
|
1038
1128
|
except Exception as e:
|
1039
|
-
self.logger.
|
1129
|
+
self.logger.error(
|
1130
|
+
f"Unexpected error in MCP retrieval: {type(e).__name__}: {e}"
|
1131
|
+
)
|
1132
|
+
self.logger.info("Falling back to mock MCP implementation.")
|
1040
1133
|
|
1041
1134
|
# Fallback to mock implementation
|
1042
1135
|
for uri in mcp_context:
|
@@ -1089,9 +1182,7 @@ class LLMAgentNode(Node):
|
|
1089
1182
|
|
1090
1183
|
if use_real_mcp:
|
1091
1184
|
try:
|
1092
|
-
import
|
1093
|
-
|
1094
|
-
from kailash.mcp import MCPClient
|
1185
|
+
from kailash.mcp_server import MCPClient
|
1095
1186
|
|
1096
1187
|
# Initialize MCP client if not already done
|
1097
1188
|
if not hasattr(self, "_mcp_client"):
|
@@ -1101,7 +1192,7 @@ class LLMAgentNode(Node):
|
|
1101
1192
|
for server_config in mcp_servers:
|
1102
1193
|
try:
|
1103
1194
|
# Discover tools asynchronously
|
1104
|
-
tools =
|
1195
|
+
tools = self._run_async_in_sync_context(
|
1105
1196
|
self._mcp_client.discover_tools(server_config)
|
1106
1197
|
)
|
1107
1198
|
|
@@ -1131,16 +1222,27 @@ class LLMAgentNode(Node):
|
|
1131
1222
|
{"type": "function", "function": function_def}
|
1132
1223
|
)
|
1133
1224
|
|
1225
|
+
except TimeoutError as e:
|
1226
|
+
self.logger.warning(
|
1227
|
+
f"Tool discovery timed out for MCP server '{server_config.get('name', 'unknown')}': {e}"
|
1228
|
+
)
|
1134
1229
|
except Exception as e:
|
1135
|
-
|
1136
|
-
|
1230
|
+
error_type = type(e).__name__
|
1231
|
+
self.logger.error(
|
1232
|
+
f"Failed to discover tools from '{server_config.get('name', 'unknown')}' ({error_type}): {e}"
|
1137
1233
|
)
|
1138
1234
|
|
1139
1235
|
except ImportError:
|
1140
1236
|
# MCPClient not available, use mock tools
|
1237
|
+
self.logger.info(
|
1238
|
+
"MCP client not available for tool discovery. Install with 'pip install mcp' for real MCP tools."
|
1239
|
+
)
|
1141
1240
|
pass
|
1142
1241
|
except Exception as e:
|
1143
|
-
self.logger.
|
1242
|
+
self.logger.error(
|
1243
|
+
f"Unexpected error in MCP tool discovery: {type(e).__name__}: {e}"
|
1244
|
+
)
|
1245
|
+
self.logger.info("Using mock tools as fallback.")
|
1144
1246
|
|
1145
1247
|
# If no real tools discovered, provide minimal generic tools
|
1146
1248
|
if not discovered_tools:
|
@@ -1665,7 +1767,7 @@ class LLMAgentNode(Node):
|
|
1665
1767
|
server_config = mcp_tool.get("function", {}).get("mcp_server_config", {})
|
1666
1768
|
|
1667
1769
|
try:
|
1668
|
-
from kailash.
|
1770
|
+
from kailash.mcp_server import MCPClient
|
1669
1771
|
|
1670
1772
|
# Initialize MCP client if not already done
|
1671
1773
|
if not hasattr(self, "_mcp_client"):
|
kailash/nodes/data/sql.py
CHANGED
@@ -494,6 +494,30 @@ class SQLDatabaseNode(Node):
|
|
494
494
|
"execution_time": execution_time,
|
495
495
|
}
|
496
496
|
|
497
|
+
async def async_run(self, **kwargs) -> dict[str, Any]:
|
498
|
+
"""
|
499
|
+
Async wrapper for the run method to maintain backward compatibility.
|
500
|
+
|
501
|
+
This method provides an async interface while maintaining the same
|
502
|
+
functionality as the synchronous run method. The underlying SQLAlchemy
|
503
|
+
operations are still synchronous but wrapped for async compatibility.
|
504
|
+
|
505
|
+
Args:
|
506
|
+
**kwargs: Same parameters as run()
|
507
|
+
|
508
|
+
Returns:
|
509
|
+
Same return format as run()
|
510
|
+
|
511
|
+
Note:
|
512
|
+
This is a compatibility method. The actual database operations
|
513
|
+
are still synchronous underneath.
|
514
|
+
"""
|
515
|
+
import asyncio
|
516
|
+
|
517
|
+
# Run the synchronous method in a thread pool to avoid blocking
|
518
|
+
loop = asyncio.get_event_loop()
|
519
|
+
return await loop.run_in_executor(None, lambda: self.run(**kwargs))
|
520
|
+
|
497
521
|
@classmethod
|
498
522
|
def get_pool_status(cls) -> dict[str, Any]:
|
499
523
|
"""Get status of all shared connection pools."""
|
kailash/resources/registry.py
CHANGED
@@ -266,6 +266,12 @@ class ResourceRegistry:
|
|
266
266
|
)
|
267
267
|
|
268
268
|
# Try generic cleanup methods
|
269
|
+
elif hasattr(resource, "aclose"):
|
270
|
+
# Use aclose for modern async resources (e.g., Redis)
|
271
|
+
if asyncio.iscoroutinefunction(resource.aclose):
|
272
|
+
await resource.aclose()
|
273
|
+
else:
|
274
|
+
resource.aclose()
|
269
275
|
elif hasattr(resource, "close"):
|
270
276
|
if asyncio.iscoroutinefunction(resource.close):
|
271
277
|
await resource.close()
|
kailash/runtime/async_local.py
CHANGED
@@ -717,6 +717,13 @@ class AsyncLocalRuntime(LocalRuntime):
|
|
717
717
|
# Navigate the path (e.g., "result.data")
|
718
718
|
path_parts = source_path.split(".")
|
719
719
|
current_data = source_data
|
720
|
+
|
721
|
+
# CRITICAL FIX: Handle paths starting with "result"
|
722
|
+
# When source_path is "result.field", the node output IS the result
|
723
|
+
if path_parts[0] == "result" and len(path_parts) > 1:
|
724
|
+
# Skip the "result" prefix and navigate from the actual data
|
725
|
+
path_parts = path_parts[1:]
|
726
|
+
|
720
727
|
for part in path_parts:
|
721
728
|
if (
|
722
729
|
isinstance(current_data, dict)
|
kailash/utils/export.py
CHANGED
@@ -4,6 +4,7 @@ import json
|
|
4
4
|
import logging
|
5
5
|
import re
|
6
6
|
from copy import deepcopy
|
7
|
+
from datetime import UTC, datetime
|
7
8
|
from pathlib import Path
|
8
9
|
from typing import Any
|
9
10
|
|
@@ -643,6 +644,157 @@ class WorkflowExporter:
|
|
643
644
|
except Exception as e:
|
644
645
|
raise ExportException(f"Failed to export workflow manifest: {e}") from e
|
645
646
|
|
647
|
+
def export_as_code(self, workflow: Workflow, output_path: str | None = None) -> str:
|
648
|
+
"""Export workflow as executable Python code.
|
649
|
+
|
650
|
+
Args:
|
651
|
+
workflow: Workflow to export
|
652
|
+
output_path: Optional path to write Python file
|
653
|
+
|
654
|
+
Returns:
|
655
|
+
Python code string
|
656
|
+
|
657
|
+
Raises:
|
658
|
+
ExportException: If export fails
|
659
|
+
"""
|
660
|
+
if not workflow:
|
661
|
+
raise ExportException("Workflow is required")
|
662
|
+
|
663
|
+
try:
|
664
|
+
if self.pre_export_hook:
|
665
|
+
self.pre_export_hook(workflow, "python")
|
666
|
+
|
667
|
+
# Generate Python code
|
668
|
+
metadata = workflow.metadata if hasattr(workflow, "metadata") else {}
|
669
|
+
if isinstance(metadata, dict):
|
670
|
+
name = metadata.get("name", "workflow")
|
671
|
+
description = metadata.get("description", "Generated workflow")
|
672
|
+
else:
|
673
|
+
name = getattr(metadata, "name", "workflow")
|
674
|
+
description = getattr(metadata, "description", "Generated workflow")
|
675
|
+
|
676
|
+
code_lines = [
|
677
|
+
"#!/usr/bin/env python3",
|
678
|
+
'"""',
|
679
|
+
f"Generated workflow: {name}",
|
680
|
+
f"Description: {description}",
|
681
|
+
f"Generated at: {datetime.now(UTC).isoformat()}",
|
682
|
+
'"""',
|
683
|
+
"",
|
684
|
+
"from kailash import WorkflowBuilder",
|
685
|
+
"from kailash.runtime.local import LocalRuntime",
|
686
|
+
"",
|
687
|
+
"",
|
688
|
+
"def build_workflow():",
|
689
|
+
' """Build the workflow."""',
|
690
|
+
" builder = WorkflowBuilder()",
|
691
|
+
"",
|
692
|
+
]
|
693
|
+
|
694
|
+
# Add nodes
|
695
|
+
for node_id, node in workflow.nodes.items():
|
696
|
+
node_type = node.node_type
|
697
|
+
config = node.config
|
698
|
+
|
699
|
+
# Format config as Python dict
|
700
|
+
config_str = self._format_dict_for_code(config, indent=8)
|
701
|
+
|
702
|
+
code_lines.extend(
|
703
|
+
[
|
704
|
+
f" # Add {node_type} node",
|
705
|
+
f' builder.add_node("{node_type}", "{node_id}", config={config_str})',
|
706
|
+
"",
|
707
|
+
]
|
708
|
+
)
|
709
|
+
|
710
|
+
# Add connections
|
711
|
+
if workflow.connections:
|
712
|
+
code_lines.append(" # Add connections")
|
713
|
+
for conn in workflow.connections:
|
714
|
+
code_lines.append(
|
715
|
+
f' builder.add_connection("{conn.source_node}", "{conn.source_output}", '
|
716
|
+
f'"{conn.target_node}", "{conn.target_input}")'
|
717
|
+
)
|
718
|
+
code_lines.append("")
|
719
|
+
|
720
|
+
# Build workflow
|
721
|
+
code_lines.extend(
|
722
|
+
[
|
723
|
+
f' return builder.build("{name}")',
|
724
|
+
"",
|
725
|
+
"",
|
726
|
+
"def main():",
|
727
|
+
' """Execute the workflow."""',
|
728
|
+
" # Build workflow",
|
729
|
+
" workflow = build_workflow()",
|
730
|
+
" ",
|
731
|
+
" # Create runtime",
|
732
|
+
" runtime = LocalRuntime()",
|
733
|
+
" ",
|
734
|
+
" # Execute workflow",
|
735
|
+
" result = runtime.execute(workflow)",
|
736
|
+
" ",
|
737
|
+
" # Print results",
|
738
|
+
' print("Workflow execution completed!")',
|
739
|
+
' print(f"Result: {result}")',
|
740
|
+
"",
|
741
|
+
"",
|
742
|
+
'if __name__ == "__main__":',
|
743
|
+
" main()",
|
744
|
+
"",
|
745
|
+
]
|
746
|
+
)
|
747
|
+
|
748
|
+
python_code = "\n".join(code_lines)
|
749
|
+
|
750
|
+
if output_path:
|
751
|
+
try:
|
752
|
+
Path(output_path).parent.mkdir(parents=True, exist_ok=True)
|
753
|
+
Path(output_path).write_text(python_code)
|
754
|
+
# Make executable
|
755
|
+
Path(output_path).chmod(0o755)
|
756
|
+
except Exception as e:
|
757
|
+
raise ExportException(
|
758
|
+
f"Failed to write Python code to '{output_path}': {e}"
|
759
|
+
) from e
|
760
|
+
|
761
|
+
if self.post_export_hook:
|
762
|
+
self.post_export_hook(workflow, "python", python_code)
|
763
|
+
|
764
|
+
return python_code
|
765
|
+
|
766
|
+
except ExportException:
|
767
|
+
raise
|
768
|
+
except Exception as e:
|
769
|
+
raise ExportException(f"Failed to export workflow as code: {e}") from e
|
770
|
+
|
771
|
+
def _format_dict_for_code(self, data: dict, indent: int = 0) -> str:
|
772
|
+
"""Format dictionary for Python code generation."""
|
773
|
+
if not data:
|
774
|
+
return "{}"
|
775
|
+
|
776
|
+
lines = ["{"]
|
777
|
+
indent_str = " " * indent
|
778
|
+
inner_indent = " " * (indent + 4)
|
779
|
+
|
780
|
+
for i, (key, value) in enumerate(data.items()):
|
781
|
+
if isinstance(value, str):
|
782
|
+
value_str = f'"{value}"'
|
783
|
+
elif isinstance(value, dict):
|
784
|
+
value_str = self._format_dict_for_code(value, indent + 4)
|
785
|
+
elif isinstance(value, list):
|
786
|
+
value_str = str(value)
|
787
|
+
else:
|
788
|
+
value_str = str(value)
|
789
|
+
|
790
|
+
line = f'{inner_indent}"{key}": {value_str}'
|
791
|
+
if i < len(data) - 1:
|
792
|
+
line += ","
|
793
|
+
lines.append(line)
|
794
|
+
|
795
|
+
lines.append(indent_str + "}")
|
796
|
+
return "\n".join(lines)
|
797
|
+
|
646
798
|
def export_with_templates(
|
647
799
|
self, workflow: Workflow, template_name: str, output_dir: str
|
648
800
|
) -> dict[str, str]:
|
kailash/workflow/builder.py
CHANGED
@@ -162,6 +162,48 @@ class WorkflowBuilder:
|
|
162
162
|
|
163
163
|
logger.info(f"Connected '{from_node}.{from_output}' to '{to_node}.{to_input}'")
|
164
164
|
|
165
|
+
def connect(
|
166
|
+
self,
|
167
|
+
from_node: str,
|
168
|
+
to_node: str,
|
169
|
+
mapping: dict = None,
|
170
|
+
from_output: str = None,
|
171
|
+
to_input: str = None,
|
172
|
+
) -> None:
|
173
|
+
"""
|
174
|
+
Connect two nodes in the workflow with flexible parameter formats.
|
175
|
+
|
176
|
+
This method provides a more intuitive API for connecting nodes and supports
|
177
|
+
both simple connections and complex mapping-based connections.
|
178
|
+
|
179
|
+
Args:
|
180
|
+
from_node: Source node ID
|
181
|
+
to_node: Target node ID
|
182
|
+
mapping: Dict mapping from_output to to_input (e.g., {"data": "input"})
|
183
|
+
from_output: Single output field (alternative to mapping)
|
184
|
+
to_input: Single input field (alternative to mapping)
|
185
|
+
|
186
|
+
Examples:
|
187
|
+
# Simple connection
|
188
|
+
workflow.connect("node1", "node2", from_output="data", to_input="input")
|
189
|
+
|
190
|
+
# Mapping-based connection
|
191
|
+
workflow.connect("node1", "node2", mapping={"data": "input"})
|
192
|
+
|
193
|
+
# Default data flow
|
194
|
+
workflow.connect("node1", "node2") # Uses "data" -> "data"
|
195
|
+
"""
|
196
|
+
if mapping:
|
197
|
+
# Handle mapping-based connections
|
198
|
+
for from_out, to_in in mapping.items():
|
199
|
+
self.add_connection(from_node, from_out, to_node, to_in)
|
200
|
+
elif from_output and to_input:
|
201
|
+
# Handle explicit parameter connections
|
202
|
+
self.add_connection(from_node, from_output, to_node, to_input)
|
203
|
+
else:
|
204
|
+
# Default data flow
|
205
|
+
self.add_connection(from_node, "data", to_node, "data")
|
206
|
+
|
165
207
|
def set_metadata(self, **kwargs) -> "WorkflowBuilder":
|
166
208
|
"""
|
167
209
|
Set workflow metadata.
|