kailash 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/access_control.py +40 -39
- kailash/api/auth.py +26 -32
- kailash/api/custom_nodes.py +29 -29
- kailash/api/custom_nodes_secure.py +35 -35
- kailash/api/database.py +17 -17
- kailash/api/gateway.py +19 -19
- kailash/api/mcp_integration.py +24 -23
- kailash/api/studio.py +45 -45
- kailash/api/workflow_api.py +8 -8
- kailash/cli/commands.py +5 -8
- kailash/manifest.py +42 -42
- kailash/mcp/__init__.py +1 -1
- kailash/mcp/ai_registry_server.py +20 -20
- kailash/mcp/client.py +9 -11
- kailash/mcp/client_new.py +10 -10
- kailash/mcp/server.py +1 -2
- kailash/mcp/server_enhanced.py +449 -0
- kailash/mcp/servers/ai_registry.py +6 -6
- kailash/mcp/utils/__init__.py +31 -0
- kailash/mcp/utils/cache.py +267 -0
- kailash/mcp/utils/config.py +263 -0
- kailash/mcp/utils/formatters.py +293 -0
- kailash/mcp/utils/metrics.py +418 -0
- kailash/nodes/ai/agents.py +9 -9
- kailash/nodes/ai/ai_providers.py +33 -34
- kailash/nodes/ai/embedding_generator.py +31 -32
- kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
- kailash/nodes/ai/iterative_llm_agent.py +48 -48
- kailash/nodes/ai/llm_agent.py +32 -33
- kailash/nodes/ai/models.py +13 -13
- kailash/nodes/ai/self_organizing.py +44 -44
- kailash/nodes/api/__init__.py +5 -0
- kailash/nodes/api/auth.py +11 -11
- kailash/nodes/api/graphql.py +13 -13
- kailash/nodes/api/http.py +19 -19
- kailash/nodes/api/monitoring.py +463 -0
- kailash/nodes/api/rate_limiting.py +9 -13
- kailash/nodes/api/rest.py +29 -29
- kailash/nodes/api/security.py +819 -0
- kailash/nodes/base.py +24 -26
- kailash/nodes/base_async.py +7 -7
- kailash/nodes/base_cycle_aware.py +12 -12
- kailash/nodes/base_with_acl.py +5 -5
- kailash/nodes/code/python.py +56 -55
- kailash/nodes/data/__init__.py +6 -0
- kailash/nodes/data/directory.py +6 -6
- kailash/nodes/data/event_generation.py +297 -0
- kailash/nodes/data/file_discovery.py +598 -0
- kailash/nodes/data/readers.py +8 -8
- kailash/nodes/data/retrieval.py +10 -10
- kailash/nodes/data/sharepoint_graph.py +17 -17
- kailash/nodes/data/sources.py +5 -5
- kailash/nodes/data/sql.py +13 -13
- kailash/nodes/data/streaming.py +25 -25
- kailash/nodes/data/vector_db.py +22 -22
- kailash/nodes/data/writers.py +7 -7
- kailash/nodes/logic/async_operations.py +17 -17
- kailash/nodes/logic/convergence.py +11 -11
- kailash/nodes/logic/loop.py +4 -4
- kailash/nodes/logic/operations.py +11 -11
- kailash/nodes/logic/workflow.py +8 -9
- kailash/nodes/mixins/mcp.py +17 -17
- kailash/nodes/mixins.py +8 -10
- kailash/nodes/transform/chunkers.py +3 -3
- kailash/nodes/transform/formatters.py +7 -7
- kailash/nodes/transform/processors.py +11 -11
- kailash/runtime/access_controlled.py +18 -18
- kailash/runtime/async_local.py +18 -20
- kailash/runtime/docker.py +24 -26
- kailash/runtime/local.py +55 -31
- kailash/runtime/parallel.py +25 -25
- kailash/runtime/parallel_cyclic.py +29 -29
- kailash/runtime/runner.py +6 -6
- kailash/runtime/testing.py +22 -22
- kailash/sdk_exceptions.py +0 -58
- kailash/security.py +14 -26
- kailash/tracking/manager.py +38 -38
- kailash/tracking/metrics_collector.py +15 -14
- kailash/tracking/models.py +53 -53
- kailash/tracking/storage/base.py +7 -17
- kailash/tracking/storage/database.py +22 -23
- kailash/tracking/storage/filesystem.py +38 -40
- kailash/utils/export.py +21 -21
- kailash/utils/templates.py +8 -9
- kailash/visualization/api.py +30 -34
- kailash/visualization/dashboard.py +17 -17
- kailash/visualization/performance.py +32 -19
- kailash/visualization/reports.py +30 -28
- kailash/workflow/builder.py +8 -8
- kailash/workflow/convergence.py +13 -12
- kailash/workflow/cycle_analyzer.py +38 -33
- kailash/workflow/cycle_builder.py +12 -12
- kailash/workflow/cycle_config.py +16 -15
- kailash/workflow/cycle_debugger.py +40 -40
- kailash/workflow/cycle_exceptions.py +29 -29
- kailash/workflow/cycle_profiler.py +21 -21
- kailash/workflow/cycle_state.py +20 -22
- kailash/workflow/cyclic_runner.py +45 -45
- kailash/workflow/graph.py +57 -45
- kailash/workflow/mermaid_visualizer.py +9 -11
- kailash/workflow/migration.py +22 -22
- kailash/workflow/mock_registry.py +6 -6
- kailash/workflow/runner.py +9 -9
- kailash/workflow/safety.py +12 -13
- kailash/workflow/state.py +8 -11
- kailash/workflow/templates.py +19 -19
- kailash/workflow/validation.py +14 -14
- kailash/workflow/visualization.py +32 -24
- kailash-0.3.1.dist-info/METADATA +476 -0
- kailash-0.3.1.dist-info/RECORD +136 -0
- kailash-0.2.2.dist-info/METADATA +0 -121
- kailash-0.2.2.dist-info/RECORD +0 -126
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.2.2.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
kailash/nodes/data/vector_db.py
CHANGED
@@ -44,7 +44,7 @@ Example:
|
|
44
44
|
... })
|
45
45
|
"""
|
46
46
|
|
47
|
-
from typing import Any
|
47
|
+
from typing import Any
|
48
48
|
|
49
49
|
import numpy as np
|
50
50
|
|
@@ -142,7 +142,7 @@ class EmbeddingNode(Node):
|
|
142
142
|
self._model = None
|
143
143
|
self._model_info = {}
|
144
144
|
|
145
|
-
def get_parameters(self) ->
|
145
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
146
146
|
"""Define parameters for the embedding node."""
|
147
147
|
return {
|
148
148
|
"model": NodeParameter(
|
@@ -188,7 +188,7 @@ class EmbeddingNode(Node):
|
|
188
188
|
),
|
189
189
|
}
|
190
190
|
|
191
|
-
def configure(self, config:
|
191
|
+
def configure(self, config: dict[str, Any]) -> None:
|
192
192
|
"""Configure the embedding node with model settings.
|
193
193
|
|
194
194
|
Validates configuration, initializes the embedding model, and
|
@@ -241,7 +241,7 @@ class EmbeddingNode(Node):
|
|
241
241
|
"max_tokens": self.config.get("max_tokens", 8192),
|
242
242
|
}
|
243
243
|
|
244
|
-
def run(self, **kwargs) ->
|
244
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
245
245
|
"""Generate embeddings for input texts.
|
246
246
|
|
247
247
|
Implementation of the abstract run method from the base Node class.
|
@@ -254,7 +254,7 @@ class EmbeddingNode(Node):
|
|
254
254
|
"""
|
255
255
|
return self.execute(kwargs)
|
256
256
|
|
257
|
-
def execute(self, inputs:
|
257
|
+
def execute(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
258
258
|
"""Generate embeddings for input texts.
|
259
259
|
|
260
260
|
Processes the input texts through the configured embedding model,
|
@@ -295,7 +295,7 @@ class EmbeddingNode(Node):
|
|
295
295
|
except Exception as e:
|
296
296
|
raise NodeExecutionError(f"Failed to generate embeddings: {str(e)}")
|
297
297
|
|
298
|
-
def _generate_embeddings(self, texts:
|
298
|
+
def _generate_embeddings(self, texts: list[str]) -> list[list[float]]:
|
299
299
|
"""Generate embeddings for a batch of texts.
|
300
300
|
|
301
301
|
This is a placeholder for actual embedding generation logic.
|
@@ -310,7 +310,7 @@ class EmbeddingNode(Node):
|
|
310
310
|
dim = self._model_info.get("dimensions", 768)
|
311
311
|
return [np.random.randn(dim).tolist() for _ in texts]
|
312
312
|
|
313
|
-
def _normalize_embeddings(self, embeddings:
|
313
|
+
def _normalize_embeddings(self, embeddings: list[list[float]]) -> list[list[float]]:
|
314
314
|
"""Normalize embedding vectors to unit length.
|
315
315
|
|
316
316
|
Normalizes each embedding vector to have a magnitude of 1.0,
|
@@ -442,7 +442,7 @@ class VectorDatabaseNode(Node):
|
|
442
442
|
self._client = None
|
443
443
|
self._index = None
|
444
444
|
|
445
|
-
def get_parameters(self) ->
|
445
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
446
446
|
"""Define parameters for the vector database node."""
|
447
447
|
return {
|
448
448
|
"provider": NodeParameter(
|
@@ -484,7 +484,7 @@ class VectorDatabaseNode(Node):
|
|
484
484
|
),
|
485
485
|
}
|
486
486
|
|
487
|
-
def configure(self, config:
|
487
|
+
def configure(self, config: dict[str, Any]) -> None:
|
488
488
|
"""Configure the vector database connection.
|
489
489
|
|
490
490
|
Establishes connection to the vector database, validates the index,
|
@@ -529,7 +529,7 @@ class VectorDatabaseNode(Node):
|
|
529
529
|
self._client = f"{provider}_client"
|
530
530
|
self._index = self.config.get("index_name")
|
531
531
|
|
532
|
-
def run(self, **kwargs) ->
|
532
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
533
533
|
"""Execute vector database operations.
|
534
534
|
|
535
535
|
Implementation of the abstract run method from the base Node class.
|
@@ -542,7 +542,7 @@ class VectorDatabaseNode(Node):
|
|
542
542
|
"""
|
543
543
|
return self.execute(kwargs)
|
544
544
|
|
545
|
-
def execute(self, inputs:
|
545
|
+
def execute(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
546
546
|
"""Execute vector database operations.
|
547
547
|
|
548
548
|
Performs the requested operation (upsert, query, delete, fetch)
|
@@ -573,7 +573,7 @@ class VectorDatabaseNode(Node):
|
|
573
573
|
except Exception as e:
|
574
574
|
raise NodeExecutionError(f"Vector operation failed: {str(e)}")
|
575
575
|
|
576
|
-
def _upsert_vectors(self, inputs:
|
576
|
+
def _upsert_vectors(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
577
577
|
"""Insert or update vectors in the database.
|
578
578
|
|
579
579
|
Args:
|
@@ -600,7 +600,7 @@ class VectorDatabaseNode(Node):
|
|
600
600
|
"index": self._index,
|
601
601
|
}
|
602
602
|
|
603
|
-
def _query_vectors(self, inputs:
|
603
|
+
def _query_vectors(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
604
604
|
"""Query similar vectors from the database.
|
605
605
|
|
606
606
|
Args:
|
@@ -631,7 +631,7 @@ class VectorDatabaseNode(Node):
|
|
631
631
|
"count": min(k, 5),
|
632
632
|
}
|
633
633
|
|
634
|
-
def _delete_vectors(self, inputs:
|
634
|
+
def _delete_vectors(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
635
635
|
"""Delete vectors from the database.
|
636
636
|
|
637
637
|
Args:
|
@@ -648,7 +648,7 @@ class VectorDatabaseNode(Node):
|
|
648
648
|
# Placeholder for actual deletion
|
649
649
|
return {"operation": "delete", "status": "success", "count": len(ids)}
|
650
650
|
|
651
|
-
def _fetch_vectors(self, inputs:
|
651
|
+
def _fetch_vectors(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
652
652
|
"""Fetch specific vectors by ID.
|
653
653
|
|
654
654
|
Args:
|
@@ -748,7 +748,7 @@ class TextSplitterNode(Node):
|
|
748
748
|
tags={"text", "processing", "nlp"},
|
749
749
|
)
|
750
750
|
|
751
|
-
def get_parameters(self) ->
|
751
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
752
752
|
"""Define parameters for the text splitter node."""
|
753
753
|
return {
|
754
754
|
"strategy": NodeParameter(
|
@@ -788,7 +788,7 @@ class TextSplitterNode(Node):
|
|
788
788
|
),
|
789
789
|
}
|
790
790
|
|
791
|
-
def run(self, **kwargs) ->
|
791
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
792
792
|
"""Split text into chunks using configured strategy.
|
793
793
|
|
794
794
|
Implementation of the abstract run method from the base Node class.
|
@@ -801,7 +801,7 @@ class TextSplitterNode(Node):
|
|
801
801
|
"""
|
802
802
|
return self.execute(kwargs)
|
803
803
|
|
804
|
-
def execute(self, inputs:
|
804
|
+
def execute(self, inputs: dict[str, Any]) -> dict[str, Any]:
|
805
805
|
"""Split text into chunks using configured strategy.
|
806
806
|
|
807
807
|
Args:
|
@@ -854,7 +854,7 @@ class TextSplitterNode(Node):
|
|
854
854
|
except Exception as e:
|
855
855
|
raise NodeExecutionError(f"Text splitting failed: {str(e)}")
|
856
856
|
|
857
|
-
def _recursive_split(self, text: str) ->
|
857
|
+
def _recursive_split(self, text: str) -> list[str]:
|
858
858
|
"""Split text recursively using multiple separators.
|
859
859
|
|
860
860
|
Args:
|
@@ -878,7 +878,7 @@ class TextSplitterNode(Node):
|
|
878
878
|
|
879
879
|
return chunks
|
880
880
|
|
881
|
-
def _character_split(self, text: str) ->
|
881
|
+
def _character_split(self, text: str) -> list[str]:
|
882
882
|
"""Split text by character count.
|
883
883
|
|
884
884
|
Args:
|
@@ -908,7 +908,7 @@ class TextSplitterNode(Node):
|
|
908
908
|
|
909
909
|
return chunks
|
910
910
|
|
911
|
-
def _sentence_split(self, text: str) ->
|
911
|
+
def _sentence_split(self, text: str) -> list[str]:
|
912
912
|
"""Split text by sentences.
|
913
913
|
|
914
914
|
Args:
|
@@ -936,7 +936,7 @@ class TextSplitterNode(Node):
|
|
936
936
|
|
937
937
|
return chunks
|
938
938
|
|
939
|
-
def _token_split(self, text: str) ->
|
939
|
+
def _token_split(self, text: str) -> list[str]:
|
940
940
|
"""Split text by token count.
|
941
941
|
|
942
942
|
Args:
|
kailash/nodes/data/writers.py
CHANGED
@@ -31,7 +31,7 @@ Downstream Consumers:
|
|
31
31
|
|
32
32
|
import csv
|
33
33
|
import json
|
34
|
-
from typing import Any
|
34
|
+
from typing import Any
|
35
35
|
|
36
36
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
37
37
|
from kailash.security import safe_open, validate_file_path
|
@@ -96,7 +96,7 @@ class CSVWriterNode(Node):
|
|
96
96
|
>>> # result = {'rows_written': 2, 'file_path': 'output.csv'}
|
97
97
|
"""
|
98
98
|
|
99
|
-
def get_parameters(self) ->
|
99
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
100
100
|
"""Define input parameters for CSV writing.
|
101
101
|
|
102
102
|
Provides comprehensive parameters for flexible CSV output,
|
@@ -144,7 +144,7 @@ class CSVWriterNode(Node):
|
|
144
144
|
),
|
145
145
|
}
|
146
146
|
|
147
|
-
def run(self, **kwargs) ->
|
147
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
148
148
|
"""Execute CSV writing operation.
|
149
149
|
|
150
150
|
Intelligently handles different data structures, automatically
|
@@ -280,7 +280,7 @@ class JSONWriterNode(Node):
|
|
280
280
|
>>> # result = {'file_path': 'response.json'}
|
281
281
|
"""
|
282
282
|
|
283
|
-
def get_parameters(self) ->
|
283
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
284
284
|
"""Define input parameters for JSON writing.
|
285
285
|
|
286
286
|
Minimal parameters reflecting JSON's flexibility while
|
@@ -320,7 +320,7 @@ class JSONWriterNode(Node):
|
|
320
320
|
),
|
321
321
|
}
|
322
322
|
|
323
|
-
def run(self, **kwargs) ->
|
323
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
324
324
|
"""Execute JSON writing operation.
|
325
325
|
|
326
326
|
Serializes data to JSON format with proper formatting
|
@@ -431,7 +431,7 @@ class TextWriterNode(Node):
|
|
431
431
|
>>> # result = {'file_path': 'app.log', 'bytes_written': 25}
|
432
432
|
"""
|
433
433
|
|
434
|
-
def get_parameters(self) ->
|
434
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
435
435
|
"""Define input parameters for text writing.
|
436
436
|
|
437
437
|
Comprehensive parameters supporting various text writing
|
@@ -478,7 +478,7 @@ class TextWriterNode(Node):
|
|
478
478
|
),
|
479
479
|
}
|
480
480
|
|
481
|
-
def run(self, **kwargs) ->
|
481
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
482
482
|
"""Execute text writing operation.
|
483
483
|
|
484
484
|
Writes text to file with specified encoding and mode.
|
@@ -6,7 +6,7 @@ data processing tasks in workflows.
|
|
6
6
|
"""
|
7
7
|
|
8
8
|
import asyncio
|
9
|
-
from typing import Any
|
9
|
+
from typing import Any
|
10
10
|
|
11
11
|
from kailash.nodes.base import NodeParameter, register_node
|
12
12
|
from kailash.nodes.base_async import AsyncNode
|
@@ -53,7 +53,7 @@ class AsyncMergeNode(AsyncNode):
|
|
53
53
|
[1, 2, 3, 4]
|
54
54
|
"""
|
55
55
|
|
56
|
-
def get_parameters(self) ->
|
56
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
57
57
|
"""Define parameters for the AsyncMergeNode."""
|
58
58
|
# Reuse parameters from SyncMerge
|
59
59
|
return {
|
@@ -116,7 +116,7 @@ class AsyncMergeNode(AsyncNode):
|
|
116
116
|
),
|
117
117
|
}
|
118
118
|
|
119
|
-
def get_output_schema(self) ->
|
119
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
120
120
|
"""Define the output schema for AsyncMergeNode."""
|
121
121
|
return {
|
122
122
|
"merged_data": NodeParameter(
|
@@ -127,7 +127,7 @@ class AsyncMergeNode(AsyncNode):
|
|
127
127
|
)
|
128
128
|
}
|
129
129
|
|
130
|
-
async def async_run(self, **kwargs) ->
|
130
|
+
async def async_run(self, **kwargs) -> dict[str, Any]:
|
131
131
|
"""Asynchronously execute the merge operation.
|
132
132
|
|
133
133
|
This implementation provides efficient processing for large datasets by:
|
@@ -199,7 +199,7 @@ class AsyncMergeNode(AsyncNode):
|
|
199
199
|
|
200
200
|
return {"merged_data": result}
|
201
201
|
|
202
|
-
def run(self, **kwargs) ->
|
202
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
203
203
|
"""Synchronous execution method that delegates to the async implementation.
|
204
204
|
|
205
205
|
This method is required by the Node abstract base class but shouldn't
|
@@ -220,7 +220,7 @@ class AsyncMergeNode(AsyncNode):
|
|
220
220
|
"AsyncMergeNode.run() was called directly. Use execute() or execute_async() instead."
|
221
221
|
)
|
222
222
|
|
223
|
-
async def _async_concat(self, data_inputs:
|
223
|
+
async def _async_concat(self, data_inputs: list[Any], chunk_size: int) -> Any:
|
224
224
|
"""Asynchronously concatenate data.
|
225
225
|
|
226
226
|
Args:
|
@@ -254,7 +254,7 @@ class AsyncMergeNode(AsyncNode):
|
|
254
254
|
|
255
255
|
return result
|
256
256
|
|
257
|
-
async def _async_zip(self, data_inputs:
|
257
|
+
async def _async_zip(self, data_inputs: list[Any]) -> list[tuple]:
|
258
258
|
"""Asynchronously zip data.
|
259
259
|
|
260
260
|
Args:
|
@@ -275,10 +275,10 @@ class AsyncMergeNode(AsyncNode):
|
|
275
275
|
await asyncio.sleep(0.005)
|
276
276
|
|
277
277
|
# Zip the lists together
|
278
|
-
return list(zip(*normalized_inputs))
|
278
|
+
return list(zip(*normalized_inputs, strict=False))
|
279
279
|
|
280
280
|
async def _async_merge_dict(
|
281
|
-
self, data_inputs:
|
281
|
+
self, data_inputs: list[Any], key: str | None, chunk_size: int
|
282
282
|
) -> Any:
|
283
283
|
"""Asynchronously merge dictionaries.
|
284
284
|
|
@@ -326,7 +326,7 @@ class AsyncMergeNode(AsyncNode):
|
|
326
326
|
)
|
327
327
|
|
328
328
|
async def _merge_dict_chunk(
|
329
|
-
self, result:
|
329
|
+
self, result: list[dict], data: list[dict], key: str
|
330
330
|
) -> None:
|
331
331
|
"""Merge a chunk of dictionaries into the result list.
|
332
332
|
|
@@ -405,7 +405,7 @@ class AsyncSwitchNode(AsyncNode):
|
|
405
405
|
{'priority': 'high', 'task': 'urgent'}
|
406
406
|
"""
|
407
407
|
|
408
|
-
def get_parameters(self) ->
|
408
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
409
409
|
"""Define parameters for the AsyncSwitchNode."""
|
410
410
|
return {
|
411
411
|
"input_data": NodeParameter(
|
@@ -469,7 +469,7 @@ class AsyncSwitchNode(AsyncNode):
|
|
469
469
|
),
|
470
470
|
}
|
471
471
|
|
472
|
-
def get_output_schema(self) ->
|
472
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
473
473
|
"""Dynamic schema with standard outputs."""
|
474
474
|
return {
|
475
475
|
"true_output": NodeParameter(
|
@@ -499,7 +499,7 @@ class AsyncSwitchNode(AsyncNode):
|
|
499
499
|
# Note: case_X outputs are dynamic and not listed here
|
500
500
|
}
|
501
501
|
|
502
|
-
async def async_run(self, **kwargs) ->
|
502
|
+
async def async_run(self, **kwargs) -> dict[str, Any]:
|
503
503
|
"""Asynchronously execute the switch operation.
|
504
504
|
|
505
505
|
Args:
|
@@ -624,7 +624,7 @@ class AsyncSwitchNode(AsyncNode):
|
|
624
624
|
self.logger.debug(f"AsyncSwitch node result keys: {list(result.keys())}")
|
625
625
|
return result
|
626
626
|
|
627
|
-
def run(self, **kwargs) ->
|
627
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
628
628
|
"""Synchronous execution method that delegates to the async implementation.
|
629
629
|
|
630
630
|
This method is required by the Node abstract base class but shouldn't
|
@@ -693,12 +693,12 @@ class AsyncSwitchNode(AsyncNode):
|
|
693
693
|
|
694
694
|
async def _handle_list_grouping(
|
695
695
|
self,
|
696
|
-
groups:
|
697
|
-
cases:
|
696
|
+
groups: dict[Any, list],
|
697
|
+
cases: list[Any],
|
698
698
|
case_prefix: str,
|
699
699
|
default_field: str,
|
700
700
|
pass_condition_result: bool,
|
701
|
-
) ->
|
701
|
+
) -> dict[str, Any]:
|
702
702
|
"""Asynchronously handle routing when input is a list of dictionaries.
|
703
703
|
|
704
704
|
This method creates outputs for each case with the filtered data.
|
@@ -27,7 +27,7 @@ Example usage:
|
|
27
27
|
... ))
|
28
28
|
"""
|
29
29
|
|
30
|
-
from typing import Any
|
30
|
+
from typing import Any
|
31
31
|
|
32
32
|
from ..base import NodeParameter, register_node
|
33
33
|
from ..base_cycle_aware import CycleAwareNode
|
@@ -102,7 +102,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
102
102
|
... mode="combined", threshold=0.9, stability_window=3)
|
103
103
|
"""
|
104
104
|
|
105
|
-
def get_parameters(self) ->
|
105
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
106
106
|
"""Define input parameters for convergence checking."""
|
107
107
|
return {
|
108
108
|
"value": NodeParameter(
|
@@ -181,7 +181,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
181
181
|
),
|
182
182
|
}
|
183
183
|
|
184
|
-
def get_output_schema(self) ->
|
184
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
185
185
|
"""Define output schema for convergence results."""
|
186
186
|
return {
|
187
187
|
"converged": NodeParameter(
|
@@ -216,7 +216,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
216
216
|
),
|
217
217
|
}
|
218
218
|
|
219
|
-
def run(self, context:
|
219
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
220
220
|
"""Execute convergence checking logic."""
|
221
221
|
# Get parameters
|
222
222
|
value = kwargs["value"]
|
@@ -355,7 +355,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
355
355
|
|
356
356
|
def _check_stability_convergence(
|
357
357
|
self,
|
358
|
-
value_history:
|
358
|
+
value_history: list[float],
|
359
359
|
window: int,
|
360
360
|
min_variance: float,
|
361
361
|
iteration: int,
|
@@ -382,7 +382,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
382
382
|
|
383
383
|
def _check_improvement_convergence(
|
384
384
|
self,
|
385
|
-
value_history:
|
385
|
+
value_history: list[float],
|
386
386
|
window: int,
|
387
387
|
min_improvement: float,
|
388
388
|
iteration: int,
|
@@ -413,7 +413,7 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
413
413
|
def _check_combined_convergence(
|
414
414
|
self,
|
415
415
|
value: float,
|
416
|
-
value_history:
|
416
|
+
value_history: list[float],
|
417
417
|
threshold: float,
|
418
418
|
stability_window: int,
|
419
419
|
min_variance: float,
|
@@ -454,8 +454,8 @@ class ConvergenceCheckerNode(CycleAwareNode):
|
|
454
454
|
def _check_custom_convergence(
|
455
455
|
self,
|
456
456
|
value: float,
|
457
|
-
value_history:
|
458
|
-
expression:
|
457
|
+
value_history: list[float],
|
458
|
+
expression: str | None,
|
459
459
|
iteration: int,
|
460
460
|
**kwargs,
|
461
461
|
) -> tuple[bool, str, dict]:
|
@@ -511,7 +511,7 @@ class MultiCriteriaConvergenceNode(CycleAwareNode):
|
|
511
511
|
... )
|
512
512
|
"""
|
513
513
|
|
514
|
-
def get_parameters(self) ->
|
514
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
515
515
|
"""Define input parameters for multi-criteria convergence."""
|
516
516
|
return {
|
517
517
|
"metrics": NodeParameter(
|
@@ -537,7 +537,7 @@ class MultiCriteriaConvergenceNode(CycleAwareNode):
|
|
537
537
|
),
|
538
538
|
}
|
539
539
|
|
540
|
-
def run(self, context:
|
540
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
541
541
|
"""Execute multi-criteria convergence checking."""
|
542
542
|
metrics = kwargs.get("metrics", {})
|
543
543
|
|
kailash/nodes/logic/loop.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
"""Loop control node for creating cycles in workflows."""
|
2
2
|
|
3
|
-
from typing import Any
|
3
|
+
from typing import Any
|
4
4
|
|
5
5
|
from kailash.nodes.base import Node, NodeParameter
|
6
6
|
|
@@ -29,7 +29,7 @@ class LoopNode(Node):
|
|
29
29
|
>>> workflow.connect("loop_control", "final_output", condition="exit")
|
30
30
|
"""
|
31
31
|
|
32
|
-
def get_parameters(self) ->
|
32
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
33
33
|
"""Define loop control parameters."""
|
34
34
|
return {
|
35
35
|
"input_data": NodeParameter(
|
@@ -75,7 +75,7 @@ class LoopNode(Node):
|
|
75
75
|
),
|
76
76
|
}
|
77
77
|
|
78
|
-
def run(self, context:
|
78
|
+
def run(self, context: dict[str, Any], **kwargs) -> dict[str, Any]:
|
79
79
|
"""Execute loop control logic."""
|
80
80
|
input_data = kwargs.get("input_data")
|
81
81
|
condition_type = kwargs.get("condition", "counter")
|
@@ -129,7 +129,7 @@ class LoopNode(Node):
|
|
129
129
|
},
|
130
130
|
}
|
131
131
|
|
132
|
-
def get_output_schema(self) ->
|
132
|
+
def get_output_schema(self) -> dict[str, Any] | None:
|
133
133
|
"""Define output schema for loop control."""
|
134
134
|
return {
|
135
135
|
"type": "object",
|
@@ -5,7 +5,7 @@ These nodes are essential for building complex workflows with decision points an
|
|
5
5
|
data transformations.
|
6
6
|
"""
|
7
7
|
|
8
|
-
from typing import Any
|
8
|
+
from typing import Any
|
9
9
|
|
10
10
|
from kailash.nodes.base import Node, NodeParameter, register_node
|
11
11
|
|
@@ -91,7 +91,7 @@ class SwitchNode(Node):
|
|
91
91
|
>>> workflow.connect("switch", "output", condition="true_output")
|
92
92
|
"""
|
93
93
|
|
94
|
-
def get_parameters(self) ->
|
94
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
95
95
|
return {
|
96
96
|
"input_data": NodeParameter(
|
97
97
|
name="input_data",
|
@@ -161,7 +161,7 @@ class SwitchNode(Node):
|
|
161
161
|
),
|
162
162
|
}
|
163
163
|
|
164
|
-
def get_output_schema(self) ->
|
164
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
165
165
|
"""
|
166
166
|
Define the output schema for SwitchNode.
|
167
167
|
|
@@ -200,7 +200,7 @@ class SwitchNode(Node):
|
|
200
200
|
# Note: case_X outputs are dynamic and not listed here
|
201
201
|
}
|
202
202
|
|
203
|
-
def run(self, **kwargs) ->
|
203
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
204
204
|
"""
|
205
205
|
Execute the switch routing logic.
|
206
206
|
|
@@ -460,12 +460,12 @@ class SwitchNode(Node):
|
|
460
460
|
|
461
461
|
def _handle_list_grouping(
|
462
462
|
self,
|
463
|
-
groups:
|
464
|
-
cases:
|
463
|
+
groups: dict[Any, list],
|
464
|
+
cases: list[Any],
|
465
465
|
case_prefix: str,
|
466
466
|
default_field: str,
|
467
467
|
pass_condition_result: bool,
|
468
|
-
) ->
|
468
|
+
) -> dict[str, Any]:
|
469
469
|
"""
|
470
470
|
Handle routing when input is a list of dictionaries.
|
471
471
|
|
@@ -557,7 +557,7 @@ class MergeNode(Node):
|
|
557
557
|
[{'id': 1, 'name': 'Alice', 'age': 30}]
|
558
558
|
"""
|
559
559
|
|
560
|
-
def get_parameters(self) ->
|
560
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
561
561
|
return {
|
562
562
|
"data1": NodeParameter(
|
563
563
|
name="data1",
|
@@ -611,7 +611,7 @@ class MergeNode(Node):
|
|
611
611
|
),
|
612
612
|
}
|
613
613
|
|
614
|
-
def execute(self, **runtime_inputs) ->
|
614
|
+
def execute(self, **runtime_inputs) -> dict[str, Any]:
|
615
615
|
"""Override execute method for the unknown_merge_type test."""
|
616
616
|
# Special handling for test_unknown_merge_type
|
617
617
|
if (
|
@@ -621,7 +621,7 @@ class MergeNode(Node):
|
|
621
621
|
raise ValueError(f"Unknown merge type: {runtime_inputs['merge_type']}")
|
622
622
|
return super().execute(**runtime_inputs)
|
623
623
|
|
624
|
-
def run(self, **kwargs) ->
|
624
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
625
625
|
# Skip data1 check for test_with_all_none_values test
|
626
626
|
if all(kwargs.get(f"data{i}") is None for i in range(1, 6)) and kwargs.get(
|
627
627
|
"skip_none", True
|
@@ -682,7 +682,7 @@ class MergeNode(Node):
|
|
682
682
|
normalized_inputs.append([data])
|
683
683
|
|
684
684
|
# Zip the lists together
|
685
|
-
result = list(zip(*normalized_inputs))
|
685
|
+
result = list(zip(*normalized_inputs, strict=False))
|
686
686
|
|
687
687
|
elif merge_type == "merge_dict":
|
688
688
|
# For dictionaries, merge them sequentially
|
kailash/nodes/logic/workflow.py
CHANGED
@@ -19,7 +19,7 @@ Key Features:
|
|
19
19
|
|
20
20
|
import json
|
21
21
|
from pathlib import Path
|
22
|
-
from typing import Any
|
22
|
+
from typing import Any
|
23
23
|
|
24
24
|
import yaml
|
25
25
|
|
@@ -96,7 +96,7 @@ class WorkflowNode(Node):
|
|
96
96
|
- Logs execution progress
|
97
97
|
"""
|
98
98
|
|
99
|
-
def __init__(self, workflow:
|
99
|
+
def __init__(self, workflow: Workflow | None = None, **kwargs):
|
100
100
|
"""Initialize the WorkflowNode.
|
101
101
|
|
102
102
|
Args:
|
@@ -138,7 +138,6 @@ class WorkflowNode(Node):
|
|
138
138
|
"""
|
139
139
|
# Skip parameter validation for WorkflowNode since parameters
|
140
140
|
# are dynamically determined from the wrapped workflow
|
141
|
-
pass
|
142
141
|
|
143
142
|
def _load_workflow(self):
|
144
143
|
"""Load workflow from path or dictionary.
|
@@ -159,11 +158,11 @@ class WorkflowNode(Node):
|
|
159
158
|
|
160
159
|
try:
|
161
160
|
if path.suffix == ".json":
|
162
|
-
with open(path
|
161
|
+
with open(path) as f:
|
163
162
|
data = json.load(f)
|
164
163
|
self._workflow = Workflow.from_dict(data)
|
165
164
|
elif path.suffix in [".yaml", ".yml"]:
|
166
|
-
with open(path
|
165
|
+
with open(path) as f:
|
167
166
|
data = yaml.safe_load(f)
|
168
167
|
self._workflow = Workflow.from_dict(data)
|
169
168
|
else:
|
@@ -188,7 +187,7 @@ class WorkflowNode(Node):
|
|
188
187
|
"or 'workflow_dict' parameter"
|
189
188
|
)
|
190
189
|
|
191
|
-
def get_parameters(self) ->
|
190
|
+
def get_parameters(self) -> dict[str, NodeParameter]:
|
192
191
|
"""Define parameters based on workflow entry nodes.
|
193
192
|
|
194
193
|
Analyzes the wrapped workflow to determine required inputs:
|
@@ -257,7 +256,7 @@ class WorkflowNode(Node):
|
|
257
256
|
|
258
257
|
return params
|
259
258
|
|
260
|
-
def get_output_schema(self) ->
|
259
|
+
def get_output_schema(self) -> dict[str, NodeParameter]:
|
261
260
|
"""Define output schema based on workflow exit nodes.
|
262
261
|
|
263
262
|
Analyzes the wrapped workflow to determine outputs:
|
@@ -322,7 +321,7 @@ class WorkflowNode(Node):
|
|
322
321
|
|
323
322
|
return output_schema
|
324
323
|
|
325
|
-
def run(self, **kwargs) ->
|
324
|
+
def run(self, **kwargs) -> dict[str, Any]:
|
326
325
|
"""Execute the wrapped workflow.
|
327
326
|
|
328
327
|
Executes the inner workflow with proper input mapping:
|
@@ -419,7 +418,7 @@ class WorkflowNode(Node):
|
|
419
418
|
self.logger.error(f"Workflow execution failed: {e}")
|
420
419
|
raise NodeExecutionError(f"Failed to execute wrapped workflow: {e}") from e
|
421
420
|
|
422
|
-
def to_dict(self) ->
|
421
|
+
def to_dict(self) -> dict[str, Any]:
|
423
422
|
"""Convert node to dictionary representation.
|
424
423
|
|
425
424
|
Serializes the WorkflowNode including its wrapped workflow
|