kailash 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/api/__init__.py +17 -0
- kailash/api/gateway.py +394 -0
- kailash/api/mcp_integration.py +478 -0
- kailash/api/workflow_api.py +399 -0
- kailash/nodes/ai/__init__.py +4 -4
- kailash/nodes/ai/agents.py +4 -4
- kailash/nodes/ai/ai_providers.py +18 -22
- kailash/nodes/ai/embedding_generator.py +34 -38
- kailash/nodes/ai/llm_agent.py +351 -356
- kailash/nodes/api/http.py +0 -4
- kailash/nodes/api/rest.py +1 -1
- kailash/nodes/base.py +60 -64
- kailash/nodes/code/python.py +61 -42
- kailash/nodes/data/__init__.py +10 -10
- kailash/nodes/data/readers.py +27 -29
- kailash/nodes/data/retrieval.py +1 -1
- kailash/nodes/data/sharepoint_graph.py +23 -25
- kailash/nodes/data/sql.py +27 -29
- kailash/nodes/data/vector_db.py +2 -2
- kailash/nodes/data/writers.py +41 -44
- kailash/nodes/logic/__init__.py +10 -3
- kailash/nodes/logic/async_operations.py +14 -14
- kailash/nodes/logic/operations.py +18 -22
- kailash/nodes/logic/workflow.py +439 -0
- kailash/nodes/mcp/client.py +29 -33
- kailash/nodes/mcp/resource.py +1 -1
- kailash/nodes/mcp/server.py +10 -4
- kailash/nodes/transform/formatters.py +1 -1
- kailash/nodes/transform/processors.py +5 -3
- kailash/runtime/docker.py +2 -0
- kailash/tracking/metrics_collector.py +6 -7
- kailash/tracking/models.py +0 -20
- kailash/tracking/storage/database.py +4 -4
- kailash/tracking/storage/filesystem.py +0 -1
- kailash/utils/export.py +2 -2
- kailash/utils/templates.py +16 -16
- kailash/visualization/performance.py +7 -7
- kailash/visualization/reports.py +1 -1
- kailash/workflow/graph.py +4 -4
- kailash/workflow/mock_registry.py +1 -1
- {kailash-0.1.2.dist-info → kailash-0.1.4.dist-info}/METADATA +198 -27
- kailash-0.1.4.dist-info/RECORD +85 -0
- kailash-0.1.2.dist-info/RECORD +0 -80
- {kailash-0.1.2.dist-info → kailash-0.1.4.dist-info}/WHEEL +0 -0
- {kailash-0.1.2.dist-info → kailash-0.1.4.dist-info}/entry_points.txt +0 -0
- {kailash-0.1.2.dist-info → kailash-0.1.4.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.1.2.dist-info → kailash-0.1.4.dist-info}/top_level.txt +0 -0
kailash/nodes/data/writers.py
CHANGED
@@ -12,9 +12,9 @@ Design Philosophy:
|
|
12
12
|
5. Progress tracking and feedback
|
13
13
|
|
14
14
|
Node Categories:
|
15
|
-
-
|
16
|
-
-
|
17
|
-
-
|
15
|
+
- CSVWriterNode: Tabular data to CSV files
|
16
|
+
- JSONWriterNode: Structured data to JSON files
|
17
|
+
- TextWriterNode: Raw text to any text file
|
18
18
|
|
19
19
|
Upstream Components:
|
20
20
|
- Reader nodes: Provide data to transform
|
@@ -37,7 +37,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
37
37
|
|
38
38
|
|
39
39
|
@register_node()
|
40
|
-
class
|
40
|
+
class CSVWriterNode(Node):
|
41
41
|
"""Writes data to a CSV file.
|
42
42
|
|
43
43
|
This node handles CSV file writing with support for both dictionary
|
@@ -81,19 +81,18 @@ class CSVWriter(Node):
|
|
81
81
|
- TypeError: Invalid data structure
|
82
82
|
- UnicodeEncodeError: Encoding issues
|
83
83
|
|
84
|
-
Example
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
)
|
95
|
-
result =
|
96
|
-
# result = {'rows_written': 2, 'file_path': 'output.csv'}
|
84
|
+
Example:
|
85
|
+
>>> # Write customer data
|
86
|
+
>>> writer = CSVWriterNode(
|
87
|
+
... file_path='output.csv',
|
88
|
+
... data=[
|
89
|
+
... {'id': 1, 'name': 'John', 'age': 30},
|
90
|
+
... {'id': 2, 'name': 'Jane', 'age': 25}
|
91
|
+
... ],
|
92
|
+
... delimiter=','
|
93
|
+
... )
|
94
|
+
>>> result = writer.execute()
|
95
|
+
>>> # result = {'rows_written': 2, 'file_path': 'output.csv'}
|
97
96
|
"""
|
98
97
|
|
99
98
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -218,7 +217,7 @@ class CSVWriter(Node):
|
|
218
217
|
|
219
218
|
|
220
219
|
@register_node()
|
221
|
-
class
|
220
|
+
class JSONWriterNode(Node):
|
222
221
|
"""Writes data to a JSON file.
|
223
222
|
|
224
223
|
This node handles JSON serialization with support for complex
|
@@ -262,20 +261,19 @@ class JSONWriter(Node):
|
|
262
261
|
- OSError: Path or disk issues
|
263
262
|
- JSONEncodeError: Encoding problems
|
264
263
|
|
265
|
-
Example
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
)
|
277
|
-
result =
|
278
|
-
# result = {'file_path': 'response.json'}
|
264
|
+
Example:
|
265
|
+
>>> # Write API response
|
266
|
+
>>> writer = JSONWriterNode(
|
267
|
+
... file_path='response.json',
|
268
|
+
... data={
|
269
|
+
... 'status': 'success',
|
270
|
+
... 'results': [1, 2, 3],
|
271
|
+
... 'metadata': {'version': '1.0'}
|
272
|
+
... },
|
273
|
+
... indent=2
|
274
|
+
... )
|
275
|
+
>>> result = writer.execute()
|
276
|
+
>>> # result = {'file_path': 'response.json'}
|
279
277
|
"""
|
280
278
|
|
281
279
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -370,7 +368,7 @@ class JSONWriter(Node):
|
|
370
368
|
|
371
369
|
|
372
370
|
@register_node()
|
373
|
-
class
|
371
|
+
class TextWriterNode(Node):
|
374
372
|
"""Writes text to a file.
|
375
373
|
|
376
374
|
This node provides flexible text file writing with support for
|
@@ -414,17 +412,16 @@ class TextWriter(Node):
|
|
414
412
|
- UnicodeEncodeError: Encoding mismatch
|
415
413
|
- MemoryError: Text too large
|
416
414
|
|
417
|
-
Example
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
)
|
426
|
-
result =
|
427
|
-
# result = {'file_path': 'app.log', 'bytes_written': 25}
|
415
|
+
Example:
|
416
|
+
>>> # Append to log file
|
417
|
+
>>> writer = TextWriterNode(
|
418
|
+
... file_path='app.log',
|
419
|
+
... text='ERROR: Connection failed\\n',
|
420
|
+
... encoding='utf-8',
|
421
|
+
... append=True
|
422
|
+
... )
|
423
|
+
>>> result = writer.execute()
|
424
|
+
>>> # result = {'file_path': 'app.log', 'bytes_written': 25}
|
428
425
|
"""
|
429
426
|
|
430
427
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
kailash/nodes/logic/__init__.py
CHANGED
@@ -1,6 +1,13 @@
|
|
1
1
|
"""Logic operation nodes for the Kailash SDK."""
|
2
2
|
|
3
|
-
from kailash.nodes.logic.async_operations import
|
4
|
-
from kailash.nodes.logic.operations import
|
3
|
+
from kailash.nodes.logic.async_operations import AsyncMergeNode, AsyncSwitchNode
|
4
|
+
from kailash.nodes.logic.operations import MergeNode, SwitchNode
|
5
|
+
from kailash.nodes.logic.workflow import WorkflowNode
|
5
6
|
|
6
|
-
__all__ = [
|
7
|
+
__all__ = [
|
8
|
+
"SwitchNode",
|
9
|
+
"MergeNode",
|
10
|
+
"AsyncSwitchNode",
|
11
|
+
"AsyncMergeNode",
|
12
|
+
"WorkflowNode",
|
13
|
+
]
|
@@ -13,14 +13,14 @@ from kailash.nodes.base_async import AsyncNode
|
|
13
13
|
|
14
14
|
|
15
15
|
@register_node()
|
16
|
-
class
|
16
|
+
class AsyncMergeNode(AsyncNode):
|
17
17
|
"""Asynchronously merges multiple data sources.
|
18
18
|
|
19
19
|
Note: We implement run() to fulfill the Node abstract base class requirement,
|
20
20
|
but it's just a pass-through to async_run().
|
21
21
|
|
22
22
|
|
23
|
-
This node extends the standard
|
23
|
+
This node extends the standard MergeNode with asynchronous execution capabilities,
|
24
24
|
making it more efficient for:
|
25
25
|
|
26
26
|
1. Combining large datasets from parallel branches
|
@@ -28,13 +28,13 @@ class AsyncMerge(AsyncNode):
|
|
28
28
|
3. Processing streaming data in chunks
|
29
29
|
4. Aggregating results from various API calls
|
30
30
|
|
31
|
-
The merge operation supports the same types as the standard
|
31
|
+
The merge operation supports the same types as the standard MergeNode:
|
32
32
|
concat (list concatenation), zip (parallel iteration), and merge_dict
|
33
33
|
(dictionary merging with optional key-based joining).
|
34
34
|
|
35
35
|
Usage example:
|
36
|
-
# Create an
|
37
|
-
async_merge =
|
36
|
+
# Create an AsyncMergeNode in a workflow
|
37
|
+
async_merge = AsyncMergeNode(merge_type="merge_dict", key="id")
|
38
38
|
workflow.add_node("data_combine", async_merge)
|
39
39
|
|
40
40
|
# Connect multiple data sources
|
@@ -44,7 +44,7 @@ class AsyncMerge(AsyncNode):
|
|
44
44
|
"""
|
45
45
|
|
46
46
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
47
|
-
"""Define parameters for the
|
47
|
+
"""Define parameters for the AsyncMergeNode."""
|
48
48
|
# Reuse parameters from SyncMerge
|
49
49
|
return {
|
50
50
|
"data1": NodeParameter(
|
@@ -107,7 +107,7 @@ class AsyncMerge(AsyncNode):
|
|
107
107
|
}
|
108
108
|
|
109
109
|
def get_output_schema(self) -> Dict[str, NodeParameter]:
|
110
|
-
"""Define the output schema for
|
110
|
+
"""Define the output schema for AsyncMergeNode."""
|
111
111
|
return {
|
112
112
|
"merged_data": NodeParameter(
|
113
113
|
name="merged_data",
|
@@ -155,7 +155,7 @@ class AsyncMerge(AsyncNode):
|
|
155
155
|
|
156
156
|
# Check if we have at least one valid input
|
157
157
|
if not data_inputs:
|
158
|
-
self.logger.warning("No valid data inputs provided to
|
158
|
+
self.logger.warning("No valid data inputs provided to AsyncMergeNode")
|
159
159
|
return {"merged_data": None}
|
160
160
|
|
161
161
|
# If only one input was provided, return it directly
|
@@ -207,7 +207,7 @@ class AsyncMerge(AsyncNode):
|
|
207
207
|
# This will be properly wrapped by the execute() method
|
208
208
|
# which will call it in a sync context
|
209
209
|
raise RuntimeError(
|
210
|
-
"
|
210
|
+
"AsyncMergeNode.run() was called directly. Use execute() or execute_async() instead."
|
211
211
|
)
|
212
212
|
|
213
213
|
async def _async_concat(self, data_inputs: List[Any], chunk_size: int) -> Any:
|
@@ -349,25 +349,25 @@ class AsyncMerge(AsyncNode):
|
|
349
349
|
|
350
350
|
|
351
351
|
@register_node()
|
352
|
-
class
|
352
|
+
class AsyncSwitchNode(AsyncNode):
|
353
353
|
"""Asynchronously routes data to different outputs based on conditions.
|
354
354
|
|
355
355
|
Note: We implement run() to fulfill the Node abstract base class requirement,
|
356
356
|
but it's just a pass-through to async_run().
|
357
357
|
|
358
|
-
This node extends the standard
|
358
|
+
This node extends the standard SwitchNode with asynchronous execution capabilities,
|
359
359
|
making it more efficient for:
|
360
360
|
|
361
361
|
1. Processing conditional routing with I/O-bound condition evaluation
|
362
362
|
2. Handling large datasets that need to be routed based on complex criteria
|
363
363
|
3. Integrating with other asynchronous nodes in a workflow
|
364
364
|
|
365
|
-
The basic functionality is the same as the synchronous
|
365
|
+
The basic functionality is the same as the synchronous SwitchNode but optimized
|
366
366
|
for asynchronous execution.
|
367
367
|
"""
|
368
368
|
|
369
369
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
370
|
-
"""Define parameters for the
|
370
|
+
"""Define parameters for the AsyncSwitchNode."""
|
371
371
|
return {
|
372
372
|
"input_data": NodeParameter(
|
373
373
|
name="input_data",
|
@@ -603,7 +603,7 @@ class AsyncSwitch(AsyncNode):
|
|
603
603
|
# This will be properly wrapped by the execute() method
|
604
604
|
# which will call it in a sync context
|
605
605
|
raise RuntimeError(
|
606
|
-
"
|
606
|
+
"AsyncSwitchNode.run() was called directly. Use execute() or execute_async() instead."
|
607
607
|
)
|
608
608
|
|
609
609
|
async def _evaluate_condition(
|
@@ -11,7 +11,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
11
11
|
|
12
12
|
|
13
13
|
@register_node()
|
14
|
-
class
|
14
|
+
class SwitchNode(Node):
|
15
15
|
"""Routes data to different outputs based on conditions.
|
16
16
|
|
17
17
|
The Switch node enables conditional branching in workflows by evaluating
|
@@ -23,25 +23,21 @@ class Switch(Node):
|
|
23
23
|
3. Dynamic workflow paths based on data values
|
24
24
|
|
25
25
|
The outputs of Switch nodes are typically connected to different processing
|
26
|
-
nodes, and those branches can be rejoined later using a
|
27
|
-
|
28
|
-
Example usage
|
29
|
-
|
30
|
-
|
31
|
-
switch_node
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
workflow.connect("router", "success_handler", {"case_success": "input"})
|
42
|
-
workflow.connect("router", "warning_handler", {"case_warning": "input"})
|
43
|
-
workflow.connect("router", "error_handler", {"case_error": "input"})
|
44
|
-
workflow.connect("router", "default_handler", {"default": "input"})
|
26
|
+
nodes, and those branches can be rejoined later using a MergeNode.
|
27
|
+
|
28
|
+
Example usage:
|
29
|
+
>>> # Simple boolean condition
|
30
|
+
>>> switch_node = SwitchNode(condition_field="status", operator="==", value="success")
|
31
|
+
>>> switch_node.metadata.name
|
32
|
+
'SwitchNode'
|
33
|
+
|
34
|
+
>>> # Multi-case switching
|
35
|
+
>>> switch_node = SwitchNode(
|
36
|
+
... condition_field="status",
|
37
|
+
... cases=["success", "warning", "error"]
|
38
|
+
... )
|
39
|
+
>>> 'cases' in switch_node.get_parameters()
|
40
|
+
True
|
45
41
|
"""
|
46
42
|
|
47
43
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -360,7 +356,7 @@ class Switch(Node):
|
|
360
356
|
|
361
357
|
|
362
358
|
@register_node()
|
363
|
-
class
|
359
|
+
class MergeNode(Node):
|
364
360
|
"""Merges multiple data sources.
|
365
361
|
|
366
362
|
This node can combine data from multiple input sources in various ways,
|
@@ -368,7 +364,7 @@ class Merge(Node):
|
|
368
364
|
|
369
365
|
1. Combining results from parallel branches in a workflow
|
370
366
|
2. Joining related data sets
|
371
|
-
3. Combining outputs after conditional branching with the
|
367
|
+
3. Combining outputs after conditional branching with the SwitchNode
|
372
368
|
4. Aggregating collections of data
|
373
369
|
|
374
370
|
The merge operation is determined by the merge_type parameter, which supports
|