kailash 0.1.3__py3-none-any.whl → 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kailash/__init__.py +1 -1
- kailash/api/__init__.py +11 -1
- kailash/api/gateway.py +394 -0
- kailash/api/mcp_integration.py +478 -0
- kailash/api/workflow_api.py +29 -13
- kailash/nodes/ai/__init__.py +40 -4
- kailash/nodes/ai/a2a.py +1143 -0
- kailash/nodes/ai/agents.py +120 -6
- kailash/nodes/ai/ai_providers.py +224 -30
- kailash/nodes/ai/embedding_generator.py +34 -38
- kailash/nodes/ai/intelligent_agent_orchestrator.py +2114 -0
- kailash/nodes/ai/llm_agent.py +351 -356
- kailash/nodes/ai/self_organizing.py +1624 -0
- kailash/nodes/api/http.py +106 -25
- kailash/nodes/api/rest.py +116 -21
- kailash/nodes/base.py +60 -64
- kailash/nodes/code/python.py +61 -42
- kailash/nodes/data/__init__.py +10 -10
- kailash/nodes/data/readers.py +117 -66
- kailash/nodes/data/retrieval.py +1 -1
- kailash/nodes/data/sharepoint_graph.py +23 -25
- kailash/nodes/data/sql.py +24 -26
- kailash/nodes/data/writers.py +41 -44
- kailash/nodes/logic/__init__.py +9 -3
- kailash/nodes/logic/async_operations.py +60 -21
- kailash/nodes/logic/operations.py +43 -22
- kailash/nodes/logic/workflow.py +26 -18
- kailash/nodes/mcp/client.py +29 -33
- kailash/nodes/transform/__init__.py +8 -1
- kailash/nodes/transform/formatters.py +1 -1
- kailash/nodes/transform/processors.py +119 -4
- kailash/tracking/metrics_collector.py +6 -7
- kailash/utils/export.py +2 -2
- kailash/utils/templates.py +16 -16
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/METADATA +293 -29
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/RECORD +40 -35
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/WHEEL +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/entry_points.txt +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/licenses/LICENSE +0 -0
- {kailash-0.1.3.dist-info → kailash-0.1.5.dist-info}/top_level.txt +0 -0
kailash/nodes/data/readers.py
CHANGED
@@ -12,9 +12,9 @@ Design Philosophy:
|
|
12
12
|
5. Type-safe parameter validation
|
13
13
|
|
14
14
|
Node Categories:
|
15
|
-
-
|
16
|
-
-
|
17
|
-
-
|
15
|
+
- CSVReaderNode: Tabular data from CSV files
|
16
|
+
- JSONReaderNode: Structured data from JSON files
|
17
|
+
- TextReaderNode: Raw text from any text file
|
18
18
|
|
19
19
|
Upstream Components:
|
20
20
|
- FileSystem: Provides files to read
|
@@ -36,61 +36,113 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
36
36
|
|
37
37
|
|
38
38
|
@register_node()
|
39
|
-
class
|
40
|
-
"""
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
3. Loading configuration from CSV
|
62
|
-
4. Ingesting sensor data logs
|
63
|
-
|
64
|
-
Upstream Sources:
|
65
|
-
- File system paths from user input
|
66
|
-
- Output paths from previous nodes
|
67
|
-
- Configuration management systems
|
39
|
+
class CSVReaderNode(Node):
|
40
|
+
"""
|
41
|
+
Reads data from CSV files with automatic header detection and type inference.
|
42
|
+
|
43
|
+
This node provides comprehensive CSV file reading capabilities, handling various
|
44
|
+
formats, encodings, and edge cases. It automatically detects headers, infers data
|
45
|
+
types, and provides consistent structured output for downstream processing in
|
46
|
+
Kailash workflows.
|
47
|
+
|
48
|
+
Design Philosophy:
|
49
|
+
The CSVReaderNode embodies the principle of "data accessibility without
|
50
|
+
complexity." It abstracts the intricacies of CSV parsing while providing
|
51
|
+
flexibility for various formats. The design prioritizes memory efficiency,
|
52
|
+
automatic format detection, and consistent output structure, making it easy
|
53
|
+
to integrate diverse CSV data sources into workflows.
|
54
|
+
|
55
|
+
Upstream Dependencies:
|
56
|
+
- File system providing CSV files
|
57
|
+
- Workflow orchestrators specifying file paths
|
58
|
+
- Configuration systems providing parsing options
|
59
|
+
- Previous nodes generating CSV file paths
|
60
|
+
- User inputs defining data sources
|
68
61
|
|
69
62
|
Downstream Consumers:
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
63
|
+
- DataTransformNode: Processes tabular data
|
64
|
+
- FilterNode: Applies row/column filtering
|
65
|
+
- AggregatorNode: Summarizes data
|
66
|
+
- PythonCodeNode: Custom data processing
|
67
|
+
- WriterNodes: Exports to other formats
|
68
|
+
- Visualization nodes: Creates charts
|
69
|
+
- ML nodes: Uses as training data
|
70
|
+
|
71
|
+
Configuration:
|
72
|
+
The node supports extensive CSV parsing options:
|
73
|
+
- Delimiter detection (comma, tab, pipe, etc.)
|
74
|
+
- Header row identification
|
75
|
+
- Encoding specification (UTF-8, Latin-1, etc.)
|
76
|
+
- Quote character handling
|
77
|
+
- Skip rows/comments functionality
|
78
|
+
- Column type inference
|
79
|
+
- Missing value handling
|
80
|
+
|
81
|
+
Implementation Details:
|
82
|
+
- Uses Python's csv module for robust parsing
|
83
|
+
- Implements streaming for large files
|
84
|
+
- Automatic delimiter detection when not specified
|
85
|
+
- Header detection based on first row analysis
|
86
|
+
- Type inference for numeric/date columns
|
87
|
+
- Memory-efficient processing with generators
|
88
|
+
- Unicode normalization for consistent encoding
|
74
89
|
|
75
90
|
Error Handling:
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
#
|
91
|
-
|
92
|
-
|
93
|
-
|
91
|
+
- FileNotFoundError: Clear message with path
|
92
|
+
- PermissionError: Access rights guidance
|
93
|
+
- UnicodeDecodeError: Encoding detection hints
|
94
|
+
- csv.Error: Malformed data diagnostics
|
95
|
+
- EmptyFileError: Handles zero-byte files
|
96
|
+
- Partial read recovery for corrupted files
|
97
|
+
|
98
|
+
Side Effects:
|
99
|
+
- Reads from file system
|
100
|
+
- May consume significant memory for large files
|
101
|
+
- Creates file handles (properly closed)
|
102
|
+
- Updates internal read statistics
|
103
|
+
|
104
|
+
Examples:
|
105
|
+
>>> # Basic CSV reading with headers
|
106
|
+
>>> reader = CSVReaderNode()
|
107
|
+
>>> result = reader.run(
|
108
|
+
... file_path="customers.csv",
|
109
|
+
... headers=True
|
110
|
+
... )
|
111
|
+
>>> assert isinstance(result["data"], list)
|
112
|
+
>>> assert all(isinstance(row, dict) for row in result["data"])
|
113
|
+
>>> # Example output:
|
114
|
+
>>> # result["data"] = [
|
115
|
+
>>> # {"id": "1", "name": "John Doe", "age": "30"},
|
116
|
+
>>> # {"id": "2", "name": "Jane Smith", "age": "25"}
|
117
|
+
>>> # ]
|
118
|
+
>>>
|
119
|
+
>>> # Reading with custom delimiter
|
120
|
+
>>> result = reader.run(
|
121
|
+
... file_path="data.tsv",
|
122
|
+
... delimiter="\\t",
|
123
|
+
... headers=True
|
124
|
+
... )
|
125
|
+
>>>
|
126
|
+
>>> # Reading without headers (returns list of lists)
|
127
|
+
>>> result = reader.run(
|
128
|
+
... file_path="data.csv",
|
129
|
+
... headers=False
|
130
|
+
... )
|
131
|
+
>>> assert all(isinstance(row, list) for row in result["data"])
|
132
|
+
>>>
|
133
|
+
>>> # Reading with specific encoding
|
134
|
+
>>> result = reader.run(
|
135
|
+
... file_path="european_data.csv",
|
136
|
+
... encoding="iso-8859-1",
|
137
|
+
... headers=True
|
138
|
+
... )
|
139
|
+
>>>
|
140
|
+
>>> # Handling quoted fields
|
141
|
+
>>> result = reader.run(
|
142
|
+
... file_path="complex.csv",
|
143
|
+
... headers=True,
|
144
|
+
... quotechar='"'
|
145
|
+
... )
|
94
146
|
"""
|
95
147
|
|
96
148
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -235,7 +287,7 @@ class CSVReader(Node):
|
|
235
287
|
|
236
288
|
|
237
289
|
@register_node()
|
238
|
-
class
|
290
|
+
class JSONReaderNode(Node):
|
239
291
|
"""Reads data from a JSON file.
|
240
292
|
|
241
293
|
This node handles JSON file reading with support for complex nested
|
@@ -280,7 +332,7 @@ class JSONReader(Node):
|
|
280
332
|
|
281
333
|
Example:
|
282
334
|
# Read API response data
|
283
|
-
reader =
|
335
|
+
reader = JSONReaderNode(file_path='api_response.json')
|
284
336
|
result = reader.execute()
|
285
337
|
# result['data'] = {
|
286
338
|
# 'status': 'success',
|
@@ -359,7 +411,7 @@ class JSONReader(Node):
|
|
359
411
|
|
360
412
|
|
361
413
|
@register_node()
|
362
|
-
class
|
414
|
+
class TextReaderNode(Node):
|
363
415
|
"""Reads text from a file.
|
364
416
|
|
365
417
|
This node provides simple text file reading with encoding support.
|
@@ -403,15 +455,14 @@ class TextReader(Node):
|
|
403
455
|
- UnicodeDecodeError: Wrong encoding
|
404
456
|
- MemoryError: File too large
|
405
457
|
|
406
|
-
Example
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
412
|
-
)
|
413
|
-
result =
|
414
|
-
# result['text'] = "2024-01-01 INFO: Application started\\n..."
|
458
|
+
Example:
|
459
|
+
>>> # Read a log file
|
460
|
+
>>> reader = TextReaderNode(
|
461
|
+
... file_path='application.log',
|
462
|
+
... encoding='utf-8'
|
463
|
+
... )
|
464
|
+
>>> result = reader.execute()
|
465
|
+
>>> # result['text'] = "2024-01-01 INFO: Application started\\n..."
|
415
466
|
"""
|
416
467
|
|
417
468
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
kailash/nodes/data/retrieval.py
CHANGED
@@ -102,7 +102,7 @@ class RelevanceScorerNode(Node):
|
|
102
102
|
) -> List[Dict]:
|
103
103
|
"""Score chunks using cosine similarity."""
|
104
104
|
# Extract actual embedding vectors from the embedding objects
|
105
|
-
#
|
105
|
+
# EmbeddingGeneratorNode returns embeddings in format: {"embedding": [...], "text": "...", "dimensions": X}
|
106
106
|
|
107
107
|
# Handle query embedding - should be the first (and only) embedding in the list
|
108
108
|
query_embedding_obj = query_embeddings[0] if query_embeddings else {}
|
@@ -56,18 +56,17 @@ class SharePointGraphReader(Node):
|
|
56
56
|
3. Search for files by name
|
57
57
|
4. Navigate folder structures
|
58
58
|
|
59
|
-
Example
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
)
|
59
|
+
Example:
|
60
|
+
>>> reader = SharePointGraphReader()
|
61
|
+
>>> result = reader.execute(
|
62
|
+
... tenant_id="your-tenant-id",
|
63
|
+
... client_id="your-client-id",
|
64
|
+
... client_secret="your-secret",
|
65
|
+
... site_url="https://company.sharepoint.com/sites/project",
|
66
|
+
... operation="list_files",
|
67
|
+
... library_name="Documents",
|
68
|
+
... folder_path="Reports/2024"
|
69
|
+
... )
|
71
70
|
"""
|
72
71
|
|
73
72
|
def get_metadata(self) -> NodeMetadata:
|
@@ -471,19 +470,18 @@ class SharePointGraphWriter(Node):
|
|
471
470
|
This node handles file uploads to SharePoint document libraries,
|
472
471
|
supporting folder structures and metadata.
|
473
472
|
|
474
|
-
Example
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
|
484
|
-
|
485
|
-
|
486
|
-
)
|
473
|
+
Example:
|
474
|
+
>>> writer = SharePointGraphWriter()
|
475
|
+
>>> result = writer.execute(
|
476
|
+
... tenant_id="your-tenant-id",
|
477
|
+
... client_id="your-client-id",
|
478
|
+
... client_secret="your-secret",
|
479
|
+
... site_url="https://company.sharepoint.com/sites/project",
|
480
|
+
... local_path="report.pdf",
|
481
|
+
... library_name="Documents",
|
482
|
+
... folder_path="Reports/2024",
|
483
|
+
... sharepoint_name="Q4_Report_2024.pdf"
|
484
|
+
... )
|
487
485
|
"""
|
488
486
|
|
489
487
|
def get_metadata(self) -> NodeMetadata:
|
kailash/nodes/data/sql.py
CHANGED
@@ -63,20 +63,19 @@ class SQLDatabaseNode(Node):
|
|
63
63
|
- TimeoutError: Query execution timeout
|
64
64
|
- PermissionError: Access denied
|
65
65
|
|
66
|
-
Example
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
)
|
75
|
-
result =
|
76
|
-
#
|
77
|
-
# {'id':
|
78
|
-
#
|
79
|
-
# ]
|
66
|
+
Example:
|
67
|
+
>>> # Query customer data
|
68
|
+
>>> sql_node = SQLDatabaseNode(
|
69
|
+
... connection_string='postgresql://user:pass@host/db',
|
70
|
+
... query='SELECT * FROM customers WHERE active = ?',
|
71
|
+
... parameters=[True],
|
72
|
+
... result_format='dict'
|
73
|
+
... )
|
74
|
+
>>> result = sql_node.execute()
|
75
|
+
>>> # result['data'] = [
|
76
|
+
>>> # {'id': 1, 'name': 'John', 'active': True},
|
77
|
+
>>> # {'id': 2, 'name': 'Jane', 'active': True}
|
78
|
+
>>> # ]
|
80
79
|
"""
|
81
80
|
|
82
81
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -259,18 +258,17 @@ class SQLQueryBuilderNode(Node):
|
|
259
258
|
3. Multi-table joins
|
260
259
|
4. Aggregation queries
|
261
260
|
|
262
|
-
Example
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
)
|
271
|
-
result =
|
272
|
-
# result['
|
273
|
-
# result['parameters'] = [True, 'USA']
|
261
|
+
Example:
|
262
|
+
>>> builder = SQLQueryBuilderNode(
|
263
|
+
... table='customers',
|
264
|
+
... select=['name', 'email'],
|
265
|
+
... where={'active': True, 'country': 'USA'},
|
266
|
+
... order_by=['name'],
|
267
|
+
... limit=100
|
268
|
+
... )
|
269
|
+
>>> result = builder.execute()
|
270
|
+
>>> # result['query'] = 'SELECT name, email FROM customers WHERE active = ? AND country = ? ORDER BY name LIMIT 100'
|
271
|
+
>>> # result['parameters'] = [True, 'USA']
|
274
272
|
"""
|
275
273
|
|
276
274
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
kailash/nodes/data/writers.py
CHANGED
@@ -12,9 +12,9 @@ Design Philosophy:
|
|
12
12
|
5. Progress tracking and feedback
|
13
13
|
|
14
14
|
Node Categories:
|
15
|
-
-
|
16
|
-
-
|
17
|
-
-
|
15
|
+
- CSVWriterNode: Tabular data to CSV files
|
16
|
+
- JSONWriterNode: Structured data to JSON files
|
17
|
+
- TextWriterNode: Raw text to any text file
|
18
18
|
|
19
19
|
Upstream Components:
|
20
20
|
- Reader nodes: Provide data to transform
|
@@ -37,7 +37,7 @@ from kailash.nodes.base import Node, NodeParameter, register_node
|
|
37
37
|
|
38
38
|
|
39
39
|
@register_node()
|
40
|
-
class
|
40
|
+
class CSVWriterNode(Node):
|
41
41
|
"""Writes data to a CSV file.
|
42
42
|
|
43
43
|
This node handles CSV file writing with support for both dictionary
|
@@ -81,19 +81,18 @@ class CSVWriter(Node):
|
|
81
81
|
- TypeError: Invalid data structure
|
82
82
|
- UnicodeEncodeError: Encoding issues
|
83
83
|
|
84
|
-
Example
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
)
|
95
|
-
result =
|
96
|
-
# result = {'rows_written': 2, 'file_path': 'output.csv'}
|
84
|
+
Example:
|
85
|
+
>>> # Write customer data
|
86
|
+
>>> writer = CSVWriterNode(
|
87
|
+
... file_path='output.csv',
|
88
|
+
... data=[
|
89
|
+
... {'id': 1, 'name': 'John', 'age': 30},
|
90
|
+
... {'id': 2, 'name': 'Jane', 'age': 25}
|
91
|
+
... ],
|
92
|
+
... delimiter=','
|
93
|
+
... )
|
94
|
+
>>> result = writer.execute()
|
95
|
+
>>> # result = {'rows_written': 2, 'file_path': 'output.csv'}
|
97
96
|
"""
|
98
97
|
|
99
98
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -218,7 +217,7 @@ class CSVWriter(Node):
|
|
218
217
|
|
219
218
|
|
220
219
|
@register_node()
|
221
|
-
class
|
220
|
+
class JSONWriterNode(Node):
|
222
221
|
"""Writes data to a JSON file.
|
223
222
|
|
224
223
|
This node handles JSON serialization with support for complex
|
@@ -262,20 +261,19 @@ class JSONWriter(Node):
|
|
262
261
|
- OSError: Path or disk issues
|
263
262
|
- JSONEncodeError: Encoding problems
|
264
263
|
|
265
|
-
Example
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
)
|
277
|
-
result =
|
278
|
-
# result = {'file_path': 'response.json'}
|
264
|
+
Example:
|
265
|
+
>>> # Write API response
|
266
|
+
>>> writer = JSONWriterNode(
|
267
|
+
... file_path='response.json',
|
268
|
+
... data={
|
269
|
+
... 'status': 'success',
|
270
|
+
... 'results': [1, 2, 3],
|
271
|
+
... 'metadata': {'version': '1.0'}
|
272
|
+
... },
|
273
|
+
... indent=2
|
274
|
+
... )
|
275
|
+
>>> result = writer.execute()
|
276
|
+
>>> # result = {'file_path': 'response.json'}
|
279
277
|
"""
|
280
278
|
|
281
279
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
@@ -370,7 +368,7 @@ class JSONWriter(Node):
|
|
370
368
|
|
371
369
|
|
372
370
|
@register_node()
|
373
|
-
class
|
371
|
+
class TextWriterNode(Node):
|
374
372
|
"""Writes text to a file.
|
375
373
|
|
376
374
|
This node provides flexible text file writing with support for
|
@@ -414,17 +412,16 @@ class TextWriter(Node):
|
|
414
412
|
- UnicodeEncodeError: Encoding mismatch
|
415
413
|
- MemoryError: Text too large
|
416
414
|
|
417
|
-
Example
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
)
|
426
|
-
result =
|
427
|
-
# result = {'file_path': 'app.log', 'bytes_written': 25}
|
415
|
+
Example:
|
416
|
+
>>> # Append to log file
|
417
|
+
>>> writer = TextWriterNode(
|
418
|
+
... file_path='app.log',
|
419
|
+
... text='ERROR: Connection failed\\n',
|
420
|
+
... encoding='utf-8',
|
421
|
+
... append=True
|
422
|
+
... )
|
423
|
+
>>> result = writer.execute()
|
424
|
+
>>> # result = {'file_path': 'app.log', 'bytes_written': 25}
|
428
425
|
"""
|
429
426
|
|
430
427
|
def get_parameters(self) -> Dict[str, NodeParameter]:
|
kailash/nodes/logic/__init__.py
CHANGED
@@ -1,7 +1,13 @@
|
|
1
1
|
"""Logic operation nodes for the Kailash SDK."""
|
2
2
|
|
3
|
-
from kailash.nodes.logic.async_operations import
|
4
|
-
from kailash.nodes.logic.operations import
|
3
|
+
from kailash.nodes.logic.async_operations import AsyncMergeNode, AsyncSwitchNode
|
4
|
+
from kailash.nodes.logic.operations import MergeNode, SwitchNode
|
5
5
|
from kailash.nodes.logic.workflow import WorkflowNode
|
6
6
|
|
7
|
-
__all__ = [
|
7
|
+
__all__ = [
|
8
|
+
"SwitchNode",
|
9
|
+
"MergeNode",
|
10
|
+
"AsyncSwitchNode",
|
11
|
+
"AsyncMergeNode",
|
12
|
+
"WorkflowNode",
|
13
|
+
]
|