kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,379 @@
1
+ """Transform nodes for data processing."""
2
+
3
+ import traceback
4
+ from typing import Any, Dict
5
+
6
+ from kailash.nodes.base import Node, NodeParameter, register_node
7
+
8
+
9
+ @register_node()
10
+ class Filter(Node):
11
+ """Filters data based on a condition."""
12
+
13
+ def get_parameters(self) -> Dict[str, NodeParameter]:
14
+ return {
15
+ "data": NodeParameter(
16
+ name="data",
17
+ type=list,
18
+ required=False, # Data comes from workflow connections
19
+ description="Input data to filter",
20
+ ),
21
+ "field": NodeParameter(
22
+ name="field",
23
+ type=str,
24
+ required=False,
25
+ description="Field name for dict-based filtering",
26
+ ),
27
+ "operator": NodeParameter(
28
+ name="operator",
29
+ type=str,
30
+ required=False,
31
+ default="==",
32
+ description="Comparison operator (==, !=, >, <, >=, <=, contains)",
33
+ ),
34
+ "value": NodeParameter(
35
+ name="value",
36
+ type=Any,
37
+ required=False,
38
+ description="Value to compare against",
39
+ ),
40
+ }
41
+
42
+ def run(self, **kwargs) -> Dict[str, Any]:
43
+ data = kwargs["data"]
44
+ field = kwargs.get("field")
45
+ operator = kwargs.get("operator", "==")
46
+ value = kwargs.get("value")
47
+
48
+ if not data:
49
+ return {"filtered_data": []}
50
+
51
+ filtered_data = []
52
+ for item in data:
53
+ if field and isinstance(item, dict):
54
+ item_value = item.get(field)
55
+ else:
56
+ item_value = item
57
+
58
+ if self._apply_operator(item_value, operator, value):
59
+ filtered_data.append(item)
60
+
61
+ return {"filtered_data": filtered_data}
62
+
63
+ def _apply_operator(
64
+ self, item_value: Any, operator: str, compare_value: Any
65
+ ) -> bool:
66
+ """Apply comparison operator."""
67
+ try:
68
+ # Handle None values - they fail most comparisons
69
+ if item_value is None:
70
+ if operator in ["==", "!="]:
71
+ return (operator == "==") == (compare_value is None)
72
+ else:
73
+ return False # None fails all other comparisons
74
+
75
+ # For numeric operators, try to convert strings to numbers
76
+ if operator in [">", "<", ">=", "<="]:
77
+ try:
78
+ # Try to convert both values to float for comparison
79
+ if isinstance(item_value, str):
80
+ item_value = float(item_value)
81
+ if isinstance(compare_value, str):
82
+ compare_value = float(compare_value)
83
+ except (ValueError, TypeError):
84
+ # If conversion fails, fall back to string comparison
85
+ pass
86
+
87
+ if operator == "==":
88
+ return item_value == compare_value
89
+ elif operator == "!=":
90
+ return item_value != compare_value
91
+ elif operator == ">":
92
+ return item_value > compare_value
93
+ elif operator == "<":
94
+ return item_value < compare_value
95
+ elif operator == ">=":
96
+ return item_value >= compare_value
97
+ elif operator == "<=":
98
+ return item_value <= compare_value
99
+ elif operator == "contains":
100
+ return compare_value in str(item_value)
101
+ else:
102
+ raise ValueError(f"Unknown operator: {operator}")
103
+ except Exception:
104
+ # If any comparison fails, return False (filter out the item)
105
+ return False
106
+
107
+
108
+ @register_node()
109
+ class Map(Node):
110
+ """Maps data using a transformation."""
111
+
112
+ def get_parameters(self) -> Dict[str, NodeParameter]:
113
+ return {
114
+ "data": NodeParameter(
115
+ name="data",
116
+ type=list,
117
+ required=False, # Data comes from workflow connections
118
+ description="Input data to transform",
119
+ ),
120
+ "field": NodeParameter(
121
+ name="field",
122
+ type=str,
123
+ required=False,
124
+ description="Field to extract from dict items",
125
+ ),
126
+ "new_field": NodeParameter(
127
+ name="new_field",
128
+ type=str,
129
+ required=False,
130
+ description="New field name for dict items",
131
+ ),
132
+ "operation": NodeParameter(
133
+ name="operation",
134
+ type=str,
135
+ required=False,
136
+ default="identity",
137
+ description="Operation to apply (identity, upper, lower, multiply, add)",
138
+ ),
139
+ "value": NodeParameter(
140
+ name="value",
141
+ type=Any,
142
+ required=False,
143
+ description="Value for operations that need it",
144
+ ),
145
+ }
146
+
147
+ def run(self, **kwargs) -> Dict[str, Any]:
148
+ data = kwargs["data"]
149
+ field = kwargs.get("field")
150
+ new_field = kwargs.get("new_field")
151
+ operation = kwargs.get("operation", "identity")
152
+ value = kwargs.get("value")
153
+
154
+ mapped_data = []
155
+ for item in data:
156
+ if isinstance(item, dict):
157
+ new_item = item.copy()
158
+ if field:
159
+ item_value = item.get(field)
160
+ transformed = self._apply_operation(item_value, operation, value)
161
+ if new_field:
162
+ new_item[new_field] = transformed
163
+ else:
164
+ new_item[field] = transformed
165
+ mapped_data.append(new_item)
166
+ else:
167
+ transformed = self._apply_operation(item, operation, value)
168
+ mapped_data.append(transformed)
169
+
170
+ return {"mapped_data": mapped_data}
171
+
172
+ def _apply_operation(self, item_value: Any, operation: str, op_value: Any) -> Any:
173
+ """Apply transformation operation."""
174
+ if operation == "identity":
175
+ return item_value
176
+ elif operation == "upper":
177
+ return str(item_value).upper()
178
+ elif operation == "lower":
179
+ return str(item_value).lower()
180
+ elif operation == "multiply":
181
+ return float(item_value) * float(op_value)
182
+ elif operation == "add":
183
+ if isinstance(item_value, str):
184
+ return str(item_value) + str(op_value)
185
+ return float(item_value) + float(op_value)
186
+ else:
187
+ raise ValueError(f"Unknown operation: {operation}")
188
+
189
+
190
+ @register_node()
191
+ class DataTransformer(Node):
192
+ """
193
+ Transforms data using custom transformation functions provided as strings.
194
+
195
+ This node allows arbitrary data transformations by providing lambda functions
196
+ or other Python code as strings. These are compiled and executed against the input data.
197
+ """
198
+
199
+ def get_parameters(self) -> Dict[str, NodeParameter]:
200
+ return {
201
+ "data": NodeParameter(
202
+ name="data",
203
+ type=list,
204
+ required=False,
205
+ description="Primary input data to transform",
206
+ ),
207
+ "transformations": NodeParameter(
208
+ name="transformations",
209
+ type=list,
210
+ required=True,
211
+ description="List of transformation functions as strings",
212
+ ),
213
+ **{
214
+ f"arg{i}": NodeParameter(
215
+ name=f"arg{i}",
216
+ type=Any,
217
+ required=False,
218
+ description=f"Additional argument {i}",
219
+ )
220
+ for i in range(1, 6)
221
+ }, # Support for up to 5 additional arguments
222
+ }
223
+
224
+ def run(self, **kwargs) -> Dict[str, Any]:
225
+ # Extract the transformation functions
226
+ transformations = kwargs.get("transformations", [])
227
+ if not transformations:
228
+ return {"result": kwargs.get("data", [])}
229
+
230
+ # Get all input data
231
+ input_data = {}
232
+ for key, value in kwargs.items():
233
+ if key != "transformations":
234
+ input_data[key] = value
235
+
236
+ # Execute the transformations
237
+ result = input_data.get("data", [])
238
+
239
+ for transform_str in transformations:
240
+ try:
241
+ # Create a safe globals dictionary with basic functions
242
+ safe_globals = {
243
+ "len": len,
244
+ "sum": sum,
245
+ "min": min,
246
+ "max": max,
247
+ "dict": dict,
248
+ "list": list,
249
+ "set": set,
250
+ "str": str,
251
+ "int": int,
252
+ "float": float,
253
+ "bool": bool,
254
+ "sorted": sorted,
255
+ }
256
+
257
+ # For multi-line code blocks
258
+ if "\n" in transform_str.strip():
259
+ # Prepare local context for execution
260
+ local_vars = input_data.copy()
261
+ local_vars["result"] = result
262
+
263
+ # Execute the code block
264
+ exec(transform_str, safe_globals, local_vars)
265
+
266
+ # Extract the result from local context
267
+ result = local_vars.get("result", result)
268
+
269
+ # For single expressions or lambdas
270
+ else:
271
+ # For lambda functions like: "lambda x: x * 2"
272
+ if transform_str.strip().startswith("lambda"):
273
+ # First, compile the lambda function
274
+ lambda_func = eval(transform_str, safe_globals)
275
+
276
+ # Apply the lambda function based on input data
277
+ if isinstance(result, list):
278
+ # If there are multiple arguments expected by the lambda
279
+ if (
280
+ "data" in input_data
281
+ and lambda_func.__code__.co_argcount > 1
282
+ ):
283
+ # For cases like "lambda tx, customers_dict: ..."
284
+ arg_names = lambda_func.__code__.co_varnames[
285
+ : lambda_func.__code__.co_argcount
286
+ ]
287
+
288
+ # Apply the lambda to each item
289
+ new_result = []
290
+ for item in result:
291
+ args = {}
292
+ # First arg is the item itself
293
+ args[arg_names[0]] = item
294
+ # Other args come from input_data
295
+ self.logger.debug(
296
+ f"Lambda expected args: {arg_names}"
297
+ )
298
+ self.logger.debug(
299
+ f"Available input data keys: {input_data.keys()}"
300
+ )
301
+ for i, arg_name in enumerate(arg_names[1:], 1):
302
+ if arg_name in input_data:
303
+ args[arg_name] = input_data[arg_name]
304
+ self.logger.debug(
305
+ f"Found {arg_name} in input_data"
306
+ )
307
+ else:
308
+ self.logger.error(
309
+ f"Missing required argument {arg_name} for lambda function"
310
+ )
311
+
312
+ # Apply function with the args
313
+ transformed = lambda_func(**args)
314
+ new_result.append(transformed)
315
+ result = new_result
316
+ else:
317
+ # Simple map operation: lambda x: x * 2
318
+ result = [lambda_func(item) for item in result]
319
+ else:
320
+ # Apply directly to a single value
321
+ result = lambda_func(result)
322
+
323
+ # For regular expressions like: "x * 2"
324
+ else:
325
+ local_vars = input_data.copy()
326
+ local_vars["result"] = result
327
+ result = eval(transform_str, safe_globals, local_vars)
328
+
329
+ except Exception as e:
330
+ tb = traceback.format_exc()
331
+ self.logger.error(f"Error executing transformation: {e}")
332
+ raise RuntimeError(
333
+ f"Error executing transformation '{transform_str}': {str(e)}\n{tb}"
334
+ )
335
+
336
+ return {"result": result}
337
+
338
+
339
+ @register_node()
340
+ class Sort(Node):
341
+ """Sorts data."""
342
+
343
+ def get_parameters(self) -> Dict[str, NodeParameter]:
344
+ return {
345
+ "data": NodeParameter(
346
+ name="data",
347
+ type=list,
348
+ required=False, # Data comes from workflow connections
349
+ description="Input data to sort",
350
+ ),
351
+ "field": NodeParameter(
352
+ name="field",
353
+ type=str,
354
+ required=False,
355
+ description="Field to sort by for dict items",
356
+ ),
357
+ "reverse": NodeParameter(
358
+ name="reverse",
359
+ type=bool,
360
+ required=False,
361
+ default=False,
362
+ description="Sort in descending order",
363
+ ),
364
+ }
365
+
366
+ def run(self, **kwargs) -> Dict[str, Any]:
367
+ data = kwargs["data"]
368
+ field = kwargs.get("field")
369
+ reverse = kwargs.get("reverse", False)
370
+
371
+ if not data:
372
+ return {"sorted_data": []}
373
+
374
+ if field and isinstance(data[0], dict):
375
+ sorted_data = sorted(data, key=lambda x: x.get(field), reverse=reverse)
376
+ else:
377
+ sorted_data = sorted(data, reverse=reverse)
378
+
379
+ return {"sorted_data": sorted_data}
@@ -0,0 +1,6 @@
1
+ """Runtime engines for the Kailash SDK."""
2
+
3
+ from kailash.runtime.local import LocalRuntime
4
+ from kailash.runtime.runner import WorkflowRunner
5
+
6
+ __all__ = ["LocalRuntime", "WorkflowRunner"]