kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,529 @@
1
+ """Data writer nodes for the Kailash SDK.
2
+
3
+ This module provides node implementations for writing data to various file formats.
4
+ These nodes serve as data sinks in workflows, persisting processed data to the
5
+ file system for storage, sharing, or further processing.
6
+
7
+ Design Philosophy:
8
+ 1. Consistent input interface across formats
9
+ 2. Flexible output options for each format
10
+ 3. Safe file operations with error handling
11
+ 4. Format-specific optimizations
12
+ 5. Progress tracking and feedback
13
+
14
+ Node Categories:
15
+ - CSVWriter: Tabular data to CSV files
16
+ - JSONWriter: Structured data to JSON files
17
+ - TextWriter: Raw text to any text file
18
+
19
+ Upstream Components:
20
+ - Reader nodes: Provide data to transform
21
+ - Transform nodes: Process data before writing
22
+ - Logic nodes: Filter data for output
23
+ - AI nodes: Generate content to save
24
+
25
+ Downstream Consumers:
26
+ - File system: Stores the written files
27
+ - External systems: Read the output files
28
+ - Other workflows: Use files as input
29
+ - Monitoring systems: Track file creation
30
+ """
31
+
32
+ import csv
33
+ import json
34
+ from typing import Any, Dict
35
+
36
+ from kailash.nodes.base import Node, NodeParameter, register_node
37
+
38
+
39
+ @register_node()
40
+ class CSVWriter(Node):
41
+ """Writes data to a CSV file.
42
+
43
+ This node handles CSV file writing with support for both dictionary
44
+ and list data structures. It automatically detects data format and
45
+ applies appropriate writing strategies.
46
+
47
+ Design Features:
48
+ 1. Automatic format detection (dict vs list)
49
+ 2. Header generation from dictionary keys
50
+ 3. Configurable delimiters
51
+ 4. Unicode support through encoding
52
+ 5. Transaction-safe writing
53
+
54
+ Data Flow:
55
+ - Input: Structured data (list of dicts/lists)
56
+ - Processing: Format detection and CSV generation
57
+ - Output: File creation confirmation
58
+
59
+ Common Usage Patterns:
60
+ 1. Exporting processed data
61
+ 2. Creating reports
62
+ 3. Generating data backups
63
+ 4. Producing import files
64
+ 5. Saving analysis results
65
+
66
+ Upstream Sources:
67
+ - CSVReader: Modified data round-trip
68
+ - Transform nodes: Processed tabular data
69
+ - Aggregator: Summarized results
70
+ - API nodes: Structured responses
71
+
72
+ Downstream Consumers:
73
+ - File system: Stores the CSV
74
+ - External tools: Excel, databases
75
+ - Other workflows: Read the output
76
+ - Archive systems: Long-term storage
77
+
78
+ Error Handling:
79
+ - PermissionError: Write access denied
80
+ - OSError: Disk full or path issues
81
+ - TypeError: Invalid data structure
82
+ - UnicodeEncodeError: Encoding issues
83
+
84
+ Example:
85
+ # Write customer data
86
+ writer = CSVWriter(
87
+ file_path='output.csv',
88
+ data=[
89
+ {'id': 1, 'name': 'John', 'age': 30},
90
+ {'id': 2, 'name': 'Jane', 'age': 25}
91
+ ],
92
+ delimiter=','
93
+ )
94
+ result = writer.execute()
95
+ # result = {'rows_written': 2, 'file_path': 'output.csv'}
96
+ """
97
+
98
+ def get_parameters(self) -> Dict[str, NodeParameter]:
99
+ """Define input parameters for CSV writing.
100
+
101
+ Provides comprehensive parameters for flexible CSV output,
102
+ supporting various data structures and formatting options.
103
+
104
+ Parameter Design:
105
+ 1. file_path: Required output location
106
+ 2. data: Required data to write
107
+ 3. headers: Optional custom headers
108
+ 4. delimiter: Optional separator
109
+
110
+ The parameters handle two main scenarios:
111
+ - Dict data: Auto-extracts headers from keys
112
+ - List data: Requires headers or writes raw
113
+
114
+ Returns:
115
+ Dictionary of parameter definitions for validation
116
+ """
117
+ return {
118
+ "file_path": NodeParameter(
119
+ name="file_path",
120
+ type=str,
121
+ required=True,
122
+ description="Path to write the CSV file",
123
+ ),
124
+ "data": NodeParameter(
125
+ name="data",
126
+ type=list,
127
+ required=False, # Not required at initialization for workflow usage
128
+ description="Data to write (list of dicts or lists)",
129
+ ),
130
+ "headers": NodeParameter(
131
+ name="headers",
132
+ type=bool,
133
+ required=False,
134
+ default=None,
135
+ description="Column headers (auto-detected if not provided)",
136
+ ),
137
+ "delimiter": NodeParameter(
138
+ name="delimiter",
139
+ type=str,
140
+ required=False,
141
+ default=",",
142
+ description="CSV delimiter character",
143
+ ),
144
+ }
145
+
146
+ def run(self, **kwargs) -> Dict[str, Any]:
147
+ """Execute CSV writing operation.
148
+
149
+ Intelligently handles different data structures, automatically
150
+ detecting format and applying appropriate writing strategy.
151
+
152
+ Processing Steps:
153
+ 1. Detects data structure (dict vs list)
154
+ 2. Determines headers (provided or extracted)
155
+ 3. Creates appropriate CSV writer
156
+ 4. Writes headers if applicable
157
+ 5. Writes data rows
158
+ 6. Returns write statistics
159
+
160
+ Format Detection:
161
+ - Dict data: Uses DictWriter, auto-extracts headers
162
+ - List data: Uses standard writer, optional headers
163
+ - Empty data: Returns zero rows written
164
+
165
+ File Handling:
166
+ - Creates new file (overwrites existing)
167
+ - Uses UTF-8 encoding
168
+ - Handles newlines correctly (cross-platform)
169
+ - Closes file automatically
170
+
171
+ Args:
172
+ **kwargs: Validated parameters including:
173
+ - file_path: Output file location
174
+ - data: List of dicts or lists
175
+ - headers: Optional column names
176
+ - delimiter: Field separator
177
+
178
+ Returns:
179
+ Dictionary with:
180
+ - rows_written: Number of data rows
181
+ - file_path: Output file location
182
+
183
+ Raises:
184
+ PermissionError: If can't write to location
185
+ OSError: If path issues occur
186
+ TypeError: If data format invalid
187
+
188
+ Downstream usage:
189
+ - File can be read by CSVReader
190
+ - External tools can process output
191
+ - Metrics available for monitoring
192
+ """
193
+ file_path = kwargs["file_path"]
194
+ data = kwargs["data"]
195
+ headers = kwargs.get("headers")
196
+ delimiter = kwargs.get("delimiter", ",")
197
+
198
+ if not data:
199
+ return {"rows_written": 0}
200
+
201
+ with open(file_path, "w", newline="", encoding="utf-8") as f:
202
+ if isinstance(data[0], dict):
203
+ # Writing dictionaries
204
+ if not headers:
205
+ headers = list(data[0].keys())
206
+ writer = csv.DictWriter(f, fieldnames=headers, delimiter=delimiter)
207
+ writer.writeheader()
208
+ writer.writerows(data)
209
+ else:
210
+ # Writing lists
211
+ writer = csv.writer(f, delimiter=delimiter)
212
+ if headers:
213
+ writer.writerow(headers)
214
+ writer.writerows(data)
215
+
216
+ return {"rows_written": len(data), "file_path": file_path}
217
+
218
+
219
+ @register_node()
220
+ class JSONWriter(Node):
221
+ """Writes data to a JSON file.
222
+
223
+ This node handles JSON serialization with support for complex
224
+ nested structures, pretty printing, and various data types.
225
+ It ensures data persistence while maintaining structure integrity.
226
+
227
+ Design Features:
228
+ 1. Preserves complex data structures
229
+ 2. Pretty printing with indentation
230
+ 3. Unicode support by default
231
+ 4. Type preservation for round-trips
232
+ 5. Atomic write operations
233
+
234
+ Data Flow:
235
+ - Input: Any JSON-serializable data
236
+ - Processing: JSON serialization
237
+ - Output: File creation confirmation
238
+
239
+ Common Usage Patterns:
240
+ 1. Saving API responses
241
+ 2. Persisting configuration
242
+ 3. Caching structured data
243
+ 4. Exporting analysis results
244
+ 5. Creating data backups
245
+
246
+ Upstream Sources:
247
+ - JSONReader: Modified data round-trip
248
+ - API nodes: Response data
249
+ - Transform nodes: Processed structures
250
+ - Aggregator: Complex results
251
+
252
+ Downstream Consumers:
253
+ - File system: Stores JSON file
254
+ - JSONReader: Can reload data
255
+ - APIs: Import the data
256
+ - Version control: Track changes
257
+
258
+ Error Handling:
259
+ - TypeError: Non-serializable data
260
+ - PermissionError: Write access denied
261
+ - OSError: Path or disk issues
262
+ - JSONEncodeError: Encoding problems
263
+
264
+ Example:
265
+ # Write API response
266
+ writer = JSONWriter(
267
+ file_path='response.json',
268
+ data={
269
+ 'status': 'success',
270
+ 'results': [1, 2, 3],
271
+ 'metadata': {'version': '1.0'}
272
+ },
273
+ indent=2
274
+ )
275
+ result = writer.execute()
276
+ # result = {'file_path': 'response.json'}
277
+ """
278
+
279
+ def get_parameters(self) -> Dict[str, NodeParameter]:
280
+ """Define input parameters for JSON writing.
281
+
282
+ Minimal parameters reflecting JSON's flexibility while
283
+ providing formatting control through indentation.
284
+
285
+ Parameter Design:
286
+ 1. file_path: Required output location
287
+ 2. data: Required data (any serializable)
288
+ 3. indent: Optional formatting control
289
+
290
+ The 'Any' type for data reflects JSON's ability to
291
+ handle various structures - validation happens at
292
+ serialization time.
293
+
294
+ Returns:
295
+ Dictionary of parameter definitions
296
+ """
297
+ return {
298
+ "file_path": NodeParameter(
299
+ name="file_path",
300
+ type=str,
301
+ required=True,
302
+ description="Path to write the JSON file",
303
+ ),
304
+ "data": NodeParameter(
305
+ name="data",
306
+ type=Any,
307
+ required=False, # Not required at initialization for workflow usage
308
+ description="Data to write (must be JSON-serializable)",
309
+ ),
310
+ "indent": NodeParameter(
311
+ name="indent",
312
+ type=int,
313
+ required=False,
314
+ default=2,
315
+ description="Indentation level for pretty printing",
316
+ ),
317
+ }
318
+
319
+ def run(self, **kwargs) -> Dict[str, Any]:
320
+ """Execute JSON writing operation.
321
+
322
+ Serializes data to JSON format with proper formatting
323
+ and encoding. Handles complex nested structures while
324
+ maintaining readability through indentation.
325
+
326
+ Processing Steps:
327
+ 1. Opens file for writing
328
+ 2. Serializes data to JSON
329
+ 3. Applies formatting options
330
+ 4. Ensures Unicode preservation
331
+ 5. Writes atomically
332
+
333
+ Serialization Features:
334
+ - Pretty printing with indentation
335
+ - Unicode characters preserved
336
+ - Consistent key ordering
337
+ - Null value handling
338
+ - Number precision maintained
339
+
340
+ Args:
341
+ **kwargs: Validated parameters including:
342
+ - file_path: Output file location
343
+ - data: Data to serialize
344
+ - indent: Spaces for indentation
345
+
346
+ Returns:
347
+ Dictionary with:
348
+ - file_path: Written file location
349
+
350
+ Raises:
351
+ TypeError: If data not serializable
352
+ PermissionError: If write denied
353
+ OSError: If path issues
354
+
355
+ Downstream usage:
356
+ - JSONReader can reload file
357
+ - Version control can track
358
+ - APIs can import data
359
+ """
360
+ file_path = kwargs["file_path"]
361
+ data = kwargs["data"]
362
+ indent = kwargs.get("indent", 2)
363
+
364
+ with open(file_path, "w", encoding="utf-8") as f:
365
+ json.dump(data, f, indent=indent, ensure_ascii=False)
366
+
367
+ return {"file_path": file_path}
368
+
369
+
370
+ @register_node()
371
+ class TextWriter(Node):
372
+ """Writes text to a file.
373
+
374
+ This node provides flexible text file writing with support for
375
+ various encodings and append operations. It handles plain text
376
+ output for logs, documents, and generated content.
377
+
378
+ Design Features:
379
+ 1. Flexible encoding support
380
+ 2. Append mode for log files
381
+ 3. Overwrite mode for fresh output
382
+ 4. Byte counting for verification
383
+ 5. Unicode-safe operations
384
+
385
+ Data Flow:
386
+ - Input: Text string and configuration
387
+ - Processing: Encode and write text
388
+ - Output: Write confirmation
389
+
390
+ Common Usage Patterns:
391
+ 1. Writing log entries
392
+ 2. Saving generated content
393
+ 3. Creating documentation
394
+ 4. Exporting text reports
395
+ 5. Building configuration files
396
+
397
+ Upstream Sources:
398
+ - TextReader: Modified text round-trip
399
+ - Transform nodes: Processed text
400
+ - AI nodes: Generated content
401
+ - Template nodes: Formatted output
402
+
403
+ Downstream Consumers:
404
+ - File system: Stores text file
405
+ - Log analyzers: Process logs
406
+ - Documentation systems: Use output
407
+ - Version control: Track changes
408
+
409
+ Error Handling:
410
+ - PermissionError: Write access denied
411
+ - OSError: Path or disk issues
412
+ - UnicodeEncodeError: Encoding mismatch
413
+ - MemoryError: Text too large
414
+
415
+ Example:
416
+ # Append to log file
417
+ writer = TextWriter(
418
+ file_path='app.log',
419
+ text='ERROR: Connection failed\\n',
420
+ encoding='utf-8',
421
+ append=True
422
+ )
423
+ result = writer.execute()
424
+ # result = {'file_path': 'app.log', 'bytes_written': 25}
425
+ """
426
+
427
+ def get_parameters(self) -> Dict[str, NodeParameter]:
428
+ """Define input parameters for text writing.
429
+
430
+ Comprehensive parameters supporting various text writing
431
+ scenarios from simple output to complex log management.
432
+
433
+ Parameter Design:
434
+ 1. file_path: Required output location
435
+ 2. text: Required content to write
436
+ 3. encoding: Optional for compatibility
437
+ 4. append: Optional for log patterns
438
+
439
+ The append parameter is crucial for:
440
+ - Log file management
441
+ - Continuous output streams
442
+ - Building files incrementally
443
+ - Preserving existing content
444
+
445
+ Returns:
446
+ Dictionary of parameter definitions
447
+ """
448
+ return {
449
+ "file_path": NodeParameter(
450
+ name="file_path",
451
+ type=str,
452
+ required=True,
453
+ description="Path to write the text file",
454
+ ),
455
+ "text": NodeParameter(
456
+ name="text", type=str, required=True, description="Text to write"
457
+ ),
458
+ "encoding": NodeParameter(
459
+ name="encoding",
460
+ type=str,
461
+ required=False,
462
+ default="utf-8",
463
+ description="File encoding",
464
+ ),
465
+ "append": NodeParameter(
466
+ name="append",
467
+ type=bool,
468
+ required=False,
469
+ default=False,
470
+ description="Whether to append to existing file",
471
+ ),
472
+ }
473
+
474
+ def run(self, **kwargs) -> Dict[str, Any]:
475
+ """Execute text writing operation.
476
+
477
+ Writes text to file with specified encoding and mode.
478
+ Supports both overwrite and append operations for different
479
+ use cases like logging and content generation.
480
+
481
+ Processing Steps:
482
+ 1. Determines write mode (append/overwrite)
483
+ 2. Opens file with encoding
484
+ 3. Writes text content
485
+ 4. Calculates bytes written
486
+ 5. Returns write statistics
487
+
488
+ Mode Selection:
489
+ - append=False: Creates new or overwrites
490
+ - append=True: Adds to existing file
491
+ - File created if doesn't exist (both modes)
492
+
493
+ Encoding Handling:
494
+ - Encodes text before counting bytes
495
+ - Supports any Python encoding
496
+ - UTF-8 default for compatibility
497
+
498
+ Args:
499
+ **kwargs: Validated parameters including:
500
+ - file_path: Output file location
501
+ - text: Content to write
502
+ - encoding: Character encoding
503
+ - append: Write mode selection
504
+
505
+ Returns:
506
+ Dictionary with:
507
+ - file_path: Written file location
508
+ - bytes_written: Size of written data
509
+
510
+ Raises:
511
+ PermissionError: If write denied
512
+ OSError: If path issues
513
+ UnicodeEncodeError: If encoding fails
514
+
515
+ Downstream usage:
516
+ - TextReader can read file
517
+ - Log analyzers can process
518
+ - Metrics available for monitoring
519
+ """
520
+ file_path = kwargs["file_path"]
521
+ text = kwargs["text"]
522
+ encoding = kwargs.get("encoding", "utf-8")
523
+ append = kwargs.get("append", False)
524
+
525
+ mode = "a" if append else "w"
526
+ with open(file_path, mode, encoding=encoding) as f:
527
+ f.write(text)
528
+
529
+ return {"file_path": file_path, "bytes_written": len(text.encode(encoding))}
@@ -0,0 +1,6 @@
1
+ """Logic operation nodes for the Kailash SDK."""
2
+
3
+ from kailash.nodes.logic.async_operations import AsyncMerge, AsyncSwitch
4
+ from kailash.nodes.logic.operations import Merge, Switch
5
+
6
+ __all__ = ["Switch", "Merge", "AsyncSwitch", "AsyncMerge"]