kailash 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. kailash/__init__.py +1 -1
  2. kailash/access_control.py +40 -39
  3. kailash/api/auth.py +26 -32
  4. kailash/api/custom_nodes.py +29 -29
  5. kailash/api/custom_nodes_secure.py +35 -35
  6. kailash/api/database.py +17 -17
  7. kailash/api/gateway.py +19 -19
  8. kailash/api/mcp_integration.py +24 -23
  9. kailash/api/studio.py +45 -45
  10. kailash/api/workflow_api.py +8 -8
  11. kailash/cli/commands.py +5 -8
  12. kailash/manifest.py +42 -42
  13. kailash/mcp/__init__.py +1 -1
  14. kailash/mcp/ai_registry_server.py +20 -20
  15. kailash/mcp/client.py +9 -11
  16. kailash/mcp/client_new.py +10 -10
  17. kailash/mcp/server.py +1 -2
  18. kailash/mcp/server_enhanced.py +449 -0
  19. kailash/mcp/servers/ai_registry.py +6 -6
  20. kailash/mcp/utils/__init__.py +31 -0
  21. kailash/mcp/utils/cache.py +267 -0
  22. kailash/mcp/utils/config.py +263 -0
  23. kailash/mcp/utils/formatters.py +293 -0
  24. kailash/mcp/utils/metrics.py +418 -0
  25. kailash/nodes/ai/agents.py +9 -9
  26. kailash/nodes/ai/ai_providers.py +33 -34
  27. kailash/nodes/ai/embedding_generator.py +31 -32
  28. kailash/nodes/ai/intelligent_agent_orchestrator.py +62 -66
  29. kailash/nodes/ai/iterative_llm_agent.py +48 -48
  30. kailash/nodes/ai/llm_agent.py +32 -33
  31. kailash/nodes/ai/models.py +13 -13
  32. kailash/nodes/ai/self_organizing.py +44 -44
  33. kailash/nodes/api/auth.py +11 -11
  34. kailash/nodes/api/graphql.py +13 -13
  35. kailash/nodes/api/http.py +19 -19
  36. kailash/nodes/api/monitoring.py +20 -20
  37. kailash/nodes/api/rate_limiting.py +9 -13
  38. kailash/nodes/api/rest.py +29 -29
  39. kailash/nodes/api/security.py +44 -47
  40. kailash/nodes/base.py +21 -23
  41. kailash/nodes/base_async.py +7 -7
  42. kailash/nodes/base_cycle_aware.py +12 -12
  43. kailash/nodes/base_with_acl.py +5 -5
  44. kailash/nodes/code/python.py +56 -55
  45. kailash/nodes/data/directory.py +6 -6
  46. kailash/nodes/data/event_generation.py +10 -10
  47. kailash/nodes/data/file_discovery.py +28 -31
  48. kailash/nodes/data/readers.py +8 -8
  49. kailash/nodes/data/retrieval.py +10 -10
  50. kailash/nodes/data/sharepoint_graph.py +17 -17
  51. kailash/nodes/data/sources.py +5 -5
  52. kailash/nodes/data/sql.py +13 -13
  53. kailash/nodes/data/streaming.py +25 -25
  54. kailash/nodes/data/vector_db.py +22 -22
  55. kailash/nodes/data/writers.py +7 -7
  56. kailash/nodes/logic/async_operations.py +17 -17
  57. kailash/nodes/logic/convergence.py +11 -11
  58. kailash/nodes/logic/loop.py +4 -4
  59. kailash/nodes/logic/operations.py +11 -11
  60. kailash/nodes/logic/workflow.py +8 -9
  61. kailash/nodes/mixins/mcp.py +17 -17
  62. kailash/nodes/mixins.py +8 -10
  63. kailash/nodes/transform/chunkers.py +3 -3
  64. kailash/nodes/transform/formatters.py +7 -7
  65. kailash/nodes/transform/processors.py +10 -10
  66. kailash/runtime/access_controlled.py +18 -18
  67. kailash/runtime/async_local.py +17 -19
  68. kailash/runtime/docker.py +20 -22
  69. kailash/runtime/local.py +16 -16
  70. kailash/runtime/parallel.py +23 -23
  71. kailash/runtime/parallel_cyclic.py +27 -27
  72. kailash/runtime/runner.py +6 -6
  73. kailash/runtime/testing.py +20 -20
  74. kailash/sdk_exceptions.py +0 -58
  75. kailash/security.py +14 -26
  76. kailash/tracking/manager.py +38 -38
  77. kailash/tracking/metrics_collector.py +15 -14
  78. kailash/tracking/models.py +53 -53
  79. kailash/tracking/storage/base.py +7 -17
  80. kailash/tracking/storage/database.py +22 -23
  81. kailash/tracking/storage/filesystem.py +38 -40
  82. kailash/utils/export.py +21 -21
  83. kailash/utils/templates.py +2 -3
  84. kailash/visualization/api.py +30 -34
  85. kailash/visualization/dashboard.py +17 -17
  86. kailash/visualization/performance.py +16 -16
  87. kailash/visualization/reports.py +25 -27
  88. kailash/workflow/builder.py +8 -8
  89. kailash/workflow/convergence.py +13 -12
  90. kailash/workflow/cycle_analyzer.py +30 -32
  91. kailash/workflow/cycle_builder.py +12 -12
  92. kailash/workflow/cycle_config.py +16 -15
  93. kailash/workflow/cycle_debugger.py +40 -40
  94. kailash/workflow/cycle_exceptions.py +29 -29
  95. kailash/workflow/cycle_profiler.py +21 -21
  96. kailash/workflow/cycle_state.py +20 -22
  97. kailash/workflow/cyclic_runner.py +44 -44
  98. kailash/workflow/graph.py +40 -40
  99. kailash/workflow/mermaid_visualizer.py +9 -11
  100. kailash/workflow/migration.py +22 -22
  101. kailash/workflow/mock_registry.py +6 -6
  102. kailash/workflow/runner.py +9 -9
  103. kailash/workflow/safety.py +12 -13
  104. kailash/workflow/state.py +8 -11
  105. kailash/workflow/templates.py +19 -19
  106. kailash/workflow/validation.py +14 -14
  107. kailash/workflow/visualization.py +22 -22
  108. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/METADATA +53 -5
  109. kailash-0.3.1.dist-info/RECORD +136 -0
  110. kailash-0.3.0.dist-info/RECORD +0 -130
  111. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/WHEEL +0 -0
  112. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/entry_points.txt +0 -0
  113. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/licenses/LICENSE +0 -0
  114. {kailash-0.3.0.dist-info → kailash-0.3.1.dist-info}/top_level.txt +0 -0
@@ -2,7 +2,6 @@
2
2
 
3
3
  import json
4
4
  from datetime import datetime
5
- from typing import List, Optional
6
5
  from uuid import uuid4
7
6
 
8
7
  from ..models import TaskMetrics, TaskRun, TaskStatus, WorkflowRun
@@ -160,7 +159,7 @@ class DatabaseStorage(StorageBackend):
160
159
 
161
160
  self.conn.commit()
162
161
 
163
- def load_run(self, run_id: str) -> Optional[WorkflowRun]:
162
+ def load_run(self, run_id: str) -> WorkflowRun | None:
164
163
  """Load a workflow run by ID."""
165
164
  cursor = self.conn.cursor()
166
165
 
@@ -177,7 +176,7 @@ class DatabaseStorage(StorageBackend):
177
176
 
178
177
  # Convert row to dict
179
178
  columns = [desc[0] for desc in cursor.description]
180
- data = dict(zip(columns, row))
179
+ data = dict(zip(columns, row, strict=False))
181
180
 
182
181
  # Parse JSON metadata
183
182
  data["metadata"] = json.loads(data["metadata"] or "{}")
@@ -189,8 +188,8 @@ class DatabaseStorage(StorageBackend):
189
188
  return WorkflowRun.model_validate(data)
190
189
 
191
190
  def list_runs(
192
- self, workflow_name: Optional[str] = None, status: Optional[str] = None
193
- ) -> List[WorkflowRun]:
191
+ self, workflow_name: str | None = None, status: str | None = None
192
+ ) -> list[WorkflowRun]:
194
193
  """List workflow runs."""
195
194
  cursor = self.conn.cursor()
196
195
 
@@ -213,7 +212,7 @@ class DatabaseStorage(StorageBackend):
213
212
  columns = [desc[0] for desc in cursor.description]
214
213
 
215
214
  for row in cursor.fetchall():
216
- data = dict(zip(columns, row))
215
+ data = dict(zip(columns, row, strict=False))
217
216
  data["metadata"] = json.loads(data["metadata"] or "{}")
218
217
 
219
218
  # Load task IDs
@@ -277,7 +276,7 @@ class DatabaseStorage(StorageBackend):
277
276
 
278
277
  self.conn.commit()
279
278
 
280
- def load_task(self, task_id: str) -> Optional[TaskRun]:
279
+ def load_task(self, task_id: str) -> TaskRun | None:
281
280
  """Load a task by ID."""
282
281
  cursor = self.conn.cursor()
283
282
 
@@ -305,7 +304,7 @@ class DatabaseStorage(StorageBackend):
305
304
 
306
305
  # Convert row to dict
307
306
  columns = [desc[0] for desc in cursor.description]
308
- data = dict(zip(columns, row))
307
+ data = dict(zip(columns, row, strict=False))
309
308
 
310
309
  # Parse JSON fields
311
310
  if data["result"]:
@@ -341,7 +340,7 @@ class DatabaseStorage(StorageBackend):
341
340
  metrics_row = cursor.fetchone()
342
341
  if metrics_row:
343
342
  metrics_columns = [desc[0] for desc in cursor.description]
344
- metrics_data = dict(zip(metrics_columns, metrics_row))
343
+ metrics_data = dict(zip(metrics_columns, metrics_row, strict=False))
345
344
 
346
345
  # Parse custom metrics if present
347
346
  if metrics_data.get("custom_metrics"):
@@ -362,9 +361,9 @@ class DatabaseStorage(StorageBackend):
362
361
  def list_tasks(
363
362
  self,
364
363
  run_id: str,
365
- node_id: Optional[str] = None,
366
- status: Optional[TaskStatus] = None,
367
- ) -> List[TaskRun]:
364
+ node_id: str | None = None,
365
+ status: TaskStatus | None = None,
366
+ ) -> list[TaskRun]:
368
367
  """List tasks for a run."""
369
368
  cursor = self.conn.cursor()
370
369
 
@@ -387,7 +386,7 @@ class DatabaseStorage(StorageBackend):
387
386
  columns = [desc[0] for desc in cursor.description]
388
387
 
389
388
  for row in cursor.fetchall():
390
- data = dict(zip(columns, row))
389
+ data = dict(zip(columns, row, strict=False))
391
390
 
392
391
  # Parse JSON fields
393
392
  if data["result"]:
@@ -451,7 +450,7 @@ class DatabaseStorage(StorageBackend):
451
450
 
452
451
  def import_run(self, input_path: str) -> str:
453
452
  """Import a run and its tasks."""
454
- with open(input_path, "r") as f:
453
+ with open(input_path) as f:
455
454
  import_data = json.load(f)
456
455
 
457
456
  # Import run
@@ -472,7 +471,7 @@ class DatabaseStorage(StorageBackend):
472
471
 
473
472
  return run.run_id
474
473
 
475
- def get_task(self, task_id: str) -> Optional[TaskRun]:
474
+ def get_task(self, task_id: str) -> TaskRun | None:
476
475
  """Load a task by ID.
477
476
 
478
477
  Alias for load_task for API compatibility.
@@ -485,7 +484,7 @@ class DatabaseStorage(StorageBackend):
485
484
  """
486
485
  return self.load_task(task_id)
487
486
 
488
- def get_all_tasks(self) -> List[TaskRun]:
487
+ def get_all_tasks(self) -> list[TaskRun]:
489
488
  """Get all tasks.
490
489
 
491
490
  Returns:
@@ -498,7 +497,7 @@ class DatabaseStorage(StorageBackend):
498
497
  columns = [desc[0] for desc in cursor.description]
499
498
 
500
499
  for row in cursor.fetchall():
501
- data = dict(zip(columns, row))
500
+ data = dict(zip(columns, row, strict=False))
502
501
 
503
502
  # Parse JSON fields
504
503
  if data["result"]:
@@ -532,11 +531,11 @@ class DatabaseStorage(StorageBackend):
532
531
 
533
532
  def query_tasks(
534
533
  self,
535
- node_id: Optional[str] = None,
536
- status: Optional[TaskStatus] = None,
537
- started_after: Optional[datetime] = None,
538
- completed_before: Optional[datetime] = None,
539
- ) -> List[TaskRun]:
534
+ node_id: str | None = None,
535
+ status: TaskStatus | None = None,
536
+ started_after: datetime | None = None,
537
+ completed_before: datetime | None = None,
538
+ ) -> list[TaskRun]:
540
539
  """Query tasks with filters.
541
540
 
542
541
  Args:
@@ -583,7 +582,7 @@ class DatabaseStorage(StorageBackend):
583
582
  columns = [desc[0] for desc in cursor.description]
584
583
 
585
584
  for row in cursor.fetchall():
586
- data = dict(zip(columns, row))
585
+ data = dict(zip(columns, row, strict=False))
587
586
 
588
587
  # Parse JSON fields
589
588
  if data["result"]:
@@ -2,9 +2,9 @@
2
2
 
3
3
  import json
4
4
  import os
5
- from datetime import datetime
5
+ from datetime import UTC, datetime
6
6
  from pathlib import Path
7
- from typing import Any, List, Optional
7
+ from typing import Any
8
8
  from uuid import uuid4
9
9
 
10
10
  from kailash.sdk_exceptions import KailashStorageError
@@ -16,7 +16,7 @@ from .base import StorageBackend
16
16
  class FileSystemStorage(StorageBackend):
17
17
  """Filesystem-based storage backend."""
18
18
 
19
- def __init__(self, base_path: Optional[str] = None):
19
+ def __init__(self, base_path: str | None = None):
20
20
  """Initialize filesystem storage.
21
21
 
22
22
  Args:
@@ -47,26 +47,26 @@ class FileSystemStorage(StorageBackend):
47
47
  with open(run_path, "w") as f:
48
48
  json.dump(run.to_dict(), f, indent=2)
49
49
 
50
- def load_run(self, run_id: str) -> Optional[WorkflowRun]:
50
+ def load_run(self, run_id: str) -> WorkflowRun | None:
51
51
  """Load a workflow run by ID."""
52
52
  run_path = self.runs_dir / f"{run_id}.json"
53
53
  if not run_path.exists():
54
54
  return None
55
55
 
56
- with open(run_path, "r") as f:
56
+ with open(run_path) as f:
57
57
  data = json.load(f)
58
58
 
59
59
  return WorkflowRun.model_validate(data)
60
60
 
61
61
  def list_runs(
62
- self, workflow_name: Optional[str] = None, status: Optional[str] = None
63
- ) -> List[WorkflowRun]:
62
+ self, workflow_name: str | None = None, status: str | None = None
63
+ ) -> list[WorkflowRun]:
64
64
  """List workflow runs."""
65
65
  runs = []
66
66
 
67
67
  for run_file in self.runs_dir.glob("*.json"):
68
68
  try:
69
- with open(run_file, "r") as f:
69
+ with open(run_file) as f:
70
70
  data = json.load(f)
71
71
 
72
72
  run = WorkflowRun.model_validate(data)
@@ -89,14 +89,13 @@ class FileSystemStorage(StorageBackend):
89
89
  # Ensure datetime is timezone-aware
90
90
  if run.started_at.tzinfo is None:
91
91
  # Assume UTC for naive datetimes
92
- from datetime import timezone
93
92
 
94
- return run.started_at.replace(tzinfo=timezone.utc)
93
+ return run.started_at.replace(tzinfo=UTC)
95
94
  return run.started_at
96
95
  # Return a very old date for runs without started_at
97
- from datetime import datetime, timezone
96
+ from datetime import datetime
98
97
 
99
- return datetime.min.replace(tzinfo=timezone.utc)
98
+ return datetime.min.replace(tzinfo=UTC)
100
99
 
101
100
  runs.sort(key=safe_datetime_key, reverse=True)
102
101
  return runs
@@ -135,7 +134,7 @@ class FileSystemStorage(StorageBackend):
135
134
  except Exception as e:
136
135
  raise KailashStorageError(f"Failed to save task: {e}") from e
137
136
 
138
- def get_task(self, task_id: str) -> Optional[TaskRun]:
137
+ def get_task(self, task_id: str) -> TaskRun | None:
139
138
  """Load a task by ID.
140
139
 
141
140
  Args:
@@ -151,14 +150,14 @@ class FileSystemStorage(StorageBackend):
151
150
  # First check direct path for tests
152
151
  task_path = self.tasks_dir / f"{task_id}.json"
153
152
  if task_path.exists():
154
- with open(task_path, "r") as tf:
153
+ with open(task_path) as tf:
155
154
  task_data = json.load(tf)
156
155
  task = TaskRun.model_validate(task_data)
157
156
 
158
157
  # Load metrics if available
159
158
  metrics_path = self.metrics_dir / f"{task_id}.json"
160
159
  if metrics_path.exists():
161
- with open(metrics_path, "r") as mf:
160
+ with open(metrics_path) as mf:
162
161
  metrics_data = json.load(mf)
163
162
  task.metrics = TaskMetrics.model_validate(metrics_data)
164
163
 
@@ -167,20 +166,20 @@ class FileSystemStorage(StorageBackend):
167
166
  # Then check index for run_id
168
167
  index_path = self._get_index_file()
169
168
  if index_path.exists():
170
- with open(index_path, "r") as f:
169
+ with open(index_path) as f:
171
170
  index = json.load(f)
172
171
  if task_id in index.get("tasks", {}):
173
172
  run_id = index["tasks"][task_id]["run_id"]
174
173
  run_task_path = self.tasks_dir / run_id / f"{task_id}.json"
175
174
  if run_task_path.exists():
176
- with open(run_task_path, "r") as tf:
175
+ with open(run_task_path) as tf:
177
176
  task_data = json.load(tf)
178
177
  task = TaskRun.model_validate(task_data)
179
178
 
180
179
  # Load metrics if available
181
180
  metrics_path = self.metrics_dir / f"{task_id}.json"
182
181
  if metrics_path.exists():
183
- with open(metrics_path, "r") as mf:
182
+ with open(metrics_path) as mf:
184
183
  metrics_data = json.load(mf)
185
184
  task.metrics = TaskMetrics.model_validate(metrics_data)
186
185
 
@@ -193,7 +192,7 @@ class FileSystemStorage(StorageBackend):
193
192
  raise
194
193
  raise KailashStorageError(f"Failed to get task: {e}") from e
195
194
 
196
- def load_task(self, task_id: str) -> Optional[TaskRun]:
195
+ def load_task(self, task_id: str) -> TaskRun | None:
197
196
  """Load a task by ID."""
198
197
  # Search all run directories
199
198
  for run_dir in self.tasks_dir.iterdir():
@@ -202,7 +201,7 @@ class FileSystemStorage(StorageBackend):
202
201
 
203
202
  task_path = run_dir / f"{task_id}.json"
204
203
  if task_path.exists():
205
- with open(task_path, "r") as f:
204
+ with open(task_path) as f:
206
205
  data = json.load(f)
207
206
  return TaskRun.model_validate(data)
208
207
 
@@ -211,9 +210,9 @@ class FileSystemStorage(StorageBackend):
211
210
  def list_tasks(
212
211
  self,
213
212
  run_id: str,
214
- node_id: Optional[str] = None,
215
- status: Optional[TaskStatus] = None,
216
- ) -> List[TaskRun]:
213
+ node_id: str | None = None,
214
+ status: TaskStatus | None = None,
215
+ ) -> list[TaskRun]:
217
216
  """List tasks for a run."""
218
217
  tasks = []
219
218
  run_tasks_dir = self.tasks_dir / run_id
@@ -223,7 +222,7 @@ class FileSystemStorage(StorageBackend):
223
222
 
224
223
  for task_file in run_tasks_dir.glob("*.json"):
225
224
  try:
226
- with open(task_file, "r") as f:
225
+ with open(task_file) as f:
227
226
  data = json.load(f)
228
227
 
229
228
  task = TaskRun.model_validate(data)
@@ -246,9 +245,8 @@ class FileSystemStorage(StorageBackend):
246
245
  # Ensure datetime is timezone-aware
247
246
  if task.started_at.tzinfo is None:
248
247
  # Assume UTC for naive datetimes
249
- from datetime import timezone
250
248
 
251
- return task.started_at.replace(tzinfo=timezone.utc)
249
+ return task.started_at.replace(tzinfo=UTC)
252
250
  return task.started_at
253
251
  # Use task_id as fallback for tasks without started_at
254
252
  return task.task_id
@@ -290,7 +288,7 @@ class FileSystemStorage(StorageBackend):
290
288
 
291
289
  def import_run(self, input_path: str) -> str:
292
290
  """Import a run and its tasks."""
293
- with open(input_path, "r") as f:
291
+ with open(input_path) as f:
294
292
  import_data = json.load(f)
295
293
 
296
294
  # Import run
@@ -383,7 +381,7 @@ class FileSystemStorage(StorageBackend):
383
381
  # Update index
384
382
  index_path = self._get_index_file()
385
383
  if index_path.exists():
386
- with open(index_path, "r") as f:
384
+ with open(index_path) as f:
387
385
  index = json.load(f)
388
386
 
389
387
  if task_id in index.get("tasks", {}):
@@ -396,7 +394,7 @@ class FileSystemStorage(StorageBackend):
396
394
  raise
397
395
  raise KailashStorageError(f"Failed to delete task: {e}") from e
398
396
 
399
- def get_all_tasks(self) -> List[TaskRun]:
397
+ def get_all_tasks(self) -> list[TaskRun]:
400
398
  """Get all tasks.
401
399
 
402
400
  Returns:
@@ -411,7 +409,7 @@ class FileSystemStorage(StorageBackend):
411
409
  # First load tasks in the main tasks directory (for tests)
412
410
  for task_file in self.tasks_dir.glob("*.json"):
413
411
  if task_file.is_file():
414
- with open(task_file, "r") as f:
412
+ with open(task_file) as f:
415
413
  task_data = json.load(f)
416
414
 
417
415
  task = TaskRun.model_validate(task_data)
@@ -419,7 +417,7 @@ class FileSystemStorage(StorageBackend):
419
417
  # Load metrics if available
420
418
  metrics_path = self.metrics_dir / f"{task.task_id}.json"
421
419
  if metrics_path.exists():
422
- with open(metrics_path, "r") as f:
420
+ with open(metrics_path) as f:
423
421
  metrics_data = json.load(f)
424
422
  task.metrics = TaskMetrics.model_validate(metrics_data)
425
423
 
@@ -432,7 +430,7 @@ class FileSystemStorage(StorageBackend):
432
430
 
433
431
  # Load all tasks in the run directory
434
432
  for task_file in run_dir.glob("*.json"):
435
- with open(task_file, "r") as f:
433
+ with open(task_file) as f:
436
434
  task_data = json.load(f)
437
435
 
438
436
  task = TaskRun.model_validate(task_data)
@@ -440,7 +438,7 @@ class FileSystemStorage(StorageBackend):
440
438
  # Load metrics if available
441
439
  metrics_path = self.metrics_dir / f"{task.task_id}.json"
442
440
  if metrics_path.exists():
443
- with open(metrics_path, "r") as f:
441
+ with open(metrics_path) as f:
444
442
  metrics_data = json.load(f)
445
443
  task.metrics = TaskMetrics.model_validate(metrics_data)
446
444
 
@@ -450,7 +448,7 @@ class FileSystemStorage(StorageBackend):
450
448
  except Exception as e:
451
449
  raise KailashStorageError(f"Failed to get all tasks: {e}") from e
452
450
 
453
- def get_tasks_by_run(self, run_id: str) -> List[TaskRun]:
451
+ def get_tasks_by_run(self, run_id: str) -> list[TaskRun]:
454
452
  """Get all tasks for a specific run.
455
453
 
456
454
  Args:
@@ -466,11 +464,11 @@ class FileSystemStorage(StorageBackend):
466
464
 
467
465
  def query_tasks(
468
466
  self,
469
- node_id: Optional[str] = None,
470
- status: Optional[TaskStatus] = None,
471
- started_after: Optional[datetime] = None,
472
- completed_before: Optional[datetime] = None,
473
- ) -> List[TaskRun]:
467
+ node_id: str | None = None,
468
+ status: TaskStatus | None = None,
469
+ started_after: datetime | None = None,
470
+ completed_before: datetime | None = None,
471
+ ) -> list[TaskRun]:
474
472
  """Query tasks with filters.
475
473
 
476
474
  Args:
@@ -536,7 +534,7 @@ class FileSystemStorage(StorageBackend):
536
534
  # Load existing index
537
535
  if index_path.exists():
538
536
  try:
539
- with open(index_path, "r") as f:
537
+ with open(index_path) as f:
540
538
  index = json.load(f)
541
539
  except json.JSONDecodeError:
542
540
  # Handle case where the file is empty or invalid
kailash/utils/export.py CHANGED
@@ -5,7 +5,7 @@ import logging
5
5
  import re
6
6
  from copy import deepcopy
7
7
  from pathlib import Path
8
- from typing import Any, Dict, List, Optional, Set
8
+ from typing import Any
9
9
 
10
10
  import yaml
11
11
  from pydantic import BaseModel, Field, ValidationError
@@ -26,9 +26,9 @@ class ResourceSpec(BaseModel):
26
26
 
27
27
  cpu: str = Field("100m", description="CPU request")
28
28
  memory: str = Field("128Mi", description="Memory request")
29
- cpu_limit: Optional[str] = Field(None, description="CPU limit")
30
- memory_limit: Optional[str] = Field(None, description="Memory limit")
31
- gpu: Optional[int] = Field(None, description="Number of GPUs")
29
+ cpu_limit: str | None = Field(None, description="CPU limit")
30
+ memory_limit: str | None = Field(None, description="Memory limit")
31
+ gpu: int | None = Field(None, description="Number of GPUs")
32
32
 
33
33
 
34
34
  class ContainerMapping(BaseModel):
@@ -36,15 +36,15 @@ class ContainerMapping(BaseModel):
36
36
 
37
37
  python_node: str = Field(..., description="Python node class name")
38
38
  container_image: str = Field(..., description="Docker container image")
39
- command: List[str] = Field(default_factory=list, description="Container command")
40
- args: List[str] = Field(default_factory=list, description="Container arguments")
41
- env: Dict[str, str] = Field(
39
+ command: list[str] = Field(default_factory=list, description="Container command")
40
+ args: list[str] = Field(default_factory=list, description="Container arguments")
41
+ env: dict[str, str] = Field(
42
42
  default_factory=dict, description="Environment variables"
43
43
  )
44
44
  resources: ResourceSpec = Field(
45
45
  default_factory=ResourceSpec, description="Resource specs"
46
46
  )
47
- mount_paths: Dict[str, str] = Field(
47
+ mount_paths: dict[str, str] = Field(
48
48
  default_factory=dict, description="Volume mount paths"
49
49
  )
50
50
 
@@ -58,7 +58,7 @@ class ExportConfig(BaseModel):
58
58
  include_resources: bool = Field(True, description="Include resource specifications")
59
59
  validate_output: bool = Field(True, description="Validate exported format")
60
60
  container_registry: str = Field("", description="Container registry URL")
61
- partial_export: Set[str] = Field(default_factory=set, description="Nodes to export")
61
+ partial_export: set[str] = Field(default_factory=set, description="Nodes to export")
62
62
 
63
63
 
64
64
  class NodeMapper:
@@ -71,7 +71,7 @@ class NodeMapper:
71
71
  ConfigurationException: If initialization fails
72
72
  """
73
73
  try:
74
- self.mappings: Dict[str, ContainerMapping] = {}
74
+ self.mappings: dict[str, ContainerMapping] = {}
75
75
  self._initialize_default_mappings()
76
76
  except Exception as e:
77
77
  raise ConfigurationException(
@@ -198,7 +198,7 @@ class ExportValidator:
198
198
  """Validates exported workflow formats."""
199
199
 
200
200
  @staticmethod
201
- def validate_yaml(data: Dict[str, Any]) -> bool:
201
+ def validate_yaml(data: dict[str, Any]) -> bool:
202
202
  """Validate YAML export format.
203
203
 
204
204
  Args:
@@ -271,7 +271,7 @@ class ExportValidator:
271
271
  return True
272
272
 
273
273
  @staticmethod
274
- def validate_json(data: Dict[str, Any]) -> bool:
274
+ def validate_json(data: dict[str, Any]) -> bool:
275
275
  """Validate JSON export format.
276
276
 
277
277
  Args:
@@ -300,7 +300,7 @@ class ManifestGenerator:
300
300
 
301
301
  def generate_manifest(
302
302
  self, workflow: Workflow, node_mapper: NodeMapper
303
- ) -> Dict[str, Any]:
303
+ ) -> dict[str, Any]:
304
304
  """Generate deployment manifest for a workflow.
305
305
 
306
306
  Args:
@@ -373,7 +373,7 @@ class ManifestGenerator:
373
373
 
374
374
  def _generate_node_spec(
375
375
  self, node_id: str, node_instance, node: Node, node_mapper: NodeMapper
376
- ) -> Dict[str, Any]:
376
+ ) -> dict[str, Any]:
377
377
  """Generate node specification for manifest.
378
378
 
379
379
  Args:
@@ -479,7 +479,7 @@ class ManifestGenerator:
479
479
  class WorkflowExporter:
480
480
  """Main exporter for Kailash workflows."""
481
481
 
482
- def __init__(self, config: Optional[ExportConfig] = None):
482
+ def __init__(self, config: ExportConfig | None = None):
483
483
  """Initialize the workflow exporter.
484
484
 
485
485
  Args:
@@ -506,7 +506,7 @@ class WorkflowExporter:
506
506
  f"Failed to initialize workflow exporter: {e}"
507
507
  ) from e
508
508
 
509
- def to_yaml(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
509
+ def to_yaml(self, workflow: Workflow, output_path: str | None = None) -> str:
510
510
  """Export workflow to YAML format.
511
511
 
512
512
  Args:
@@ -552,7 +552,7 @@ class WorkflowExporter:
552
552
  except Exception as e:
553
553
  raise ExportException(f"Failed to export workflow to YAML: {e}") from e
554
554
 
555
- def to_json(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
555
+ def to_json(self, workflow: Workflow, output_path: str | None = None) -> str:
556
556
  """Export workflow to JSON format.
557
557
 
558
558
  Args:
@@ -598,7 +598,7 @@ class WorkflowExporter:
598
598
  except Exception as e:
599
599
  raise ExportException(f"Failed to export workflow to JSON: {e}") from e
600
600
 
601
- def to_manifest(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
601
+ def to_manifest(self, workflow: Workflow, output_path: str | None = None) -> str:
602
602
  """Export workflow as deployment manifest.
603
603
 
604
604
  Args:
@@ -645,7 +645,7 @@ class WorkflowExporter:
645
645
 
646
646
  def export_with_templates(
647
647
  self, workflow: Workflow, template_name: str, output_dir: str
648
- ) -> Dict[str, str]:
648
+ ) -> dict[str, str]:
649
649
  """Export workflow using predefined templates.
650
650
 
651
651
  Args:
@@ -722,7 +722,7 @@ class WorkflowExporter:
722
722
 
723
723
  return exports
724
724
 
725
- def _prepare_export_data(self, workflow: Workflow) -> Dict[str, Any]:
725
+ def _prepare_export_data(self, workflow: Workflow) -> dict[str, Any]:
726
726
  """Prepare workflow data for export.
727
727
 
728
728
  Args:
@@ -876,7 +876,7 @@ class WorkflowExporter:
876
876
  def export_workflow(
877
877
  workflow: Workflow,
878
878
  format: str = "yaml",
879
- output_path: Optional[str] = None,
879
+ output_path: str | None = None,
880
880
  **config,
881
881
  ) -> str:
882
882
  """Export a workflow to specified format.
@@ -1,7 +1,6 @@
1
1
  """Project template system for Kailash SDK."""
2
2
 
3
3
  from pathlib import Path
4
- from typing import Dict, Optional
5
4
 
6
5
  from kailash.sdk_exceptions import TemplateError
7
6
 
@@ -266,7 +265,7 @@ __pycache__/
266
265
  },
267
266
  }
268
267
 
269
- def get_template(self, template_name: str) -> Dict:
268
+ def get_template(self, template_name: str) -> dict:
270
269
  """Get an export template by name.
271
270
 
272
271
  Args:
@@ -286,7 +285,7 @@ __pycache__/
286
285
  self,
287
286
  project_name: str,
288
287
  template: str = "basic",
289
- target_dir: Optional[str] = None,
288
+ target_dir: str | None = None,
290
289
  ) -> None:
291
290
  """Create a new project from a template.
292
291