kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,924 @@
1
+ """Export functionality for converting Kailash Python SDK workflows to Kailash-compatible formats."""
2
+
3
+ import json
4
+ import logging
5
+ import re
6
+ from copy import deepcopy
7
+ from pathlib import Path
8
+ from typing import Any, Dict, List, Optional, Set
9
+
10
+ import yaml
11
+ from pydantic import BaseModel, Field, ValidationError
12
+
13
+ from kailash.nodes import Node
14
+ from kailash.sdk_exceptions import (
15
+ ConfigurationException,
16
+ ExportException,
17
+ ImportException,
18
+ )
19
+ from kailash.workflow import Workflow
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ class ResourceSpec(BaseModel):
25
+ """Resource specifications for a node."""
26
+
27
+ cpu: str = Field("100m", description="CPU request")
28
+ memory: str = Field("128Mi", description="Memory request")
29
+ cpu_limit: Optional[str] = Field(None, description="CPU limit")
30
+ memory_limit: Optional[str] = Field(None, description="Memory limit")
31
+ gpu: Optional[int] = Field(None, description="Number of GPUs")
32
+
33
+
34
+ class ContainerMapping(BaseModel):
35
+ """Mapping from Python node to Kailash container."""
36
+
37
+ python_node: str = Field(..., description="Python node class name")
38
+ container_image: str = Field(..., description="Docker container image")
39
+ command: List[str] = Field(default_factory=list, description="Container command")
40
+ args: List[str] = Field(default_factory=list, description="Container arguments")
41
+ env: Dict[str, str] = Field(
42
+ default_factory=dict, description="Environment variables"
43
+ )
44
+ resources: ResourceSpec = Field(
45
+ default_factory=ResourceSpec, description="Resource specs"
46
+ )
47
+ mount_paths: Dict[str, str] = Field(
48
+ default_factory=dict, description="Volume mount paths"
49
+ )
50
+
51
+
52
+ class ExportConfig(BaseModel):
53
+ """Configuration for export process."""
54
+
55
+ version: str = Field("1.0", description="Export format version")
56
+ namespace: str = Field("default", description="Kubernetes namespace")
57
+ include_metadata: bool = Field(True, description="Include metadata in export")
58
+ include_resources: bool = Field(True, description="Include resource specifications")
59
+ validate_output: bool = Field(True, description="Validate exported format")
60
+ container_registry: str = Field("", description="Container registry URL")
61
+ partial_export: Set[str] = Field(default_factory=set, description="Nodes to export")
62
+
63
+
64
+ class NodeMapper:
65
+ """Maps Python nodes to Kailash containers."""
66
+
67
+ def __init__(self):
68
+ """Initialize the node mapper with default mappings.
69
+
70
+ Raises:
71
+ ConfigurationException: If initialization fails
72
+ """
73
+ try:
74
+ self.mappings: Dict[str, ContainerMapping] = {}
75
+ self._initialize_default_mappings()
76
+ except Exception as e:
77
+ raise ConfigurationException(
78
+ f"Failed to initialize node mapper: {e}"
79
+ ) from e
80
+
81
+ def _initialize_default_mappings(self):
82
+ """Set up default mappings for common node types."""
83
+ # Data reader nodes
84
+ self.mappings["FileReader"] = ContainerMapping(
85
+ python_node="FileReader",
86
+ container_image="kailash/file-reader:latest",
87
+ command=["python", "-m", "kailash.nodes.data.reader"],
88
+ resources=ResourceSpec(cpu="100m", memory="256Mi"),
89
+ )
90
+
91
+ self.mappings["CSVReader"] = ContainerMapping(
92
+ python_node="CSVReader",
93
+ container_image="kailash/csv-reader:latest",
94
+ command=["python", "-m", "kailash.nodes.data.csv_reader"],
95
+ resources=ResourceSpec(cpu="100m", memory="512Mi"),
96
+ )
97
+
98
+ # Data writer nodes
99
+ self.mappings["FileWriter"] = ContainerMapping(
100
+ python_node="FileWriter",
101
+ container_image="kailash/file-writer:latest",
102
+ command=["python", "-m", "kailash.nodes.data.writer"],
103
+ resources=ResourceSpec(cpu="100m", memory="256Mi"),
104
+ )
105
+
106
+ # Transform nodes
107
+ self.mappings["DataTransform"] = ContainerMapping(
108
+ python_node="DataTransform",
109
+ container_image="kailash/data-transform:latest",
110
+ command=["python", "-m", "kailash.nodes.transform.processor"],
111
+ resources=ResourceSpec(cpu="200m", memory="512Mi"),
112
+ )
113
+
114
+ # AI nodes
115
+ self.mappings["LLMNode"] = ContainerMapping(
116
+ python_node="LLMNode",
117
+ container_image="kailash/llm-node:latest",
118
+ command=["python", "-m", "kailash.nodes.ai.llm"],
119
+ resources=ResourceSpec(cpu="500m", memory="2Gi", gpu=1),
120
+ env={"MODEL_TYPE": "gpt-3.5", "MAX_TOKENS": "1000"},
121
+ )
122
+
123
+ # Logic nodes
124
+ self.mappings["ConditionalNode"] = ContainerMapping(
125
+ python_node="ConditionalNode",
126
+ container_image="kailash/conditional:latest",
127
+ command=["python", "-m", "kailash.nodes.logic.conditional"],
128
+ resources=ResourceSpec(cpu="50m", memory="128Mi"),
129
+ )
130
+
131
+ def register_mapping(self, mapping: ContainerMapping):
132
+ """Register a custom node mapping.
133
+
134
+ Args:
135
+ mapping: Container mapping to register
136
+
137
+ Raises:
138
+ ConfigurationException: If mapping is invalid
139
+ """
140
+ try:
141
+ if not mapping.python_node:
142
+ raise ConfigurationException("Python node name is required")
143
+ if not mapping.container_image:
144
+ raise ConfigurationException("Container image is required")
145
+
146
+ self.mappings[mapping.python_node] = mapping
147
+ logger.info(f"Registered mapping for node '{mapping.python_node}'")
148
+ except ValidationError as e:
149
+ raise ConfigurationException(f"Invalid container mapping: {e}") from e
150
+
151
+ def get_mapping(self, node_type: str) -> ContainerMapping:
152
+ """Get container mapping for a node type.
153
+
154
+ Args:
155
+ node_type: Python node type name
156
+
157
+ Returns:
158
+ Container mapping
159
+
160
+ Raises:
161
+ ConfigurationException: If mapping cannot be created
162
+ """
163
+ if not node_type:
164
+ raise ConfigurationException("Node type is required")
165
+
166
+ if node_type not in self.mappings:
167
+ logger.warning(
168
+ f"No mapping found for node type '{node_type}', creating default"
169
+ )
170
+ try:
171
+ # Try to create a default mapping
172
+ return ContainerMapping(
173
+ python_node=node_type,
174
+ container_image=f"kailash/{node_type.lower()}:latest",
175
+ command=["python", "-m", f"kailash.nodes.{node_type.lower()}"],
176
+ )
177
+ except Exception as e:
178
+ raise ConfigurationException(
179
+ f"Failed to create default mapping for node '{node_type}': {e}"
180
+ ) from e
181
+ return self.mappings[node_type]
182
+
183
+ def update_registry(self, registry_url: str):
184
+ """Update container image URLs with registry prefix.
185
+
186
+ Args:
187
+ registry_url: Container registry URL
188
+ """
189
+ if not registry_url:
190
+ return
191
+
192
+ for mapping in self.mappings.values():
193
+ if not mapping.container_image.startswith(registry_url):
194
+ mapping.container_image = f"{registry_url}/{mapping.container_image}"
195
+
196
+
197
+ class ExportValidator:
198
+ """Validates exported workflow formats."""
199
+
200
+ @staticmethod
201
+ def validate_yaml(data: Dict[str, Any]) -> bool:
202
+ """Validate YAML export format.
203
+
204
+ Args:
205
+ data: Exported data to validate
206
+
207
+ Returns:
208
+ True if valid
209
+
210
+ Raises:
211
+ ExportException: If validation fails
212
+ """
213
+ if not isinstance(data, dict):
214
+ raise ExportException("Export data must be a dictionary")
215
+
216
+ required_fields = ["metadata", "nodes", "connections"]
217
+
218
+ for field in required_fields:
219
+ if field not in data:
220
+ raise ExportException(
221
+ f"Missing required field: '{field}'. "
222
+ f"Required fields: {required_fields}"
223
+ )
224
+
225
+ # Validate metadata
226
+ metadata = data["metadata"]
227
+ if not isinstance(metadata, dict):
228
+ raise ExportException("Metadata must be a dictionary")
229
+
230
+ if "name" not in metadata:
231
+ raise ExportException("Metadata must contain 'name' field")
232
+
233
+ # Validate nodes
234
+ nodes = data["nodes"]
235
+ if not isinstance(nodes, dict):
236
+ raise ExportException("Nodes must be a dictionary")
237
+
238
+ if not nodes:
239
+ logger.warning("No nodes found in export data")
240
+
241
+ for node_id, node_data in nodes.items():
242
+ if not isinstance(node_data, dict):
243
+ raise ExportException(f"Node '{node_id}' must be a dictionary")
244
+
245
+ if "type" not in node_data:
246
+ raise ExportException(
247
+ f"Node '{node_id}' missing 'type' field. "
248
+ f"Available fields: {list(node_data.keys())}"
249
+ )
250
+ if "config" not in node_data:
251
+ raise ExportException(
252
+ f"Node '{node_id}' missing 'config' field. "
253
+ f"Available fields: {list(node_data.keys())}"
254
+ )
255
+
256
+ # Validate connections
257
+ connections = data["connections"]
258
+ if not isinstance(connections, list):
259
+ raise ExportException("Connections must be a list")
260
+
261
+ for i, conn in enumerate(connections):
262
+ if not isinstance(conn, dict):
263
+ raise ExportException(f"Connection {i} must be a dictionary")
264
+
265
+ if "from" not in conn or "to" not in conn:
266
+ raise ExportException(
267
+ f"Connection {i} missing 'from' or 'to' field. "
268
+ f"Connection data: {conn}"
269
+ )
270
+
271
+ return True
272
+
273
+ @staticmethod
274
+ def validate_json(data: Dict[str, Any]) -> bool:
275
+ """Validate JSON export format.
276
+
277
+ Args:
278
+ data: Exported data to validate
279
+
280
+ Returns:
281
+ True if valid
282
+
283
+ Raises:
284
+ ExportException: If validation fails
285
+ """
286
+ # JSON validation is the same as YAML for our purposes
287
+ return ExportValidator.validate_yaml(data)
288
+
289
+
290
+ class ManifestGenerator:
291
+ """Generates deployment manifests for Kailash workflows."""
292
+
293
+ def __init__(self, config: ExportConfig):
294
+ """Initialize the manifest generator.
295
+
296
+ Args:
297
+ config: Export configuration
298
+ """
299
+ self.config = config
300
+
301
+ def generate_manifest(
302
+ self, workflow: Workflow, node_mapper: NodeMapper
303
+ ) -> Dict[str, Any]:
304
+ """Generate deployment manifest for a workflow.
305
+
306
+ Args:
307
+ workflow: Workflow to generate manifest for
308
+ node_mapper: Node mapper for container mappings
309
+
310
+ Returns:
311
+ Deployment manifest
312
+
313
+ Raises:
314
+ ExportException: If manifest generation fails
315
+ """
316
+ try:
317
+ manifest = {
318
+ "apiVersion": "kailash.io/v1",
319
+ "kind": "Workflow",
320
+ "metadata": {
321
+ "name": self._sanitize_name(workflow.metadata.name),
322
+ "namespace": self.config.namespace,
323
+ "labels": {
324
+ "app": "kailash",
325
+ "workflow": self._sanitize_name(workflow.metadata.name),
326
+ "version": workflow.metadata.version,
327
+ },
328
+ "annotations": {
329
+ "description": workflow.metadata.description,
330
+ "author": workflow.metadata.author,
331
+ "created_at": workflow.metadata.created_at.isoformat(),
332
+ },
333
+ },
334
+ "spec": {"nodes": [], "edges": []},
335
+ }
336
+ except Exception as e:
337
+ raise ExportException(f"Failed to create manifest structure: {e}") from e
338
+
339
+ # Add nodes
340
+ for node_id, node_instance in workflow.nodes.items():
341
+ if self.config.partial_export and node_id not in self.config.partial_export:
342
+ continue
343
+
344
+ try:
345
+ node_spec = self._generate_node_spec(
346
+ node_id,
347
+ node_instance,
348
+ workflow._node_instances[node_id],
349
+ node_mapper,
350
+ )
351
+ manifest["spec"]["nodes"].append(node_spec)
352
+ except Exception as e:
353
+ raise ExportException(
354
+ f"Failed to generate spec for node '{node_id}': {e}"
355
+ ) from e
356
+
357
+ # Add connections
358
+ for connection in workflow.connections:
359
+ if self.config.partial_export:
360
+ if (
361
+ connection.source_node not in self.config.partial_export
362
+ or connection.target_node not in self.config.partial_export
363
+ ):
364
+ continue
365
+
366
+ edge_spec = {
367
+ "from": f"{connection.source_node}.{connection.source_output}",
368
+ "to": f"{connection.target_node}.{connection.target_input}",
369
+ }
370
+ manifest["spec"]["edges"].append(edge_spec)
371
+
372
+ return manifest
373
+
374
+ def _generate_node_spec(
375
+ self, node_id: str, node_instance, node: Node, node_mapper: NodeMapper
376
+ ) -> Dict[str, Any]:
377
+ """Generate node specification for manifest.
378
+
379
+ Args:
380
+ node_id: Node identifier
381
+ node_instance: Node instance from workflow
382
+ node: Actual node object
383
+ node_mapper: Node mapper for container info
384
+
385
+ Returns:
386
+ Node specification
387
+
388
+ Raises:
389
+ ExportException: If node spec generation fails
390
+ """
391
+ try:
392
+ mapping = node_mapper.get_mapping(node_instance.node_type)
393
+ except Exception as e:
394
+ raise ExportException(
395
+ f"Failed to get mapping for node '{node_id}': {e}"
396
+ ) from e
397
+
398
+ node_spec = {
399
+ "name": node_id,
400
+ "type": node_instance.node_type,
401
+ "container": {
402
+ "image": mapping.container_image,
403
+ "command": mapping.command,
404
+ "args": mapping.args,
405
+ "env": [],
406
+ },
407
+ }
408
+
409
+ # Add environment variables
410
+ for key, value in mapping.env.items():
411
+ node_spec["container"]["env"].append({"name": key, "value": str(value)})
412
+
413
+ # Add config as environment variables
414
+ for key, value in node_instance.config.items():
415
+ node_spec["container"]["env"].append(
416
+ {"name": f"CONFIG_{key.upper()}", "value": str(value)}
417
+ )
418
+
419
+ # Add resources if enabled
420
+ if self.config.include_resources:
421
+ node_spec["container"]["resources"] = {
422
+ "requests": {
423
+ "cpu": mapping.resources.cpu,
424
+ "memory": mapping.resources.memory,
425
+ }
426
+ }
427
+
428
+ limits = {}
429
+ if mapping.resources.cpu_limit:
430
+ limits["cpu"] = mapping.resources.cpu_limit
431
+ if mapping.resources.memory_limit:
432
+ limits["memory"] = mapping.resources.memory_limit
433
+ if mapping.resources.gpu:
434
+ limits["nvidia.com/gpu"] = str(mapping.resources.gpu)
435
+
436
+ if limits:
437
+ node_spec["container"]["resources"]["limits"] = limits
438
+
439
+ # Add volume mounts
440
+ if mapping.mount_paths:
441
+ node_spec["container"]["volumeMounts"] = []
442
+ for name, path in mapping.mount_paths.items():
443
+ node_spec["container"]["volumeMounts"].append(
444
+ {"name": name, "mountPath": path}
445
+ )
446
+
447
+ return node_spec
448
+
449
+ def _sanitize_name(self, name: str) -> str:
450
+ """Sanitize name for Kubernetes compatibility.
451
+
452
+ Args:
453
+ name: Name to sanitize
454
+
455
+ Returns:
456
+ Sanitized name
457
+ """
458
+ if not name:
459
+ raise ExportException("Name cannot be empty")
460
+
461
+ # Replace non-alphanumeric characters with hyphens
462
+ sanitized = re.sub(r"[^a-zA-Z0-9-]", "-", name.lower())
463
+ # Remove leading/trailing hyphens
464
+ sanitized = sanitized.strip("-")
465
+ # Ensure it doesn't start with a number
466
+ if sanitized and sanitized[0].isdigit():
467
+ sanitized = f"w-{sanitized}"
468
+ # Truncate to 63 characters (Kubernetes limit)
469
+ sanitized = sanitized[:63]
470
+
471
+ if not sanitized:
472
+ raise ExportException(
473
+ f"Name '{name}' cannot be sanitized to a valid Kubernetes name"
474
+ )
475
+
476
+ return sanitized
477
+
478
+
479
+ class WorkflowExporter:
480
+ """Main exporter for Kailash workflows."""
481
+
482
+ def __init__(self, config: Optional[ExportConfig] = None):
483
+ """Initialize the workflow exporter.
484
+
485
+ Args:
486
+ config: Export configuration
487
+
488
+ Raises:
489
+ ConfigurationException: If initialization fails
490
+ """
491
+ try:
492
+ self.config = config or ExportConfig()
493
+ self.node_mapper = NodeMapper()
494
+ self.validator = ExportValidator()
495
+ self.manifest_generator = ManifestGenerator(self.config)
496
+
497
+ # Update registry if provided
498
+ if self.config.container_registry:
499
+ self.node_mapper.update_registry(self.config.container_registry)
500
+
501
+ self.pre_export_hook = None
502
+ self.post_export_hook = None
503
+
504
+ except Exception as e:
505
+ raise ConfigurationException(
506
+ f"Failed to initialize workflow exporter: {e}"
507
+ ) from e
508
+
509
+ def to_yaml(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
510
+ """Export workflow to YAML format.
511
+
512
+ Args:
513
+ workflow: Workflow to export
514
+ output_path: Optional path to write YAML file
515
+
516
+ Returns:
517
+ YAML string
518
+
519
+ Raises:
520
+ ExportException: If export fails
521
+ """
522
+ if not workflow:
523
+ raise ExportException("Workflow is required")
524
+
525
+ try:
526
+ if self.pre_export_hook:
527
+ self.pre_export_hook(workflow, "yaml")
528
+
529
+ data = self._prepare_export_data(workflow)
530
+
531
+ if self.config.validate_output:
532
+ self.validator.validate_yaml(data)
533
+
534
+ yaml_str = yaml.dump(data, default_flow_style=False, sort_keys=False)
535
+
536
+ if output_path:
537
+ try:
538
+ Path(output_path).parent.mkdir(parents=True, exist_ok=True)
539
+ Path(output_path).write_text(yaml_str)
540
+ except Exception as e:
541
+ raise ExportException(
542
+ f"Failed to write YAML to '{output_path}': {e}"
543
+ ) from e
544
+
545
+ if self.post_export_hook:
546
+ self.post_export_hook(workflow, "yaml", yaml_str)
547
+
548
+ return yaml_str
549
+
550
+ except ExportException:
551
+ raise
552
+ except Exception as e:
553
+ raise ExportException(f"Failed to export workflow to YAML: {e}") from e
554
+
555
+ def to_json(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
556
+ """Export workflow to JSON format.
557
+
558
+ Args:
559
+ workflow: Workflow to export
560
+ output_path: Optional path to write JSON file
561
+
562
+ Returns:
563
+ JSON string
564
+
565
+ Raises:
566
+ ExportException: If export fails
567
+ """
568
+ if not workflow:
569
+ raise ExportException("Workflow is required")
570
+
571
+ try:
572
+ if self.pre_export_hook:
573
+ self.pre_export_hook(workflow, "json")
574
+
575
+ data = self._prepare_export_data(workflow)
576
+
577
+ if self.config.validate_output:
578
+ self.validator.validate_json(data)
579
+
580
+ json_str = json.dumps(data, indent=2, default=str)
581
+
582
+ if output_path:
583
+ try:
584
+ Path(output_path).parent.mkdir(parents=True, exist_ok=True)
585
+ Path(output_path).write_text(json_str)
586
+ except Exception as e:
587
+ raise ExportException(
588
+ f"Failed to write JSON to '{output_path}': {e}"
589
+ ) from e
590
+
591
+ if self.post_export_hook:
592
+ self.post_export_hook(workflow, "json", json_str)
593
+
594
+ return json_str
595
+
596
+ except ExportException:
597
+ raise
598
+ except Exception as e:
599
+ raise ExportException(f"Failed to export workflow to JSON: {e}") from e
600
+
601
+ def to_manifest(self, workflow: Workflow, output_path: Optional[str] = None) -> str:
602
+ """Export workflow as deployment manifest.
603
+
604
+ Args:
605
+ workflow: Workflow to export
606
+ output_path: Optional path to write manifest file
607
+
608
+ Returns:
609
+ Manifest YAML string
610
+
611
+ Raises:
612
+ ExportException: If export fails
613
+ """
614
+ if not workflow:
615
+ raise ExportException("Workflow is required")
616
+
617
+ try:
618
+ if self.pre_export_hook:
619
+ self.pre_export_hook(workflow, "manifest")
620
+
621
+ manifest = self.manifest_generator.generate_manifest(
622
+ workflow, self.node_mapper
623
+ )
624
+
625
+ yaml_str = yaml.dump(manifest, default_flow_style=False, sort_keys=False)
626
+
627
+ if output_path:
628
+ try:
629
+ Path(output_path).parent.mkdir(parents=True, exist_ok=True)
630
+ Path(output_path).write_text(yaml_str)
631
+ except Exception as e:
632
+ raise ExportException(
633
+ f"Failed to write manifest to '{output_path}': {e}"
634
+ ) from e
635
+
636
+ if self.post_export_hook:
637
+ self.post_export_hook(workflow, "manifest", yaml_str)
638
+
639
+ return yaml_str
640
+
641
+ except ExportException:
642
+ raise
643
+ except Exception as e:
644
+ raise ExportException(f"Failed to export workflow manifest: {e}") from e
645
+
646
+ def export_with_templates(
647
+ self, workflow: Workflow, template_name: str, output_dir: str
648
+ ) -> Dict[str, str]:
649
+ """Export workflow using predefined templates.
650
+
651
+ Args:
652
+ workflow: Workflow to export
653
+ template_name: Name of template to use
654
+ output_dir: Directory to write files
655
+
656
+ Returns:
657
+ Dictionary of file paths to content
658
+
659
+ Raises:
660
+ ExportException: If export fails
661
+ ImportException: If template import fails
662
+ """
663
+ if not workflow:
664
+ raise ExportException("Workflow is required")
665
+ if not template_name:
666
+ raise ExportException("Template name is required")
667
+ if not output_dir:
668
+ raise ExportException("Output directory is required")
669
+
670
+ try:
671
+ from kailash.utils.templates import TemplateManager
672
+ except ImportError as e:
673
+ raise ImportException(f"Failed to import template manager: {e}") from e
674
+
675
+ try:
676
+ template_manager = TemplateManager()
677
+ template = template_manager.get_template(template_name)
678
+ except Exception as e:
679
+ raise ExportException(
680
+ f"Failed to get template '{template_name}': {e}"
681
+ ) from e
682
+
683
+ output_dir = Path(output_dir)
684
+ try:
685
+ output_dir.mkdir(parents=True, exist_ok=True)
686
+ except Exception as e:
687
+ raise ExportException(
688
+ f"Failed to create output directory '{output_dir}': {e}"
689
+ ) from e
690
+
691
+ exports = {}
692
+
693
+ # Generate files based on template
694
+ if template.get("yaml", True):
695
+ yaml_path = output_dir / f"{workflow.metadata.name}.yaml"
696
+ yaml_content = self.to_yaml(workflow, str(yaml_path))
697
+ exports[str(yaml_path)] = yaml_content
698
+
699
+ if template.get("json", False):
700
+ json_path = output_dir / f"{workflow.metadata.name}.json"
701
+ json_content = self.to_json(workflow, str(json_path))
702
+ exports[str(json_path)] = json_content
703
+
704
+ if template.get("manifest", True):
705
+ manifest_path = output_dir / f"{workflow.metadata.name}-manifest.yaml"
706
+ manifest_content = self.to_manifest(workflow, str(manifest_path))
707
+ exports[str(manifest_path)] = manifest_content
708
+
709
+ # Generate additional files from template
710
+ for filename, content_template in template.get("files", {}).items():
711
+ file_path = output_dir / filename
712
+ try:
713
+ content = content_template.format(
714
+ workflow_name=workflow.metadata.name,
715
+ workflow_version=workflow.metadata.version,
716
+ namespace=self.config.namespace,
717
+ )
718
+ file_path.write_text(content)
719
+ exports[str(file_path)] = content
720
+ except Exception as e:
721
+ logger.warning(f"Failed to generate file '{filename}': {e}")
722
+
723
+ return exports
724
+
725
+ def _prepare_export_data(self, workflow: Workflow) -> Dict[str, Any]:
726
+ """Prepare workflow data for export.
727
+
728
+ Args:
729
+ workflow: Workflow to prepare
730
+
731
+ Returns:
732
+ Export data dictionary
733
+
734
+ Raises:
735
+ ExportException: If preparation fails
736
+ """
737
+ data = {
738
+ "version": self.config.version,
739
+ "metadata": {},
740
+ "nodes": {},
741
+ "connections": [],
742
+ }
743
+
744
+ # Add metadata if enabled
745
+ if self.config.include_metadata:
746
+ try:
747
+ # workflow.metadata is a dict, not a pydantic model
748
+ data["metadata"] = {
749
+ "name": workflow.name,
750
+ "description": workflow.description,
751
+ "version": workflow.version,
752
+ "author": workflow.author,
753
+ }
754
+ # Add any additional metadata from the dict
755
+ if workflow.metadata:
756
+ data["metadata"].update(workflow.metadata)
757
+
758
+ # Convert datetime to string if present
759
+ if "created_at" in data["metadata"] and hasattr(
760
+ data["metadata"]["created_at"], "isoformat"
761
+ ):
762
+ data["metadata"]["created_at"] = data["metadata"][
763
+ "created_at"
764
+ ].isoformat()
765
+
766
+ # Convert set to list for JSON serialization if present
767
+ if "tags" in data["metadata"] and isinstance(
768
+ data["metadata"]["tags"], set
769
+ ):
770
+ data["metadata"]["tags"] = list(data["metadata"]["tags"])
771
+ except Exception as e:
772
+ raise ExportException(f"Failed to export metadata: {e}") from e
773
+ else:
774
+ data["metadata"] = {"name": workflow.name}
775
+
776
+ # Add nodes
777
+ for node_id, node_instance in workflow.nodes.items():
778
+ if self.config.partial_export and node_id not in self.config.partial_export:
779
+ continue
780
+
781
+ try:
782
+ node_data = {
783
+ "type": node_instance.node_type,
784
+ "config": deepcopy(node_instance.config),
785
+ }
786
+
787
+ # Try to add container info
788
+ try:
789
+ mapping = self.node_mapper.get_mapping(node_instance.node_type)
790
+ node_data["container"] = {
791
+ "image": mapping.container_image,
792
+ "command": mapping.command,
793
+ "args": mapping.args,
794
+ "env": mapping.env,
795
+ }
796
+
797
+ # Add resources if enabled
798
+ if self.config.include_resources:
799
+ node_data["resources"] = mapping.resources.model_dump()
800
+ except Exception as e:
801
+ logger.warning(
802
+ f"No container mapping for node type '{node_instance.node_type}': {e}"
803
+ )
804
+
805
+ # Add position for visualization
806
+ node_data["position"] = {
807
+ "x": node_instance.position[0],
808
+ "y": node_instance.position[1],
809
+ }
810
+
811
+ data["nodes"][node_id] = node_data
812
+
813
+ except Exception as e:
814
+ raise ExportException(f"Failed to export node '{node_id}': {e}") from e
815
+
816
+ # Add connections
817
+ for connection in workflow.connections:
818
+ if self.config.partial_export:
819
+ if (
820
+ connection.source_node not in self.config.partial_export
821
+ or connection.target_node not in self.config.partial_export
822
+ ):
823
+ continue
824
+
825
+ try:
826
+ conn_data = {
827
+ "from": connection.source_node,
828
+ "to": connection.target_node,
829
+ "from_output": connection.source_output,
830
+ "to_input": connection.target_input,
831
+ }
832
+ data["connections"].append(conn_data)
833
+ except Exception as e:
834
+ raise ExportException(f"Failed to export connection: {e}") from e
835
+
836
+ return data
837
+
838
+ def register_custom_mapping(self, node_type: str, container_image: str, **kwargs):
839
+ """Register a custom node to container mapping.
840
+
841
+ Args:
842
+ node_type: Python node type name
843
+ container_image: Docker container image
844
+ **kwargs: Additional mapping configuration
845
+
846
+ Raises:
847
+ ConfigurationException: If registration fails
848
+ """
849
+ if not node_type:
850
+ raise ConfigurationException("Node type is required")
851
+ if not container_image:
852
+ raise ConfigurationException("Container image is required")
853
+
854
+ try:
855
+ mapping = ContainerMapping(
856
+ python_node=node_type, container_image=container_image, **kwargs
857
+ )
858
+ self.node_mapper.register_mapping(mapping)
859
+ except Exception as e:
860
+ raise ConfigurationException(
861
+ f"Failed to register custom mapping: {e}"
862
+ ) from e
863
+
864
+ def set_export_hooks(self, pre_export=None, post_export=None):
865
+ """Set custom hooks for export process.
866
+
867
+ Args:
868
+ pre_export: Function to call before export
869
+ post_export: Function to call after export
870
+ """
871
+ self.pre_export_hook = pre_export
872
+ self.post_export_hook = post_export
873
+
874
+
875
+ # Convenience functions
876
+ def export_workflow(
877
+ workflow: Workflow,
878
+ format: str = "yaml",
879
+ output_path: Optional[str] = None,
880
+ **config,
881
+ ) -> str:
882
+ """Export a workflow to specified format.
883
+
884
+ Args:
885
+ workflow: Workflow to export
886
+ format: Export format (yaml, json, manifest)
887
+ output_path: Optional output file path
888
+ **config: Export configuration options
889
+
890
+ Returns:
891
+ Exported content as string
892
+
893
+ Raises:
894
+ ExportException: If export fails
895
+ """
896
+ if not workflow:
897
+ raise ExportException("Workflow is required")
898
+
899
+ supported_formats = ["yaml", "json", "manifest"]
900
+ if format not in supported_formats:
901
+ raise ExportException(
902
+ f"Unknown export format: '{format}'. "
903
+ f"Supported formats: {supported_formats}"
904
+ )
905
+
906
+ try:
907
+ export_config = ExportConfig(**config)
908
+ exporter = WorkflowExporter(export_config)
909
+
910
+ if format == "yaml":
911
+ return exporter.to_yaml(workflow, output_path)
912
+ elif format == "json":
913
+ return exporter.to_json(workflow, output_path)
914
+ elif format == "manifest":
915
+ return exporter.to_manifest(workflow, output_path)
916
+
917
+ except Exception as e:
918
+ if isinstance(e, ExportException):
919
+ raise
920
+ raise ExportException(f"Failed to export workflow: {e}") from e
921
+
922
+
923
+ # Legacy compatibility aliases
924
+ KailashExporter = WorkflowExporter