kailash 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. kailash/__init__.py +31 -0
  2. kailash/__main__.py +11 -0
  3. kailash/cli/__init__.py +5 -0
  4. kailash/cli/commands.py +563 -0
  5. kailash/manifest.py +778 -0
  6. kailash/nodes/__init__.py +23 -0
  7. kailash/nodes/ai/__init__.py +26 -0
  8. kailash/nodes/ai/agents.py +417 -0
  9. kailash/nodes/ai/models.py +488 -0
  10. kailash/nodes/api/__init__.py +52 -0
  11. kailash/nodes/api/auth.py +567 -0
  12. kailash/nodes/api/graphql.py +480 -0
  13. kailash/nodes/api/http.py +598 -0
  14. kailash/nodes/api/rate_limiting.py +572 -0
  15. kailash/nodes/api/rest.py +665 -0
  16. kailash/nodes/base.py +1032 -0
  17. kailash/nodes/base_async.py +128 -0
  18. kailash/nodes/code/__init__.py +32 -0
  19. kailash/nodes/code/python.py +1021 -0
  20. kailash/nodes/data/__init__.py +125 -0
  21. kailash/nodes/data/readers.py +496 -0
  22. kailash/nodes/data/sharepoint_graph.py +623 -0
  23. kailash/nodes/data/sql.py +380 -0
  24. kailash/nodes/data/streaming.py +1168 -0
  25. kailash/nodes/data/vector_db.py +964 -0
  26. kailash/nodes/data/writers.py +529 -0
  27. kailash/nodes/logic/__init__.py +6 -0
  28. kailash/nodes/logic/async_operations.py +702 -0
  29. kailash/nodes/logic/operations.py +551 -0
  30. kailash/nodes/transform/__init__.py +5 -0
  31. kailash/nodes/transform/processors.py +379 -0
  32. kailash/runtime/__init__.py +6 -0
  33. kailash/runtime/async_local.py +356 -0
  34. kailash/runtime/docker.py +697 -0
  35. kailash/runtime/local.py +434 -0
  36. kailash/runtime/parallel.py +557 -0
  37. kailash/runtime/runner.py +110 -0
  38. kailash/runtime/testing.py +347 -0
  39. kailash/sdk_exceptions.py +307 -0
  40. kailash/tracking/__init__.py +7 -0
  41. kailash/tracking/manager.py +885 -0
  42. kailash/tracking/metrics_collector.py +342 -0
  43. kailash/tracking/models.py +535 -0
  44. kailash/tracking/storage/__init__.py +0 -0
  45. kailash/tracking/storage/base.py +113 -0
  46. kailash/tracking/storage/database.py +619 -0
  47. kailash/tracking/storage/filesystem.py +543 -0
  48. kailash/utils/__init__.py +0 -0
  49. kailash/utils/export.py +924 -0
  50. kailash/utils/templates.py +680 -0
  51. kailash/visualization/__init__.py +62 -0
  52. kailash/visualization/api.py +732 -0
  53. kailash/visualization/dashboard.py +951 -0
  54. kailash/visualization/performance.py +808 -0
  55. kailash/visualization/reports.py +1471 -0
  56. kailash/workflow/__init__.py +15 -0
  57. kailash/workflow/builder.py +245 -0
  58. kailash/workflow/graph.py +827 -0
  59. kailash/workflow/mermaid_visualizer.py +628 -0
  60. kailash/workflow/mock_registry.py +63 -0
  61. kailash/workflow/runner.py +302 -0
  62. kailash/workflow/state.py +238 -0
  63. kailash/workflow/visualization.py +588 -0
  64. kailash-0.1.0.dist-info/METADATA +710 -0
  65. kailash-0.1.0.dist-info/RECORD +69 -0
  66. kailash-0.1.0.dist-info/WHEEL +5 -0
  67. kailash-0.1.0.dist-info/entry_points.txt +2 -0
  68. kailash-0.1.0.dist-info/licenses/LICENSE +21 -0
  69. kailash-0.1.0.dist-info/top_level.txt +1 -0
kailash/manifest.py ADDED
@@ -0,0 +1,778 @@
1
+ """Workflow manifest generation for Kailash deployment."""
2
+
3
+ import json
4
+ from datetime import datetime, timezone
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional, Union
7
+
8
+ import yaml
9
+ from pydantic import BaseModel, Field
10
+
11
+ from kailash.sdk_exceptions import ManifestError
12
+ from kailash.workflow import Workflow
13
+
14
+
15
+ class KailashManifest(BaseModel):
16
+ """Represents a complete Kailash deployment manifest."""
17
+
18
+ model_config = {"arbitrary_types_allowed": True}
19
+
20
+ metadata: Dict[str, Any] = Field(..., description="Manifest metadata")
21
+ workflow: Optional[Workflow] = Field(None, description="Associated workflow")
22
+ resources: Optional[Dict[str, Any]] = Field(
23
+ default_factory=dict, description="Additional deployment resources"
24
+ )
25
+
26
+ def to_dict(self) -> Dict[str, Any]:
27
+ """Convert manifest to dictionary.
28
+
29
+ Returns:
30
+ Dictionary representation
31
+ """
32
+ result = {"metadata": self.metadata}
33
+
34
+ if self.workflow:
35
+ result["workflow"] = self.workflow.to_dict()
36
+
37
+ if self.resources:
38
+ result["resources"] = self.resources
39
+
40
+ return result
41
+
42
+ def to_yaml(self) -> str:
43
+ """Convert manifest to YAML string.
44
+
45
+ Returns:
46
+ YAML representation
47
+ """
48
+ return yaml.dump(self.to_dict(), default_flow_style=False, sort_keys=False)
49
+
50
+ def to_json(self) -> str:
51
+ """Convert manifest to JSON string.
52
+
53
+ Returns:
54
+ JSON representation
55
+ """
56
+ return json.dumps(self.to_dict(), indent=2)
57
+
58
+ def save(self, path: Union[str, Path], format: str = "yaml") -> None:
59
+ """Save manifest to file.
60
+
61
+ Args:
62
+ path: File path
63
+ format: Output format (yaml or json)
64
+
65
+ Raises:
66
+ ValueError: If format is invalid
67
+ """
68
+ output_path = Path(path)
69
+ output_path.parent.mkdir(parents=True, exist_ok=True)
70
+
71
+ if format == "yaml":
72
+ with open(output_path, "w") as f:
73
+ f.write(self.to_yaml())
74
+ elif format == "json":
75
+ with open(output_path, "w") as f:
76
+ f.write(self.to_json())
77
+ else:
78
+ raise ValueError(f"Unknown format: {format}")
79
+
80
+ @classmethod
81
+ def from_workflow(cls, workflow: Workflow, **metadata) -> "KailashManifest":
82
+ """Create manifest from workflow.
83
+
84
+ Args:
85
+ workflow: Workflow to include
86
+ **metadata: Additional metadata
87
+
88
+ Returns:
89
+ KailashManifest instance
90
+ """
91
+ # Default metadata
92
+ default_metadata = {
93
+ "id": workflow.metadata.name,
94
+ "name": workflow.metadata.name,
95
+ "version": workflow.metadata.version,
96
+ "author": workflow.metadata.author,
97
+ "description": workflow.metadata.description,
98
+ "created_at": datetime.now(timezone.utc).isoformat(),
99
+ }
100
+
101
+ # Override defaults with provided metadata
102
+ default_metadata.update(metadata)
103
+
104
+ return cls(metadata=default_metadata, workflow=workflow)
105
+
106
+ @classmethod
107
+ def from_dict(cls, data: Dict[str, Any]) -> "KailashManifest":
108
+ """Create manifest from dictionary.
109
+
110
+ Args:
111
+ data: Dictionary representation
112
+
113
+ Returns:
114
+ KailashManifest instance
115
+
116
+ Raises:
117
+ ManifestError: If data is invalid
118
+ """
119
+ try:
120
+ metadata = data.get("metadata", {})
121
+
122
+ workflow = None
123
+ if "workflow" in data:
124
+ from kailash.workflow import Workflow
125
+
126
+ workflow = Workflow.from_dict(data["workflow"])
127
+
128
+ resources = data.get("resources", {})
129
+
130
+ return cls(metadata=metadata, workflow=workflow, resources=resources)
131
+ except Exception as e:
132
+ raise ManifestError(f"Failed to create manifest from data: {e}") from e
133
+
134
+ @classmethod
135
+ def load(cls, path: Union[str, Path]) -> "KailashManifest":
136
+ """Load manifest from file.
137
+
138
+ Args:
139
+ path: File path
140
+
141
+ Returns:
142
+ KailashManifest instance
143
+
144
+ Raises:
145
+ ManifestError: If loading fails
146
+ """
147
+ try:
148
+ file_path = Path(path)
149
+ if not file_path.exists():
150
+ raise FileNotFoundError(f"File not found: {file_path}")
151
+
152
+ with open(file_path, "r") as f:
153
+ content = f.read()
154
+
155
+ # Parse based on file extension
156
+ if file_path.suffix.lower() in (".yaml", ".yml"):
157
+ data = yaml.safe_load(content)
158
+ elif file_path.suffix.lower() == ".json":
159
+ data = json.loads(content)
160
+ else:
161
+ raise ValueError(f"Unsupported file format: {file_path.suffix}")
162
+
163
+ return cls.from_dict(data)
164
+ except Exception as e:
165
+ raise ManifestError(f"Failed to load manifest from {path}: {e}") from e
166
+
167
+
168
+ class DeploymentConfig(BaseModel):
169
+ """Configuration for deployment manifest."""
170
+
171
+ name: str = Field(..., description="Deployment name")
172
+ namespace: str = Field("default", description="Kubernetes namespace")
173
+ replicas: int = Field(1, description="Number of replicas")
174
+ strategy: str = Field("RollingUpdate", description="Deployment strategy")
175
+ labels: Dict[str, str] = Field(
176
+ default_factory=dict, description="Kubernetes labels"
177
+ )
178
+ annotations: Dict[str, str] = Field(
179
+ default_factory=dict, description="Kubernetes annotations"
180
+ )
181
+ image_pull_policy: str = Field("IfNotPresent", description="Image pull policy")
182
+ service_account: Optional[str] = Field(None, description="Service account name")
183
+ node_selector: Dict[str, str] = Field(
184
+ default_factory=dict, description="Node selector"
185
+ )
186
+ tolerations: List[Dict[str, Any]] = Field(
187
+ default_factory=list, description="Pod tolerations"
188
+ )
189
+ affinity: Optional[Dict[str, Any]] = Field(None, description="Pod affinity rules")
190
+
191
+
192
+ class ServiceConfig(BaseModel):
193
+ """Configuration for Kubernetes service."""
194
+
195
+ name: str = Field(..., description="Service name")
196
+ type: str = Field("ClusterIP", description="Service type")
197
+ ports: List[Dict[str, Any]] = Field(
198
+ default_factory=list, description="Service ports"
199
+ )
200
+ selector: Dict[str, str] = Field(default_factory=dict, description="Pod selector")
201
+ labels: Dict[str, str] = Field(default_factory=dict, description="Service labels")
202
+
203
+
204
+ class VolumeConfig(BaseModel):
205
+ """Configuration for volumes."""
206
+
207
+ name: str = Field(..., description="Volume name")
208
+ type: str = Field("configMap", description="Volume type")
209
+ source: str = Field(..., description="Volume source")
210
+ mount_path: str = Field(..., description="Mount path in container")
211
+ read_only: bool = Field(True, description="Read-only mount")
212
+ sub_path: Optional[str] = Field(None, description="Sub-path within volume")
213
+
214
+
215
+ class ConfigMapConfig(BaseModel):
216
+ """Configuration for ConfigMap."""
217
+
218
+ name: str = Field(..., description="ConfigMap name")
219
+ namespace: str = Field("default", description="Namespace")
220
+ data: Dict[str, str] = Field(default_factory=dict, description="ConfigMap data")
221
+ binary_data: Dict[str, str] = Field(default_factory=dict, description="Binary data")
222
+ labels: Dict[str, str] = Field(default_factory=dict, description="Labels")
223
+
224
+
225
+ class SecretConfig(BaseModel):
226
+ """Configuration for Secret."""
227
+
228
+ name: str = Field(..., description="Secret name")
229
+ namespace: str = Field("default", description="Namespace")
230
+ type: str = Field("Opaque", description="Secret type")
231
+ data: Dict[str, str] = Field(default_factory=dict, description="Secret data")
232
+ string_data: Dict[str, str] = Field(default_factory=dict, description="String data")
233
+ labels: Dict[str, str] = Field(default_factory=dict, description="Labels")
234
+
235
+
236
+ class ManifestBuilder:
237
+ """Builder for creating deployment manifests."""
238
+
239
+ def __init__(self, workflow: Workflow):
240
+ """Initialize the manifest builder.
241
+
242
+ Args:
243
+ workflow: Workflow to build manifest for
244
+ """
245
+ self.workflow = workflow
246
+ self.deployment_config = None
247
+ self.service_configs: List[ServiceConfig] = []
248
+ self.volume_configs: List[VolumeConfig] = []
249
+ self.configmap_configs: List[ConfigMapConfig] = []
250
+ self.secret_configs: List[SecretConfig] = []
251
+
252
+ def with_deployment(self, config: DeploymentConfig) -> "ManifestBuilder":
253
+ """Add deployment configuration.
254
+
255
+ Args:
256
+ config: Deployment configuration
257
+
258
+ Returns:
259
+ Self for chaining
260
+ """
261
+ self.deployment_config = config
262
+ return self
263
+
264
+ def with_service(self, config: ServiceConfig) -> "ManifestBuilder":
265
+ """Add service configuration.
266
+
267
+ Args:
268
+ config: Service configuration
269
+
270
+ Returns:
271
+ Self for chaining
272
+ """
273
+ self.service_configs.append(config)
274
+ return self
275
+
276
+ def with_volume(self, config: VolumeConfig) -> "ManifestBuilder":
277
+ """Add volume configuration.
278
+
279
+ Args:
280
+ config: Volume configuration
281
+
282
+ Returns:
283
+ Self for chaining
284
+ """
285
+ self.volume_configs.append(config)
286
+ return self
287
+
288
+ def with_configmap(self, config: ConfigMapConfig) -> "ManifestBuilder":
289
+ """Add ConfigMap configuration.
290
+
291
+ Args:
292
+ config: ConfigMap configuration
293
+
294
+ Returns:
295
+ Self for chaining
296
+ """
297
+ self.configmap_configs.append(config)
298
+ return self
299
+
300
+ def with_secret(self, config: SecretConfig) -> "ManifestBuilder":
301
+ """Add Secret configuration.
302
+
303
+ Args:
304
+ config: Secret configuration
305
+
306
+ Returns:
307
+ Self for chaining
308
+ """
309
+ self.secret_configs.append(config)
310
+ return self
311
+
312
+ def build(self) -> Dict[str, Any]:
313
+ """Build the complete manifest.
314
+
315
+ Returns:
316
+ Complete manifest dictionary
317
+ """
318
+ if not self.deployment_config:
319
+ raise ManifestError("Deployment configuration is required")
320
+
321
+ manifest = {"apiVersion": "v1", "kind": "List", "items": []}
322
+
323
+ # Add ConfigMaps
324
+ for configmap in self.configmap_configs:
325
+ manifest["items"].append(self._build_configmap(configmap))
326
+
327
+ # Add Secrets
328
+ for secret in self.secret_configs:
329
+ manifest["items"].append(self._build_secret(secret))
330
+
331
+ # Add Deployment
332
+ manifest["items"].append(self._build_deployment())
333
+
334
+ # Add Services
335
+ for service in self.service_configs:
336
+ manifest["items"].append(self._build_service(service))
337
+
338
+ # Add Workflow CRD
339
+ manifest["items"].append(self._build_workflow_crd())
340
+
341
+ return manifest
342
+
343
+ def _build_deployment(self) -> Dict[str, Any]:
344
+ """Build deployment manifest."""
345
+ config = self.deployment_config
346
+
347
+ deployment = {
348
+ "apiVersion": "apps/v1",
349
+ "kind": "Deployment",
350
+ "metadata": {
351
+ "name": config.name,
352
+ "namespace": config.namespace,
353
+ "labels": config.labels,
354
+ "annotations": config.annotations,
355
+ },
356
+ "spec": {
357
+ "replicas": config.replicas,
358
+ "strategy": {"type": config.strategy},
359
+ "selector": {
360
+ "matchLabels": {
361
+ "app": config.name,
362
+ "workflow": self.workflow.metadata.name,
363
+ }
364
+ },
365
+ "template": {
366
+ "metadata": {
367
+ "labels": {
368
+ "app": config.name,
369
+ "workflow": self.workflow.metadata.name,
370
+ **config.labels,
371
+ }
372
+ },
373
+ "spec": {"containers": []},
374
+ },
375
+ },
376
+ }
377
+
378
+ # Add service account if specified
379
+ if config.service_account:
380
+ deployment["spec"]["template"]["spec"][
381
+ "serviceAccountName"
382
+ ] = config.service_account
383
+
384
+ # Add node selector
385
+ if config.node_selector:
386
+ deployment["spec"]["template"]["spec"][
387
+ "nodeSelector"
388
+ ] = config.node_selector
389
+
390
+ # Add tolerations
391
+ if config.tolerations:
392
+ deployment["spec"]["template"]["spec"]["tolerations"] = config.tolerations
393
+
394
+ # Add affinity
395
+ if config.affinity:
396
+ deployment["spec"]["template"]["spec"]["affinity"] = config.affinity
397
+
398
+ # Add volumes
399
+ if self.volume_configs:
400
+ volumes = []
401
+ volume_mounts = []
402
+
403
+ for vol_config in self.volume_configs:
404
+ volume = {"name": vol_config.name}
405
+
406
+ if vol_config.type == "configMap":
407
+ volume["configMap"] = {"name": vol_config.source}
408
+ elif vol_config.type == "secret":
409
+ volume["secret"] = {"secretName": vol_config.source}
410
+ elif vol_config.type == "persistentVolumeClaim":
411
+ volume["persistentVolumeClaim"] = {"claimName": vol_config.source}
412
+
413
+ volumes.append(volume)
414
+
415
+ mount = {
416
+ "name": vol_config.name,
417
+ "mountPath": vol_config.mount_path,
418
+ "readOnly": vol_config.read_only,
419
+ }
420
+
421
+ if vol_config.sub_path:
422
+ mount["subPath"] = vol_config.sub_path
423
+
424
+ volume_mounts.append(mount)
425
+
426
+ deployment["spec"]["template"]["spec"]["volumes"] = volumes
427
+
428
+ # Add workflow controller container
429
+ controller_container = {
430
+ "name": "workflow-controller",
431
+ "image": "kailash/workflow-controller:latest",
432
+ "imagePullPolicy": config.image_pull_policy,
433
+ "env": [
434
+ {"name": "WORKFLOW_NAME", "value": self.workflow.metadata.name},
435
+ {"name": "NAMESPACE", "value": config.namespace},
436
+ ],
437
+ "resources": {"requests": {"cpu": "100m", "memory": "256Mi"}},
438
+ }
439
+
440
+ if hasattr(self, "volume_mounts"):
441
+ controller_container["volumeMounts"] = volume_mounts
442
+
443
+ deployment["spec"]["template"]["spec"]["containers"].append(
444
+ controller_container
445
+ )
446
+
447
+ return deployment
448
+
449
+ def _build_service(self, config: ServiceConfig) -> Dict[str, Any]:
450
+ """Build service manifest."""
451
+ service = {
452
+ "apiVersion": "v1",
453
+ "kind": "Service",
454
+ "metadata": {
455
+ "name": config.name,
456
+ "namespace": self.deployment_config.namespace,
457
+ "labels": config.labels,
458
+ },
459
+ "spec": {
460
+ "type": config.type,
461
+ "selector": config.selector
462
+ or {
463
+ "app": self.deployment_config.name,
464
+ "workflow": self.workflow.metadata.name,
465
+ },
466
+ "ports": config.ports,
467
+ },
468
+ }
469
+
470
+ return service
471
+
472
+ def _build_configmap(self, config: ConfigMapConfig) -> Dict[str, Any]:
473
+ """Build ConfigMap manifest."""
474
+ configmap = {
475
+ "apiVersion": "v1",
476
+ "kind": "ConfigMap",
477
+ "metadata": {
478
+ "name": config.name,
479
+ "namespace": config.namespace,
480
+ "labels": config.labels,
481
+ },
482
+ "data": config.data,
483
+ }
484
+
485
+ if config.binary_data:
486
+ configmap["binaryData"] = config.binary_data
487
+
488
+ return configmap
489
+
490
+ def _build_secret(self, config: SecretConfig) -> Dict[str, Any]:
491
+ """Build Secret manifest."""
492
+ secret = {
493
+ "apiVersion": "v1",
494
+ "kind": "Secret",
495
+ "metadata": {
496
+ "name": config.name,
497
+ "namespace": config.namespace,
498
+ "labels": config.labels,
499
+ },
500
+ "type": config.type,
501
+ "data": config.data,
502
+ }
503
+
504
+ if config.string_data:
505
+ secret["stringData"] = config.string_data
506
+
507
+ return secret
508
+
509
+ def _build_workflow_crd(self) -> Dict[str, Any]:
510
+ """Build workflow custom resource."""
511
+ from kailash.utils.export import ExportConfig, WorkflowExporter
512
+
513
+ # Use exporter to get workflow data
514
+ export_config = ExportConfig(
515
+ namespace=self.deployment_config.namespace,
516
+ include_metadata=True,
517
+ include_resources=True,
518
+ )
519
+ exporter = WorkflowExporter(export_config)
520
+
521
+ return exporter.manifest_generator.generate_manifest(
522
+ self.workflow, exporter.node_mapper
523
+ )
524
+
525
+
526
+ class ManifestGenerator:
527
+ """Generator for creating deployment manifests from workflows."""
528
+
529
+ @staticmethod
530
+ def generate_simple_manifest(
531
+ workflow: Workflow, name: str, namespace: str = "default"
532
+ ) -> Dict[str, Any]:
533
+ """Generate a simple deployment manifest.
534
+
535
+ Args:
536
+ workflow: Workflow to deploy
537
+ name: Deployment name
538
+ namespace: Kubernetes namespace
539
+
540
+ Returns:
541
+ Deployment manifest
542
+ """
543
+ builder = ManifestBuilder(workflow)
544
+
545
+ # Add deployment
546
+ deployment_config = DeploymentConfig(
547
+ name=name,
548
+ namespace=namespace,
549
+ labels={
550
+ "app": name,
551
+ "workflow": workflow.metadata.name,
552
+ "version": workflow.metadata.version,
553
+ },
554
+ )
555
+ builder.with_deployment(deployment_config)
556
+
557
+ # Add service
558
+ service_config = ServiceConfig(
559
+ name=f"{name}-service",
560
+ ports=[{"name": "http", "port": 80, "targetPort": 8080}],
561
+ )
562
+ builder.with_service(service_config)
563
+
564
+ # Add workflow ConfigMap
565
+ configmap_config = ConfigMapConfig(
566
+ name=f"{name}-config",
567
+ namespace=namespace,
568
+ data={"workflow.yaml": yaml.dump(workflow.to_dict())},
569
+ )
570
+ builder.with_configmap(configmap_config)
571
+
572
+ # Add volume for ConfigMap
573
+ volume_config = VolumeConfig(
574
+ name="workflow-config",
575
+ type="configMap",
576
+ source=f"{name}-config",
577
+ mount_path="/config",
578
+ )
579
+ builder.with_volume(volume_config)
580
+
581
+ return builder.build()
582
+
583
+ @staticmethod
584
+ def generate_advanced_manifest(
585
+ workflow: Workflow,
586
+ name: str,
587
+ namespace: str = "default",
588
+ replicas: int = 1,
589
+ resources: Optional[Dict[str, Any]] = None,
590
+ **kwargs,
591
+ ) -> Dict[str, Any]:
592
+ """Generate an advanced deployment manifest with custom configuration.
593
+
594
+ Args:
595
+ workflow: Workflow to deploy
596
+ name: Deployment name
597
+ namespace: Kubernetes namespace
598
+ replicas: Number of replicas
599
+ resources: Resource requirements
600
+ **kwargs: Additional configuration options
601
+
602
+ Returns:
603
+ Deployment manifest
604
+ """
605
+ builder = ManifestBuilder(workflow)
606
+
607
+ # Add deployment with advanced configuration
608
+ deployment_config = DeploymentConfig(
609
+ name=name,
610
+ namespace=namespace,
611
+ replicas=replicas,
612
+ labels=kwargs.get(
613
+ "labels",
614
+ {
615
+ "app": name,
616
+ "workflow": workflow.metadata.name,
617
+ "version": workflow.metadata.version,
618
+ },
619
+ ),
620
+ annotations=kwargs.get("annotations", {}),
621
+ node_selector=kwargs.get("node_selector", {}),
622
+ tolerations=kwargs.get("tolerations", []),
623
+ affinity=kwargs.get("affinity", None),
624
+ service_account=kwargs.get("service_account", None),
625
+ )
626
+ builder.with_deployment(deployment_config)
627
+
628
+ # Add services
629
+ if kwargs.get("expose_external", False):
630
+ service_config = ServiceConfig(
631
+ name=f"{name}-external",
632
+ type="LoadBalancer",
633
+ ports=[{"name": "http", "port": 80, "targetPort": 8080}],
634
+ )
635
+ builder.with_service(service_config)
636
+
637
+ # Internal service
638
+ internal_service = ServiceConfig(
639
+ name=f"{name}-internal",
640
+ type="ClusterIP",
641
+ ports=[
642
+ {"name": "http", "port": 8080, "targetPort": 8080},
643
+ {"name": "metrics", "port": 9090, "targetPort": 9090},
644
+ ],
645
+ )
646
+ builder.with_service(internal_service)
647
+
648
+ # Add ConfigMaps
649
+ # Workflow config
650
+ workflow_config = ConfigMapConfig(
651
+ name=f"{name}-workflow",
652
+ namespace=namespace,
653
+ data={
654
+ "workflow.yaml": yaml.dump(workflow.to_dict()),
655
+ "workflow.json": json.dumps(workflow.to_dict(), indent=2),
656
+ },
657
+ )
658
+ builder.with_configmap(workflow_config)
659
+
660
+ # Runtime config
661
+ runtime_config = ConfigMapConfig(
662
+ name=f"{name}-runtime",
663
+ namespace=namespace,
664
+ data=kwargs.get(
665
+ "runtime_config",
666
+ {
667
+ "log_level": "INFO",
668
+ "metrics_enabled": "true",
669
+ "trace_enabled": "false",
670
+ },
671
+ ),
672
+ )
673
+ builder.with_configmap(runtime_config)
674
+
675
+ # Add Secrets if provided
676
+ if "secrets" in kwargs:
677
+ for secret_name, secret_data in kwargs["secrets"].items():
678
+ secret_config = SecretConfig(
679
+ name=f"{name}-{secret_name}", namespace=namespace, data=secret_data
680
+ )
681
+ builder.with_secret(secret_config)
682
+
683
+ # Add Volumes
684
+ # Workflow config volume
685
+ workflow_volume = VolumeConfig(
686
+ name="workflow-config",
687
+ type="configMap",
688
+ source=f"{name}-workflow",
689
+ mount_path="/config/workflow",
690
+ )
691
+ builder.with_volume(workflow_volume)
692
+
693
+ # Runtime config volume
694
+ runtime_volume = VolumeConfig(
695
+ name="runtime-config",
696
+ type="configMap",
697
+ source=f"{name}-runtime",
698
+ mount_path="/config/runtime",
699
+ )
700
+ builder.with_volume(runtime_volume)
701
+
702
+ # Data volume if specified
703
+ if kwargs.get("persistent_storage", False):
704
+ data_volume = VolumeConfig(
705
+ name="data",
706
+ type="persistentVolumeClaim",
707
+ source=f"{name}-data",
708
+ mount_path="/data",
709
+ read_only=False,
710
+ )
711
+ builder.with_volume(data_volume)
712
+
713
+ return builder.build()
714
+
715
+ @staticmethod
716
+ def save_manifest(manifest: Dict[str, Any], path: str, format: str = "yaml"):
717
+ """Save manifest to file.
718
+
719
+ Args:
720
+ manifest: Manifest dictionary
721
+ path: Output file path
722
+ format: Output format (yaml or json)
723
+ """
724
+ output_path = Path(path)
725
+ output_path.parent.mkdir(parents=True, exist_ok=True)
726
+
727
+ if format == "yaml":
728
+ with open(output_path, "w") as f:
729
+ yaml.dump(manifest, f, default_flow_style=False, sort_keys=False)
730
+ elif format == "json":
731
+ with open(output_path, "w") as f:
732
+ json.dump(manifest, f, indent=2)
733
+ else:
734
+ raise ValueError(f"Unknown format: {format}")
735
+
736
+
737
+ # Convenience functions
738
+ def create_deployment_manifest(
739
+ workflow: Workflow, deployment_name: str, **config
740
+ ) -> Dict[str, Any]:
741
+ """Create a deployment manifest for a workflow.
742
+
743
+ Args:
744
+ workflow: Workflow to deploy
745
+ deployment_name: Name for the deployment
746
+ **config: Additional configuration
747
+
748
+ Returns:
749
+ Deployment manifest
750
+ """
751
+ if config.get("advanced", False):
752
+ return ManifestGenerator.generate_advanced_manifest(
753
+ workflow, deployment_name, **config
754
+ )
755
+ else:
756
+ return ManifestGenerator.generate_simple_manifest(
757
+ workflow, deployment_name, namespace=config.get("namespace", "default")
758
+ )
759
+
760
+
761
+ def save_deployment_manifest(
762
+ workflow: Workflow,
763
+ deployment_name: str,
764
+ output_path: str,
765
+ format: str = "yaml",
766
+ **config,
767
+ ):
768
+ """Create and save a deployment manifest.
769
+
770
+ Args:
771
+ workflow: Workflow to deploy
772
+ deployment_name: Name for the deployment
773
+ output_path: Output file path
774
+ format: Output format (yaml or json)
775
+ **config: Additional configuration
776
+ """
777
+ manifest = create_deployment_manifest(workflow, deployment_name, **config)
778
+ ManifestGenerator.save_manifest(manifest, output_path, format)