flowyml 1.7.1__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. flowyml/assets/base.py +15 -0
  2. flowyml/assets/dataset.py +570 -17
  3. flowyml/assets/metrics.py +5 -0
  4. flowyml/assets/model.py +1052 -15
  5. flowyml/cli/main.py +709 -0
  6. flowyml/cli/stack_cli.py +138 -25
  7. flowyml/core/__init__.py +17 -0
  8. flowyml/core/executor.py +231 -37
  9. flowyml/core/image_builder.py +129 -0
  10. flowyml/core/log_streamer.py +227 -0
  11. flowyml/core/orchestrator.py +59 -4
  12. flowyml/core/pipeline.py +65 -13
  13. flowyml/core/routing.py +558 -0
  14. flowyml/core/scheduler.py +88 -5
  15. flowyml/core/step.py +9 -1
  16. flowyml/core/step_grouping.py +49 -35
  17. flowyml/core/types.py +407 -0
  18. flowyml/integrations/keras.py +247 -82
  19. flowyml/monitoring/alerts.py +10 -0
  20. flowyml/monitoring/notifications.py +104 -25
  21. flowyml/monitoring/slack_blocks.py +323 -0
  22. flowyml/plugins/__init__.py +251 -0
  23. flowyml/plugins/alerters/__init__.py +1 -0
  24. flowyml/plugins/alerters/slack.py +168 -0
  25. flowyml/plugins/base.py +752 -0
  26. flowyml/plugins/config.py +478 -0
  27. flowyml/plugins/deployers/__init__.py +22 -0
  28. flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
  29. flowyml/plugins/deployers/sagemaker.py +306 -0
  30. flowyml/plugins/deployers/vertex.py +290 -0
  31. flowyml/plugins/integration.py +369 -0
  32. flowyml/plugins/manager.py +510 -0
  33. flowyml/plugins/model_registries/__init__.py +22 -0
  34. flowyml/plugins/model_registries/mlflow.py +159 -0
  35. flowyml/plugins/model_registries/sagemaker.py +489 -0
  36. flowyml/plugins/model_registries/vertex.py +386 -0
  37. flowyml/plugins/orchestrators/__init__.py +13 -0
  38. flowyml/plugins/orchestrators/sagemaker.py +443 -0
  39. flowyml/plugins/orchestrators/vertex_ai.py +461 -0
  40. flowyml/plugins/registries/__init__.py +13 -0
  41. flowyml/plugins/registries/ecr.py +321 -0
  42. flowyml/plugins/registries/gcr.py +313 -0
  43. flowyml/plugins/registry.py +454 -0
  44. flowyml/plugins/stack.py +494 -0
  45. flowyml/plugins/stack_config.py +537 -0
  46. flowyml/plugins/stores/__init__.py +13 -0
  47. flowyml/plugins/stores/gcs.py +460 -0
  48. flowyml/plugins/stores/s3.py +453 -0
  49. flowyml/plugins/trackers/__init__.py +11 -0
  50. flowyml/plugins/trackers/mlflow.py +316 -0
  51. flowyml/plugins/validators/__init__.py +3 -0
  52. flowyml/plugins/validators/deepchecks.py +119 -0
  53. flowyml/registry/__init__.py +2 -1
  54. flowyml/registry/model_environment.py +109 -0
  55. flowyml/registry/model_registry.py +241 -96
  56. flowyml/serving/__init__.py +17 -0
  57. flowyml/serving/model_server.py +628 -0
  58. flowyml/stacks/__init__.py +60 -0
  59. flowyml/stacks/aws.py +93 -0
  60. flowyml/stacks/base.py +62 -0
  61. flowyml/stacks/components.py +12 -0
  62. flowyml/stacks/gcp.py +44 -9
  63. flowyml/stacks/plugins.py +115 -0
  64. flowyml/stacks/registry.py +2 -1
  65. flowyml/storage/sql.py +401 -12
  66. flowyml/tracking/experiment.py +8 -5
  67. flowyml/ui/backend/Dockerfile +87 -16
  68. flowyml/ui/backend/auth.py +12 -2
  69. flowyml/ui/backend/main.py +149 -5
  70. flowyml/ui/backend/routers/ai_context.py +226 -0
  71. flowyml/ui/backend/routers/assets.py +23 -4
  72. flowyml/ui/backend/routers/auth.py +96 -0
  73. flowyml/ui/backend/routers/deployments.py +660 -0
  74. flowyml/ui/backend/routers/model_explorer.py +597 -0
  75. flowyml/ui/backend/routers/plugins.py +103 -51
  76. flowyml/ui/backend/routers/projects.py +91 -8
  77. flowyml/ui/backend/routers/runs.py +132 -1
  78. flowyml/ui/backend/routers/schedules.py +54 -29
  79. flowyml/ui/backend/routers/templates.py +319 -0
  80. flowyml/ui/backend/routers/websocket.py +2 -2
  81. flowyml/ui/frontend/Dockerfile +55 -6
  82. flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
  83. flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
  84. flowyml/ui/frontend/dist/index.html +2 -2
  85. flowyml/ui/frontend/dist/logo.png +0 -0
  86. flowyml/ui/frontend/nginx.conf +65 -4
  87. flowyml/ui/frontend/package-lock.json +1415 -74
  88. flowyml/ui/frontend/package.json +4 -0
  89. flowyml/ui/frontend/public/logo.png +0 -0
  90. flowyml/ui/frontend/src/App.jsx +10 -7
  91. flowyml/ui/frontend/src/app/assets/page.jsx +890 -321
  92. flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
  93. flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
  94. flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
  95. flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
  96. flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
  97. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
  98. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectMetricsPanel.jsx +1 -1
  99. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +601 -101
  100. flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
  101. flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
  102. flowyml/ui/frontend/src/components/ArtifactViewer.jsx +62 -2
  103. flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +424 -29
  104. flowyml/ui/frontend/src/components/AssetTreeHierarchy.jsx +119 -11
  105. flowyml/ui/frontend/src/components/DatasetViewer.jsx +753 -0
  106. flowyml/ui/frontend/src/components/Layout.jsx +6 -0
  107. flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
  108. flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
  109. flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
  110. flowyml/ui/frontend/src/components/TrainingHistoryChart.jsx +514 -0
  111. flowyml/ui/frontend/src/components/TrainingMetricsPanel.jsx +175 -0
  112. flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
  113. flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
  114. flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
  115. flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
  116. flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
  117. flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
  118. flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
  119. flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
  120. flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
  121. flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
  122. flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
  123. flowyml/ui/frontend/src/router/index.jsx +47 -20
  124. flowyml/ui/frontend/src/services/pluginService.js +3 -1
  125. flowyml/ui/server_manager.py +5 -5
  126. flowyml/ui/utils.py +157 -39
  127. flowyml/utils/config.py +37 -15
  128. flowyml/utils/model_introspection.py +123 -0
  129. flowyml/utils/observability.py +30 -0
  130. flowyml-1.8.0.dist-info/METADATA +174 -0
  131. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/RECORD +134 -73
  132. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
  133. flowyml/ui/frontend/dist/assets/index-BqDQvp63.js +0 -630
  134. flowyml/ui/frontend/dist/assets/index-By4trVyv.css +0 -1
  135. flowyml-1.7.1.dist-info/METADATA +0 -477
  136. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
  137. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
flowyml/stacks/aws.py CHANGED
@@ -290,6 +290,45 @@ class AWSBatchOrchestrator(RemoteOrchestrator):
290
290
  print(f"Error fetching job status: {e}")
291
291
  return ExecutionStatus.FAILED
292
292
 
293
+ def get_run_logs(self, job_id: str) -> str:
294
+ """Get logs for an AWS Batch job.
295
+
296
+ Args:
297
+ job_id: The job ID.
298
+
299
+ Returns:
300
+ String containing the logs.
301
+ """
302
+ import boto3
303
+
304
+ try:
305
+ client = self._client()
306
+ response = client.describe_jobs(jobs=[job_id])
307
+
308
+ if not response.get("jobs"):
309
+ return "Job not found."
310
+
311
+ job = response["jobs"][0]
312
+ if "container" not in job or "logStreamName" not in job["container"]:
313
+ # Job might not have started yet or failed before logging
314
+ return "Logs not available yet (no log stream)."
315
+
316
+ log_stream_name = job["container"]["logStreamName"]
317
+ logs_client = boto3.client("logs", region_name=self.region)
318
+
319
+ log_events = logs_client.get_log_events(
320
+ logGroupName="/aws/batch/job",
321
+ logStreamName=log_stream_name,
322
+ limit=100,
323
+ startFromHead=False,
324
+ )
325
+
326
+ messages = [event["message"] for event in log_events.get("events", [])]
327
+ return "\n".join(messages) if messages else "No log messages found."
328
+
329
+ except Exception as e:
330
+ return f"Failed to fetch logs: {e}"
331
+
293
332
  def stop_run(self, job_id: str, graceful: bool = True) -> None:
294
333
  """Stop an AWS Batch job.
295
334
 
@@ -516,6 +555,52 @@ class SageMakerOrchestrator(RemoteOrchestrator):
516
555
  print(f"Error fetching training job status: {e}")
517
556
  return ExecutionStatus.FAILED
518
557
 
558
+ def get_run_logs(self, job_id: str) -> str:
559
+ """Get logs for a SageMaker training job.
560
+
561
+ Args:
562
+ job_id: The training job name.
563
+
564
+ Returns:
565
+ String containing the logs.
566
+ """
567
+ import boto3
568
+
569
+ try:
570
+ self._client()
571
+ logs_client = boto3.client("logs", region_name=self.region)
572
+
573
+ # SageMaker logs to /aws/sagemaker/TrainingJobs
574
+ log_group_name = "/aws/sagemaker/TrainingJobs"
575
+
576
+ # Find log stream
577
+ # Streams are usually like <job_name>/algo-1-123456789
578
+ streams = logs_client.describe_log_streams(
579
+ logGroupName=log_group_name,
580
+ logStreamNamePrefix=job_id,
581
+ orderBy="LastEventTime",
582
+ descending=True,
583
+ limit=1,
584
+ )
585
+
586
+ if not streams.get("logStreams"):
587
+ return "No log streams found for this job."
588
+
589
+ log_stream_name = streams["logStreams"][0]["logStreamName"]
590
+
591
+ log_events = logs_client.get_log_events(
592
+ logGroupName=log_group_name,
593
+ logStreamName=log_stream_name,
594
+ limit=100,
595
+ startFromHead=False,
596
+ )
597
+
598
+ messages = [event["message"] for event in log_events.get("events", [])]
599
+ return "\n".join(messages) if messages else "No log messages found."
600
+
601
+ except Exception as e:
602
+ return f"Failed to fetch logs: {e}"
603
+
519
604
  def stop_run(self, job_id: str, graceful: bool = True) -> None:
520
605
  """Stop a SageMaker training job.
521
606
 
@@ -556,6 +641,7 @@ class AWSStack(Stack):
556
641
  orchestrator_type: str = "batch",
557
642
  role_arn: str | None = None,
558
643
  metadata_store: Any | None = None,
644
+ model_deployer: Any | None = None,
559
645
  ):
560
646
  orchestrator: Orchestrator
561
647
  if orchestrator_type == "sagemaker":
@@ -569,6 +655,11 @@ class AWSStack(Stack):
569
655
  if metadata_store is None:
570
656
  metadata_store = SQLiteMetadataStore()
571
657
 
658
+ if model_deployer is None:
659
+ from flowyml.plugins.deployers.sagemaker import SageMakerEndpointDeployer
660
+
661
+ model_deployer = SageMakerEndpointDeployer(region=region, role_arn=role_arn)
662
+
572
663
  super().__init__(
573
664
  name=name,
574
665
  executor=None,
@@ -576,6 +667,7 @@ class AWSStack(Stack):
576
667
  metadata_store=metadata_store,
577
668
  container_registry=container_registry,
578
669
  orchestrator=orchestrator,
670
+ model_deployer=model_deployer,
579
671
  )
580
672
 
581
673
  self.region = region
@@ -596,4 +688,5 @@ class AWSStack(Stack):
596
688
  "orchestrator": self.orchestrator.to_dict(),
597
689
  "artifact_store": self.artifact_store.to_dict(),
598
690
  "container_registry": self.container_registry.to_dict(),
691
+ "model_deployer": self.model_deployer.to_dict() if self.model_deployer else None,
599
692
  }
flowyml/stacks/base.py CHANGED
@@ -14,6 +14,7 @@ class StackConfig:
14
14
  metadata_store: str
15
15
  container_registry: str | None = None
16
16
  orchestrator: str | None = None
17
+ model_deployer: str | None = None
17
18
 
18
19
  def to_dict(self) -> dict[str, Any]:
19
20
  """Convert to dictionary."""
@@ -24,6 +25,7 @@ class StackConfig:
24
25
  "metadata_store": self.metadata_store,
25
26
  "container_registry": self.container_registry,
26
27
  "orchestrator": self.orchestrator,
28
+ "model_deployer": self.model_deployer,
27
29
  }
28
30
 
29
31
 
@@ -46,6 +48,7 @@ class Stack:
46
48
  metadata_store: Any,
47
49
  container_registry: Any | None = None,
48
50
  orchestrator: Any | None = None,
51
+ model_deployer: Any | None = None,
49
52
  ):
50
53
  self.name = name
51
54
  self.executor = executor
@@ -53,6 +56,7 @@ class Stack:
53
56
  self.metadata_store = metadata_store
54
57
  self.container_registry = container_registry
55
58
  self.orchestrator = orchestrator
59
+ self.model_deployer = model_deployer
56
60
 
57
61
  self.config = StackConfig(
58
62
  name=name,
@@ -61,6 +65,7 @@ class Stack:
61
65
  metadata_store=type(metadata_store).__name__,
62
66
  container_registry=type(container_registry).__name__ if container_registry else None,
63
67
  orchestrator=type(orchestrator).__name__ if orchestrator else None,
68
+ model_deployer=type(model_deployer).__name__ if model_deployer else None,
64
69
  )
65
70
 
66
71
  def activate(self) -> None:
@@ -68,6 +73,63 @@ class Stack:
68
73
  # In a real implementation, this would set the global active stack
69
74
  pass
70
75
 
76
+ def prepare_docker_image(self, docker_config: Any, pipeline_name: str, project_name: str | None = None) -> str:
77
+ """Prepare the Docker image for execution.
78
+
79
+ Args:
80
+ docker_config: Docker configuration object.
81
+ pipeline_name: Name of the pipeline being built.
82
+ project_name: Optional name of the project.
83
+
84
+ Returns:
85
+ str: The full URI of the docker image to use.
86
+
87
+ Raises:
88
+ ValueError: If image cannot be prepared (e.g. no registry configured for build).
89
+ """
90
+ # 1. If explicit image provided, use it
91
+ if docker_config.image:
92
+ return docker_config.image
93
+
94
+ # 2. If no registry, we cannot build/push for remote execution
95
+ if not self.container_registry:
96
+ raise ValueError(
97
+ "Remote execution requires a specific 'image' in DockerConfiguration "
98
+ "or a configured 'container_registry' in the Stack for automatic building.",
99
+ )
100
+
101
+ # 3. Trigger build and push
102
+ # Use safe naming: registry/project/pipeline:latest OR registry/pipeline:latest
103
+ if project_name:
104
+ image_name = f"{project_name}-{pipeline_name}"
105
+ else:
106
+ image_name = pipeline_name
107
+
108
+ # Clean image name to be docker compatible (lowercase, alphanumeric)
109
+ import re
110
+
111
+ safe_name = re.sub(r"[^a-zA-Z0-9_\-]", "_", image_name).lower()
112
+
113
+ image_tag = f"{self.container_registry.registry_uri}/{safe_name}:latest"
114
+
115
+ # Build
116
+ try:
117
+ from flowyml.core.image_builder import DockerImageBuilder
118
+
119
+ builder = DockerImageBuilder()
120
+ builder.build_image(docker_config, image_tag)
121
+ except ImportError:
122
+ # Fallback if file not found (shouldn't happen in prod)
123
+ print("Warning: DockerImageBuilder not found. Skipping build.")
124
+
125
+ # Push
126
+ print(f"🚀 Pushing image: {image_tag}")
127
+ try:
128
+ pushed_uri = self.container_registry.push_image(image_tag)
129
+ return pushed_uri
130
+ except Exception as e:
131
+ raise RuntimeError(f"Failed to push image to registry: {e}")
132
+
71
133
  def validate(self) -> bool:
72
134
  """Validate that all stack components are properly configured."""
73
135
  # Check that all components are properly configured
@@ -18,6 +18,7 @@ class ComponentType(Enum):
18
18
  CONTAINER_REGISTRY = "container_registry"
19
19
  METADATA_STORE = "metadata_store"
20
20
  EXECUTOR = "executor"
21
+ MODEL_DEPLOYER = "model_deployer"
21
22
 
22
23
 
23
24
  @dataclass
@@ -130,6 +131,17 @@ class Orchestrator(StackComponent):
130
131
  """Get status of a pipeline run."""
131
132
  pass
132
133
 
134
+ def get_run_logs(self, run_id: str) -> str:
135
+ """Get logs for a pipeline run.
136
+
137
+ Args:
138
+ run_id: The run identifier.
139
+
140
+ Returns:
141
+ String containing the logs.
142
+ """
143
+ return "Logs not available for this orchestrator."
144
+
133
145
 
134
146
  class ArtifactStore(StackComponent):
135
147
  """Base class for artifact stores."""
flowyml/stacks/gcp.py CHANGED
@@ -41,7 +41,7 @@ class VertexAIOrchestrator(RemoteOrchestrator):
41
41
  self,
42
42
  name: str = "vertex_ai",
43
43
  project_id: str | None = None,
44
- region: str = "us-central1",
44
+ region: str = "europe-west1",
45
45
  service_account: str | None = None,
46
46
  network: str | None = None,
47
47
  encryption_key: str | None = None,
@@ -176,6 +176,36 @@ class VertexAIOrchestrator(RemoteOrchestrator):
176
176
  print(f"Error fetching job status: {e}")
177
177
  return ExecutionStatus.FAILED
178
178
 
179
+ def get_run_logs(self, job_id: str) -> str:
180
+ """Get logs for a Vertex AI job.
181
+
182
+ Args:
183
+ job_id: The job resource name.
184
+
185
+ Returns:
186
+ String containing the logs.
187
+ """
188
+ try:
189
+ from google.cloud import logging
190
+
191
+ client = logging.Client(project=self.project_id)
192
+ job_name = job_id.split("/")[-1]
193
+
194
+ # Filter logs for this job
195
+ # Note: This is a simplified filter; exact filter depends on Vertex AI logging format
196
+ filter_str = f'resource.type="ml_job" AND conversion_id="{job_name}"'
197
+
198
+ entries = client.list_entries(filter_=filter_str, order_by=logging.DESCENDING, max_results=100)
199
+ logs = []
200
+ for entry in entries:
201
+ if entry.payload:
202
+ logs.append(str(entry.payload))
203
+
204
+ return "\n".join(reversed(logs)) if logs else "No logs found."
205
+
206
+ except Exception as e:
207
+ return f"Failed to fetch logs: {e}"
208
+
179
209
  def stop_run(self, job_id: str, graceful: bool = True) -> None:
180
210
  """Cancel a Vertex AI job."""
181
211
  from google.cloud import aiplatform
@@ -489,11 +519,12 @@ class GCPStack(Stack):
489
519
  self,
490
520
  name: str = "gcp",
491
521
  project_id: str | None = None,
492
- region: str = "us-central1",
522
+ region: str = "europe-west1",
493
523
  bucket_name: str | None = None,
494
524
  registry_uri: str | None = None,
495
525
  service_account: str | None = None,
496
526
  metadata_store: Any | None = None,
527
+ model_deployer: Any | None = None,
497
528
  ):
498
529
  """Initialize GCP stack.
499
530
 
@@ -505,6 +536,7 @@ class GCPStack(Stack):
505
536
  registry_uri: Container registry URI
506
537
  service_account: Service account for job execution
507
538
  metadata_store: Metadata store (optional, defaults to local SQLite)
539
+ model_deployer: Optional model deployer
508
540
  """
509
541
  # Create GCP components
510
542
  orchestrator = VertexAIOrchestrator(
@@ -524,11 +556,12 @@ class GCPStack(Stack):
524
556
  region=region,
525
557
  )
526
558
 
527
- # Use local metadata store if not provided
528
- if metadata_store is None:
529
- from flowyml.storage.metadata import SQLiteMetadataStore
559
+ # Use new generic deployer if provided, else use CloudRun default if desired,
560
+ # but better to stick to generic injection or default creation
561
+ if model_deployer is None:
562
+ from flowyml.plugins.deployers.gcp_cloud_run import GCPCloudRunDeployer
530
563
 
531
- metadata_store = SQLiteMetadataStore()
564
+ model_deployer = GCPCloudRunDeployer(project_id=project_id, region=region)
532
565
 
533
566
  # Initialize base stack
534
567
  super().__init__(
@@ -538,12 +571,13 @@ class GCPStack(Stack):
538
571
  metadata_store=metadata_store,
539
572
  container_registry=container_registry,
540
573
  orchestrator=orchestrator,
574
+ model_deployer=model_deployer,
541
575
  )
542
576
 
543
577
  self.project_id = project_id
544
578
  self.region = region
579
+ # Legacy helpers kept for backward compatibility if needed, but stack now uses proper components
545
580
  self.vertex_endpoints = VertexEndpointManager(project_id=project_id, region=region)
546
- self.cloud_run = CloudRunDeployer(project_id=project_id, region=region)
547
581
 
548
582
  def validate(self) -> bool:
549
583
  """Validate all GCP stack components."""
@@ -562,13 +596,14 @@ class GCPStack(Stack):
562
596
  "orchestrator": self.orchestrator.to_dict(),
563
597
  "artifact_store": self.artifact_store.to_dict(),
564
598
  "container_registry": self.container_registry.to_dict(),
599
+ "model_deployer": self.model_deployer.to_dict() if self.model_deployer else None,
565
600
  }
566
601
 
567
602
 
568
603
  class VertexEndpointManager:
569
604
  """Deploy trained models as Vertex AI endpoints."""
570
605
 
571
- def __init__(self, project_id: str | None, region: str = "us-central1"):
606
+ def __init__(self, project_id: str | None, region: str = "europe-west1"):
572
607
  self.project_id = project_id
573
608
  self.region = region
574
609
 
@@ -600,7 +635,7 @@ class VertexEndpointManager:
600
635
  class CloudRunDeployer:
601
636
  """Deploy container images to Cloud Run."""
602
637
 
603
- def __init__(self, project_id: str | None, region: str = "us-central1"):
638
+ def __init__(self, project_id: str | None, region: str = "europe-west1"):
604
639
  self.project_id = project_id
605
640
  self.region = region
606
641
 
flowyml/stacks/plugins.py CHANGED
@@ -26,6 +26,14 @@ from flowyml.stacks.components import (
26
26
  from flowyml.stacks.bridge import GenericBridge, AdaptationRule
27
27
 
28
28
 
29
+ # Lazy import to avoid circular dependencies
30
+ def _get_zenml_bridge():
31
+ """Lazy import of ZenMLBridge to avoid import errors when ZenML is not installed."""
32
+ from flowyml.stacks.zenml_bridge import ZenMLBridge
33
+
34
+ return ZenMLBridge()
35
+
36
+
29
37
  @dataclass
30
38
  class PluginInfo:
31
39
  """Metadata about a plugin."""
@@ -334,6 +342,113 @@ class ComponentRegistry:
334
342
  except subprocess.CalledProcessError:
335
343
  return False
336
344
 
345
+ # ==================== ZenML Integration Methods ====================
346
+
347
+ def list_zenml_integrations(self) -> list[str]:
348
+ """List all available ZenML integrations.
349
+
350
+ Returns:
351
+ List of integration names (e.g., ['mlflow', 'kubernetes', 'aws']).
352
+ """
353
+ try:
354
+ bridge = _get_zenml_bridge()
355
+ return bridge.list_available_integrations()
356
+ except Exception:
357
+ return []
358
+
359
+ def list_installed_zenml_integrations(self) -> list[str]:
360
+ """List installed ZenML integrations.
361
+
362
+ Returns:
363
+ List of installed integration names.
364
+ """
365
+ try:
366
+ bridge = _get_zenml_bridge()
367
+ return bridge.list_installed_integrations()
368
+ except Exception:
369
+ return []
370
+
371
+ def install_zenml_integration(self, integration_name: str) -> bool:
372
+ """Install a ZenML integration and its dependencies.
373
+
374
+ Args:
375
+ integration_name: Name of the integration (e.g., "mlflow", "kubernetes").
376
+
377
+ Returns:
378
+ True if installation was successful.
379
+
380
+ Example:
381
+ >>> registry = get_component_registry()
382
+ >>> registry.install_zenml_integration("mlflow")
383
+ True
384
+ """
385
+ try:
386
+ bridge = _get_zenml_bridge()
387
+ return bridge.install_integration(integration_name)
388
+ except Exception as e:
389
+ print(f"Failed to install ZenML integration: {e}")
390
+ return False
391
+
392
+ def import_zenml_integration(self, integration_name: str) -> list[type[StackComponent]]:
393
+ """Import all components from a ZenML integration.
394
+
395
+ This discovers all flavors provided by a ZenML integration and
396
+ registers them as FlowyML components.
397
+
398
+ Args:
399
+ integration_name: Name of the integration to import.
400
+
401
+ Returns:
402
+ List of wrapped FlowyML component classes.
403
+
404
+ Example:
405
+ >>> registry = get_component_registry()
406
+ >>> components = registry.import_zenml_integration("mlflow")
407
+ >>> print([c.__name__ for c in components])
408
+ ['ZenMLMLFlowExperimentTrackerWrapper']
409
+ """
410
+ try:
411
+ bridge = _get_zenml_bridge()
412
+ components = bridge.import_integration(integration_name)
413
+
414
+ # Register all imported components
415
+ for component_class in components:
416
+ self.register(component_class)
417
+
418
+ return components
419
+ except Exception as e:
420
+ print(f"Failed to import ZenML integration: {e}")
421
+ return []
422
+
423
+ def import_all_zenml(self) -> dict[str, list[type[StackComponent]]]:
424
+ """Import all components from all installed ZenML integrations.
425
+
426
+ This is the easiest way to make all ZenML components available
427
+ in FlowyML with a single call.
428
+
429
+ Returns:
430
+ Dictionary mapping integration names to lists of wrapped components.
431
+
432
+ Example:
433
+ >>> registry = get_component_registry()
434
+ >>> all_components = registry.import_all_zenml()
435
+ >>> print(all_components.keys())
436
+ dict_keys(['mlflow', 'kubernetes', 'aws'])
437
+ """
438
+ try:
439
+ bridge = _get_zenml_bridge()
440
+ result = bridge.import_all()
441
+
442
+ # Register all imported components
443
+ for _integration_name, components in result.items():
444
+ for component_class in components:
445
+ self.register(component_class)
446
+
447
+ return result
448
+ except Exception as e:
449
+ print(f"Failed to import ZenML integrations: {e}")
450
+ return {}
451
+
337
452
  @staticmethod
338
453
  def _class_to_snake_case(name: str) -> str:
339
454
  """Convert ClassName to class_name."""
@@ -226,7 +226,8 @@ def get_active_stack() -> Stack | None:
226
226
  from flowyml.stacks.base import Stack
227
227
  import os
228
228
 
229
- api_token = os.getenv("FLOWYML_API_TOKEN")
229
+ # Prefer token from config, fallback to env var
230
+ api_token = config.api_token or os.getenv("FLOWYML_API_TOKEN")
230
231
 
231
232
  return Stack(
232
233
  name="remote_logging",