flowyml 1.7.2__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. flowyml/assets/base.py +15 -0
  2. flowyml/assets/metrics.py +5 -0
  3. flowyml/cli/main.py +709 -0
  4. flowyml/cli/stack_cli.py +138 -25
  5. flowyml/core/__init__.py +17 -0
  6. flowyml/core/executor.py +161 -26
  7. flowyml/core/image_builder.py +129 -0
  8. flowyml/core/log_streamer.py +227 -0
  9. flowyml/core/orchestrator.py +22 -2
  10. flowyml/core/pipeline.py +34 -10
  11. flowyml/core/routing.py +558 -0
  12. flowyml/core/step.py +9 -1
  13. flowyml/core/step_grouping.py +49 -35
  14. flowyml/core/types.py +407 -0
  15. flowyml/monitoring/alerts.py +10 -0
  16. flowyml/monitoring/notifications.py +104 -25
  17. flowyml/monitoring/slack_blocks.py +323 -0
  18. flowyml/plugins/__init__.py +251 -0
  19. flowyml/plugins/alerters/__init__.py +1 -0
  20. flowyml/plugins/alerters/slack.py +168 -0
  21. flowyml/plugins/base.py +752 -0
  22. flowyml/plugins/config.py +478 -0
  23. flowyml/plugins/deployers/__init__.py +22 -0
  24. flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
  25. flowyml/plugins/deployers/sagemaker.py +306 -0
  26. flowyml/plugins/deployers/vertex.py +290 -0
  27. flowyml/plugins/integration.py +369 -0
  28. flowyml/plugins/manager.py +510 -0
  29. flowyml/plugins/model_registries/__init__.py +22 -0
  30. flowyml/plugins/model_registries/mlflow.py +159 -0
  31. flowyml/plugins/model_registries/sagemaker.py +489 -0
  32. flowyml/plugins/model_registries/vertex.py +386 -0
  33. flowyml/plugins/orchestrators/__init__.py +13 -0
  34. flowyml/plugins/orchestrators/sagemaker.py +443 -0
  35. flowyml/plugins/orchestrators/vertex_ai.py +461 -0
  36. flowyml/plugins/registries/__init__.py +13 -0
  37. flowyml/plugins/registries/ecr.py +321 -0
  38. flowyml/plugins/registries/gcr.py +313 -0
  39. flowyml/plugins/registry.py +454 -0
  40. flowyml/plugins/stack.py +494 -0
  41. flowyml/plugins/stack_config.py +537 -0
  42. flowyml/plugins/stores/__init__.py +13 -0
  43. flowyml/plugins/stores/gcs.py +460 -0
  44. flowyml/plugins/stores/s3.py +453 -0
  45. flowyml/plugins/trackers/__init__.py +11 -0
  46. flowyml/plugins/trackers/mlflow.py +316 -0
  47. flowyml/plugins/validators/__init__.py +3 -0
  48. flowyml/plugins/validators/deepchecks.py +119 -0
  49. flowyml/registry/__init__.py +2 -1
  50. flowyml/registry/model_environment.py +109 -0
  51. flowyml/registry/model_registry.py +241 -96
  52. flowyml/serving/__init__.py +17 -0
  53. flowyml/serving/model_server.py +628 -0
  54. flowyml/stacks/__init__.py +60 -0
  55. flowyml/stacks/aws.py +93 -0
  56. flowyml/stacks/base.py +62 -0
  57. flowyml/stacks/components.py +12 -0
  58. flowyml/stacks/gcp.py +44 -9
  59. flowyml/stacks/plugins.py +115 -0
  60. flowyml/stacks/registry.py +2 -1
  61. flowyml/storage/sql.py +401 -12
  62. flowyml/tracking/experiment.py +8 -5
  63. flowyml/ui/backend/Dockerfile +87 -16
  64. flowyml/ui/backend/auth.py +12 -2
  65. flowyml/ui/backend/main.py +149 -5
  66. flowyml/ui/backend/routers/ai_context.py +226 -0
  67. flowyml/ui/backend/routers/assets.py +23 -4
  68. flowyml/ui/backend/routers/auth.py +96 -0
  69. flowyml/ui/backend/routers/deployments.py +660 -0
  70. flowyml/ui/backend/routers/model_explorer.py +597 -0
  71. flowyml/ui/backend/routers/plugins.py +103 -51
  72. flowyml/ui/backend/routers/projects.py +91 -8
  73. flowyml/ui/backend/routers/runs.py +20 -1
  74. flowyml/ui/backend/routers/schedules.py +22 -17
  75. flowyml/ui/backend/routers/templates.py +319 -0
  76. flowyml/ui/backend/routers/websocket.py +2 -2
  77. flowyml/ui/frontend/Dockerfile +55 -6
  78. flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
  79. flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
  80. flowyml/ui/frontend/dist/index.html +2 -2
  81. flowyml/ui/frontend/dist/logo.png +0 -0
  82. flowyml/ui/frontend/nginx.conf +65 -4
  83. flowyml/ui/frontend/package-lock.json +1404 -74
  84. flowyml/ui/frontend/package.json +3 -0
  85. flowyml/ui/frontend/public/logo.png +0 -0
  86. flowyml/ui/frontend/src/App.jsx +10 -7
  87. flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
  88. flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
  89. flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
  90. flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
  91. flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
  92. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
  93. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +36 -24
  94. flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
  95. flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
  96. flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +29 -7
  97. flowyml/ui/frontend/src/components/Layout.jsx +6 -0
  98. flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
  99. flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
  100. flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
  101. flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
  102. flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
  103. flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
  104. flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
  105. flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
  106. flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
  107. flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
  108. flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
  109. flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
  110. flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
  111. flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
  112. flowyml/ui/frontend/src/router/index.jsx +47 -20
  113. flowyml/ui/frontend/src/services/pluginService.js +3 -1
  114. flowyml/ui/server_manager.py +5 -5
  115. flowyml/ui/utils.py +157 -39
  116. flowyml/utils/config.py +37 -15
  117. flowyml/utils/model_introspection.py +123 -0
  118. flowyml/utils/observability.py +30 -0
  119. flowyml-1.8.0.dist-info/METADATA +174 -0
  120. {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/RECORD +123 -65
  121. {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
  122. flowyml/ui/frontend/dist/assets/index-B40RsQDq.css +0 -1
  123. flowyml/ui/frontend/dist/assets/index-CjI0zKCn.js +0 -685
  124. flowyml-1.7.2.dist-info/METADATA +0 -477
  125. {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
  126. {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
flowyml/cli/stack_cli.py CHANGED
@@ -98,23 +98,28 @@ def stack() -> None:
98
98
  @click.option("--config", "-c", help="Path to flowyml.yaml")
99
99
  def list_stacks(config: str | None) -> None:
100
100
  """List all configured stacks."""
101
- from flowyml.utils.stack_config import load_config
101
+ from flowyml.plugins.stack_config import get_stack_manager
102
+ from flowyml.plugins.config import get_config
102
103
 
103
- loader = load_config(config)
104
- stacks = loader.list_stacks()
104
+ # Initialize config if path provided
105
+ if config:
106
+ get_config(config)
107
+
108
+ manager = get_stack_manager()
109
+ stacks = manager.list_stacks()
105
110
 
106
111
  if not stacks:
107
- click.echo("No stacks configured. Create a flowyml.yaml file.")
112
+ click.echo("No stacks configured. Create a flowyml.yaml file with 'stacks:' section.")
108
113
  return
109
114
 
110
- default = loader.get_default_stack()
115
+ active = manager.active_stack_name
111
116
 
112
- click.echo("\nConfigured stacks:")
117
+ click.echo("\n📦 Configured stacks:")
113
118
  for stack_name in stacks:
114
- marker = " (default)" if stack_name == default else ""
115
- config_data = loader.get_stack_config(stack_name)
116
- stack_type = config_data.get("type", "unknown")
117
- click.echo(f" • {stack_name}{marker} [{stack_type}]")
119
+ marker = " (active)" if stack_name == active else ""
120
+ stack = manager.get_stack(stack_name)
121
+ orch_type = stack.orchestrator.get("type", "local") if stack and stack.orchestrator else "local"
122
+ click.echo(f" • {stack_name}{marker} [orchestrator: {orch_type}]")
118
123
  click.echo()
119
124
 
120
125
 
@@ -123,35 +128,143 @@ def list_stacks(config: str | None) -> None:
123
128
  @click.option("--config", "-c", help="Path to flowyml.yaml")
124
129
  def show_stack(stack_name: str, config: str | None) -> None:
125
130
  """Show detailed stack configuration."""
126
- from flowyml.utils.stack_config import load_config
127
- import yaml
131
+ from flowyml.plugins.stack_config import get_stack_manager
132
+ from flowyml.plugins.config import get_config
128
133
 
129
- loader = load_config(config)
130
- stack_config = loader.get_stack_config(stack_name)
134
+ # Initialize config if path provided
135
+ if config:
136
+ get_config(config)
131
137
 
132
- if not stack_config:
138
+ manager = get_stack_manager()
139
+ stack = manager.get_stack(stack_name)
140
+
141
+ if not stack:
133
142
  click.echo(f"Stack '{stack_name}' not found", err=True)
143
+ available = manager.list_stacks()
144
+ if available:
145
+ click.echo(f"Available stacks: {', '.join(available)}")
134
146
  sys.exit(1)
135
147
 
136
- click.echo(f"\nStack: {stack_name}")
137
- click.echo(yaml.dump(stack_config, default_flow_style=False))
148
+ is_active = stack_name == manager.active_stack_name
149
+ status = " (active)" if is_active else ""
150
+
151
+ click.echo(f"\n📦 Stack: {stack_name}{status}")
152
+ click.echo("─" * 40)
153
+
154
+ # Show components
155
+ if stack.orchestrator:
156
+ click.echo(f"\n🎯 Orchestrator: {stack.orchestrator.get('type', 'unknown')}")
157
+ for k, v in stack.orchestrator.items():
158
+ if k != "type":
159
+ click.echo(f" {k}: {v}")
160
+
161
+ if stack.artifact_store:
162
+ click.echo(f"\n💾 Artifact Store: {stack.artifact_store.get('type', 'unknown')}")
163
+ for k, v in stack.artifact_store.items():
164
+ if k != "type":
165
+ click.echo(f" {k}: {v}")
166
+
167
+ if stack.experiment_tracker:
168
+ click.echo(f"\n📊 Experiment Tracker: {stack.experiment_tracker.get('type', 'unknown')}")
169
+
170
+ if stack.model_registry:
171
+ click.echo(f"\n📝 Model Registry: {stack.model_registry.get('type', 'unknown')}")
172
+
173
+ if stack.model_deployer:
174
+ click.echo(f"\n🚀 Model Deployer: {stack.model_deployer.get('type', 'unknown')}")
175
+
176
+ if stack.container_registry:
177
+ click.echo(f"\n🐳 Container Registry: {stack.container_registry.get('type', 'unknown')}")
178
+
179
+ if stack.artifact_routing:
180
+ click.echo("\n📍 Artifact Routing:")
181
+ for type_name, rule in stack.artifact_routing.rules.items():
182
+ click.echo(f" {type_name}: store={rule.store}, register={rule.register}")
183
+
184
+ click.echo()
138
185
 
139
186
 
140
187
  @stack.command("set-default")
141
188
  @click.argument("stack_name")
142
189
  @click.option("--config", "-c", help="Path to flowyml.yaml")
143
- def set_default_stack(stack_name: str, config: str | None) -> None:
144
- """Set the default stack."""
145
- from flowyml.stacks.registry import get_registry
190
+ def set_active_stack(stack_name: str, config: str | None) -> None:
191
+ """Set the active stack (alias for switch)."""
192
+ switch_stack_impl(stack_name, config)
146
193
 
147
- registry = get_registry()
148
194
 
149
- if stack_name not in registry.list_stacks():
150
- click.echo(f"Stack '{stack_name}' not found", err=True)
195
+ @stack.command("switch")
196
+ @click.argument("stack_name")
197
+ @click.option("--config", "-c", help="Path to flowyml.yaml")
198
+ @click.option("--validate/--no-validate", default=True, help="Validate stack configuration after switching")
199
+ def switch_stack(stack_name: str, config: str | None, validate: bool) -> None:
200
+ """Switch to a different stack."""
201
+ switch_stack_impl(stack_name, config, validate)
202
+
203
+
204
+ def switch_stack_impl(stack_name: str, config: str | None, validate: bool = False) -> None:
205
+ from flowyml.plugins.stack_config import get_stack_manager
206
+ from flowyml.plugins.config import get_config
207
+
208
+ from rich.console import Console
209
+
210
+ console = Console()
211
+
212
+ # Initialize config if path provided
213
+ if config:
214
+ get_config(config)
215
+
216
+ manager = get_stack_manager()
217
+
218
+ if manager.set_active_stack(stack_name):
219
+ console.print(f"[bold green]✅ Active stack set to '{stack_name}'[/bold green]")
220
+
221
+ if validate:
222
+ stack = manager.get_stack(stack_name)
223
+ console.print(f"🔍 Validating stack '{stack_name}' environment...")
224
+
225
+ # Check for remote requirements
226
+ if stack.orchestrator and stack.orchestrator.get("type") != "local":
227
+ console.print("[yellow]Remote stack detected. Checking Docker configuration...[/yellow]")
228
+ if not stack.container_registry:
229
+ console.print(
230
+ "[bold red]❌ Warning: Remote stack usually requires a Container Registry for automatic builds.[/bold red]",
231
+ )
232
+ console.print(
233
+ " Please configure 'container_registry' in your stack or ensure you provide pre-built image URIs.",
234
+ )
235
+ else:
236
+ console.print(f"[bold red]❌ Stack '{stack_name}' not found[/bold red]")
237
+ available = manager.list_stacks()
238
+ if available:
239
+ console.print(f"Available stacks: {', '.join(available)}")
240
+ sys.exit(1)
241
+
242
+
243
+ @stack.command("register")
244
+ @click.argument("stack_name")
245
+ @click.option("--file", "-f", "config_file", required=True, help="Path to stack config file")
246
+ def register_stack(stack_name: str, config_file: str) -> None:
247
+ """Register a new stack from a config file."""
248
+ import yaml
249
+ from flowyml.plugins.stack_config import get_stack_manager, StackConfig
250
+
251
+ config_path = Path(config_file)
252
+ if not config_path.exists():
253
+ click.echo(f"Config file not found: {config_file}", err=True)
151
254
  sys.exit(1)
152
255
 
153
- registry.set_active_stack(stack_name)
154
- click.echo(f"Set '{stack_name}' as active stack")
256
+ try:
257
+ with open(config_path) as f:
258
+ stack_data = yaml.safe_load(f)
259
+
260
+ stack_config = StackConfig.from_dict(stack_name, stack_data)
261
+ manager = get_stack_manager()
262
+ manager.register_stack(stack_name, stack_config)
263
+
264
+ click.echo(f"✅ Registered stack '{stack_name}' from {config_file}")
265
+ except Exception as e:
266
+ click.echo(f"❌ Error registering stack: {e}", err=True)
267
+ sys.exit(1)
155
268
 
156
269
 
157
270
  @cli.command()
flowyml/core/__init__.py CHANGED
@@ -27,6 +27,15 @@ from flowyml.core.resources import (
27
27
  NodeAffinity,
28
28
  resources,
29
29
  )
30
+ from flowyml.core.types import (
31
+ Artifact,
32
+ Model,
33
+ Dataset,
34
+ Metrics,
35
+ Parameters,
36
+ is_artifact_type,
37
+ get_artifact_type_name,
38
+ )
30
39
 
31
40
  __all__ = [
32
41
  # Context
@@ -65,4 +74,12 @@ __all__ = [
65
74
  "GPUConfig",
66
75
  "NodeAffinity",
67
76
  "resources",
77
+ # Artifact Types
78
+ "Artifact",
79
+ "Model",
80
+ "Dataset",
81
+ "Metrics",
82
+ "Parameters",
83
+ "is_artifact_type",
84
+ "get_artifact_type_name",
68
85
  ]
flowyml/core/executor.py CHANGED
@@ -12,6 +12,7 @@ import ctypes
12
12
  import requests
13
13
  import os
14
14
  import inspect
15
+ import psutil
15
16
 
16
17
 
17
18
  class StopExecutionError(Exception):
@@ -133,10 +134,25 @@ class MonitorThread(threading.Thread):
133
134
  def run(self):
134
135
  while not self._stop_event.is_set():
135
136
  try:
136
- # Send heartbeat
137
+ # Collect metrics
138
+ process = psutil.Process()
139
+ with process.oneshot():
140
+ cpu_percent = process.cpu_percent(interval=None)
141
+ memory_info = process.memory_info()
142
+ memory_mb = memory_info.rss / 1024 / 1024
143
+
144
+ # Send heartbeat with metrics
137
145
  response = requests.post(
138
146
  f"{self.api_url}/api/runs/{self.run_id}/steps/{self.step_name}/heartbeat",
139
- json={"step_name": self.step_name, "status": "running"},
147
+ json={
148
+ "step_name": self.step_name,
149
+ "status": "running",
150
+ "metrics": {
151
+ "cpu_percent": cpu_percent,
152
+ "memory_mb": memory_mb,
153
+ "timestamp": datetime.now().isoformat(),
154
+ },
155
+ },
140
156
  timeout=2,
141
157
  )
142
158
  if response.status_code == 200:
@@ -194,6 +210,10 @@ class Executor:
194
210
  inputs: dict[str, Any],
195
211
  context_params: dict[str, Any],
196
212
  cache_store: Any | None = None,
213
+ artifact_store: Any | None = None,
214
+ run_id: str | None = None,
215
+ project_name: str = "default",
216
+ all_outputs: dict[str, Any] | None = None,
197
217
  ) -> ExecutionResult:
198
218
  """Execute a single step.
199
219
 
@@ -202,6 +222,10 @@ class Executor:
202
222
  inputs: Input data for the step
203
223
  context_params: Parameters from context
204
224
  cache_store: Cache store for caching
225
+ artifact_store: Artifact store for logging results
226
+ run_id: Unique ID for this pipeline run
227
+ project_name: Name of the project
228
+ all_outputs: Collection of all step outputs for conditional evaluation
205
229
 
206
230
  Returns:
207
231
  ExecutionResult with output or error
@@ -249,21 +273,43 @@ class LocalExecutor(Executor):
249
273
  artifact_store: Any | None = None,
250
274
  run_id: str | None = None,
251
275
  project_name: str = "default",
276
+ all_outputs: dict[str, Any] | None = None,
252
277
  ) -> ExecutionResult:
253
- """Execute step locally with retry, caching, and materialization."""
278
+ """Execute step locally with retry, caching, and materialization.
279
+
280
+ Args:
281
+ step: Step to execute
282
+ inputs: Input data for the step
283
+ context_params: Parameters from context
284
+ cache_store: Cache store for caching
285
+ artifact_store: Artifact store for logging results
286
+ run_id: Unique ID for this pipeline run
287
+ project_name: Name of the project
288
+ all_outputs: Collection of all step outputs for conditional evaluation
289
+
290
+ Returns:
291
+ ExecutionResult with output or error
292
+ """
254
293
  start_time = time.time()
255
294
  retries = 0
256
295
 
257
296
  # Check condition
258
297
  if step.condition:
259
298
  try:
260
- # We pass inputs and context params to condition if it accepts them
261
- # For simplicity, let's try to inspect the condition function
262
- # or just pass what we can.
263
- # A simple approach: pass nothing if it takes no args, or kwargs if it does.
264
- # But inspect is safer.
299
+ # Prepare kwargs for condition: inputs + context_params + all_outputs
265
300
  sig = inspect.signature(step.condition)
266
- kwargs = {**inputs, **context_params}
301
+ kwargs = {**context_params}
302
+
303
+ # Add all outputs so far (paths like 'data/processed')
304
+ if all_outputs:
305
+ kwargs.update(all_outputs)
306
+ # Also flatten dict outputs to allow access to keys like 'quality_score'
307
+ for val in all_outputs.values():
308
+ if isinstance(val, dict):
309
+ kwargs.update({k: v for k, v in val.items() if k not in kwargs})
310
+
311
+ # Add direct inputs (might override all_outputs if paths match)
312
+ kwargs.update(inputs)
267
313
 
268
314
  # Filter kwargs to only what condition accepts
269
315
  cond_kwargs = {k: v for k, v in kwargs.items() if k in sig.parameters}
@@ -337,7 +383,12 @@ class LocalExecutor(Executor):
337
383
  )
338
384
  monitor_thread.start()
339
385
 
340
- result = step.func(**kwargs)
386
+ # Filter kwargs to only what the function accepts
387
+ func_sig = inspect.signature(step.func)
388
+ # Handle *args/**kwargs if needed, but for now strict matching is safer for steps
389
+ filtered_kwargs = {k: v for k, v in kwargs.items() if k in func_sig.parameters}
390
+
391
+ result = step.func(**filtered_kwargs)
341
392
  except StopExecution:
342
393
  duration = time.time() - start_time
343
394
  return ExecutionResult(
@@ -364,16 +415,57 @@ class LocalExecutor(Executor):
364
415
  monitor_thread.join()
365
416
 
366
417
  # Materialize output if artifact store is available
418
+ # Only upload if the result is an Asset with upload=True
367
419
  artifact_uri = None
368
420
  if artifact_store and result is not None and run_id:
369
- with contextlib.suppress(Exception):
370
- artifact_uri = artifact_store.materialize(
371
- obj=result,
372
- name="output", # Default name for single output
373
- run_id=run_id,
421
+ # Check if result is an Asset and respects upload flag
422
+ should_upload = True
423
+ try:
424
+ from flowyml.assets.base import Asset
425
+
426
+ if isinstance(result, Asset):
427
+ should_upload = getattr(result, "upload", False)
428
+ except ImportError:
429
+ pass
430
+
431
+ if should_upload:
432
+ with contextlib.suppress(Exception):
433
+ artifact_uri = artifact_store.materialize(
434
+ obj=result,
435
+ name="output", # Default name for single output
436
+ run_id=run_id,
437
+ step_name=step.name,
438
+ project_name=project_name,
439
+ )
440
+
441
+ # Type-based artifact routing
442
+ routing_result = None
443
+ try:
444
+ from flowyml.core.routing import route_artifact, should_route
445
+
446
+ if should_route(result):
447
+ # Get return type annotation if available
448
+ return_type = None
449
+ try:
450
+ from flowyml.core.routing import get_step_return_type
451
+
452
+ return_type = get_step_return_type(step.func)
453
+ except Exception:
454
+ pass
455
+
456
+ routing_result = route_artifact(
457
+ output=result,
374
458
  step_name=step.name,
459
+ run_id=run_id or "local",
460
+ return_type=return_type,
375
461
  project_name=project_name,
376
462
  )
463
+ if routing_result and routing_result.store_uri:
464
+ artifact_uri = routing_result.store_uri
465
+ except ImportError:
466
+ pass # Routing module not available
467
+ except Exception:
468
+ pass # Routing failed, continue with normal flow
377
469
 
378
470
  # Cache result
379
471
  if cache_store and step.cache:
@@ -574,33 +666,76 @@ class DistributedExecutor(Executor):
574
666
  inputs: dict[str, Any],
575
667
  context_params: dict[str, Any],
576
668
  cache_store: Any | None = None,
669
+ artifact_store: Any | None = None,
670
+ run_id: str | None = None,
671
+ project_name: str = "default",
672
+ all_outputs: dict[str, Any] | None = None,
577
673
  ) -> ExecutionResult:
578
- """Execute step in distributed manner."""
674
+ """Execute step in distributed manner.
675
+
676
+ Args:
677
+ step: Step to execute
678
+ inputs: Input data for the step
679
+ context_params: Parameters from context
680
+ cache_store: Cache store for caching
681
+ artifact_store: Artifact store for logging results
682
+ run_id: Unique ID for this pipeline run
683
+ project_name: Name of the project
684
+ all_outputs: Collection of all step outputs for conditional evaluation
685
+
686
+ Returns:
687
+ ExecutionResult with output or error
688
+ """
579
689
  # Placeholder - would use Ray, Dask, or similar
580
690
  # For now, fall back to local execution
581
691
  local_executor = LocalExecutor()
582
- return local_executor.execute_step(step, inputs, context_params, cache_store)
692
+ return local_executor.execute_step(
693
+ step,
694
+ inputs,
695
+ context_params,
696
+ cache_store,
697
+ artifact_store,
698
+ run_id,
699
+ project_name,
700
+ all_outputs,
701
+ )
583
702
 
584
703
  def execute_step_group(
585
704
  self,
586
705
  step_group, # StepGroup
587
706
  inputs: dict[str, Any],
588
- context_params: dict[str, Any],
707
+ context: Any | None = None, # Context object for per-step injection
708
+ context_params: dict[str, Any] | None = None, # Deprecated: use context instead
589
709
  cache_store: Any | None = None,
590
710
  artifact_store: Any | None = None,
591
711
  run_id: str | None = None,
592
712
  project_name: str = "default",
593
713
  ) -> list[ExecutionResult]:
594
- """Execute step group in distributed manner."""
714
+ """Execute step group in distributed manner.
715
+
716
+ Args:
717
+ step_group: StepGroup to execute
718
+ inputs: Input data available to the group
719
+ context: Context object for per-step parameter injection (preferred)
720
+ context_params: Parameters from context (deprecated, use context instead)
721
+ cache_store: Cache store for caching
722
+ artifact_store: Artifact store for materialization
723
+ run_id: Run identifier
724
+ project_name: Project name
725
+
726
+ Returns:
727
+ List of ExecutionResult (one per step)
728
+ """
595
729
  # Placeholder - in real implementation, would send entire group to remote worker
596
730
  # For now, fall back to local execution
597
731
  local_executor = LocalExecutor()
598
732
  return local_executor.execute_step_group(
599
- step_group,
600
- inputs,
601
- context_params,
602
- cache_store,
603
- artifact_store,
604
- run_id,
605
- project_name,
733
+ step_group=step_group,
734
+ inputs=inputs,
735
+ context=context,
736
+ context_params=context_params,
737
+ cache_store=cache_store,
738
+ artifact_store=artifact_store,
739
+ run_id=run_id,
740
+ project_name=project_name,
606
741
  )
@@ -0,0 +1,129 @@
1
+ import subprocess
2
+ from pathlib import Path
3
+ from flowyml.stacks.components import DockerConfig
4
+
5
+
6
+ class DockerImageBuilder:
7
+ """Handles building and pushing Docker images for remote execution."""
8
+
9
+ def build_image(self, docker_config: DockerConfig, tag: str) -> str:
10
+ """Build a Docker image from the configuration.
11
+
12
+ Args:
13
+ docker_config: The Docker configuration.
14
+ tag: The tag to apply to the built image.
15
+
16
+ Returns:
17
+ The full image tag that was built.
18
+ """
19
+ build_context = Path(docker_config.build_context)
20
+ if not build_context.exists():
21
+ raise FileNotFoundError(f"Build context not found: {build_context}")
22
+
23
+ # Auto-generate Dockerfile if needed
24
+ dockerfile_path = self._ensure_dockerfile(docker_config, build_context)
25
+
26
+ cmd = [
27
+ "docker",
28
+ "build",
29
+ "-t",
30
+ tag,
31
+ "-f",
32
+ str(dockerfile_path),
33
+ str(build_context),
34
+ ]
35
+
36
+ # Add build args
37
+ for k, v in docker_config.build_args.items():
38
+ cmd.extend(["--build-arg", f"{k}={v}"])
39
+
40
+ print(f"🐳 Building image: {tag}")
41
+ try:
42
+ subprocess.run(cmd, check=True)
43
+ print("✅ Build successful!")
44
+ return tag
45
+ except subprocess.CalledProcessError as e:
46
+ raise RuntimeError(f"Docker build failed: {e}")
47
+
48
+ def _ensure_dockerfile(self, config: DockerConfig, context: Path) -> Path:
49
+ """Get path to Dockerfile or generate one."""
50
+ if config.dockerfile:
51
+ path = context / config.dockerfile
52
+ if not path.exists():
53
+ # Try absolute path
54
+ path = Path(config.dockerfile)
55
+ if not path.exists():
56
+ raise FileNotFoundError(f"Dockerfile not found: {config.dockerfile}")
57
+ return path
58
+
59
+ # Generate temporary Dockerfile
60
+ generated_path = context / ".flowyml.Dockerfile"
61
+ content = self._generate_dockerfile_content(config)
62
+ generated_path.write_text(content)
63
+ return generated_path
64
+
65
+ def _generate_dockerfile_content(self, config: DockerConfig) -> str:
66
+ """Generate Dockerfile content based on requirements.
67
+
68
+ Prioritizes:
69
+ 1. uv.lock -> uv sync
70
+ 2. poetry.lock -> poetry install
71
+ 3. requirements.txt -> uv pip install
72
+ 4. list -> uv pip install
73
+ """
74
+ lines = [f"FROM {config.base_image}", "WORKDIR /app"]
75
+
76
+ # Install system dependencies if any
77
+ # lines.append("RUN apt-get update && apt-get install -y ...")
78
+
79
+ context_path = Path(config.build_context)
80
+
81
+ # 0. Always install uv as it's our preferred installer for pip/reqs too
82
+ # We install it via the official installer script for speed and isolation
83
+ lines.append("RUN pip install uv")
84
+ lines.append("ENV VIRTUAL_ENV=/app/.venv")
85
+ lines.append('ENV PATH="$VIRTUAL_ENV/bin:$PATH"')
86
+
87
+ # 1. Check for uv.lock
88
+ if (context_path / "uv.lock").exists():
89
+ print("📦 Detected uv based project")
90
+ lines.append("COPY pyproject.toml uv.lock ./")
91
+ # Create venv and sync
92
+ lines.append("RUN uv venv && uv sync --frozen --no-install-project")
93
+
94
+ # 2. Check for poetry.lock
95
+ elif (context_path / "poetry.lock").exists() or (context_path / "pyproject.toml").exists():
96
+ print("📦 Detected Poetry based project")
97
+ lines.append("RUN pip install poetry")
98
+ lines.append("COPY pyproject.toml poetry.lock* ./")
99
+ lines.append("RUN poetry config virtualenvs.in-project true")
100
+ lines.append("RUN poetry install --no-interaction --no-ansi --no-root")
101
+ # Add local venv to path if poetry created one
102
+ lines.append('ENV PATH="/app/.venv/bin:$PATH"')
103
+
104
+ # 3. Check for requirements.txt (Use uv for speed)
105
+ elif (context_path / "requirements.txt").exists():
106
+ print("📦 Detected requirements.txt")
107
+ lines.append("COPY requirements.txt .")
108
+ lines.append("RUN uv venv && uv pip install -r requirements.txt")
109
+
110
+ # 4. Check for dynamic requirements list (Use uv for speed)
111
+ elif config.requirements:
112
+ print("📦 Detected dynamic requirements list")
113
+ reqs_str = " ".join([f'"{r}"' for r in config.requirements])
114
+ lines.append(f"RUN uv venv && uv pip install {reqs_str}")
115
+
116
+ # Copy source code
117
+ lines.append("COPY . .")
118
+
119
+ # Install project itself if needed (for uv/poetry)
120
+ if (context_path / "uv.lock").exists():
121
+ lines.append("RUN uv sync --frozen")
122
+ elif (context_path / "poetry.lock").exists():
123
+ lines.append("RUN poetry install --no-interaction --no-ansi")
124
+
125
+ # Env vars
126
+ for k, v in config.env_vars.items():
127
+ lines.append(f"ENV {k}={v}")
128
+
129
+ return "\n".join(lines)