flowyml 1.7.1__py3-none-any.whl → 1.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. flowyml/assets/base.py +15 -0
  2. flowyml/assets/dataset.py +570 -17
  3. flowyml/assets/metrics.py +5 -0
  4. flowyml/assets/model.py +1052 -15
  5. flowyml/cli/main.py +709 -0
  6. flowyml/cli/stack_cli.py +138 -25
  7. flowyml/core/__init__.py +17 -0
  8. flowyml/core/executor.py +231 -37
  9. flowyml/core/image_builder.py +129 -0
  10. flowyml/core/log_streamer.py +227 -0
  11. flowyml/core/orchestrator.py +59 -4
  12. flowyml/core/pipeline.py +65 -13
  13. flowyml/core/routing.py +558 -0
  14. flowyml/core/scheduler.py +88 -5
  15. flowyml/core/step.py +9 -1
  16. flowyml/core/step_grouping.py +49 -35
  17. flowyml/core/types.py +407 -0
  18. flowyml/integrations/keras.py +247 -82
  19. flowyml/monitoring/alerts.py +10 -0
  20. flowyml/monitoring/notifications.py +104 -25
  21. flowyml/monitoring/slack_blocks.py +323 -0
  22. flowyml/plugins/__init__.py +251 -0
  23. flowyml/plugins/alerters/__init__.py +1 -0
  24. flowyml/plugins/alerters/slack.py +168 -0
  25. flowyml/plugins/base.py +752 -0
  26. flowyml/plugins/config.py +478 -0
  27. flowyml/plugins/deployers/__init__.py +22 -0
  28. flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
  29. flowyml/plugins/deployers/sagemaker.py +306 -0
  30. flowyml/plugins/deployers/vertex.py +290 -0
  31. flowyml/plugins/integration.py +369 -0
  32. flowyml/plugins/manager.py +510 -0
  33. flowyml/plugins/model_registries/__init__.py +22 -0
  34. flowyml/plugins/model_registries/mlflow.py +159 -0
  35. flowyml/plugins/model_registries/sagemaker.py +489 -0
  36. flowyml/plugins/model_registries/vertex.py +386 -0
  37. flowyml/plugins/orchestrators/__init__.py +13 -0
  38. flowyml/plugins/orchestrators/sagemaker.py +443 -0
  39. flowyml/plugins/orchestrators/vertex_ai.py +461 -0
  40. flowyml/plugins/registries/__init__.py +13 -0
  41. flowyml/plugins/registries/ecr.py +321 -0
  42. flowyml/plugins/registries/gcr.py +313 -0
  43. flowyml/plugins/registry.py +454 -0
  44. flowyml/plugins/stack.py +494 -0
  45. flowyml/plugins/stack_config.py +537 -0
  46. flowyml/plugins/stores/__init__.py +13 -0
  47. flowyml/plugins/stores/gcs.py +460 -0
  48. flowyml/plugins/stores/s3.py +453 -0
  49. flowyml/plugins/trackers/__init__.py +11 -0
  50. flowyml/plugins/trackers/mlflow.py +316 -0
  51. flowyml/plugins/validators/__init__.py +3 -0
  52. flowyml/plugins/validators/deepchecks.py +119 -0
  53. flowyml/registry/__init__.py +2 -1
  54. flowyml/registry/model_environment.py +109 -0
  55. flowyml/registry/model_registry.py +241 -96
  56. flowyml/serving/__init__.py +17 -0
  57. flowyml/serving/model_server.py +628 -0
  58. flowyml/stacks/__init__.py +60 -0
  59. flowyml/stacks/aws.py +93 -0
  60. flowyml/stacks/base.py +62 -0
  61. flowyml/stacks/components.py +12 -0
  62. flowyml/stacks/gcp.py +44 -9
  63. flowyml/stacks/plugins.py +115 -0
  64. flowyml/stacks/registry.py +2 -1
  65. flowyml/storage/sql.py +401 -12
  66. flowyml/tracking/experiment.py +8 -5
  67. flowyml/ui/backend/Dockerfile +87 -16
  68. flowyml/ui/backend/auth.py +12 -2
  69. flowyml/ui/backend/main.py +149 -5
  70. flowyml/ui/backend/routers/ai_context.py +226 -0
  71. flowyml/ui/backend/routers/assets.py +23 -4
  72. flowyml/ui/backend/routers/auth.py +96 -0
  73. flowyml/ui/backend/routers/deployments.py +660 -0
  74. flowyml/ui/backend/routers/model_explorer.py +597 -0
  75. flowyml/ui/backend/routers/plugins.py +103 -51
  76. flowyml/ui/backend/routers/projects.py +91 -8
  77. flowyml/ui/backend/routers/runs.py +132 -1
  78. flowyml/ui/backend/routers/schedules.py +54 -29
  79. flowyml/ui/backend/routers/templates.py +319 -0
  80. flowyml/ui/backend/routers/websocket.py +2 -2
  81. flowyml/ui/frontend/Dockerfile +55 -6
  82. flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
  83. flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
  84. flowyml/ui/frontend/dist/index.html +2 -2
  85. flowyml/ui/frontend/dist/logo.png +0 -0
  86. flowyml/ui/frontend/nginx.conf +65 -4
  87. flowyml/ui/frontend/package-lock.json +1415 -74
  88. flowyml/ui/frontend/package.json +4 -0
  89. flowyml/ui/frontend/public/logo.png +0 -0
  90. flowyml/ui/frontend/src/App.jsx +10 -7
  91. flowyml/ui/frontend/src/app/assets/page.jsx +890 -321
  92. flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
  93. flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
  94. flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
  95. flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
  96. flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
  97. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
  98. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectMetricsPanel.jsx +1 -1
  99. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +601 -101
  100. flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
  101. flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
  102. flowyml/ui/frontend/src/components/ArtifactViewer.jsx +62 -2
  103. flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +424 -29
  104. flowyml/ui/frontend/src/components/AssetTreeHierarchy.jsx +119 -11
  105. flowyml/ui/frontend/src/components/DatasetViewer.jsx +753 -0
  106. flowyml/ui/frontend/src/components/Layout.jsx +6 -0
  107. flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
  108. flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
  109. flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
  110. flowyml/ui/frontend/src/components/TrainingHistoryChart.jsx +514 -0
  111. flowyml/ui/frontend/src/components/TrainingMetricsPanel.jsx +175 -0
  112. flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
  113. flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
  114. flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
  115. flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
  116. flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
  117. flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
  118. flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
  119. flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
  120. flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
  121. flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
  122. flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
  123. flowyml/ui/frontend/src/router/index.jsx +47 -20
  124. flowyml/ui/frontend/src/services/pluginService.js +3 -1
  125. flowyml/ui/server_manager.py +5 -5
  126. flowyml/ui/utils.py +157 -39
  127. flowyml/utils/config.py +37 -15
  128. flowyml/utils/model_introspection.py +123 -0
  129. flowyml/utils/observability.py +30 -0
  130. flowyml-1.8.0.dist-info/METADATA +174 -0
  131. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/RECORD +134 -73
  132. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
  133. flowyml/ui/frontend/dist/assets/index-BqDQvp63.js +0 -630
  134. flowyml/ui/frontend/dist/assets/index-By4trVyv.css +0 -1
  135. flowyml-1.7.1.dist-info/METADATA +0 -477
  136. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
  137. {flowyml-1.7.1.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,129 @@
1
+ import subprocess
2
+ from pathlib import Path
3
+ from flowyml.stacks.components import DockerConfig
4
+
5
+
6
+ class DockerImageBuilder:
7
+ """Handles building and pushing Docker images for remote execution."""
8
+
9
+ def build_image(self, docker_config: DockerConfig, tag: str) -> str:
10
+ """Build a Docker image from the configuration.
11
+
12
+ Args:
13
+ docker_config: The Docker configuration.
14
+ tag: The tag to apply to the built image.
15
+
16
+ Returns:
17
+ The full image tag that was built.
18
+ """
19
+ build_context = Path(docker_config.build_context)
20
+ if not build_context.exists():
21
+ raise FileNotFoundError(f"Build context not found: {build_context}")
22
+
23
+ # Auto-generate Dockerfile if needed
24
+ dockerfile_path = self._ensure_dockerfile(docker_config, build_context)
25
+
26
+ cmd = [
27
+ "docker",
28
+ "build",
29
+ "-t",
30
+ tag,
31
+ "-f",
32
+ str(dockerfile_path),
33
+ str(build_context),
34
+ ]
35
+
36
+ # Add build args
37
+ for k, v in docker_config.build_args.items():
38
+ cmd.extend(["--build-arg", f"{k}={v}"])
39
+
40
+ print(f"🐳 Building image: {tag}")
41
+ try:
42
+ subprocess.run(cmd, check=True)
43
+ print("✅ Build successful!")
44
+ return tag
45
+ except subprocess.CalledProcessError as e:
46
+ raise RuntimeError(f"Docker build failed: {e}")
47
+
48
+ def _ensure_dockerfile(self, config: DockerConfig, context: Path) -> Path:
49
+ """Get path to Dockerfile or generate one."""
50
+ if config.dockerfile:
51
+ path = context / config.dockerfile
52
+ if not path.exists():
53
+ # Try absolute path
54
+ path = Path(config.dockerfile)
55
+ if not path.exists():
56
+ raise FileNotFoundError(f"Dockerfile not found: {config.dockerfile}")
57
+ return path
58
+
59
+ # Generate temporary Dockerfile
60
+ generated_path = context / ".flowyml.Dockerfile"
61
+ content = self._generate_dockerfile_content(config)
62
+ generated_path.write_text(content)
63
+ return generated_path
64
+
65
+ def _generate_dockerfile_content(self, config: DockerConfig) -> str:
66
+ """Generate Dockerfile content based on requirements.
67
+
68
+ Prioritizes:
69
+ 1. uv.lock -> uv sync
70
+ 2. poetry.lock -> poetry install
71
+ 3. requirements.txt -> uv pip install
72
+ 4. list -> uv pip install
73
+ """
74
+ lines = [f"FROM {config.base_image}", "WORKDIR /app"]
75
+
76
+ # Install system dependencies if any
77
+ # lines.append("RUN apt-get update && apt-get install -y ...")
78
+
79
+ context_path = Path(config.build_context)
80
+
81
+ # 0. Always install uv as it's our preferred installer for pip/reqs too
82
+ # We install it via the official installer script for speed and isolation
83
+ lines.append("RUN pip install uv")
84
+ lines.append("ENV VIRTUAL_ENV=/app/.venv")
85
+ lines.append('ENV PATH="$VIRTUAL_ENV/bin:$PATH"')
86
+
87
+ # 1. Check for uv.lock
88
+ if (context_path / "uv.lock").exists():
89
+ print("📦 Detected uv based project")
90
+ lines.append("COPY pyproject.toml uv.lock ./")
91
+ # Create venv and sync
92
+ lines.append("RUN uv venv && uv sync --frozen --no-install-project")
93
+
94
+ # 2. Check for poetry.lock
95
+ elif (context_path / "poetry.lock").exists() or (context_path / "pyproject.toml").exists():
96
+ print("📦 Detected Poetry based project")
97
+ lines.append("RUN pip install poetry")
98
+ lines.append("COPY pyproject.toml poetry.lock* ./")
99
+ lines.append("RUN poetry config virtualenvs.in-project true")
100
+ lines.append("RUN poetry install --no-interaction --no-ansi --no-root")
101
+ # Add local venv to path if poetry created one
102
+ lines.append('ENV PATH="/app/.venv/bin:$PATH"')
103
+
104
+ # 3. Check for requirements.txt (Use uv for speed)
105
+ elif (context_path / "requirements.txt").exists():
106
+ print("📦 Detected requirements.txt")
107
+ lines.append("COPY requirements.txt .")
108
+ lines.append("RUN uv venv && uv pip install -r requirements.txt")
109
+
110
+ # 4. Check for dynamic requirements list (Use uv for speed)
111
+ elif config.requirements:
112
+ print("📦 Detected dynamic requirements list")
113
+ reqs_str = " ".join([f'"{r}"' for r in config.requirements])
114
+ lines.append(f"RUN uv venv && uv pip install {reqs_str}")
115
+
116
+ # Copy source code
117
+ lines.append("COPY . .")
118
+
119
+ # Install project itself if needed (for uv/poetry)
120
+ if (context_path / "uv.lock").exists():
121
+ lines.append("RUN uv sync --frozen")
122
+ elif (context_path / "poetry.lock").exists():
123
+ lines.append("RUN poetry install --no-interaction --no-ansi")
124
+
125
+ # Env vars
126
+ for k, v in config.env_vars.items():
127
+ lines.append(f"ENV {k}={v}")
128
+
129
+ return "\n".join(lines)
@@ -0,0 +1,227 @@
1
+ """Real-time log streaming utilities.
2
+
3
+ This module provides utilities for capturing and streaming logs from
4
+ pipeline executions to connected WebSocket clients.
5
+ """
6
+
7
+ import asyncio
8
+ import contextlib
9
+ import logging
10
+ import sys
11
+ import threading
12
+ from collections import deque
13
+ from datetime import datetime
14
+ from io import StringIO
15
+ from collections.abc import Callable
16
+
17
+ from flowyml.ui.backend.routers.websocket import manager as ws_manager
18
+
19
+
20
+ class LogBuffer:
21
+ """Thread-safe log buffer with configurable max size."""
22
+
23
+ def __init__(self, max_size: int = 1000):
24
+ self._buffer: deque[dict] = deque(maxlen=max_size)
25
+ self._lock = threading.Lock()
26
+
27
+ def append(self, entry: dict) -> None:
28
+ """Append a log entry to the buffer."""
29
+ with self._lock:
30
+ self._buffer.append(entry)
31
+
32
+ def get_recent(self, count: int = 100) -> list[dict]:
33
+ """Get recent log entries."""
34
+ with self._lock:
35
+ return list(self._buffer)[-count:]
36
+
37
+ def clear(self) -> None:
38
+ """Clear the buffer."""
39
+ with self._lock:
40
+ self._buffer.clear()
41
+
42
+
43
+ class LogStreamer:
44
+ """Captures and streams logs to WebSocket clients.
45
+
46
+ This class hooks into Python's logging system and stdout/stderr
47
+ to capture all output and stream it to connected WebSocket clients.
48
+
49
+ Example:
50
+ ```python
51
+ streamer = LogStreamer(run_id="abc123")
52
+
53
+ # Start capturing
54
+ streamer.start()
55
+
56
+ # Your pipeline code runs here...
57
+ print("Processing step 1...")
58
+ logger.info("Step 1 complete")
59
+
60
+ # Stop capturing
61
+ streamer.stop()
62
+ ```
63
+ """
64
+
65
+ def __init__(self, run_id: str, step_name: str = "__all__", buffer_size: int = 1000):
66
+ self.run_id = run_id
67
+ self.step_name = step_name
68
+ self.buffer = LogBuffer(max_size=buffer_size)
69
+ self._active = False
70
+ self._original_stdout = None
71
+ self._original_stderr = None
72
+ self._log_handler = None
73
+ self._loop: asyncio.AbstractEventLoop | None = None
74
+
75
+ def start(self) -> None:
76
+ """Start capturing logs."""
77
+ if self._active:
78
+ return
79
+
80
+ self._active = True
81
+
82
+ # Try to get the event loop
83
+ try:
84
+ self._loop = asyncio.get_running_loop()
85
+ except RuntimeError:
86
+ self._loop = None
87
+
88
+ # Install stdout/stderr hooks
89
+ self._original_stdout = sys.stdout
90
+ self._original_stderr = sys.stderr
91
+ sys.stdout = _StreamWrapper(self._original_stdout, self._on_output, "stdout")
92
+ sys.stderr = _StreamWrapper(self._original_stderr, self._on_output, "stderr")
93
+
94
+ # Install logging handler
95
+ self._log_handler = _StreamLoggingHandler(self._on_log)
96
+ logging.root.addHandler(self._log_handler)
97
+
98
+ def stop(self) -> None:
99
+ """Stop capturing logs."""
100
+ if not self._active:
101
+ return
102
+
103
+ self._active = False
104
+
105
+ # Restore stdout/stderr
106
+ if self._original_stdout:
107
+ sys.stdout = self._original_stdout
108
+ if self._original_stderr:
109
+ sys.stderr = self._original_stderr
110
+
111
+ # Remove logging handler
112
+ if self._log_handler:
113
+ logging.root.removeHandler(self._log_handler)
114
+ self._log_handler = None
115
+
116
+ def _on_output(self, text: str, stream: str) -> None:
117
+ """Handle stdout/stderr output."""
118
+ if not text.strip():
119
+ return
120
+
121
+ entry = {
122
+ "timestamp": datetime.now().isoformat(),
123
+ "level": "ERROR" if stream == "stderr" else "INFO",
124
+ "message": text.strip(),
125
+ "source": stream,
126
+ }
127
+
128
+ self.buffer.append(entry)
129
+ self._broadcast(entry)
130
+
131
+ def _on_log(self, record: logging.LogRecord) -> None:
132
+ """Handle log record."""
133
+ entry = {
134
+ "timestamp": datetime.now().isoformat(),
135
+ "level": record.levelname,
136
+ "message": record.getMessage(),
137
+ "source": "logging",
138
+ "logger": record.name,
139
+ }
140
+
141
+ self.buffer.append(entry)
142
+ self._broadcast(entry)
143
+
144
+ def _broadcast(self, entry: dict) -> None:
145
+ """Broadcast log entry to WebSocket clients."""
146
+ content = f"[{entry['timestamp'][:19]}] [{entry['level']}] {entry['message']}"
147
+
148
+ if self._loop and self._loop.is_running():
149
+ # Schedule the coroutine on the event loop
150
+ asyncio.run_coroutine_threadsafe(
151
+ ws_manager.broadcast_log(self.run_id, self.step_name, content),
152
+ self._loop,
153
+ )
154
+
155
+ def get_history(self, count: int = 100) -> list[dict]:
156
+ """Get recent log history."""
157
+ return self.buffer.get_recent(count)
158
+
159
+ def __enter__(self) -> "LogStreamer":
160
+ """Context manager entry."""
161
+ self.start()
162
+ return self
163
+
164
+ def __exit__(self, exc_type, exc_val, exc_tb) -> None:
165
+ """Context manager exit."""
166
+ self.stop()
167
+
168
+
169
+ class _StreamWrapper:
170
+ """Wrapper for stdout/stderr to capture output."""
171
+
172
+ def __init__(self, original: StringIO, callback: Callable, stream_name: str):
173
+ self._original = original
174
+ self._callback = callback
175
+ self._stream_name = stream_name
176
+
177
+ def write(self, text: str) -> int:
178
+ """Write to the stream and callback."""
179
+ # Write to original
180
+ result = self._original.write(text)
181
+ # Callback for streaming
182
+ self._callback(text, self._stream_name)
183
+ return result
184
+
185
+ def flush(self) -> None:
186
+ """Flush the stream."""
187
+ self._original.flush()
188
+
189
+ def __getattr__(self, name: str):
190
+ """Proxy other attributes to original stream."""
191
+ return getattr(self._original, name)
192
+
193
+
194
+ class _StreamLoggingHandler(logging.Handler):
195
+ """Logging handler that calls a callback for each record."""
196
+
197
+ def __init__(self, callback: Callable):
198
+ super().__init__()
199
+ self._callback = callback
200
+
201
+ def emit(self, record: logging.LogRecord) -> None:
202
+ """Emit a log record."""
203
+ with contextlib.suppress(Exception):
204
+ self._callback(record)
205
+
206
+
207
+ # Global registry of active streamers
208
+ _active_streamers: dict[str, LogStreamer] = {}
209
+
210
+
211
+ def get_streamer(run_id: str) -> LogStreamer | None:
212
+ """Get an active streamer by run ID."""
213
+ return _active_streamers.get(run_id)
214
+
215
+
216
+ def create_streamer(run_id: str, step_name: str = "__all__") -> LogStreamer:
217
+ """Create and register a new log streamer."""
218
+ streamer = LogStreamer(run_id, step_name)
219
+ _active_streamers[run_id] = streamer
220
+ return streamer
221
+
222
+
223
+ def remove_streamer(run_id: str) -> None:
224
+ """Remove a streamer from the registry."""
225
+ if run_id in _active_streamers:
226
+ _active_streamers[run_id].stop()
227
+ del _active_streamers[run_id]
@@ -68,6 +68,22 @@ class LocalOrchestrator(Orchestrator):
68
68
  result = PipelineResult(run_id, pipeline.name)
69
69
  result.attach_configs(resources, docker_config)
70
70
 
71
+ # Save run as "running" immediately so artifacts can reference run_id
72
+ # This ensures FK constraints are satisfied when artifacts are created during step execution
73
+ if pipeline.metadata_store:
74
+ try:
75
+ initial_metadata = {
76
+ "run_id": run_id,
77
+ "pipeline_name": pipeline.name,
78
+ "status": "running",
79
+ "start_time": result.start_time.isoformat() if result.start_time else None,
80
+ "project": getattr(pipeline, "_project_name", None),
81
+ "context": dict(pipeline.context) if pipeline.context else {},
82
+ }
83
+ pipeline.metadata_store.save_run(run_id, initial_metadata)
84
+ except Exception:
85
+ pass # Silently continue if initial save fails
86
+
71
87
  # Run pipeline start hooks
72
88
  hooks = get_global_hooks()
73
89
  hooks.run_pipeline_start_hooks(pipeline)
@@ -266,8 +282,8 @@ class LocalOrchestrator(Orchestrator):
266
282
  pipeline._save_pipeline_definition()
267
283
  return result
268
284
 
269
- # Get context parameters for this step
270
- context_params = pipeline.context.inject_params(step.func)
285
+ # Get all context parameters for this step (to allow conditions to access any of them)
286
+ context_params = pipeline.context.to_dict()
271
287
 
272
288
  # Update display - step starting
273
289
  if hasattr(pipeline, "_display") and pipeline._display:
@@ -285,6 +301,7 @@ class LocalOrchestrator(Orchestrator):
285
301
  artifact_store=pipeline.stack.artifact_store if pipeline.stack else None,
286
302
  run_id=run_id,
287
303
  project_name=pipeline.name,
304
+ all_outputs=step_outputs,
288
305
  )
289
306
 
290
307
  # Run step end hooks
@@ -390,6 +407,7 @@ class LocalOrchestrator(Orchestrator):
390
407
  """Context object for conditional evaluation.
391
408
 
392
409
  Provides access to step outputs via ctx.steps['step_name'].outputs['output_name']
410
+ and context parameters via ctx.params
393
411
  """
394
412
 
395
413
  def __init__(self, result: "PipelineResult", pipeline: "Pipeline"):
@@ -397,6 +415,13 @@ class LocalOrchestrator(Orchestrator):
397
415
  self.pipeline = pipeline
398
416
  self._steps_cache = None
399
417
 
418
+ @property
419
+ def params(self):
420
+ """Get pipeline context parameters as a dictionary."""
421
+ if self.pipeline.context:
422
+ return self.pipeline.context._params
423
+ return {}
424
+
400
425
  @property
401
426
  def steps(self):
402
427
  """Lazy-load steps dictionary with outputs."""
@@ -627,6 +652,12 @@ class LocalOrchestrator(Orchestrator):
627
652
  if step_obj.name not in result.step_results:
628
653
  # Execute the selected step
629
654
  # The check above prevents re-execution of the same step
655
+ # If step has inputs defined, copy them to the step object for proper input mapping
656
+ if hasattr(selected_step, "_step_inputs") and selected_step._step_inputs:
657
+ step_obj.inputs = selected_step._step_inputs
658
+ elif hasattr(selected_step, "inputs"):
659
+ step_obj.inputs = selected_step.inputs or []
660
+
630
661
  self._execute_conditional_step(
631
662
  pipeline,
632
663
  step_obj,
@@ -659,10 +690,31 @@ class LocalOrchestrator(Orchestrator):
659
690
  step_inputs = {}
660
691
  sig = inspect.signature(step.func)
661
692
  params = [p for p in sig.parameters.values() if p.name not in ("self", "cls")]
662
-
693
+ assigned_params = set()
694
+
695
+ # First, try to map from declared inputs (like "model/trained" -> function param)
696
+ if step.inputs:
697
+ for i, input_name in enumerate(step.inputs):
698
+ if input_name not in step_outputs:
699
+ continue
700
+ val = step_outputs[input_name]
701
+ # Try to match input name directly to a parameter
702
+ param_match = next((p for p in params if p.name == input_name), None)
703
+ if param_match:
704
+ step_inputs[param_match.name] = val
705
+ assigned_params.add(param_match.name)
706
+ elif i < len(params):
707
+ # Positional fallback - use parameter at same position
708
+ target_param = params[i]
709
+ if target_param.name not in assigned_params:
710
+ step_inputs[target_param.name] = val
711
+ assigned_params.add(target_param.name)
712
+
713
+ # Then, try direct parameter name matching from step_outputs
663
714
  for param in params:
664
- if param.name in step_outputs:
715
+ if param.name not in assigned_params and param.name in step_outputs:
665
716
  step_inputs[param.name] = step_outputs[param.name]
717
+ assigned_params.add(param.name)
666
718
 
667
719
  # Get context parameters
668
720
  context_params = pipeline.context.inject_params(step.func)
@@ -746,6 +798,9 @@ class LocalOrchestrator(Orchestrator):
746
798
  else:
747
799
  if step_def.outputs:
748
800
  outputs_to_process[step_def.outputs[0]] = step_result.output
801
+ else:
802
+ # Fallback: use step name as output key
803
+ outputs_to_process[step_result.step_name] = step_result.output
749
804
 
750
805
  # Save and update state
751
806
  for name, value in outputs_to_process.items():
flowyml/core/pipeline.py CHANGED
@@ -70,13 +70,15 @@ class PipelineResult:
70
70
  "start_time": self.start_time.isoformat(),
71
71
  "end_time": self.end_time.isoformat() if self.end_time else None,
72
72
  "duration_seconds": self.duration_seconds,
73
- "resource_config": self.resource_config.to_dict()
74
- if hasattr(self.resource_config, "to_dict")
75
- else self.resource_config,
76
- "docker_config": self.docker_config.to_dict()
77
- if hasattr(self.docker_config, "to_dict")
78
- else self.docker_config,
79
- "remote_job_id": self.remote_job_id,
73
+ "metadata": {
74
+ "resources": self.resource_config.to_dict()
75
+ if hasattr(self.resource_config, "to_dict")
76
+ else self.resource_config,
77
+ "docker": self.docker_config.to_dict()
78
+ if hasattr(self.docker_config, "to_dict")
79
+ else self.docker_config,
80
+ "remote_job_id": self.remote_job_id,
81
+ },
80
82
  "steps": {
81
83
  name: {
82
84
  "success": result.success,
@@ -243,10 +245,17 @@ class Pipeline:
243
245
  # Metadata store for UI integration - use same store as UI
244
246
  from flowyml.storage.metadata import SQLiteMetadataStore
245
247
  from flowyml.utils.config import get_config
248
+ import os
246
249
 
247
250
  config = get_config()
248
- # Use the same metadata database path as the UI to ensure visibility
249
- self.metadata_store = SQLiteMetadataStore(db_path=str(config.metadata_db))
251
+ # Use simple environment variable check to allow connecting to shared DB
252
+ db_url = os.environ.get("FLOWYML_DATABASE_URL")
253
+
254
+ if db_url:
255
+ self.metadata_store = SQLiteMetadataStore(db_url=db_url)
256
+ else:
257
+ # Use the same metadata database path as the UI to ensure visibility
258
+ self.metadata_store = SQLiteMetadataStore(db_path=str(config.metadata_db))
250
259
 
251
260
  if stack:
252
261
  self._apply_stack(stack, locked=True)
@@ -488,6 +497,21 @@ class Pipeline:
488
497
  resource_config = self._coerce_resource_config(resources)
489
498
  docker_cfg = self._coerce_docker_config(docker_config)
490
499
 
500
+ # Prepare Docker Image if running on a stack
501
+ if self.stack and docker_cfg:
502
+ try:
503
+ # This handles building/pushing or validating the URI
504
+ project_name = getattr(self, "project_name", None)
505
+ docker_cfg.image = self.stack.prepare_docker_image(
506
+ docker_cfg,
507
+ pipeline_name=self.name,
508
+ project_name=project_name,
509
+ )
510
+ except Exception as e:
511
+ # If preparation fails (e.g. build error), we should probably fail the run
512
+ # or at least warn. For now, we'll fail to prevent running with bad config
513
+ raise RuntimeError(f"Failed to prepare docker image: {e}") from e
514
+
491
515
  # Initialize display system for beautiful CLI output
492
516
  display = None
493
517
  try:
@@ -972,6 +996,26 @@ class Pipeline:
972
996
  if is_asset:
973
997
  # Handle flowyml Asset
974
998
  asset_type = value.__class__.__name__
999
+ # Get properties
1000
+ props = (
1001
+ self._sanitize_for_json(value.metadata.properties)
1002
+ if hasattr(value.metadata, "properties")
1003
+ else {}
1004
+ )
1005
+
1006
+ # For Dataset assets, include the full data for visualization
1007
+ # This enables histograms and statistics in the UI
1008
+ data_value = None
1009
+ if asset_type == "Dataset" and value.data is not None:
1010
+ try:
1011
+ # Store full data as JSON-serializable dict
1012
+ data_value = self._sanitize_for_json(value.data)
1013
+ props["_full_data"] = data_value
1014
+ except Exception:
1015
+ data_value = str(value.data)[:1000]
1016
+ else:
1017
+ data_value = str(value.data)[:1000] if value.data else None
1018
+
975
1019
  artifact_metadata = {
976
1020
  "artifact_id": artifact_id,
977
1021
  "name": value.name,
@@ -979,12 +1023,20 @@ class Pipeline:
979
1023
  "run_id": result.run_id,
980
1024
  "step": step_name,
981
1025
  "path": None,
982
- "value": str(value.data)[:1000] if value.data else None,
1026
+ "value": data_value if isinstance(data_value, str) else None,
983
1027
  "created_at": datetime.now().isoformat(),
984
- "properties": self._sanitize_for_json(value.metadata.properties)
985
- if hasattr(value.metadata, "properties")
986
- else {},
1028
+ "properties": props,
987
1029
  }
1030
+
1031
+ # For Dataset, also include the data directly in the artifact
1032
+ if asset_type == "Dataset" and isinstance(data_value, dict):
1033
+ artifact_metadata["data"] = data_value
1034
+
1035
+ # Include training_history if present (for Model assets with Keras training)
1036
+ # This enables interactive training charts in the UI
1037
+ if hasattr(value, "training_history") and value.training_history:
1038
+ artifact_metadata["training_history"] = value.training_history
1039
+
988
1040
  self.metadata_store.save_artifact(artifact_id, artifact_metadata)
989
1041
 
990
1042
  # Special handling for Metrics asset