flowyml 1.7.2__py3-none-any.whl → 1.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowyml/assets/base.py +15 -0
- flowyml/assets/metrics.py +5 -0
- flowyml/cli/main.py +709 -0
- flowyml/cli/stack_cli.py +138 -25
- flowyml/core/__init__.py +17 -0
- flowyml/core/executor.py +161 -26
- flowyml/core/image_builder.py +129 -0
- flowyml/core/log_streamer.py +227 -0
- flowyml/core/orchestrator.py +22 -2
- flowyml/core/pipeline.py +34 -10
- flowyml/core/routing.py +558 -0
- flowyml/core/step.py +9 -1
- flowyml/core/step_grouping.py +49 -35
- flowyml/core/types.py +407 -0
- flowyml/monitoring/alerts.py +10 -0
- flowyml/monitoring/notifications.py +104 -25
- flowyml/monitoring/slack_blocks.py +323 -0
- flowyml/plugins/__init__.py +251 -0
- flowyml/plugins/alerters/__init__.py +1 -0
- flowyml/plugins/alerters/slack.py +168 -0
- flowyml/plugins/base.py +752 -0
- flowyml/plugins/config.py +478 -0
- flowyml/plugins/deployers/__init__.py +22 -0
- flowyml/plugins/deployers/gcp_cloud_run.py +200 -0
- flowyml/plugins/deployers/sagemaker.py +306 -0
- flowyml/plugins/deployers/vertex.py +290 -0
- flowyml/plugins/integration.py +369 -0
- flowyml/plugins/manager.py +510 -0
- flowyml/plugins/model_registries/__init__.py +22 -0
- flowyml/plugins/model_registries/mlflow.py +159 -0
- flowyml/plugins/model_registries/sagemaker.py +489 -0
- flowyml/plugins/model_registries/vertex.py +386 -0
- flowyml/plugins/orchestrators/__init__.py +13 -0
- flowyml/plugins/orchestrators/sagemaker.py +443 -0
- flowyml/plugins/orchestrators/vertex_ai.py +461 -0
- flowyml/plugins/registries/__init__.py +13 -0
- flowyml/plugins/registries/ecr.py +321 -0
- flowyml/plugins/registries/gcr.py +313 -0
- flowyml/plugins/registry.py +454 -0
- flowyml/plugins/stack.py +494 -0
- flowyml/plugins/stack_config.py +537 -0
- flowyml/plugins/stores/__init__.py +13 -0
- flowyml/plugins/stores/gcs.py +460 -0
- flowyml/plugins/stores/s3.py +453 -0
- flowyml/plugins/trackers/__init__.py +11 -0
- flowyml/plugins/trackers/mlflow.py +316 -0
- flowyml/plugins/validators/__init__.py +3 -0
- flowyml/plugins/validators/deepchecks.py +119 -0
- flowyml/registry/__init__.py +2 -1
- flowyml/registry/model_environment.py +109 -0
- flowyml/registry/model_registry.py +241 -96
- flowyml/serving/__init__.py +17 -0
- flowyml/serving/model_server.py +628 -0
- flowyml/stacks/__init__.py +60 -0
- flowyml/stacks/aws.py +93 -0
- flowyml/stacks/base.py +62 -0
- flowyml/stacks/components.py +12 -0
- flowyml/stacks/gcp.py +44 -9
- flowyml/stacks/plugins.py +115 -0
- flowyml/stacks/registry.py +2 -1
- flowyml/storage/sql.py +401 -12
- flowyml/tracking/experiment.py +8 -5
- flowyml/ui/backend/Dockerfile +87 -16
- flowyml/ui/backend/auth.py +12 -2
- flowyml/ui/backend/main.py +149 -5
- flowyml/ui/backend/routers/ai_context.py +226 -0
- flowyml/ui/backend/routers/assets.py +23 -4
- flowyml/ui/backend/routers/auth.py +96 -0
- flowyml/ui/backend/routers/deployments.py +660 -0
- flowyml/ui/backend/routers/model_explorer.py +597 -0
- flowyml/ui/backend/routers/plugins.py +103 -51
- flowyml/ui/backend/routers/projects.py +91 -8
- flowyml/ui/backend/routers/runs.py +20 -1
- flowyml/ui/backend/routers/schedules.py +22 -17
- flowyml/ui/backend/routers/templates.py +319 -0
- flowyml/ui/backend/routers/websocket.py +2 -2
- flowyml/ui/frontend/Dockerfile +55 -6
- flowyml/ui/frontend/dist/assets/index-B5AsPTSz.css +1 -0
- flowyml/ui/frontend/dist/assets/index-dFbZ8wD8.js +753 -0
- flowyml/ui/frontend/dist/index.html +2 -2
- flowyml/ui/frontend/dist/logo.png +0 -0
- flowyml/ui/frontend/nginx.conf +65 -4
- flowyml/ui/frontend/package-lock.json +1404 -74
- flowyml/ui/frontend/package.json +3 -0
- flowyml/ui/frontend/public/logo.png +0 -0
- flowyml/ui/frontend/src/App.jsx +10 -7
- flowyml/ui/frontend/src/app/auth/Login.jsx +90 -0
- flowyml/ui/frontend/src/app/dashboard/page.jsx +8 -8
- flowyml/ui/frontend/src/app/deployments/page.jsx +786 -0
- flowyml/ui/frontend/src/app/model-explorer/page.jsx +1031 -0
- flowyml/ui/frontend/src/app/pipelines/page.jsx +12 -2
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +19 -6
- flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +36 -24
- flowyml/ui/frontend/src/app/runs/page.jsx +8 -2
- flowyml/ui/frontend/src/app/settings/page.jsx +267 -253
- flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +29 -7
- flowyml/ui/frontend/src/components/Layout.jsx +6 -0
- flowyml/ui/frontend/src/components/PipelineGraph.jsx +79 -29
- flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +36 -6
- flowyml/ui/frontend/src/components/RunMetaPanel.jsx +113 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantButton.jsx +71 -0
- flowyml/ui/frontend/src/components/ai/AIAssistantPanel.jsx +420 -0
- flowyml/ui/frontend/src/components/header/Header.jsx +22 -0
- flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +4 -4
- flowyml/ui/frontend/src/components/plugins/{ZenMLIntegration.jsx → StackImport.jsx} +38 -12
- flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +36 -13
- flowyml/ui/frontend/src/contexts/AIAssistantContext.jsx +245 -0
- flowyml/ui/frontend/src/contexts/AuthContext.jsx +108 -0
- flowyml/ui/frontend/src/hooks/useAIContext.js +156 -0
- flowyml/ui/frontend/src/hooks/useWebGPU.js +54 -0
- flowyml/ui/frontend/src/layouts/MainLayout.jsx +6 -0
- flowyml/ui/frontend/src/router/index.jsx +47 -20
- flowyml/ui/frontend/src/services/pluginService.js +3 -1
- flowyml/ui/server_manager.py +5 -5
- flowyml/ui/utils.py +157 -39
- flowyml/utils/config.py +37 -15
- flowyml/utils/model_introspection.py +123 -0
- flowyml/utils/observability.py +30 -0
- flowyml-1.8.0.dist-info/METADATA +174 -0
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/RECORD +123 -65
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/WHEEL +1 -1
- flowyml/ui/frontend/dist/assets/index-B40RsQDq.css +0 -1
- flowyml/ui/frontend/dist/assets/index-CjI0zKCn.js +0 -685
- flowyml-1.7.2.dist-info/METADATA +0 -477
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/entry_points.txt +0 -0
- {flowyml-1.7.2.dist-info → flowyml-1.8.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
"""Real-time log streaming utilities.
|
|
2
|
+
|
|
3
|
+
This module provides utilities for capturing and streaming logs from
|
|
4
|
+
pipeline executions to connected WebSocket clients.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import contextlib
|
|
9
|
+
import logging
|
|
10
|
+
import sys
|
|
11
|
+
import threading
|
|
12
|
+
from collections import deque
|
|
13
|
+
from datetime import datetime
|
|
14
|
+
from io import StringIO
|
|
15
|
+
from collections.abc import Callable
|
|
16
|
+
|
|
17
|
+
from flowyml.ui.backend.routers.websocket import manager as ws_manager
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class LogBuffer:
|
|
21
|
+
"""Thread-safe log buffer with configurable max size."""
|
|
22
|
+
|
|
23
|
+
def __init__(self, max_size: int = 1000):
|
|
24
|
+
self._buffer: deque[dict] = deque(maxlen=max_size)
|
|
25
|
+
self._lock = threading.Lock()
|
|
26
|
+
|
|
27
|
+
def append(self, entry: dict) -> None:
|
|
28
|
+
"""Append a log entry to the buffer."""
|
|
29
|
+
with self._lock:
|
|
30
|
+
self._buffer.append(entry)
|
|
31
|
+
|
|
32
|
+
def get_recent(self, count: int = 100) -> list[dict]:
|
|
33
|
+
"""Get recent log entries."""
|
|
34
|
+
with self._lock:
|
|
35
|
+
return list(self._buffer)[-count:]
|
|
36
|
+
|
|
37
|
+
def clear(self) -> None:
|
|
38
|
+
"""Clear the buffer."""
|
|
39
|
+
with self._lock:
|
|
40
|
+
self._buffer.clear()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class LogStreamer:
|
|
44
|
+
"""Captures and streams logs to WebSocket clients.
|
|
45
|
+
|
|
46
|
+
This class hooks into Python's logging system and stdout/stderr
|
|
47
|
+
to capture all output and stream it to connected WebSocket clients.
|
|
48
|
+
|
|
49
|
+
Example:
|
|
50
|
+
```python
|
|
51
|
+
streamer = LogStreamer(run_id="abc123")
|
|
52
|
+
|
|
53
|
+
# Start capturing
|
|
54
|
+
streamer.start()
|
|
55
|
+
|
|
56
|
+
# Your pipeline code runs here...
|
|
57
|
+
print("Processing step 1...")
|
|
58
|
+
logger.info("Step 1 complete")
|
|
59
|
+
|
|
60
|
+
# Stop capturing
|
|
61
|
+
streamer.stop()
|
|
62
|
+
```
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
def __init__(self, run_id: str, step_name: str = "__all__", buffer_size: int = 1000):
|
|
66
|
+
self.run_id = run_id
|
|
67
|
+
self.step_name = step_name
|
|
68
|
+
self.buffer = LogBuffer(max_size=buffer_size)
|
|
69
|
+
self._active = False
|
|
70
|
+
self._original_stdout = None
|
|
71
|
+
self._original_stderr = None
|
|
72
|
+
self._log_handler = None
|
|
73
|
+
self._loop: asyncio.AbstractEventLoop | None = None
|
|
74
|
+
|
|
75
|
+
def start(self) -> None:
|
|
76
|
+
"""Start capturing logs."""
|
|
77
|
+
if self._active:
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
self._active = True
|
|
81
|
+
|
|
82
|
+
# Try to get the event loop
|
|
83
|
+
try:
|
|
84
|
+
self._loop = asyncio.get_running_loop()
|
|
85
|
+
except RuntimeError:
|
|
86
|
+
self._loop = None
|
|
87
|
+
|
|
88
|
+
# Install stdout/stderr hooks
|
|
89
|
+
self._original_stdout = sys.stdout
|
|
90
|
+
self._original_stderr = sys.stderr
|
|
91
|
+
sys.stdout = _StreamWrapper(self._original_stdout, self._on_output, "stdout")
|
|
92
|
+
sys.stderr = _StreamWrapper(self._original_stderr, self._on_output, "stderr")
|
|
93
|
+
|
|
94
|
+
# Install logging handler
|
|
95
|
+
self._log_handler = _StreamLoggingHandler(self._on_log)
|
|
96
|
+
logging.root.addHandler(self._log_handler)
|
|
97
|
+
|
|
98
|
+
def stop(self) -> None:
|
|
99
|
+
"""Stop capturing logs."""
|
|
100
|
+
if not self._active:
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
self._active = False
|
|
104
|
+
|
|
105
|
+
# Restore stdout/stderr
|
|
106
|
+
if self._original_stdout:
|
|
107
|
+
sys.stdout = self._original_stdout
|
|
108
|
+
if self._original_stderr:
|
|
109
|
+
sys.stderr = self._original_stderr
|
|
110
|
+
|
|
111
|
+
# Remove logging handler
|
|
112
|
+
if self._log_handler:
|
|
113
|
+
logging.root.removeHandler(self._log_handler)
|
|
114
|
+
self._log_handler = None
|
|
115
|
+
|
|
116
|
+
def _on_output(self, text: str, stream: str) -> None:
|
|
117
|
+
"""Handle stdout/stderr output."""
|
|
118
|
+
if not text.strip():
|
|
119
|
+
return
|
|
120
|
+
|
|
121
|
+
entry = {
|
|
122
|
+
"timestamp": datetime.now().isoformat(),
|
|
123
|
+
"level": "ERROR" if stream == "stderr" else "INFO",
|
|
124
|
+
"message": text.strip(),
|
|
125
|
+
"source": stream,
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
self.buffer.append(entry)
|
|
129
|
+
self._broadcast(entry)
|
|
130
|
+
|
|
131
|
+
def _on_log(self, record: logging.LogRecord) -> None:
|
|
132
|
+
"""Handle log record."""
|
|
133
|
+
entry = {
|
|
134
|
+
"timestamp": datetime.now().isoformat(),
|
|
135
|
+
"level": record.levelname,
|
|
136
|
+
"message": record.getMessage(),
|
|
137
|
+
"source": "logging",
|
|
138
|
+
"logger": record.name,
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
self.buffer.append(entry)
|
|
142
|
+
self._broadcast(entry)
|
|
143
|
+
|
|
144
|
+
def _broadcast(self, entry: dict) -> None:
|
|
145
|
+
"""Broadcast log entry to WebSocket clients."""
|
|
146
|
+
content = f"[{entry['timestamp'][:19]}] [{entry['level']}] {entry['message']}"
|
|
147
|
+
|
|
148
|
+
if self._loop and self._loop.is_running():
|
|
149
|
+
# Schedule the coroutine on the event loop
|
|
150
|
+
asyncio.run_coroutine_threadsafe(
|
|
151
|
+
ws_manager.broadcast_log(self.run_id, self.step_name, content),
|
|
152
|
+
self._loop,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
def get_history(self, count: int = 100) -> list[dict]:
|
|
156
|
+
"""Get recent log history."""
|
|
157
|
+
return self.buffer.get_recent(count)
|
|
158
|
+
|
|
159
|
+
def __enter__(self) -> "LogStreamer":
|
|
160
|
+
"""Context manager entry."""
|
|
161
|
+
self.start()
|
|
162
|
+
return self
|
|
163
|
+
|
|
164
|
+
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
|
165
|
+
"""Context manager exit."""
|
|
166
|
+
self.stop()
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class _StreamWrapper:
|
|
170
|
+
"""Wrapper for stdout/stderr to capture output."""
|
|
171
|
+
|
|
172
|
+
def __init__(self, original: StringIO, callback: Callable, stream_name: str):
|
|
173
|
+
self._original = original
|
|
174
|
+
self._callback = callback
|
|
175
|
+
self._stream_name = stream_name
|
|
176
|
+
|
|
177
|
+
def write(self, text: str) -> int:
|
|
178
|
+
"""Write to the stream and callback."""
|
|
179
|
+
# Write to original
|
|
180
|
+
result = self._original.write(text)
|
|
181
|
+
# Callback for streaming
|
|
182
|
+
self._callback(text, self._stream_name)
|
|
183
|
+
return result
|
|
184
|
+
|
|
185
|
+
def flush(self) -> None:
|
|
186
|
+
"""Flush the stream."""
|
|
187
|
+
self._original.flush()
|
|
188
|
+
|
|
189
|
+
def __getattr__(self, name: str):
|
|
190
|
+
"""Proxy other attributes to original stream."""
|
|
191
|
+
return getattr(self._original, name)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
class _StreamLoggingHandler(logging.Handler):
|
|
195
|
+
"""Logging handler that calls a callback for each record."""
|
|
196
|
+
|
|
197
|
+
def __init__(self, callback: Callable):
|
|
198
|
+
super().__init__()
|
|
199
|
+
self._callback = callback
|
|
200
|
+
|
|
201
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
202
|
+
"""Emit a log record."""
|
|
203
|
+
with contextlib.suppress(Exception):
|
|
204
|
+
self._callback(record)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
# Global registry of active streamers
|
|
208
|
+
_active_streamers: dict[str, LogStreamer] = {}
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def get_streamer(run_id: str) -> LogStreamer | None:
|
|
212
|
+
"""Get an active streamer by run ID."""
|
|
213
|
+
return _active_streamers.get(run_id)
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def create_streamer(run_id: str, step_name: str = "__all__") -> LogStreamer:
|
|
217
|
+
"""Create and register a new log streamer."""
|
|
218
|
+
streamer = LogStreamer(run_id, step_name)
|
|
219
|
+
_active_streamers[run_id] = streamer
|
|
220
|
+
return streamer
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def remove_streamer(run_id: str) -> None:
|
|
224
|
+
"""Remove a streamer from the registry."""
|
|
225
|
+
if run_id in _active_streamers:
|
|
226
|
+
_active_streamers[run_id].stop()
|
|
227
|
+
del _active_streamers[run_id]
|
flowyml/core/orchestrator.py
CHANGED
|
@@ -68,6 +68,22 @@ class LocalOrchestrator(Orchestrator):
|
|
|
68
68
|
result = PipelineResult(run_id, pipeline.name)
|
|
69
69
|
result.attach_configs(resources, docker_config)
|
|
70
70
|
|
|
71
|
+
# Save run as "running" immediately so artifacts can reference run_id
|
|
72
|
+
# This ensures FK constraints are satisfied when artifacts are created during step execution
|
|
73
|
+
if pipeline.metadata_store:
|
|
74
|
+
try:
|
|
75
|
+
initial_metadata = {
|
|
76
|
+
"run_id": run_id,
|
|
77
|
+
"pipeline_name": pipeline.name,
|
|
78
|
+
"status": "running",
|
|
79
|
+
"start_time": result.start_time.isoformat() if result.start_time else None,
|
|
80
|
+
"project": getattr(pipeline, "_project_name", None),
|
|
81
|
+
"context": dict(pipeline.context) if pipeline.context else {},
|
|
82
|
+
}
|
|
83
|
+
pipeline.metadata_store.save_run(run_id, initial_metadata)
|
|
84
|
+
except Exception:
|
|
85
|
+
pass # Silently continue if initial save fails
|
|
86
|
+
|
|
71
87
|
# Run pipeline start hooks
|
|
72
88
|
hooks = get_global_hooks()
|
|
73
89
|
hooks.run_pipeline_start_hooks(pipeline)
|
|
@@ -266,8 +282,8 @@ class LocalOrchestrator(Orchestrator):
|
|
|
266
282
|
pipeline._save_pipeline_definition()
|
|
267
283
|
return result
|
|
268
284
|
|
|
269
|
-
# Get context parameters for this step
|
|
270
|
-
context_params = pipeline.context.
|
|
285
|
+
# Get all context parameters for this step (to allow conditions to access any of them)
|
|
286
|
+
context_params = pipeline.context.to_dict()
|
|
271
287
|
|
|
272
288
|
# Update display - step starting
|
|
273
289
|
if hasattr(pipeline, "_display") and pipeline._display:
|
|
@@ -285,6 +301,7 @@ class LocalOrchestrator(Orchestrator):
|
|
|
285
301
|
artifact_store=pipeline.stack.artifact_store if pipeline.stack else None,
|
|
286
302
|
run_id=run_id,
|
|
287
303
|
project_name=pipeline.name,
|
|
304
|
+
all_outputs=step_outputs,
|
|
288
305
|
)
|
|
289
306
|
|
|
290
307
|
# Run step end hooks
|
|
@@ -781,6 +798,9 @@ class LocalOrchestrator(Orchestrator):
|
|
|
781
798
|
else:
|
|
782
799
|
if step_def.outputs:
|
|
783
800
|
outputs_to_process[step_def.outputs[0]] = step_result.output
|
|
801
|
+
else:
|
|
802
|
+
# Fallback: use step name as output key
|
|
803
|
+
outputs_to_process[step_result.step_name] = step_result.output
|
|
784
804
|
|
|
785
805
|
# Save and update state
|
|
786
806
|
for name, value in outputs_to_process.items():
|
flowyml/core/pipeline.py
CHANGED
|
@@ -70,13 +70,15 @@ class PipelineResult:
|
|
|
70
70
|
"start_time": self.start_time.isoformat(),
|
|
71
71
|
"end_time": self.end_time.isoformat() if self.end_time else None,
|
|
72
72
|
"duration_seconds": self.duration_seconds,
|
|
73
|
-
"
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
73
|
+
"metadata": {
|
|
74
|
+
"resources": self.resource_config.to_dict()
|
|
75
|
+
if hasattr(self.resource_config, "to_dict")
|
|
76
|
+
else self.resource_config,
|
|
77
|
+
"docker": self.docker_config.to_dict()
|
|
78
|
+
if hasattr(self.docker_config, "to_dict")
|
|
79
|
+
else self.docker_config,
|
|
80
|
+
"remote_job_id": self.remote_job_id,
|
|
81
|
+
},
|
|
80
82
|
"steps": {
|
|
81
83
|
name: {
|
|
82
84
|
"success": result.success,
|
|
@@ -243,10 +245,17 @@ class Pipeline:
|
|
|
243
245
|
# Metadata store for UI integration - use same store as UI
|
|
244
246
|
from flowyml.storage.metadata import SQLiteMetadataStore
|
|
245
247
|
from flowyml.utils.config import get_config
|
|
248
|
+
import os
|
|
246
249
|
|
|
247
250
|
config = get_config()
|
|
248
|
-
# Use
|
|
249
|
-
|
|
251
|
+
# Use simple environment variable check to allow connecting to shared DB
|
|
252
|
+
db_url = os.environ.get("FLOWYML_DATABASE_URL")
|
|
253
|
+
|
|
254
|
+
if db_url:
|
|
255
|
+
self.metadata_store = SQLiteMetadataStore(db_url=db_url)
|
|
256
|
+
else:
|
|
257
|
+
# Use the same metadata database path as the UI to ensure visibility
|
|
258
|
+
self.metadata_store = SQLiteMetadataStore(db_path=str(config.metadata_db))
|
|
250
259
|
|
|
251
260
|
if stack:
|
|
252
261
|
self._apply_stack(stack, locked=True)
|
|
@@ -488,6 +497,21 @@ class Pipeline:
|
|
|
488
497
|
resource_config = self._coerce_resource_config(resources)
|
|
489
498
|
docker_cfg = self._coerce_docker_config(docker_config)
|
|
490
499
|
|
|
500
|
+
# Prepare Docker Image if running on a stack
|
|
501
|
+
if self.stack and docker_cfg:
|
|
502
|
+
try:
|
|
503
|
+
# This handles building/pushing or validating the URI
|
|
504
|
+
project_name = getattr(self, "project_name", None)
|
|
505
|
+
docker_cfg.image = self.stack.prepare_docker_image(
|
|
506
|
+
docker_cfg,
|
|
507
|
+
pipeline_name=self.name,
|
|
508
|
+
project_name=project_name,
|
|
509
|
+
)
|
|
510
|
+
except Exception as e:
|
|
511
|
+
# If preparation fails (e.g. build error), we should probably fail the run
|
|
512
|
+
# or at least warn. For now, we'll fail to prevent running with bad config
|
|
513
|
+
raise RuntimeError(f"Failed to prepare docker image: {e}") from e
|
|
514
|
+
|
|
491
515
|
# Initialize display system for beautiful CLI output
|
|
492
516
|
display = None
|
|
493
517
|
try:
|
|
@@ -982,7 +1006,7 @@ class Pipeline:
|
|
|
982
1006
|
# For Dataset assets, include the full data for visualization
|
|
983
1007
|
# This enables histograms and statistics in the UI
|
|
984
1008
|
data_value = None
|
|
985
|
-
if asset_type == "Dataset" and value.data:
|
|
1009
|
+
if asset_type == "Dataset" and value.data is not None:
|
|
986
1010
|
try:
|
|
987
1011
|
# Store full data as JSON-serializable dict
|
|
988
1012
|
data_value = self._sanitize_for_json(value.data)
|