flowyml 1.4.0__py3-none-any.whl → 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. flowyml/__init__.py +2 -1
  2. flowyml/assets/featureset.py +30 -5
  3. flowyml/assets/metrics.py +47 -4
  4. flowyml/cli/main.py +21 -0
  5. flowyml/cli/models.py +444 -0
  6. flowyml/cli/rich_utils.py +95 -0
  7. flowyml/core/checkpoint.py +6 -1
  8. flowyml/core/conditional.py +104 -0
  9. flowyml/core/display.py +525 -0
  10. flowyml/core/execution_status.py +1 -0
  11. flowyml/core/executor.py +201 -8
  12. flowyml/core/orchestrator.py +500 -7
  13. flowyml/core/pipeline.py +301 -11
  14. flowyml/core/project.py +4 -1
  15. flowyml/core/scheduler.py +225 -81
  16. flowyml/core/versioning.py +13 -4
  17. flowyml/registry/model_registry.py +1 -1
  18. flowyml/storage/sql.py +53 -13
  19. flowyml/ui/backend/main.py +2 -0
  20. flowyml/ui/backend/routers/assets.py +36 -0
  21. flowyml/ui/backend/routers/execution.py +2 -2
  22. flowyml/ui/backend/routers/runs.py +211 -0
  23. flowyml/ui/backend/routers/stats.py +2 -2
  24. flowyml/ui/backend/routers/websocket.py +121 -0
  25. flowyml/ui/frontend/dist/assets/index-By4trVyv.css +1 -0
  26. flowyml/ui/frontend/dist/assets/index-CX5RV2C9.js +630 -0
  27. flowyml/ui/frontend/dist/index.html +2 -2
  28. flowyml/ui/frontend/package-lock.json +289 -0
  29. flowyml/ui/frontend/package.json +1 -0
  30. flowyml/ui/frontend/src/app/compare/page.jsx +213 -0
  31. flowyml/ui/frontend/src/app/experiments/compare/page.jsx +289 -0
  32. flowyml/ui/frontend/src/app/experiments/page.jsx +61 -1
  33. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +418 -203
  34. flowyml/ui/frontend/src/app/runs/page.jsx +64 -3
  35. flowyml/ui/frontend/src/app/settings/page.jsx +1 -1
  36. flowyml/ui/frontend/src/app/tokens/page.jsx +8 -6
  37. flowyml/ui/frontend/src/components/ArtifactViewer.jsx +159 -0
  38. flowyml/ui/frontend/src/components/NavigationTree.jsx +26 -9
  39. flowyml/ui/frontend/src/components/PipelineGraph.jsx +69 -28
  40. flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +42 -14
  41. flowyml/ui/frontend/src/router/index.jsx +4 -0
  42. flowyml/ui/server_manager.py +181 -0
  43. flowyml/ui/utils.py +63 -1
  44. flowyml/utils/config.py +7 -0
  45. {flowyml-1.4.0.dist-info → flowyml-1.6.0.dist-info}/METADATA +5 -3
  46. {flowyml-1.4.0.dist-info → flowyml-1.6.0.dist-info}/RECORD +49 -41
  47. flowyml/ui/frontend/dist/assets/index-DcYwrn2j.css +0 -1
  48. flowyml/ui/frontend/dist/assets/index-Dlz_ygOL.js +0 -592
  49. {flowyml-1.4.0.dist-info → flowyml-1.6.0.dist-info}/WHEEL +0 -0
  50. {flowyml-1.4.0.dist-info → flowyml-1.6.0.dist-info}/entry_points.txt +0 -0
  51. {flowyml-1.4.0.dist-info → flowyml-1.6.0.dist-info}/licenses/LICENSE +0 -0
flowyml/core/scheduler.py CHANGED
@@ -3,7 +3,6 @@
3
3
  import contextlib
4
4
  import json
5
5
  import logging
6
- import sqlite3
7
6
  import threading
8
7
  import time
9
8
  from collections.abc import Callable
@@ -12,6 +11,26 @@ from datetime import datetime, timedelta
12
11
  from pathlib import Path
13
12
  from typing import Any
14
13
 
14
+ from sqlalchemy import (
15
+ create_engine,
16
+ MetaData,
17
+ Table,
18
+ Column,
19
+ String,
20
+ Integer,
21
+ Float,
22
+ Text,
23
+ Boolean,
24
+ DateTime,
25
+ ForeignKey,
26
+ select,
27
+ insert,
28
+ update,
29
+ delete,
30
+ func,
31
+ )
32
+ from sqlalchemy.pool import StaticPool
33
+
15
34
  from flowyml.core.scheduler_config import SchedulerConfig
16
35
 
17
36
  logger = logging.getLogger(__name__)
@@ -81,6 +100,7 @@ class ScheduleExecution:
81
100
  success: bool = False
82
101
  error: str | None = None
83
102
  duration_seconds: float | None = None
103
+ run_id: str | None = None # Pipeline run_id if available
84
104
 
85
105
 
86
106
  class SchedulerMetrics:
@@ -119,54 +139,83 @@ class SchedulerMetrics:
119
139
 
120
140
 
121
141
  class SchedulerPersistence:
122
- """Persist schedules to SQLite database."""
142
+ """Persist schedules to SQLite database using SQLAlchemy."""
123
143
 
124
144
  def __init__(self, db_path: str | None = None):
125
145
  self.db_path = db_path or str(Path.cwd() / ".flowyml_scheduler.db")
146
+ # Convert to absolute path for SQLite URL
147
+ abs_path = Path(self.db_path).resolve()
148
+ abs_path.parent.mkdir(parents=True, exist_ok=True)
149
+ self.db_url = f"sqlite:///{abs_path}"
126
150
  self._init_db()
127
151
 
128
152
  def _init_db(self):
129
- """Initialize database schema."""
130
- with sqlite3.connect(self.db_path) as conn:
131
- conn.execute(
132
- """
133
- CREATE TABLE IF NOT EXISTS schedules (
134
- name TEXT PRIMARY KEY,
135
- data TEXT NOT NULL,
136
- updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
137
- )
138
- """,
139
- )
140
- conn.execute(
141
- """
142
- CREATE TABLE IF NOT EXISTS executions (
143
- id INTEGER PRIMARY KEY AUTOINCREMENT,
144
- schedule_name TEXT NOT NULL,
145
- started_at TIMESTAMP NOT NULL,
146
- completed_at TIMESTAMP,
147
- success BOOLEAN,
148
- error TEXT,
149
- duration_seconds REAL,
150
- FOREIGN KEY(schedule_name) REFERENCES schedules(name)
151
- )
152
- """,
153
- )
153
+ """Initialize database schema using SQLAlchemy."""
154
+ # Create engine
155
+ self.engine = create_engine(
156
+ self.db_url,
157
+ connect_args={"check_same_thread": False},
158
+ poolclass=StaticPool,
159
+ )
160
+
161
+ self.metadata = MetaData()
162
+
163
+ # Define schedules table
164
+ self.schedules = Table(
165
+ "schedules",
166
+ self.metadata,
167
+ Column("name", String, primary_key=True),
168
+ Column("data", Text, nullable=False),
169
+ Column("updated_at", DateTime, server_default=func.current_timestamp()),
170
+ )
171
+
172
+ # Define executions table
173
+ self.executions = Table(
174
+ "executions",
175
+ self.metadata,
176
+ Column("id", Integer, primary_key=True, autoincrement=True),
177
+ Column("schedule_name", String, ForeignKey("schedules.name"), nullable=False),
178
+ Column("started_at", DateTime, nullable=False),
179
+ Column("completed_at", DateTime, nullable=True),
180
+ Column("success", Boolean, nullable=True),
181
+ Column("error", Text, nullable=True),
182
+ Column("duration_seconds", Float, nullable=True),
183
+ Column("run_id", String, nullable=True),
184
+ )
185
+
186
+ # Create all tables
187
+ self.metadata.create_all(self.engine)
154
188
 
155
189
  def save_schedule(self, schedule: Schedule) -> None:
156
- """Save schedule to database."""
190
+ """Save schedule to database using SQLAlchemy."""
157
191
  data = schedule.to_dict()
158
- with sqlite3.connect(self.db_path) as conn:
159
- conn.execute(
160
- "INSERT OR REPLACE INTO schedules (name, data) VALUES (?, ?)",
161
- (schedule.pipeline_name, json.dumps(data)),
192
+ with self.engine.connect() as conn:
193
+ # Use INSERT OR REPLACE equivalent in SQLAlchemy
194
+ stmt = (
195
+ update(self.schedules)
196
+ .where(self.schedules.c.name == schedule.pipeline_name)
197
+ .values(data=json.dumps(data), updated_at=func.current_timestamp())
162
198
  )
199
+ result = conn.execute(stmt)
200
+ conn.commit()
201
+
202
+ # If no rows were updated, insert new record
203
+ if result.rowcount == 0:
204
+ stmt = insert(self.schedules).values(
205
+ name=schedule.pipeline_name,
206
+ data=json.dumps(data),
207
+ )
208
+ conn.execute(stmt)
209
+ conn.commit()
163
210
 
164
211
  def load_schedules(self, pipeline_funcs: dict[str, Callable]) -> dict[str, Schedule]:
165
- """Load all schedules from database."""
212
+ """Load all schedules from database using SQLAlchemy."""
166
213
  schedules = {}
167
- with sqlite3.connect(self.db_path) as conn:
168
- cursor = conn.execute("SELECT name, data FROM schedules")
169
- for name, data_json in cursor:
214
+ with self.engine.connect() as conn:
215
+ stmt = select(self.schedules.c.name, self.schedules.c.data)
216
+ result = conn.execute(stmt)
217
+ for row in result:
218
+ name, data_json = row
170
219
  try:
171
220
  data = json.loads(data_json)
172
221
  if name in pipeline_funcs:
@@ -176,52 +225,72 @@ class SchedulerPersistence:
176
225
  return schedules
177
226
 
178
227
  def delete_schedule(self, name: str) -> None:
179
- """Delete schedule from database."""
180
- with sqlite3.connect(self.db_path) as conn:
181
- conn.execute("DELETE FROM schedules WHERE name = ?", (name,))
182
- conn.execute("DELETE FROM executions WHERE schedule_name = ?", (name,))
228
+ """Delete schedule from database using SQLAlchemy."""
229
+ with self.engine.connect() as conn:
230
+ # Delete executions first (foreign key constraint)
231
+ stmt = delete(self.executions).where(self.executions.c.schedule_name == name)
232
+ conn.execute(stmt)
233
+
234
+ # Delete schedule
235
+ stmt = delete(self.schedules).where(self.schedules.c.name == name)
236
+ conn.execute(stmt)
237
+ conn.commit()
183
238
 
184
239
  def save_execution(self, execution: ScheduleExecution) -> None:
185
- """Save execution record."""
186
- with sqlite3.connect(self.db_path) as conn:
187
- conn.execute(
188
- """
189
- INSERT INTO executions
190
- (schedule_name, started_at, completed_at, success, error, duration_seconds)
191
- VALUES (?, ?, ?, ?, ?, ?)
192
- """,
193
- (
194
- execution.schedule_name,
195
- execution.started_at,
196
- execution.completed_at,
197
- execution.success,
198
- execution.error,
199
- execution.duration_seconds,
200
- ),
240
+ """Save execution record using SQLAlchemy."""
241
+ with self.engine.connect() as conn:
242
+ stmt = insert(self.executions).values(
243
+ schedule_name=execution.schedule_name,
244
+ started_at=execution.started_at,
245
+ completed_at=execution.completed_at,
246
+ success=execution.success,
247
+ error=execution.error,
248
+ duration_seconds=execution.duration_seconds,
249
+ run_id=execution.run_id,
201
250
  )
251
+ conn.execute(stmt)
252
+ conn.commit()
202
253
 
203
254
  def get_history(self, schedule_name: str, limit: int = 50) -> list[dict[str, Any]]:
204
- """Get execution history for a schedule."""
255
+ """Get execution history for a schedule using SQLAlchemy."""
205
256
  history = []
206
- with sqlite3.connect(self.db_path) as conn:
207
- cursor = conn.execute(
208
- """
209
- SELECT started_at, completed_at, success, error, duration_seconds
210
- FROM executions
211
- WHERE schedule_name = ?
212
- ORDER BY started_at DESC
213
- LIMIT ?
214
- """,
215
- (schedule_name, limit),
257
+ with self.engine.connect() as conn:
258
+ stmt = (
259
+ select(
260
+ self.executions.c.started_at,
261
+ self.executions.c.completed_at,
262
+ self.executions.c.success,
263
+ self.executions.c.error,
264
+ self.executions.c.duration_seconds,
265
+ self.executions.c.run_id,
266
+ )
267
+ .where(self.executions.c.schedule_name == schedule_name)
268
+ .order_by(self.executions.c.started_at.desc())
269
+ .limit(limit)
216
270
  )
217
- for row in cursor:
271
+ result = conn.execute(stmt)
272
+ for row in result:
273
+ # Handle datetime conversion
274
+ started_at = row.started_at
275
+ if isinstance(started_at, datetime):
276
+ started_at = started_at.isoformat()
277
+ elif started_at is not None:
278
+ started_at = str(started_at)
279
+
280
+ completed_at = row.completed_at
281
+ if isinstance(completed_at, datetime):
282
+ completed_at = completed_at.isoformat()
283
+ elif completed_at is not None:
284
+ completed_at = str(completed_at)
285
+
218
286
  history.append(
219
287
  {
220
- "started_at": row[0],
221
- "completed_at": row[1],
222
- "success": bool(row[2]),
223
- "error": row[3],
224
- "duration_seconds": row[4],
288
+ "started_at": started_at,
289
+ "completed_at": completed_at,
290
+ "success": bool(row.success) if row.success is not None else False,
291
+ "error": row.error,
292
+ "duration_seconds": row.duration_seconds,
293
+ "run_id": row.run_id,
225
294
  },
226
295
  )
227
296
  return history
@@ -474,11 +543,11 @@ class PipelineScheduler:
474
543
  """Remove all schedules."""
475
544
  self.schedules.clear()
476
545
  if self._persistence:
477
- # Re-initialize DB to clear it
478
- db_path = Path(self._persistence.db_path)
479
- if db_path.exists():
480
- db_path.unlink()
481
- self._persistence._init_db()
546
+ # Delete all schedules and executions using SQLAlchemy
547
+ with self._persistence.engine.connect() as conn:
548
+ conn.execute(delete(self._persistence.executions))
549
+ conn.execute(delete(self._persistence.schedules))
550
+ conn.commit()
482
551
 
483
552
  def enable(self, name: str) -> None:
484
553
  """Enable a schedule."""
@@ -507,17 +576,33 @@ class PipelineScheduler:
507
576
  started_at=datetime.now(),
508
577
  )
509
578
 
579
+ pipeline_result = None
510
580
  try:
511
581
  logger.info(f"Starting scheduled run: {schedule.pipeline_name}")
512
- schedule.pipeline_func()
513
- execution.success = True
582
+
583
+ # Execute pipeline and capture result
584
+ result = schedule.pipeline_func()
585
+
586
+ # Check if result is a PipelineResult object
587
+ from flowyml.core.pipeline import PipelineResult
588
+
589
+ if isinstance(result, PipelineResult):
590
+ pipeline_result = result
591
+ execution.success = result.success
592
+ # Store run_id for tracking
593
+ execution.run_id = result.run_id
594
+ else:
595
+ # Assume success if no exception and result is truthy
596
+ execution.success = bool(result)
597
+
514
598
  schedule.last_run = datetime.now(pytz.timezone(schedule.timezone)) if pytz else datetime.now()
515
599
 
516
600
  if self.on_success:
517
601
  self.on_success(schedule, execution)
518
602
  except Exception as e:
519
- logger.error(f"Schedule {schedule.pipeline_name} failed: {e}")
603
+ logger.error(f"Schedule {schedule.pipeline_name} failed: {e}", exc_info=True)
520
604
  execution.error = str(e)
605
+ execution.success = False
521
606
  if self.on_failure:
522
607
  self.on_failure(schedule, execution, e)
523
608
  finally:
@@ -525,6 +610,65 @@ class PipelineScheduler:
525
610
  execution.duration_seconds = (execution.completed_at - execution.started_at).total_seconds()
526
611
  self.metrics.update(execution)
527
612
 
613
+ # Ensure pipeline result is saved to metadata store for UI visibility
614
+ # The pipeline should have already saved via _save_run, but we ensure it's in the UI's store
615
+ if pipeline_result:
616
+ try:
617
+ # Get the global metadata store used by UI (same as pipeline should use)
618
+ from flowyml.ui.backend.dependencies import get_store
619
+
620
+ # Get the UI's metadata store
621
+ ui_store = get_store()
622
+
623
+ # Also ensure the pipeline's metadata store is the same instance/path
624
+ # If the pipeline used a different store, sync to UI store
625
+
626
+ # Check if run is already in UI store
627
+ existing_run = ui_store.load_run(pipeline_result.run_id)
628
+ if not existing_run:
629
+ # Run wasn't saved to UI store, save it now
630
+ # Build comprehensive metadata
631
+ metadata = {
632
+ "run_id": pipeline_result.run_id,
633
+ "pipeline_name": pipeline_result.pipeline_name,
634
+ "status": "completed" if pipeline_result.success else "failed",
635
+ "start_time": pipeline_result.start_time.isoformat(),
636
+ "end_time": pipeline_result.end_time.isoformat() if pipeline_result.end_time else None,
637
+ "duration": pipeline_result.duration_seconds,
638
+ "success": pipeline_result.success,
639
+ "scheduled": True, # Mark as scheduled run
640
+ "schedule_name": schedule.pipeline_name,
641
+ "steps": {
642
+ name: {
643
+ "success": result.success,
644
+ "duration": result.duration_seconds,
645
+ "cached": result.cached,
646
+ "retries": result.retries,
647
+ "error": result.error,
648
+ }
649
+ for name, result in pipeline_result.step_results.items()
650
+ },
651
+ }
652
+
653
+ # Add outputs if available
654
+ if pipeline_result.outputs:
655
+ metadata["outputs"] = {
656
+ k: str(v)[:200] if not isinstance(v, (dict, list)) else str(v)[:200]
657
+ for k, v in pipeline_result.outputs.items()
658
+ }
659
+
660
+ ui_store.save_run(pipeline_result.run_id, metadata)
661
+ logger.info(f"✅ Saved scheduled run {pipeline_result.run_id} to UI metadata store")
662
+ else:
663
+ # Update existing run to mark as scheduled
664
+ if not existing_run.get("scheduled"):
665
+ existing_run["scheduled"] = True
666
+ existing_run["schedule_name"] = schedule.pipeline_name
667
+ ui_store.save_run(pipeline_result.run_id, existing_run)
668
+ logger.debug(f"Updated run {pipeline_result.run_id} to mark as scheduled")
669
+ except Exception as e:
670
+ logger.warning(f"Failed to save scheduled run to UI metadata store: {e}", exc_info=True)
671
+
528
672
  if self.config.distributed:
529
673
  self._lock.release(schedule.pipeline_name)
530
674
 
@@ -27,9 +27,9 @@ class VersionedPipeline:
27
27
  Tracks changes between versions and allows comparison.
28
28
 
29
29
  Examples:
30
- >>> from flowyml import VersionedPipeline, step
31
- >>> pipeline = VersionedPipeline("training")
32
- >>> pipeline.version = "v1.0.0"
30
+ >>> from flowyml import VersionedPipeline, step, context
31
+ >>> ctx = context(learning_rate=0.001, epochs=10)
32
+ >>> pipeline = VersionedPipeline("training", context=ctx, version="v1.0.0", project_name="ml_project")
33
33
  >>> pipeline.add_step(load_data)
34
34
  >>> pipeline.add_step(train_model)
35
35
  >>> pipeline.save_version()
@@ -39,6 +39,10 @@ class VersionedPipeline:
39
39
  >>> pipeline.save_version()
40
40
  >>> # Compare versions
41
41
  >>> diff = pipeline.compare_with("v1.0.0")
42
+
43
+ # Or use Pipeline with version parameter (automatically creates VersionedPipeline)
44
+ >>> from flowyml import Pipeline
45
+ >>> pipeline = Pipeline("training", context=ctx, version="v1.0.1", project_name="ml_project")
42
46
  """
43
47
 
44
48
  def __init__(
@@ -46,12 +50,17 @@ class VersionedPipeline:
46
50
  name: str,
47
51
  version: str = "v0.1.0",
48
52
  versions_dir: str = ".flowyml/versions",
53
+ context: Any | None = None,
54
+ **kwargs,
49
55
  ):
50
56
  from flowyml.core.pipeline import Pipeline
51
57
 
52
58
  self.name = name
53
59
  self._version = version
54
- self.pipeline = Pipeline(name)
60
+ # Pass context and other kwargs to the internal Pipeline
61
+ # Remove 'version' from kwargs to avoid recursion
62
+ pipeline_kwargs = {k: v for k, v in kwargs.items() if k != "version"}
63
+ self.pipeline = Pipeline(name, context=context, **pipeline_kwargs)
55
64
 
56
65
  # Version storage
57
66
  self.versions_dir = Path(versions_dir) / name
@@ -184,7 +184,7 @@ class ModelRegistry:
184
184
  path: Path to save to
185
185
  framework: Framework name
186
186
  """
187
- from flowyml.storage.materializers import get_materializer
187
+ from flowyml.storage.materializers.base import get_materializer
188
188
 
189
189
  # Try to get appropriate materializer
190
190
  materializer = get_materializer(model)
flowyml/storage/sql.py CHANGED
@@ -884,26 +884,66 @@ class SQLMetadataStore(MetadataStore):
884
884
  "period_days": days,
885
885
  }
886
886
 
887
- def get_statistics(self) -> dict:
887
+ def get_statistics(self, project: str | None = None) -> dict:
888
888
  """Get global statistics."""
889
889
  with self.engine.connect() as conn:
890
- # Total runs
891
- total_runs = conn.execute(select(func.count()).select_from(self.runs)).scalar()
890
+ # 1. Total runs
891
+ runs_stmt = select(func.count()).select_from(self.runs)
892
+ if project:
893
+ runs_stmt = runs_stmt.where(self.runs.c.project == project)
894
+ total_runs = conn.execute(runs_stmt).scalar() or 0
892
895
 
893
- # Total pipelines
894
- total_pipelines = conn.execute(
895
- select(func.count(func.distinct(self.runs.c.pipeline_name))),
896
- ).scalar()
896
+ # 2. Total pipelines (unique names)
897
+ pipelines_stmt = select(func.count(func.distinct(self.runs.c.pipeline_name)))
898
+ if project:
899
+ pipelines_stmt = pipelines_stmt.where(self.runs.c.project == project)
900
+ total_pipelines = conn.execute(pipelines_stmt).scalar() or 0
897
901
 
898
- # Total experiments
899
- total_experiments = conn.execute(select(func.count()).select_from(self.experiments)).scalar()
902
+ # 3. Total artifacts
903
+ artifacts_stmt = select(func.count()).select_from(self.artifacts)
904
+ if project:
905
+ artifacts_stmt = artifacts_stmt.where(self.artifacts.c.project == project)
906
+ total_artifacts = conn.execute(artifacts_stmt).scalar() or 0
900
907
 
901
- # Total models (unique model names in metrics)
902
- total_models = conn.execute(
903
- select(func.count(func.distinct(self.model_metrics.c.model_name))),
904
- ).scalar()
908
+ # 4. Total experiments
909
+ experiments_stmt = select(func.count()).select_from(self.experiments)
910
+ if project:
911
+ experiments_stmt = experiments_stmt.where(self.experiments.c.project == project)
912
+ total_experiments = conn.execute(experiments_stmt).scalar() or 0
913
+
914
+ # 5. Total models
915
+ models_stmt = select(func.count(func.distinct(self.model_metrics.c.model_name)))
916
+ if project:
917
+ models_stmt = models_stmt.where(self.model_metrics.c.project == project)
918
+ total_models = conn.execute(models_stmt).scalar() or 0
919
+
920
+ # 6. Status counts (completed vs failed)
921
+ status_stmt = select(self.runs.c.status, func.count()).group_by(self.runs.c.status)
922
+ if project:
923
+ status_stmt = status_stmt.where(self.runs.c.project == project)
924
+
925
+ status_rows = conn.execute(status_stmt).fetchall()
926
+ status_map = {row[0]: row[1] for row in status_rows if row[0]}
927
+
928
+ completed_runs = status_map.get("completed", 0)
929
+ failed_runs = status_map.get("failed", 0)
930
+
931
+ # 7. Avg duration (only completed runs)
932
+ dur_stmt = select(func.avg(self.runs.c.duration)).where(self.runs.c.status == "completed")
933
+ if project:
934
+ dur_stmt = dur_stmt.where(self.runs.c.project == project)
935
+
936
+ avg_duration = conn.execute(dur_stmt).scalar() or 0.0
905
937
 
906
938
  return {
939
+ # Frontend-friendly keys
940
+ "pipelines": total_pipelines,
941
+ "runs": total_runs,
942
+ "artifacts": total_artifacts,
943
+ "completed_runs": completed_runs,
944
+ "failed_runs": failed_runs,
945
+ "avg_duration": avg_duration,
946
+ # Backward compatibility
907
947
  "total_runs": total_runs,
908
948
  "total_pipelines": total_pipelines,
909
949
  "total_experiments": total_experiments,
@@ -24,6 +24,7 @@ from flowyml.ui.backend.routers import (
24
24
  metrics,
25
25
  client,
26
26
  stats,
27
+ websocket,
27
28
  )
28
29
 
29
30
  app = FastAPI(
@@ -77,6 +78,7 @@ app.include_router(metrics.router, prefix="/api/metrics", tags=["metrics"])
77
78
  app.include_router(plugins.router, prefix="/api", tags=["plugins"])
78
79
  app.include_router(client.router, prefix="/api/client", tags=["client"])
79
80
  app.include_router(stats.router, prefix="/api/stats", tags=["stats"])
81
+ app.include_router(websocket.router, tags=["websocket"])
80
82
 
81
83
 
82
84
  # Static file serving for frontend
@@ -476,6 +476,42 @@ async def download_asset(artifact_id: str):
476
476
  )
477
477
 
478
478
 
479
+ @router.get("/{artifact_id}/content")
480
+ async def get_asset_content(artifact_id: str):
481
+ """Get the artifact content for inline viewing."""
482
+ import mimetypes
483
+
484
+ asset, _ = _find_asset_with_store(artifact_id)
485
+ if not asset:
486
+ raise HTTPException(status_code=404, detail="Asset not found")
487
+
488
+ artifact_path = asset.get("path")
489
+ if not artifact_path:
490
+ raise HTTPException(status_code=404, detail="Artifact path not available")
491
+
492
+ # Handle relative paths for local store
493
+ from flowyml.utils.config import get_config
494
+
495
+ config = get_config()
496
+
497
+ file_path = Path(artifact_path)
498
+ if not file_path.is_absolute():
499
+ file_path = config.artifacts_dir / file_path
500
+
501
+ if not file_path.exists():
502
+ raise HTTPException(status_code=404, detail="Artifact file not found on disk")
503
+
504
+ # Guess mime type
505
+ mime_type, _ = mimetypes.guess_type(file_path.name)
506
+ if not mime_type:
507
+ mime_type = "text/plain" # Default fallback
508
+
509
+ return FileResponse(
510
+ path=file_path,
511
+ media_type=mime_type,
512
+ )
513
+
514
+
479
515
  class ProjectUpdate(BaseModel):
480
516
  project_name: str
481
517
 
@@ -97,10 +97,10 @@ async def execute_pipeline(
97
97
  run_kwargs = request.parameters.copy()
98
98
 
99
99
  if request.retry_count > 0:
100
- from flowyml.core.retry import OrchestratorRetryPolicy
100
+ from flowyml.core.retry_policy import OrchestratorRetryPolicy
101
101
 
102
102
  run_kwargs["retry_policy"] = OrchestratorRetryPolicy(
103
- max_retries=min(request.retry_count, 5), # Cap at 5
103
+ max_attempts=min(request.retry_count, 5), # Cap at 5
104
104
  )
105
105
 
106
106
  result = pipeline.run(**run_kwargs)