flowyml 1.2.0__py3-none-any.whl → 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowyml/__init__.py +3 -0
- flowyml/assets/base.py +10 -0
- flowyml/assets/metrics.py +6 -0
- flowyml/cli/main.py +108 -2
- flowyml/cli/run.py +9 -2
- flowyml/core/execution_status.py +52 -0
- flowyml/core/hooks.py +106 -0
- flowyml/core/observability.py +210 -0
- flowyml/core/orchestrator.py +274 -0
- flowyml/core/pipeline.py +193 -231
- flowyml/core/project.py +34 -2
- flowyml/core/remote_orchestrator.py +109 -0
- flowyml/core/resources.py +34 -17
- flowyml/core/retry_policy.py +80 -0
- flowyml/core/scheduler.py +9 -9
- flowyml/core/scheduler_config.py +2 -3
- flowyml/core/step.py +18 -1
- flowyml/core/submission_result.py +53 -0
- flowyml/integrations/keras.py +95 -22
- flowyml/monitoring/alerts.py +2 -2
- flowyml/stacks/__init__.py +15 -0
- flowyml/stacks/aws.py +599 -0
- flowyml/stacks/azure.py +295 -0
- flowyml/stacks/bridge.py +9 -9
- flowyml/stacks/components.py +24 -2
- flowyml/stacks/gcp.py +158 -11
- flowyml/stacks/local.py +5 -0
- flowyml/stacks/plugins.py +2 -2
- flowyml/stacks/registry.py +21 -0
- flowyml/storage/artifacts.py +15 -5
- flowyml/storage/materializers/__init__.py +2 -0
- flowyml/storage/materializers/base.py +33 -0
- flowyml/storage/materializers/cloudpickle.py +74 -0
- flowyml/storage/metadata.py +3 -881
- flowyml/storage/remote.py +590 -0
- flowyml/storage/sql.py +911 -0
- flowyml/ui/backend/dependencies.py +28 -0
- flowyml/ui/backend/main.py +43 -80
- flowyml/ui/backend/routers/assets.py +483 -17
- flowyml/ui/backend/routers/client.py +46 -0
- flowyml/ui/backend/routers/execution.py +13 -2
- flowyml/ui/backend/routers/experiments.py +97 -14
- flowyml/ui/backend/routers/metrics.py +168 -0
- flowyml/ui/backend/routers/pipelines.py +77 -12
- flowyml/ui/backend/routers/projects.py +33 -7
- flowyml/ui/backend/routers/runs.py +221 -12
- flowyml/ui/backend/routers/schedules.py +5 -21
- flowyml/ui/backend/routers/stats.py +14 -0
- flowyml/ui/backend/routers/traces.py +37 -53
- flowyml/ui/frontend/dist/assets/index-DcYwrn2j.css +1 -0
- flowyml/ui/frontend/dist/assets/index-Dlz_ygOL.js +592 -0
- flowyml/ui/frontend/dist/index.html +2 -2
- flowyml/ui/frontend/src/App.jsx +4 -1
- flowyml/ui/frontend/src/app/assets/page.jsx +260 -230
- flowyml/ui/frontend/src/app/dashboard/page.jsx +38 -7
- flowyml/ui/frontend/src/app/experiments/page.jsx +61 -314
- flowyml/ui/frontend/src/app/observability/page.jsx +277 -0
- flowyml/ui/frontend/src/app/pipelines/page.jsx +79 -402
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectArtifactsList.jsx +151 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +145 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectHeader.jsx +45 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectHierarchy.jsx +467 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectMetricsPanel.jsx +253 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectPipelinesList.jsx +105 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectRelations.jsx +189 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectRunsList.jsx +136 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectTabs.jsx +95 -0
- flowyml/ui/frontend/src/app/projects/[projectId]/page.jsx +326 -0
- flowyml/ui/frontend/src/app/projects/page.jsx +13 -3
- flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +79 -10
- flowyml/ui/frontend/src/app/runs/page.jsx +82 -424
- flowyml/ui/frontend/src/app/settings/page.jsx +1 -0
- flowyml/ui/frontend/src/app/tokens/page.jsx +62 -16
- flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +373 -0
- flowyml/ui/frontend/src/components/AssetLineageGraph.jsx +291 -0
- flowyml/ui/frontend/src/components/AssetStatsDashboard.jsx +302 -0
- flowyml/ui/frontend/src/components/AssetTreeHierarchy.jsx +477 -0
- flowyml/ui/frontend/src/components/ExperimentDetailsPanel.jsx +227 -0
- flowyml/ui/frontend/src/components/NavigationTree.jsx +401 -0
- flowyml/ui/frontend/src/components/PipelineDetailsPanel.jsx +239 -0
- flowyml/ui/frontend/src/components/PipelineGraph.jsx +67 -3
- flowyml/ui/frontend/src/components/ProjectSelector.jsx +115 -0
- flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +298 -0
- flowyml/ui/frontend/src/components/header/Header.jsx +48 -1
- flowyml/ui/frontend/src/components/plugins/ZenMLIntegration.jsx +106 -0
- flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +52 -26
- flowyml/ui/frontend/src/components/ui/DataView.jsx +35 -17
- flowyml/ui/frontend/src/components/ui/ErrorBoundary.jsx +118 -0
- flowyml/ui/frontend/src/contexts/ProjectContext.jsx +2 -2
- flowyml/ui/frontend/src/contexts/ToastContext.jsx +116 -0
- flowyml/ui/frontend/src/layouts/MainLayout.jsx +5 -1
- flowyml/ui/frontend/src/router/index.jsx +4 -0
- flowyml/ui/frontend/src/utils/date.js +10 -0
- flowyml/ui/frontend/src/utils/downloads.js +11 -0
- flowyml/utils/config.py +6 -0
- flowyml/utils/stack_config.py +45 -3
- {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/METADATA +44 -4
- flowyml-1.4.0.dist-info/RECORD +200 -0
- {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/licenses/LICENSE +1 -1
- flowyml/ui/frontend/dist/assets/index-DFNQnrUj.js +0 -448
- flowyml/ui/frontend/dist/assets/index-pWI271rZ.css +0 -1
- flowyml-1.2.0.dist-info/RECORD +0 -159
- {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/WHEEL +0 -0
- {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/entry_points.txt +0 -0
flowyml/storage/metadata.py
CHANGED
|
@@ -1,11 +1,6 @@
|
|
|
1
1
|
"""Metadata storage backends for flowyml."""
|
|
2
2
|
|
|
3
|
-
import json
|
|
4
|
-
import sqlite3
|
|
5
3
|
from abc import ABC, abstractmethod
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
import contextlib
|
|
8
|
-
import builtins
|
|
9
4
|
|
|
10
5
|
|
|
11
6
|
class MetadataStore(ABC):
|
|
@@ -52,880 +47,7 @@ class MetadataStore(ABC):
|
|
|
52
47
|
pass
|
|
53
48
|
|
|
54
49
|
|
|
55
|
-
|
|
56
|
-
"""SQLite-based metadata storage."""
|
|
50
|
+
from flowyml.storage.sql import SQLMetadataStore # noqa: E402
|
|
57
51
|
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
Args:
|
|
62
|
-
db_path: Path to SQLite database file
|
|
63
|
-
"""
|
|
64
|
-
self.db_path = Path(db_path)
|
|
65
|
-
self.db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
66
|
-
self._init_db()
|
|
67
|
-
|
|
68
|
-
def _init_db(self) -> None:
|
|
69
|
-
"""Initialize database schema."""
|
|
70
|
-
conn = sqlite3.connect(self.db_path)
|
|
71
|
-
cursor = conn.cursor()
|
|
72
|
-
|
|
73
|
-
# Runs table
|
|
74
|
-
cursor.execute(
|
|
75
|
-
"""
|
|
76
|
-
CREATE TABLE IF NOT EXISTS runs (
|
|
77
|
-
run_id TEXT PRIMARY KEY,
|
|
78
|
-
pipeline_name TEXT,
|
|
79
|
-
status TEXT,
|
|
80
|
-
start_time TEXT,
|
|
81
|
-
end_time TEXT,
|
|
82
|
-
duration REAL,
|
|
83
|
-
metadata TEXT,
|
|
84
|
-
project TEXT,
|
|
85
|
-
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
86
|
-
)
|
|
87
|
-
""",
|
|
88
|
-
)
|
|
89
|
-
|
|
90
|
-
# Migration: Add project column if it doesn't exist
|
|
91
|
-
# Migration: Add project column if it doesn't exist
|
|
92
|
-
with contextlib.suppress(sqlite3.OperationalError):
|
|
93
|
-
cursor.execute("ALTER TABLE runs ADD COLUMN project TEXT")
|
|
94
|
-
|
|
95
|
-
# Artifacts table
|
|
96
|
-
cursor.execute(
|
|
97
|
-
"""
|
|
98
|
-
CREATE TABLE IF NOT EXISTS artifacts (
|
|
99
|
-
artifact_id TEXT PRIMARY KEY,
|
|
100
|
-
name TEXT,
|
|
101
|
-
type TEXT,
|
|
102
|
-
run_id TEXT,
|
|
103
|
-
path TEXT,
|
|
104
|
-
metadata TEXT,
|
|
105
|
-
project TEXT,
|
|
106
|
-
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
107
|
-
FOREIGN KEY (run_id) REFERENCES runs(run_id)
|
|
108
|
-
)
|
|
109
|
-
""",
|
|
110
|
-
)
|
|
111
|
-
|
|
112
|
-
# Migration: Add project column to artifacts if it doesn't exist
|
|
113
|
-
# Migration: Add project column to artifacts if it doesn't exist
|
|
114
|
-
with contextlib.suppress(sqlite3.OperationalError):
|
|
115
|
-
cursor.execute("ALTER TABLE artifacts ADD COLUMN project TEXT")
|
|
116
|
-
|
|
117
|
-
# Metrics table
|
|
118
|
-
cursor.execute(
|
|
119
|
-
"""
|
|
120
|
-
CREATE TABLE IF NOT EXISTS metrics (
|
|
121
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
122
|
-
run_id TEXT,
|
|
123
|
-
name TEXT,
|
|
124
|
-
value REAL,
|
|
125
|
-
step INTEGER,
|
|
126
|
-
timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
127
|
-
FOREIGN KEY (run_id) REFERENCES runs(run_id)
|
|
128
|
-
)
|
|
129
|
-
""",
|
|
130
|
-
)
|
|
131
|
-
|
|
132
|
-
# Parameters table
|
|
133
|
-
cursor.execute(
|
|
134
|
-
"""
|
|
135
|
-
CREATE TABLE IF NOT EXISTS parameters (
|
|
136
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
137
|
-
run_id TEXT,
|
|
138
|
-
name TEXT,
|
|
139
|
-
value TEXT,
|
|
140
|
-
FOREIGN KEY (run_id) REFERENCES runs(run_id)
|
|
141
|
-
)
|
|
142
|
-
""",
|
|
143
|
-
)
|
|
144
|
-
|
|
145
|
-
# Experiments table
|
|
146
|
-
cursor.execute(
|
|
147
|
-
"""
|
|
148
|
-
CREATE TABLE IF NOT EXISTS experiments (
|
|
149
|
-
experiment_id TEXT PRIMARY KEY,
|
|
150
|
-
name TEXT,
|
|
151
|
-
description TEXT,
|
|
152
|
-
tags TEXT,
|
|
153
|
-
project TEXT,
|
|
154
|
-
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
155
|
-
)
|
|
156
|
-
""",
|
|
157
|
-
)
|
|
158
|
-
|
|
159
|
-
# Migration: Add project column to experiments if it doesn't exist
|
|
160
|
-
# Migration: Add project column to experiments if it doesn't exist
|
|
161
|
-
with contextlib.suppress(sqlite3.OperationalError):
|
|
162
|
-
cursor.execute("ALTER TABLE experiments ADD COLUMN project TEXT")
|
|
163
|
-
|
|
164
|
-
# Experiment Runs link table
|
|
165
|
-
cursor.execute(
|
|
166
|
-
"""
|
|
167
|
-
CREATE TABLE IF NOT EXISTS experiment_runs (
|
|
168
|
-
experiment_id TEXT,
|
|
169
|
-
run_id TEXT,
|
|
170
|
-
metrics TEXT,
|
|
171
|
-
parameters TEXT,
|
|
172
|
-
timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
173
|
-
PRIMARY KEY (experiment_id, run_id),
|
|
174
|
-
FOREIGN KEY (experiment_id) REFERENCES experiments(experiment_id),
|
|
175
|
-
FOREIGN KEY (run_id) REFERENCES runs(run_id)
|
|
176
|
-
)
|
|
177
|
-
""",
|
|
178
|
-
)
|
|
179
|
-
|
|
180
|
-
# Traces table for GenAI monitoring
|
|
181
|
-
cursor.execute(
|
|
182
|
-
"""
|
|
183
|
-
CREATE TABLE IF NOT EXISTS traces (
|
|
184
|
-
event_id TEXT PRIMARY KEY,
|
|
185
|
-
trace_id TEXT,
|
|
186
|
-
parent_id TEXT,
|
|
187
|
-
event_type TEXT,
|
|
188
|
-
name TEXT,
|
|
189
|
-
inputs TEXT,
|
|
190
|
-
outputs TEXT,
|
|
191
|
-
start_time REAL,
|
|
192
|
-
end_time REAL,
|
|
193
|
-
duration REAL,
|
|
194
|
-
status TEXT,
|
|
195
|
-
error TEXT,
|
|
196
|
-
metadata TEXT,
|
|
197
|
-
prompt_tokens INTEGER,
|
|
198
|
-
completion_tokens INTEGER,
|
|
199
|
-
total_tokens INTEGER,
|
|
200
|
-
cost REAL,
|
|
201
|
-
model TEXT,
|
|
202
|
-
project TEXT,
|
|
203
|
-
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
204
|
-
)
|
|
205
|
-
""",
|
|
206
|
-
)
|
|
207
|
-
|
|
208
|
-
# Migration: Add project column to traces if it doesn't exist
|
|
209
|
-
# Migration: Add project column to traces if it doesn't exist
|
|
210
|
-
with contextlib.suppress(sqlite3.OperationalError):
|
|
211
|
-
cursor.execute("ALTER TABLE traces ADD COLUMN project TEXT")
|
|
212
|
-
|
|
213
|
-
# Create indexes for better query performance
|
|
214
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_runs_pipeline ON runs(pipeline_name)")
|
|
215
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status)")
|
|
216
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_runs_project ON runs(project)")
|
|
217
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_artifacts_run ON artifacts(run_id)")
|
|
218
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_artifacts_project ON artifacts(project)")
|
|
219
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_metrics_run ON metrics(run_id)")
|
|
220
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_parameters_run ON parameters(run_id)")
|
|
221
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_experiments_name ON experiments(name)")
|
|
222
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_experiments_project ON experiments(project)")
|
|
223
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_traces_trace_id ON traces(trace_id)")
|
|
224
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_traces_type ON traces(event_type)")
|
|
225
|
-
cursor.execute("CREATE INDEX IF NOT EXISTS idx_traces_project ON traces(project)")
|
|
226
|
-
|
|
227
|
-
# Pipeline definitions for scheduling
|
|
228
|
-
cursor.execute(
|
|
229
|
-
"""
|
|
230
|
-
CREATE TABLE IF NOT EXISTS pipeline_definitions (
|
|
231
|
-
pipeline_name TEXT PRIMARY KEY,
|
|
232
|
-
definition TEXT NOT NULL,
|
|
233
|
-
created_at TEXT NOT NULL,
|
|
234
|
-
updated_at TEXT NOT NULL
|
|
235
|
-
)
|
|
236
|
-
""",
|
|
237
|
-
)
|
|
238
|
-
|
|
239
|
-
conn.commit()
|
|
240
|
-
conn.close()
|
|
241
|
-
|
|
242
|
-
def save_run(self, run_id: str, metadata: dict) -> None:
|
|
243
|
-
"""Save run metadata to database.
|
|
244
|
-
|
|
245
|
-
Args:
|
|
246
|
-
run_id: Unique run identifier
|
|
247
|
-
metadata: Run metadata dictionary
|
|
248
|
-
"""
|
|
249
|
-
conn = sqlite3.connect(self.db_path)
|
|
250
|
-
cursor = conn.cursor()
|
|
251
|
-
|
|
252
|
-
cursor.execute(
|
|
253
|
-
"""
|
|
254
|
-
INSERT OR REPLACE INTO runs
|
|
255
|
-
(run_id, pipeline_name, status, start_time, end_time, duration, metadata, project)
|
|
256
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
257
|
-
""",
|
|
258
|
-
(
|
|
259
|
-
run_id,
|
|
260
|
-
metadata.get("pipeline_name"),
|
|
261
|
-
metadata.get("status"),
|
|
262
|
-
metadata.get("start_time"),
|
|
263
|
-
metadata.get("end_time"),
|
|
264
|
-
metadata.get("duration"),
|
|
265
|
-
json.dumps(metadata),
|
|
266
|
-
metadata.get("project"),
|
|
267
|
-
),
|
|
268
|
-
)
|
|
269
|
-
|
|
270
|
-
# Save parameters
|
|
271
|
-
if "parameters" in metadata:
|
|
272
|
-
cursor.execute("DELETE FROM parameters WHERE run_id = ?", (run_id,))
|
|
273
|
-
for name, value in metadata["parameters"].items():
|
|
274
|
-
cursor.execute(
|
|
275
|
-
"INSERT INTO parameters (run_id, name, value) VALUES (?, ?, ?)",
|
|
276
|
-
(run_id, name, json.dumps(value)),
|
|
277
|
-
)
|
|
278
|
-
|
|
279
|
-
# Save metrics
|
|
280
|
-
if "metrics" in metadata:
|
|
281
|
-
cursor.execute("DELETE FROM metrics WHERE run_id = ?", (run_id,))
|
|
282
|
-
for name, value in metadata["metrics"].items():
|
|
283
|
-
cursor.execute(
|
|
284
|
-
"INSERT INTO metrics (run_id, name, value, step) VALUES (?, ?, ?, ?)",
|
|
285
|
-
(run_id, name, value, 0),
|
|
286
|
-
)
|
|
287
|
-
|
|
288
|
-
conn.commit()
|
|
289
|
-
conn.close()
|
|
290
|
-
|
|
291
|
-
def load_run(self, run_id: str) -> dict | None:
|
|
292
|
-
"""Load run metadata from database.
|
|
293
|
-
|
|
294
|
-
Args:
|
|
295
|
-
run_id: Unique run identifier
|
|
296
|
-
|
|
297
|
-
Returns:
|
|
298
|
-
Run metadata dictionary or None if not found
|
|
299
|
-
"""
|
|
300
|
-
conn = sqlite3.connect(self.db_path)
|
|
301
|
-
cursor = conn.cursor()
|
|
302
|
-
|
|
303
|
-
cursor.execute("SELECT metadata FROM runs WHERE run_id = ?", (run_id,))
|
|
304
|
-
row = cursor.fetchone()
|
|
305
|
-
|
|
306
|
-
conn.close()
|
|
307
|
-
|
|
308
|
-
if row:
|
|
309
|
-
data = json.loads(row[0])
|
|
310
|
-
# Ensure project is in metadata if it's in the column but not the JSON blob
|
|
311
|
-
# (This might happen if we update the column directly)
|
|
312
|
-
# Actually, let's just return what's in the blob for now,
|
|
313
|
-
# but we should probably sync them.
|
|
314
|
-
return data
|
|
315
|
-
return None
|
|
316
|
-
|
|
317
|
-
def update_run_project(self, run_id: str, project_name: str) -> None:
|
|
318
|
-
"""Update the project for a run.
|
|
319
|
-
|
|
320
|
-
Args:
|
|
321
|
-
run_id: Run identifier
|
|
322
|
-
project_name: Name of the project
|
|
323
|
-
"""
|
|
324
|
-
conn = sqlite3.connect(self.db_path)
|
|
325
|
-
cursor = conn.cursor()
|
|
326
|
-
|
|
327
|
-
# 1. Update the column
|
|
328
|
-
cursor.execute("UPDATE runs SET project = ? WHERE run_id = ?", (project_name, run_id))
|
|
329
|
-
|
|
330
|
-
# 2. Update the JSON blob
|
|
331
|
-
cursor.execute("SELECT metadata FROM runs WHERE run_id = ?", (run_id,))
|
|
332
|
-
row = cursor.fetchone()
|
|
333
|
-
if row:
|
|
334
|
-
metadata = json.loads(row[0])
|
|
335
|
-
metadata["project"] = project_name
|
|
336
|
-
cursor.execute(
|
|
337
|
-
"UPDATE runs SET metadata = ? WHERE run_id = ?",
|
|
338
|
-
(json.dumps(metadata), run_id),
|
|
339
|
-
)
|
|
340
|
-
|
|
341
|
-
conn.commit()
|
|
342
|
-
conn.close()
|
|
343
|
-
|
|
344
|
-
def list_runs(self, limit: int | None = None) -> list[dict]:
|
|
345
|
-
"""List all runs from database.
|
|
346
|
-
|
|
347
|
-
Args:
|
|
348
|
-
limit: Optional limit on number of results
|
|
349
|
-
|
|
350
|
-
Returns:
|
|
351
|
-
List of run metadata dictionaries
|
|
352
|
-
"""
|
|
353
|
-
conn = sqlite3.connect(self.db_path)
|
|
354
|
-
cursor = conn.cursor()
|
|
355
|
-
|
|
356
|
-
query = "SELECT metadata FROM runs ORDER BY created_at DESC"
|
|
357
|
-
if limit:
|
|
358
|
-
query += f" LIMIT {limit}"
|
|
359
|
-
|
|
360
|
-
cursor.execute(query)
|
|
361
|
-
rows = cursor.fetchall()
|
|
362
|
-
|
|
363
|
-
conn.close()
|
|
364
|
-
|
|
365
|
-
return [json.loads(row[0]) for row in rows]
|
|
366
|
-
|
|
367
|
-
def list_pipelines(self, project: str = None) -> list[str]:
|
|
368
|
-
"""List all unique pipeline names.
|
|
369
|
-
|
|
370
|
-
Args:
|
|
371
|
-
project: Optional project name to filter by
|
|
372
|
-
|
|
373
|
-
Returns:
|
|
374
|
-
List of pipeline names
|
|
375
|
-
"""
|
|
376
|
-
conn = sqlite3.connect(self.db_path)
|
|
377
|
-
cursor = conn.cursor()
|
|
378
|
-
|
|
379
|
-
if project:
|
|
380
|
-
cursor.execute(
|
|
381
|
-
"SELECT DISTINCT pipeline_name FROM runs WHERE project = ? ORDER BY pipeline_name",
|
|
382
|
-
(project,),
|
|
383
|
-
)
|
|
384
|
-
else:
|
|
385
|
-
cursor.execute("SELECT DISTINCT pipeline_name FROM runs ORDER BY pipeline_name")
|
|
386
|
-
|
|
387
|
-
rows = cursor.fetchall()
|
|
388
|
-
|
|
389
|
-
conn.close()
|
|
390
|
-
|
|
391
|
-
return [row[0] for row in rows if row[0]]
|
|
392
|
-
|
|
393
|
-
def save_artifact(self, artifact_id: str, metadata: dict) -> None:
|
|
394
|
-
"""Save artifact metadata to database.
|
|
395
|
-
|
|
396
|
-
Args:
|
|
397
|
-
artifact_id: Unique artifact identifier
|
|
398
|
-
metadata: Artifact metadata dictionary
|
|
399
|
-
"""
|
|
400
|
-
conn = sqlite3.connect(self.db_path)
|
|
401
|
-
cursor = conn.cursor()
|
|
402
|
-
|
|
403
|
-
cursor.execute(
|
|
404
|
-
"""
|
|
405
|
-
INSERT OR REPLACE INTO artifacts
|
|
406
|
-
(artifact_id, name, type, run_id, path, metadata, project)
|
|
407
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
408
|
-
""",
|
|
409
|
-
(
|
|
410
|
-
artifact_id,
|
|
411
|
-
metadata.get("name"),
|
|
412
|
-
metadata.get("type"),
|
|
413
|
-
metadata.get("run_id"),
|
|
414
|
-
metadata.get("path"),
|
|
415
|
-
json.dumps(metadata),
|
|
416
|
-
metadata.get("project"),
|
|
417
|
-
),
|
|
418
|
-
)
|
|
419
|
-
|
|
420
|
-
conn.commit()
|
|
421
|
-
conn.close()
|
|
422
|
-
|
|
423
|
-
def load_artifact(self, artifact_id: str) -> dict | None:
|
|
424
|
-
"""Load artifact metadata from database.
|
|
425
|
-
|
|
426
|
-
Args:
|
|
427
|
-
artifact_id: Unique artifact identifier
|
|
428
|
-
|
|
429
|
-
Returns:
|
|
430
|
-
Artifact metadata dictionary or None if not found
|
|
431
|
-
"""
|
|
432
|
-
conn = sqlite3.connect(self.db_path)
|
|
433
|
-
cursor = conn.cursor()
|
|
434
|
-
|
|
435
|
-
cursor.execute("SELECT metadata FROM artifacts WHERE artifact_id = ?", (artifact_id,))
|
|
436
|
-
row = cursor.fetchone()
|
|
437
|
-
|
|
438
|
-
conn.close()
|
|
439
|
-
|
|
440
|
-
if row:
|
|
441
|
-
return json.loads(row[0])
|
|
442
|
-
return None
|
|
443
|
-
|
|
444
|
-
def list_assets(self, limit: int | None = None, **filters) -> list[dict]:
|
|
445
|
-
"""List assets from database with optional filters.
|
|
446
|
-
|
|
447
|
-
Args:
|
|
448
|
-
limit: Optional limit on number of results
|
|
449
|
-
**filters: Filter criteria (type, run_id, etc.)
|
|
450
|
-
|
|
451
|
-
Returns:
|
|
452
|
-
List of artifact metadata dictionaries
|
|
453
|
-
"""
|
|
454
|
-
conn = sqlite3.connect(self.db_path)
|
|
455
|
-
cursor = conn.cursor()
|
|
456
|
-
|
|
457
|
-
conditions = []
|
|
458
|
-
params = []
|
|
459
|
-
|
|
460
|
-
for key, value in filters.items():
|
|
461
|
-
if value is not None:
|
|
462
|
-
conditions.append(f"{key} = ?")
|
|
463
|
-
params.append(value)
|
|
464
|
-
|
|
465
|
-
query = "SELECT metadata FROM artifacts"
|
|
466
|
-
if conditions:
|
|
467
|
-
query += " WHERE " + " AND ".join(conditions)
|
|
468
|
-
|
|
469
|
-
query += " ORDER BY created_at DESC"
|
|
470
|
-
|
|
471
|
-
if limit:
|
|
472
|
-
query += f" LIMIT {limit}"
|
|
473
|
-
|
|
474
|
-
cursor.execute(query, params)
|
|
475
|
-
rows = cursor.fetchall()
|
|
476
|
-
|
|
477
|
-
conn.close()
|
|
478
|
-
|
|
479
|
-
return [json.loads(row[0]) for row in rows]
|
|
480
|
-
|
|
481
|
-
def query(self, **filters) -> list[dict]:
|
|
482
|
-
"""Query runs with filters.
|
|
483
|
-
|
|
484
|
-
Args:
|
|
485
|
-
**filters: Filter criteria (pipeline_name, status, etc.)
|
|
486
|
-
|
|
487
|
-
Returns:
|
|
488
|
-
List of matching run metadata dictionaries
|
|
489
|
-
"""
|
|
490
|
-
conn = sqlite3.connect(self.db_path)
|
|
491
|
-
cursor = conn.cursor()
|
|
492
|
-
|
|
493
|
-
where_clauses = []
|
|
494
|
-
params = []
|
|
495
|
-
|
|
496
|
-
for key, value in filters.items():
|
|
497
|
-
where_clauses.append(f"{key} = ?")
|
|
498
|
-
params.append(value)
|
|
499
|
-
|
|
500
|
-
query = "SELECT metadata FROM runs"
|
|
501
|
-
if where_clauses:
|
|
502
|
-
query += " WHERE " + " AND ".join(where_clauses)
|
|
503
|
-
query += " ORDER BY created_at DESC"
|
|
504
|
-
|
|
505
|
-
cursor.execute(query, params)
|
|
506
|
-
rows = cursor.fetchall()
|
|
507
|
-
|
|
508
|
-
conn.close()
|
|
509
|
-
|
|
510
|
-
return [json.loads(row[0]) for row in rows]
|
|
511
|
-
|
|
512
|
-
def save_metric(self, run_id: str, name: str, value: float, step: int = 0) -> None:
|
|
513
|
-
"""Save a single metric value.
|
|
514
|
-
|
|
515
|
-
Args:
|
|
516
|
-
run_id: Run identifier
|
|
517
|
-
name: Metric name
|
|
518
|
-
value: Metric value
|
|
519
|
-
step: Training step/iteration
|
|
520
|
-
"""
|
|
521
|
-
conn = sqlite3.connect(self.db_path)
|
|
522
|
-
cursor = conn.cursor()
|
|
523
|
-
|
|
524
|
-
cursor.execute(
|
|
525
|
-
"INSERT INTO metrics (run_id, name, value, step) VALUES (?, ?, ?, ?)",
|
|
526
|
-
(run_id, name, value, step),
|
|
527
|
-
)
|
|
528
|
-
|
|
529
|
-
conn.commit()
|
|
530
|
-
conn.close()
|
|
531
|
-
|
|
532
|
-
def get_metrics(self, run_id: str, name: str | None = None) -> list[dict]:
|
|
533
|
-
"""Get metrics for a run.
|
|
534
|
-
|
|
535
|
-
Args:
|
|
536
|
-
run_id: Run identifier
|
|
537
|
-
name: Optional metric name filter
|
|
538
|
-
|
|
539
|
-
Returns:
|
|
540
|
-
List of metric dictionaries
|
|
541
|
-
"""
|
|
542
|
-
conn = sqlite3.connect(self.db_path)
|
|
543
|
-
cursor = conn.cursor()
|
|
544
|
-
|
|
545
|
-
if name:
|
|
546
|
-
cursor.execute(
|
|
547
|
-
"SELECT name, value, step, timestamp FROM metrics WHERE run_id = ? AND name = ? ORDER BY step",
|
|
548
|
-
(run_id, name),
|
|
549
|
-
)
|
|
550
|
-
else:
|
|
551
|
-
cursor.execute(
|
|
552
|
-
"SELECT name, value, step, timestamp FROM metrics WHERE run_id = ? ORDER BY step",
|
|
553
|
-
(run_id,),
|
|
554
|
-
)
|
|
555
|
-
|
|
556
|
-
rows = cursor.fetchall()
|
|
557
|
-
conn.close()
|
|
558
|
-
|
|
559
|
-
return [{"name": row[0], "value": row[1], "step": row[2], "timestamp": row[3]} for row in rows]
|
|
560
|
-
|
|
561
|
-
def save_experiment(self, experiment_id: str, name: str, description: str = "", tags: dict = None) -> None:
|
|
562
|
-
"""Save experiment metadata.
|
|
563
|
-
|
|
564
|
-
Args:
|
|
565
|
-
experiment_id: Unique experiment identifier
|
|
566
|
-
name: Experiment name
|
|
567
|
-
description: Experiment description
|
|
568
|
-
tags: Experiment tags
|
|
569
|
-
"""
|
|
570
|
-
conn = sqlite3.connect(self.db_path)
|
|
571
|
-
cursor = conn.cursor()
|
|
572
|
-
|
|
573
|
-
cursor.execute(
|
|
574
|
-
"""
|
|
575
|
-
INSERT OR REPLACE INTO experiments
|
|
576
|
-
(experiment_id, name, description, tags)
|
|
577
|
-
VALUES (?, ?, ?, ?)
|
|
578
|
-
""",
|
|
579
|
-
(
|
|
580
|
-
experiment_id,
|
|
581
|
-
name,
|
|
582
|
-
description,
|
|
583
|
-
json.dumps(tags or {}),
|
|
584
|
-
),
|
|
585
|
-
)
|
|
586
|
-
|
|
587
|
-
conn.commit()
|
|
588
|
-
conn.close()
|
|
589
|
-
|
|
590
|
-
def log_experiment_run(
|
|
591
|
-
self,
|
|
592
|
-
experiment_id: str,
|
|
593
|
-
run_id: str,
|
|
594
|
-
metrics: dict = None,
|
|
595
|
-
parameters: dict = None,
|
|
596
|
-
) -> None:
|
|
597
|
-
"""Log a run to an experiment.
|
|
598
|
-
|
|
599
|
-
Args:
|
|
600
|
-
experiment_id: Experiment identifier
|
|
601
|
-
run_id: Run identifier
|
|
602
|
-
metrics: Metrics from the run
|
|
603
|
-
parameters: Parameters used in the run
|
|
604
|
-
"""
|
|
605
|
-
conn = sqlite3.connect(self.db_path)
|
|
606
|
-
cursor = conn.cursor()
|
|
607
|
-
|
|
608
|
-
cursor.execute(
|
|
609
|
-
"""
|
|
610
|
-
INSERT OR REPLACE INTO experiment_runs
|
|
611
|
-
(experiment_id, run_id, metrics, parameters)
|
|
612
|
-
VALUES (?, ?, ?, ?)
|
|
613
|
-
""",
|
|
614
|
-
(
|
|
615
|
-
experiment_id,
|
|
616
|
-
run_id,
|
|
617
|
-
json.dumps(metrics or {}),
|
|
618
|
-
json.dumps(parameters or {}),
|
|
619
|
-
),
|
|
620
|
-
)
|
|
621
|
-
|
|
622
|
-
conn.commit()
|
|
623
|
-
conn.close()
|
|
624
|
-
|
|
625
|
-
def list_experiments(self) -> list[dict]:
|
|
626
|
-
"""List all experiments.
|
|
627
|
-
|
|
628
|
-
Returns:
|
|
629
|
-
List of experiment dictionaries
|
|
630
|
-
"""
|
|
631
|
-
conn = sqlite3.connect(self.db_path)
|
|
632
|
-
cursor = conn.cursor()
|
|
633
|
-
|
|
634
|
-
cursor.execute(
|
|
635
|
-
"SELECT experiment_id, name, description, tags, created_at FROM experiments ORDER BY created_at DESC",
|
|
636
|
-
)
|
|
637
|
-
rows = cursor.fetchall()
|
|
638
|
-
|
|
639
|
-
experiments = []
|
|
640
|
-
for row in rows:
|
|
641
|
-
# Count runs for each experiment
|
|
642
|
-
cursor.execute("SELECT COUNT(*) FROM experiment_runs WHERE experiment_id = ?", (row[0],))
|
|
643
|
-
run_count = cursor.fetchone()[0]
|
|
644
|
-
|
|
645
|
-
experiments.append(
|
|
646
|
-
{
|
|
647
|
-
"experiment_id": row[0],
|
|
648
|
-
"name": row[1],
|
|
649
|
-
"description": row[2],
|
|
650
|
-
"tags": json.loads(row[3]),
|
|
651
|
-
"created_at": row[4],
|
|
652
|
-
"run_count": run_count,
|
|
653
|
-
},
|
|
654
|
-
)
|
|
655
|
-
conn.close()
|
|
656
|
-
return experiments
|
|
657
|
-
|
|
658
|
-
def update_experiment_project(self, experiment_name: str, project_name: str) -> None:
|
|
659
|
-
"""Update the project for an experiment.
|
|
660
|
-
|
|
661
|
-
Args:
|
|
662
|
-
experiment_name: Name of the experiment
|
|
663
|
-
project_name: New project name
|
|
664
|
-
"""
|
|
665
|
-
conn = sqlite3.connect(self.db_path)
|
|
666
|
-
cursor = conn.cursor()
|
|
667
|
-
|
|
668
|
-
try:
|
|
669
|
-
cursor.execute(
|
|
670
|
-
"UPDATE experiments SET project = ? WHERE name = ?",
|
|
671
|
-
(project_name, experiment_name),
|
|
672
|
-
)
|
|
673
|
-
conn.commit()
|
|
674
|
-
finally:
|
|
675
|
-
conn.close()
|
|
676
|
-
|
|
677
|
-
def get_experiment(self, experiment_id: str) -> dict | None:
|
|
678
|
-
"""Get experiment details.
|
|
679
|
-
|
|
680
|
-
Args:
|
|
681
|
-
experiment_id: Experiment identifier
|
|
682
|
-
|
|
683
|
-
Returns:
|
|
684
|
-
Experiment dictionary or None
|
|
685
|
-
"""
|
|
686
|
-
conn = sqlite3.connect(self.db_path)
|
|
687
|
-
cursor = conn.cursor()
|
|
688
|
-
|
|
689
|
-
cursor.execute(
|
|
690
|
-
"SELECT experiment_id, name, description, tags, created_at FROM experiments WHERE experiment_id = ?",
|
|
691
|
-
(experiment_id,),
|
|
692
|
-
)
|
|
693
|
-
row = cursor.fetchone()
|
|
694
|
-
|
|
695
|
-
if not row:
|
|
696
|
-
conn.close()
|
|
697
|
-
return None
|
|
698
|
-
|
|
699
|
-
experiment = {
|
|
700
|
-
"experiment_id": row[0],
|
|
701
|
-
"name": row[1],
|
|
702
|
-
"description": row[2],
|
|
703
|
-
"tags": json.loads(row[3]),
|
|
704
|
-
"created_at": row[4],
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
# Get runs
|
|
708
|
-
cursor.execute(
|
|
709
|
-
"""
|
|
710
|
-
SELECT er.run_id, er.metrics, er.parameters, er.timestamp, r.status, r.duration
|
|
711
|
-
FROM experiment_runs er
|
|
712
|
-
LEFT JOIN runs r ON er.run_id = r.run_id
|
|
713
|
-
WHERE er.experiment_id = ?
|
|
714
|
-
ORDER BY er.timestamp DESC
|
|
715
|
-
""",
|
|
716
|
-
(experiment_id,),
|
|
717
|
-
)
|
|
718
|
-
|
|
719
|
-
runs = []
|
|
720
|
-
for r in cursor.fetchall():
|
|
721
|
-
runs.append(
|
|
722
|
-
{
|
|
723
|
-
"run_id": r[0],
|
|
724
|
-
"metrics": json.loads(r[1]),
|
|
725
|
-
"parameters": json.loads(r[2]),
|
|
726
|
-
"timestamp": r[3],
|
|
727
|
-
"status": r[4],
|
|
728
|
-
"duration": r[5],
|
|
729
|
-
},
|
|
730
|
-
)
|
|
731
|
-
|
|
732
|
-
experiment["runs"] = runs
|
|
733
|
-
|
|
734
|
-
conn.close()
|
|
735
|
-
return experiment
|
|
736
|
-
|
|
737
|
-
def get_statistics(self) -> dict:
|
|
738
|
-
"""Get database statistics.
|
|
739
|
-
|
|
740
|
-
Returns:
|
|
741
|
-
Dictionary with statistics
|
|
742
|
-
"""
|
|
743
|
-
conn = sqlite3.connect(self.db_path)
|
|
744
|
-
cursor = conn.cursor()
|
|
745
|
-
|
|
746
|
-
stats = {}
|
|
747
|
-
|
|
748
|
-
cursor.execute("SELECT COUNT(*) FROM runs")
|
|
749
|
-
stats["total_runs"] = cursor.fetchone()[0]
|
|
750
|
-
|
|
751
|
-
cursor.execute("SELECT COUNT(*) FROM artifacts")
|
|
752
|
-
stats["total_artifacts"] = cursor.fetchone()[0]
|
|
753
|
-
|
|
754
|
-
cursor.execute("SELECT COUNT(*) FROM metrics")
|
|
755
|
-
stats["total_metrics"] = cursor.fetchone()[0]
|
|
756
|
-
|
|
757
|
-
cursor.execute("SELECT COUNT(DISTINCT pipeline_name) FROM runs")
|
|
758
|
-
stats["total_pipelines"] = cursor.fetchone()[0]
|
|
759
|
-
|
|
760
|
-
cursor.execute("SELECT COUNT(*) FROM experiments")
|
|
761
|
-
stats["total_experiments"] = cursor.fetchone()[0]
|
|
762
|
-
|
|
763
|
-
conn.close()
|
|
764
|
-
|
|
765
|
-
return stats
|
|
766
|
-
|
|
767
|
-
def save_trace_event(self, event: dict) -> None:
|
|
768
|
-
"""Save a trace event.
|
|
769
|
-
|
|
770
|
-
Args:
|
|
771
|
-
event: Trace event dictionary
|
|
772
|
-
"""
|
|
773
|
-
conn = sqlite3.connect(self.db_path)
|
|
774
|
-
cursor = conn.cursor()
|
|
775
|
-
|
|
776
|
-
cursor.execute(
|
|
777
|
-
"""
|
|
778
|
-
INSERT OR REPLACE INTO traces
|
|
779
|
-
(event_id, trace_id, parent_id, event_type, name, inputs, outputs,
|
|
780
|
-
start_time, end_time, duration, status, error, metadata,
|
|
781
|
-
prompt_tokens, completion_tokens, total_tokens, cost, model)
|
|
782
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
783
|
-
""",
|
|
784
|
-
(
|
|
785
|
-
event["event_id"],
|
|
786
|
-
event["trace_id"],
|
|
787
|
-
event["parent_id"],
|
|
788
|
-
event["event_type"],
|
|
789
|
-
event["name"],
|
|
790
|
-
json.dumps(event.get("inputs", {})),
|
|
791
|
-
json.dumps(event.get("outputs", {})),
|
|
792
|
-
event.get("start_time"),
|
|
793
|
-
event.get("end_time"),
|
|
794
|
-
event.get("duration"),
|
|
795
|
-
event.get("status"),
|
|
796
|
-
event.get("error"),
|
|
797
|
-
json.dumps(event.get("metadata", {})),
|
|
798
|
-
event.get("prompt_tokens", 0),
|
|
799
|
-
event.get("completion_tokens", 0),
|
|
800
|
-
event.get("total_tokens", 0),
|
|
801
|
-
event.get("cost", 0.0),
|
|
802
|
-
event.get("model"),
|
|
803
|
-
),
|
|
804
|
-
)
|
|
805
|
-
|
|
806
|
-
conn.commit()
|
|
807
|
-
conn.close()
|
|
808
|
-
|
|
809
|
-
def get_trace(self, trace_id: str) -> list[dict]:
|
|
810
|
-
"""Get all events for a trace.
|
|
811
|
-
|
|
812
|
-
Args:
|
|
813
|
-
trace_id: Trace identifier
|
|
814
|
-
|
|
815
|
-
Returns:
|
|
816
|
-
List of event dictionaries
|
|
817
|
-
"""
|
|
818
|
-
conn = sqlite3.connect(self.db_path)
|
|
819
|
-
cursor = conn.cursor()
|
|
820
|
-
|
|
821
|
-
cursor.execute(
|
|
822
|
-
"""
|
|
823
|
-
SELECT * FROM traces WHERE trace_id = ? ORDER BY start_time
|
|
824
|
-
""",
|
|
825
|
-
(trace_id,),
|
|
826
|
-
)
|
|
827
|
-
|
|
828
|
-
columns = [description[0] for description in cursor.description]
|
|
829
|
-
rows = cursor.fetchall()
|
|
830
|
-
|
|
831
|
-
events = []
|
|
832
|
-
for row in rows:
|
|
833
|
-
event = dict(zip(columns, row, strict=False))
|
|
834
|
-
# Parse JSON fields
|
|
835
|
-
for field in ["inputs", "outputs", "metadata"]:
|
|
836
|
-
if event[field]:
|
|
837
|
-
with contextlib.suppress(builtins.BaseException):
|
|
838
|
-
event[field] = json.loads(event[field])
|
|
839
|
-
events.append(event)
|
|
840
|
-
|
|
841
|
-
conn.close()
|
|
842
|
-
return events
|
|
843
|
-
|
|
844
|
-
def save_pipeline_definition(self, pipeline_name: str, definition: dict) -> None:
|
|
845
|
-
"""Save pipeline definition for scheduling."""
|
|
846
|
-
from datetime import datetime
|
|
847
|
-
|
|
848
|
-
conn = sqlite3.connect(self.db_path)
|
|
849
|
-
cursor = conn.cursor()
|
|
850
|
-
now = datetime.now().isoformat()
|
|
851
|
-
|
|
852
|
-
# Check if definition already exists
|
|
853
|
-
cursor.execute(
|
|
854
|
-
"SELECT pipeline_name FROM pipeline_definitions WHERE pipeline_name = ?",
|
|
855
|
-
(pipeline_name,),
|
|
856
|
-
)
|
|
857
|
-
exists = cursor.fetchone()
|
|
858
|
-
|
|
859
|
-
if exists:
|
|
860
|
-
# Update existing
|
|
861
|
-
cursor.execute(
|
|
862
|
-
"""
|
|
863
|
-
UPDATE pipeline_definitions
|
|
864
|
-
SET definition = ?, updated_at = ?
|
|
865
|
-
WHERE pipeline_name = ?
|
|
866
|
-
""",
|
|
867
|
-
(json.dumps(definition), now, pipeline_name),
|
|
868
|
-
)
|
|
869
|
-
else:
|
|
870
|
-
# Insert new
|
|
871
|
-
cursor.execute(
|
|
872
|
-
"""
|
|
873
|
-
INSERT INTO pipeline_definitions (pipeline_name, definition, created_at, updated_at)
|
|
874
|
-
VALUES (?, ?, ?, ?)
|
|
875
|
-
""",
|
|
876
|
-
(pipeline_name, json.dumps(definition), now, now),
|
|
877
|
-
)
|
|
878
|
-
|
|
879
|
-
conn.commit()
|
|
880
|
-
conn.close()
|
|
881
|
-
|
|
882
|
-
def update_pipeline_project(self, pipeline_name: str, project_name: str) -> None:
|
|
883
|
-
"""Update the project for all runs of a pipeline.
|
|
884
|
-
|
|
885
|
-
Args:
|
|
886
|
-
pipeline_name: Name of the pipeline
|
|
887
|
-
project_name: New project name
|
|
888
|
-
"""
|
|
889
|
-
conn = sqlite3.connect(self.db_path)
|
|
890
|
-
cursor = conn.cursor()
|
|
891
|
-
|
|
892
|
-
try:
|
|
893
|
-
# Update runs table
|
|
894
|
-
cursor.execute(
|
|
895
|
-
"UPDATE runs SET project = ? WHERE pipeline_name = ?",
|
|
896
|
-
(project_name, pipeline_name),
|
|
897
|
-
)
|
|
898
|
-
|
|
899
|
-
# Update artifacts table (optional, but good for consistency if artifacts store project)
|
|
900
|
-
# Currently artifacts are linked to runs, so run update might be enough
|
|
901
|
-
# But let's check if artifacts table has project column
|
|
902
|
-
cursor.execute("PRAGMA table_info(artifacts)")
|
|
903
|
-
columns = [info[1] for info in cursor.fetchall()]
|
|
904
|
-
if "project" in columns:
|
|
905
|
-
cursor.execute(
|
|
906
|
-
"""
|
|
907
|
-
UPDATE artifacts
|
|
908
|
-
SET project = ?
|
|
909
|
-
WHERE run_id IN (SELECT run_id FROM runs WHERE pipeline_name = ?)
|
|
910
|
-
""",
|
|
911
|
-
(project_name, pipeline_name),
|
|
912
|
-
)
|
|
913
|
-
|
|
914
|
-
conn.commit()
|
|
915
|
-
finally:
|
|
916
|
-
conn.close()
|
|
917
|
-
|
|
918
|
-
def get_pipeline_definition(self, pipeline_name: str) -> dict | None:
|
|
919
|
-
"""Retrieve pipeline definition."""
|
|
920
|
-
conn = sqlite3.connect(self.db_path)
|
|
921
|
-
cursor = conn.cursor()
|
|
922
|
-
cursor.execute(
|
|
923
|
-
"SELECT definition FROM pipeline_definitions WHERE pipeline_name = ?",
|
|
924
|
-
(pipeline_name,),
|
|
925
|
-
)
|
|
926
|
-
row = cursor.fetchone()
|
|
927
|
-
conn.close()
|
|
928
|
-
|
|
929
|
-
if row:
|
|
930
|
-
return json.loads(row[0])
|
|
931
|
-
return None
|
|
52
|
+
# Alias for backward compatibility
|
|
53
|
+
SQLiteMetadataStore = SQLMetadataStore
|