fraclab-sdk 0.1.0__py3-none-any.whl → 0.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +71 -7
- fraclab_sdk/specs/__init__.py +22 -0
- fraclab_sdk/specs/output.py +33 -0
- fraclab_sdk/workbench/Home.py +162 -0
- fraclab_sdk/workbench/__init__.py +4 -0
- fraclab_sdk/workbench/__main__.py +48 -0
- fraclab_sdk/workbench/pages/1_Snapshots.py +546 -0
- fraclab_sdk/workbench/pages/2_Browse.py +513 -0
- fraclab_sdk/workbench/pages/3_Selection.py +464 -0
- fraclab_sdk/workbench/pages/4_Run.py +325 -0
- fraclab_sdk/workbench/pages/5_Results.py +292 -0
- fraclab_sdk/workbench/pages/6_Algorithm_Edit.py +116 -0
- fraclab_sdk/workbench/pages/7_Schema_Edit.py +149 -0
- fraclab_sdk/workbench/pages/8_Output_Edit.py +144 -0
- fraclab_sdk/workbench/pages/9_Export_Algorithm.py +238 -0
- fraclab_sdk/workbench/pages/__init__.py +1 -0
- fraclab_sdk/workbench/ui_styles.py +91 -0
- fraclab_sdk/workbench/utils.py +43 -0
- {fraclab_sdk-0.1.0.dist-info → fraclab_sdk-0.1.1.dist-info}/METADATA +75 -8
- {fraclab_sdk-0.1.0.dist-info → fraclab_sdk-0.1.1.dist-info}/RECORD +22 -5
- {fraclab_sdk-0.1.0.dist-info → fraclab_sdk-0.1.1.dist-info}/entry_points.txt +1 -0
- {fraclab_sdk-0.1.0.dist-info → fraclab_sdk-0.1.1.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,546 @@
|
|
|
1
|
+
"""Snapshots management page."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import streamlit as st
|
|
10
|
+
|
|
11
|
+
from fraclab_sdk.algorithm import AlgorithmLibrary
|
|
12
|
+
from fraclab_sdk.config import SDKConfig
|
|
13
|
+
from fraclab_sdk.errors import SnapshotError
|
|
14
|
+
from fraclab_sdk.snapshot import SnapshotLibrary
|
|
15
|
+
from fraclab_sdk.workbench import ui_styles
|
|
16
|
+
from fraclab_sdk.workbench.utils import get_workspace_dir
|
|
17
|
+
|
|
18
|
+
st.set_page_config(page_title="Snapshots", page_icon="📦", layout="wide")
|
|
19
|
+
st.title("Snapshots")
|
|
20
|
+
|
|
21
|
+
ui_styles.apply_global_styles()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
config = SDKConfig()
|
|
25
|
+
config.ensure_dirs()
|
|
26
|
+
WORKSPACE_ROOT = get_workspace_dir(config)
|
|
27
|
+
snapshot_lib = SnapshotLibrary(config)
|
|
28
|
+
algorithm_lib = AlgorithmLibrary(config)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
BASE_SCHEMA_UTILS = '''"""Schema base utilities for json_schema_extra helpers."""
|
|
32
|
+
|
|
33
|
+
from typing import Any
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def show_when_condition(field: str, op: str = "equals", value: Any = True) -> dict[str, Any]:
|
|
37
|
+
return {"field": field, "op": op, "value": value}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def show_when_and(*conditions: dict[str, Any]) -> dict[str, Any]:
|
|
41
|
+
return {"and": list(conditions)}
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def show_when_or(*conditions: dict[str, Any]) -> dict[str, Any]:
|
|
45
|
+
return {"or": list(conditions)}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def schema_extra(
|
|
49
|
+
*,
|
|
50
|
+
group: str | None = None,
|
|
51
|
+
order: int | None = None,
|
|
52
|
+
unit: str | None = None,
|
|
53
|
+
step: float | None = None,
|
|
54
|
+
ui_type: str | None = None,
|
|
55
|
+
collapsible: bool | None = None,
|
|
56
|
+
show_when: dict[str, Any] | None = None,
|
|
57
|
+
enum_labels: dict[str, str] | None = None,
|
|
58
|
+
**kwargs: Any,
|
|
59
|
+
) -> dict[str, Any]:
|
|
60
|
+
result: dict[str, Any] = {}
|
|
61
|
+
if group is not None:
|
|
62
|
+
result["group"] = group
|
|
63
|
+
if order is not None:
|
|
64
|
+
result["order"] = order
|
|
65
|
+
if unit is not None:
|
|
66
|
+
result["unit"] = unit
|
|
67
|
+
if step is not None:
|
|
68
|
+
result["step"] = step
|
|
69
|
+
if ui_type is not None:
|
|
70
|
+
result["ui_type"] = ui_type
|
|
71
|
+
if collapsible is not None:
|
|
72
|
+
result["collapsible"] = collapsible
|
|
73
|
+
if show_when is not None:
|
|
74
|
+
result["show_when"] = show_when
|
|
75
|
+
if enum_labels is not None:
|
|
76
|
+
result["enum_labels"] = enum_labels
|
|
77
|
+
result.update(kwargs)
|
|
78
|
+
return result
|
|
79
|
+
'''
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def create_algorithm_scaffold(
|
|
83
|
+
algo_id: str,
|
|
84
|
+
code_version: str,
|
|
85
|
+
contract_version: str,
|
|
86
|
+
name: str,
|
|
87
|
+
summary: str,
|
|
88
|
+
authors: list[dict[str, str]],
|
|
89
|
+
notes: str | None = None,
|
|
90
|
+
tags: list[str] | None = None,
|
|
91
|
+
*,
|
|
92
|
+
workspace_root: Path,
|
|
93
|
+
) -> Path:
|
|
94
|
+
"""Create a new algorithm workspace with minimal files."""
|
|
95
|
+
ws_dir = workspace_root / algo_id / code_version
|
|
96
|
+
if ws_dir.exists():
|
|
97
|
+
raise FileExistsError(f"Algorithm workspace already exists: {ws_dir}")
|
|
98
|
+
ws_dir.mkdir(parents=True, exist_ok=True)
|
|
99
|
+
|
|
100
|
+
authors_list = [
|
|
101
|
+
{
|
|
102
|
+
"name": (a.get("name") or "").strip(),
|
|
103
|
+
"email": (a.get("email") or "").strip(),
|
|
104
|
+
"organization": (a.get("organization") or "").strip(),
|
|
105
|
+
}
|
|
106
|
+
for a in authors
|
|
107
|
+
]
|
|
108
|
+
authors_list = [a for a in authors_list if any(v for v in a.values())] or [{"name": "unknown"}]
|
|
109
|
+
|
|
110
|
+
summary_val = summary.strip() or f"Algorithm {algo_id}"
|
|
111
|
+
manifest = {
|
|
112
|
+
"manifestVersion": "1",
|
|
113
|
+
"algorithmId": algo_id,
|
|
114
|
+
"name": name or algo_id,
|
|
115
|
+
"summary": summary_val,
|
|
116
|
+
"authors": authors_list,
|
|
117
|
+
"contractVersion": contract_version,
|
|
118
|
+
"codeVersion": code_version,
|
|
119
|
+
"notes": notes or None,
|
|
120
|
+
"tags": tags or None,
|
|
121
|
+
"files": {
|
|
122
|
+
"paramsSchemaPath": "dist/params.schema.json",
|
|
123
|
+
"drsPath": "dist/drs.json",
|
|
124
|
+
"outputContractPath": "dist/output_contract.json",
|
|
125
|
+
},
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
(ws_dir / "manifest.json").write_text(json.dumps(manifest, indent=2), encoding="utf-8")
|
|
129
|
+
|
|
130
|
+
dist_dir = ws_dir / "dist"
|
|
131
|
+
dist_dir.mkdir(parents=True, exist_ok=True)
|
|
132
|
+
(dist_dir / "drs.json").write_text(json.dumps({"datasets": []}, indent=2), encoding="utf-8")
|
|
133
|
+
(dist_dir / "params.schema.json").write_text(
|
|
134
|
+
json.dumps({"type": "object", "title": "Parameters", "properties": {}}, indent=2),
|
|
135
|
+
encoding="utf-8",
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
main_stub = '''"""Algorithm entrypoint."""
|
|
139
|
+
|
|
140
|
+
from __future__ import annotations
|
|
141
|
+
|
|
142
|
+
from fraclab_sdk.runtime.data_client import DataClient
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def run(client: DataClient, params: dict) -> dict:
|
|
146
|
+
"""Implement algorithm logic here."""
|
|
147
|
+
# TODO: replace with real logic
|
|
148
|
+
return {"plots": [], "metrics": [], "diagnostics_zip": []}
|
|
149
|
+
'''
|
|
150
|
+
(ws_dir / "main.py").write_text(main_stub, encoding="utf-8")
|
|
151
|
+
|
|
152
|
+
schema_dir = ws_dir / "schema"
|
|
153
|
+
schema_dir.mkdir(parents=True, exist_ok=True)
|
|
154
|
+
(schema_dir / "__init__.py").write_text("", encoding="utf-8")
|
|
155
|
+
(schema_dir / "base.py").write_text(BASE_SCHEMA_UTILS, encoding="utf-8")
|
|
156
|
+
|
|
157
|
+
return ws_dir
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def render_manifest_fields(
|
|
161
|
+
*,
|
|
162
|
+
name: str,
|
|
163
|
+
summary: str,
|
|
164
|
+
contract_version: str,
|
|
165
|
+
code_version: str,
|
|
166
|
+
authors: list[dict],
|
|
167
|
+
tags: list[str] | None,
|
|
168
|
+
notes: str | None,
|
|
169
|
+
key_prefix: str,
|
|
170
|
+
) -> dict:
|
|
171
|
+
"""Render common manifest fields and return updated values."""
|
|
172
|
+
name_val = st.text_input("Name", value=name, key=f"{key_prefix}_name")
|
|
173
|
+
summary_val = st.text_area("Summary", value=summary, key=f"{key_prefix}_summary")
|
|
174
|
+
|
|
175
|
+
c1, c2 = st.columns(2)
|
|
176
|
+
with c1:
|
|
177
|
+
contract_val = st.text_input("Contract Version", value=contract_version, key=f"{key_prefix}_contract")
|
|
178
|
+
with c2:
|
|
179
|
+
code_val = st.text_input("Code Version", value=code_version, key=f"{key_prefix}_code")
|
|
180
|
+
|
|
181
|
+
st.markdown("---")
|
|
182
|
+
st.caption("Authors Info")
|
|
183
|
+
|
|
184
|
+
authors_entries = authors or [{"name": "", "email": "", "organization": ""}]
|
|
185
|
+
author_count = st.number_input(
|
|
186
|
+
"Authors count",
|
|
187
|
+
min_value=1,
|
|
188
|
+
max_value=max(len(authors_entries), 10),
|
|
189
|
+
value=len(authors_entries),
|
|
190
|
+
step=1,
|
|
191
|
+
key=f"{key_prefix}_author_count",
|
|
192
|
+
)
|
|
193
|
+
# ensure list length matches count
|
|
194
|
+
if author_count > len(authors_entries):
|
|
195
|
+
authors_entries.extend([{"name": "", "email": "", "organization": ""}] * (author_count - len(authors_entries)))
|
|
196
|
+
elif author_count < len(authors_entries):
|
|
197
|
+
authors_entries = authors_entries[:author_count]
|
|
198
|
+
|
|
199
|
+
authors_val: list[dict] = []
|
|
200
|
+
for idx in range(int(author_count)):
|
|
201
|
+
author = authors_entries[idx]
|
|
202
|
+
cols = st.columns(3)
|
|
203
|
+
with cols[0]:
|
|
204
|
+
name_a = st.text_input(
|
|
205
|
+
f"Author {idx+1} Name",
|
|
206
|
+
value=author.get("name", ""),
|
|
207
|
+
key=f"{key_prefix}_author_name_{idx}",
|
|
208
|
+
)
|
|
209
|
+
with cols[1]:
|
|
210
|
+
email_a = st.text_input(
|
|
211
|
+
f"Author {idx+1} Email",
|
|
212
|
+
value=author.get("email", ""),
|
|
213
|
+
key=f"{key_prefix}_author_email_{idx}",
|
|
214
|
+
)
|
|
215
|
+
with cols[2]:
|
|
216
|
+
org_a = st.text_input(
|
|
217
|
+
f"Author {idx+1} Organization",
|
|
218
|
+
value=author.get("organization", ""),
|
|
219
|
+
key=f"{key_prefix}_author_org_{idx}",
|
|
220
|
+
)
|
|
221
|
+
authors_val.append({"name": name_a, "email": email_a, "organization": org_a})
|
|
222
|
+
|
|
223
|
+
st.markdown("---")
|
|
224
|
+
tags_val = st.text_input(
|
|
225
|
+
"Tags (comma-separated)",
|
|
226
|
+
value=",".join(tags or []),
|
|
227
|
+
key=f"{key_prefix}_tags",
|
|
228
|
+
)
|
|
229
|
+
notes_val = st.text_area("Notes", value=notes or "", key=f"{key_prefix}_notes")
|
|
230
|
+
|
|
231
|
+
return {
|
|
232
|
+
"name": name_val,
|
|
233
|
+
"summary": summary_val,
|
|
234
|
+
"contract_version": contract_val,
|
|
235
|
+
"code_version": code_val,
|
|
236
|
+
"authors": [a for a in authors_val if any(v.strip() for v in a.values())] or [{"name": "unknown"}],
|
|
237
|
+
"tags": [t.strip() for t in tags_val.split(",") if t.strip()] or None,
|
|
238
|
+
"notes": notes_val.strip() or None,
|
|
239
|
+
}
|
|
240
|
+
|
|
241
|
+
# ==========================================
|
|
242
|
+
# Dialogs (Modals)
|
|
243
|
+
# ==========================================
|
|
244
|
+
|
|
245
|
+
@st.dialog("Create New Algorithm")
|
|
246
|
+
def show_create_algo_dialog():
|
|
247
|
+
with st.form("create_algo_form"):
|
|
248
|
+
algo_id = st.text_input("Algorithm ID (e.g. my-algo)", key="create_algo_id")
|
|
249
|
+
manifest_vals = render_manifest_fields(
|
|
250
|
+
name="",
|
|
251
|
+
summary="",
|
|
252
|
+
contract_version="1.0.0",
|
|
253
|
+
code_version="0.1.0",
|
|
254
|
+
authors=[{"name": "Your Name", "email": "", "organization": ""}],
|
|
255
|
+
tags=None,
|
|
256
|
+
notes=None,
|
|
257
|
+
key_prefix="create_algo",
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
f_c1, f_c2 = st.columns([1, 4])
|
|
261
|
+
with f_c1:
|
|
262
|
+
# Updated API
|
|
263
|
+
create_submit = st.form_submit_button("Create", type="primary", width="stretch")
|
|
264
|
+
with f_c2:
|
|
265
|
+
pass # form layout spacer
|
|
266
|
+
|
|
267
|
+
if create_submit:
|
|
268
|
+
if not algo_id or not manifest_vals["code_version"]:
|
|
269
|
+
st.error("Algorithm ID and Code Version are required.")
|
|
270
|
+
elif not re.match(r"^[A-Za-z0-9_-]+$", algo_id):
|
|
271
|
+
st.error("Algorithm ID may only contain letters, numbers, _ or -.")
|
|
272
|
+
else:
|
|
273
|
+
try:
|
|
274
|
+
ws_dir = create_algorithm_scaffold(
|
|
275
|
+
algo_id=algo_id,
|
|
276
|
+
code_version=manifest_vals["code_version"],
|
|
277
|
+
contract_version=manifest_vals["contract_version"],
|
|
278
|
+
name=manifest_vals["name"] or algo_id,
|
|
279
|
+
summary=manifest_vals["summary"],
|
|
280
|
+
authors=manifest_vals["authors"],
|
|
281
|
+
notes=manifest_vals["notes"],
|
|
282
|
+
tags=manifest_vals["tags"],
|
|
283
|
+
workspace_root=WORKSPACE_ROOT,
|
|
284
|
+
)
|
|
285
|
+
algo_id, version = algorithm_lib.import_algorithm(ws_dir)
|
|
286
|
+
st.success(f"Created and imported: {algo_id} v{version}")
|
|
287
|
+
st.rerun()
|
|
288
|
+
except FileExistsError as e:
|
|
289
|
+
st.error(str(e))
|
|
290
|
+
except Exception as e:
|
|
291
|
+
st.error(f"Create failed: {e}")
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
@st.dialog("Edit Manifest")
|
|
295
|
+
def show_edit_manifest_dialog(algo_id, version, manifest_path):
|
|
296
|
+
try:
|
|
297
|
+
manifest_data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
|
298
|
+
except Exception as e:
|
|
299
|
+
st.error(f"Failed to load manifest: {e}")
|
|
300
|
+
return
|
|
301
|
+
|
|
302
|
+
files_section = manifest_data.get("files") or {}
|
|
303
|
+
default_files = {
|
|
304
|
+
"paramsSchemaPath": files_section.get("paramsSchemaPath", "dist/params.schema.json"),
|
|
305
|
+
"drsPath": files_section.get("drsPath", "dist/drs.json"),
|
|
306
|
+
"outputContractPath": files_section.get("outputContractPath", "dist/output_contract.json"),
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
with st.form(f"manifest_form_{algo_id}_{version}"):
|
|
310
|
+
manifest_vals = render_manifest_fields(
|
|
311
|
+
name=manifest_data.get("name", ""),
|
|
312
|
+
summary=manifest_data.get("summary", ""),
|
|
313
|
+
contract_version=manifest_data.get("contractVersion", ""),
|
|
314
|
+
code_version=manifest_data.get("codeVersion", ""),
|
|
315
|
+
authors=manifest_data.get("authors") or [{"name": ""}],
|
|
316
|
+
tags=manifest_data.get("tags"),
|
|
317
|
+
notes=manifest_data.get("notes"),
|
|
318
|
+
key_prefix=f"manifest_{algo_id}_{version}",
|
|
319
|
+
)
|
|
320
|
+
save_submit = st.form_submit_button("Save Changes", type="primary")
|
|
321
|
+
|
|
322
|
+
if save_submit:
|
|
323
|
+
try:
|
|
324
|
+
manifest_data["name"] = manifest_vals["name"]
|
|
325
|
+
manifest_data["summary"] = manifest_vals["summary"]
|
|
326
|
+
manifest_data["contractVersion"] = manifest_vals["contract_version"]
|
|
327
|
+
manifest_data["codeVersion"] = manifest_vals["code_version"]
|
|
328
|
+
manifest_data["authors"] = [
|
|
329
|
+
a for a in manifest_vals["authors"] if any(v.strip() for v in a.values())
|
|
330
|
+
] or [{"name": "unknown"}]
|
|
331
|
+
manifest_data["notes"] = manifest_vals["notes"]
|
|
332
|
+
manifest_data["tags"] = manifest_vals["tags"]
|
|
333
|
+
manifest_data["files"] = default_files
|
|
334
|
+
manifest_path.write_text(json.dumps(manifest_data, indent=2), encoding="utf-8")
|
|
335
|
+
st.success("Manifest saved successfully")
|
|
336
|
+
st.rerun()
|
|
337
|
+
except Exception as e:
|
|
338
|
+
st.error(f"Save failed: {e}")
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
# ==========================================
|
|
342
|
+
# 1. Snapshot Management
|
|
343
|
+
# ==========================================
|
|
344
|
+
st.subheader("Import Snapshot")
|
|
345
|
+
|
|
346
|
+
with st.container(border=True):
|
|
347
|
+
# 1. File Uploader
|
|
348
|
+
uploaded_snapshot = st.file_uploader(
|
|
349
|
+
"Upload Snapshot (zip file)",
|
|
350
|
+
type=["zip"],
|
|
351
|
+
label_visibility="collapsed",
|
|
352
|
+
key="snapshot_uploader",
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
# 2. Conditional Layout: Filename + Import Button
|
|
356
|
+
if uploaded_snapshot is not None:
|
|
357
|
+
# 使用列布局:左侧文件名,右侧按钮紧凑排列
|
|
358
|
+
c_name, c_btn = st.columns([5, 1], gap="small")
|
|
359
|
+
with c_name:
|
|
360
|
+
# 垂直居中文件名文本
|
|
361
|
+
st.markdown(f"<div style='padding-top: 5px; color: #444;'>📄 <b>{uploaded_snapshot.name}</b> <small>({uploaded_snapshot.size / 1024:.1f} KB)</small></div>", unsafe_allow_html=True)
|
|
362
|
+
with c_btn:
|
|
363
|
+
# Updated API
|
|
364
|
+
if st.button("Import Snapshot", type="primary", key="import_snapshot_btn", width="stretch"):
|
|
365
|
+
with st.spinner("Importing snapshot..."):
|
|
366
|
+
try:
|
|
367
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp_file:
|
|
368
|
+
tmp_file.write(uploaded_snapshot.getvalue())
|
|
369
|
+
tmp_path = Path(tmp_file.name)
|
|
370
|
+
|
|
371
|
+
snapshot_id = snapshot_lib.import_snapshot(tmp_path)
|
|
372
|
+
st.success(f"Imported: {snapshot_id}")
|
|
373
|
+
tmp_path.unlink(missing_ok=True)
|
|
374
|
+
st.rerun()
|
|
375
|
+
except SnapshotError as e:
|
|
376
|
+
st.error(f"Import failed: {e}")
|
|
377
|
+
except Exception as e:
|
|
378
|
+
st.error(f"Error: {e}")
|
|
379
|
+
|
|
380
|
+
st.divider()
|
|
381
|
+
|
|
382
|
+
st.subheader("Imported Snapshots")
|
|
383
|
+
|
|
384
|
+
snapshots = snapshot_lib.list_snapshots()
|
|
385
|
+
|
|
386
|
+
if not snapshots:
|
|
387
|
+
st.info("No snapshots imported yet")
|
|
388
|
+
else:
|
|
389
|
+
for snap in snapshots:
|
|
390
|
+
with st.expander(f"📦 {snap.snapshot_id}", expanded=False):
|
|
391
|
+
with st.container(border=True):
|
|
392
|
+
c1, c2, c3 = st.columns([3, 5, 1])
|
|
393
|
+
|
|
394
|
+
with c1:
|
|
395
|
+
st.caption("Bundle ID")
|
|
396
|
+
st.code(snap.bundle_id, language="text")
|
|
397
|
+
st.caption("Imported At")
|
|
398
|
+
st.markdown(f"**{snap.imported_at}**")
|
|
399
|
+
|
|
400
|
+
with c2:
|
|
401
|
+
st.caption("DRS (Data Requirement Specification)")
|
|
402
|
+
# 获取完整的 Snapshot 对象以读取 DRS
|
|
403
|
+
try:
|
|
404
|
+
full_snap = snapshot_lib.get_snapshot(snap.snapshot_id)
|
|
405
|
+
drs_data = full_snap.drs.model_dump(exclude_none=True)
|
|
406
|
+
st.code(json.dumps(drs_data, indent=2, ensure_ascii=False), language="json")
|
|
407
|
+
except Exception as e:
|
|
408
|
+
st.error(f"Failed to load DRS: {e}")
|
|
409
|
+
|
|
410
|
+
with c3:
|
|
411
|
+
st.write("") # Spacer
|
|
412
|
+
if st.button("Delete", key=f"del_{snap.snapshot_id}", type="secondary"):
|
|
413
|
+
try:
|
|
414
|
+
snapshot_lib.delete_snapshot(snap.snapshot_id)
|
|
415
|
+
st.success(f"Deleted")
|
|
416
|
+
st.rerun()
|
|
417
|
+
except Exception as e:
|
|
418
|
+
st.error(f"Delete failed: {e}")
|
|
419
|
+
|
|
420
|
+
st.divider()
|
|
421
|
+
|
|
422
|
+
# ==========================================
|
|
423
|
+
# 2. Algorithm Management
|
|
424
|
+
# ==========================================
|
|
425
|
+
st.subheader("Algorithm Workspace")
|
|
426
|
+
|
|
427
|
+
# Action Bar
|
|
428
|
+
col_create, col_spacer = st.columns([1, 4])
|
|
429
|
+
with col_create:
|
|
430
|
+
# Updated API
|
|
431
|
+
if st.button("✨ Create New Algorithm", key="create_algo_btn", width="stretch"):
|
|
432
|
+
show_create_algo_dialog()
|
|
433
|
+
|
|
434
|
+
# Upload Section (Expanded by Default)
|
|
435
|
+
with st.expander("📤 Import Existing Algorithm", expanded=True):
|
|
436
|
+
uploaded_algorithm = st.file_uploader(
|
|
437
|
+
"Upload Algorithm (zip or .py)",
|
|
438
|
+
type=["zip", "py"],
|
|
439
|
+
key="algorithm_uploader",
|
|
440
|
+
)
|
|
441
|
+
|
|
442
|
+
if uploaded_algorithm is not None:
|
|
443
|
+
if uploaded_algorithm.name.endswith(".zip"):
|
|
444
|
+
if st.button("Import Algorithm Zip", type="primary", key="import_algo_btn_zip"):
|
|
445
|
+
with st.spinner("Importing algorithm from zip..."):
|
|
446
|
+
try:
|
|
447
|
+
with tempfile.NamedTemporaryFile(delete=False, suffix=".zip") as tmp_file:
|
|
448
|
+
tmp_file.write(uploaded_algorithm.getvalue())
|
|
449
|
+
tmp_path = Path(tmp_file.name)
|
|
450
|
+
algo_id, version = algorithm_lib.import_algorithm(tmp_path)
|
|
451
|
+
st.success(f"Imported algorithm: {algo_id} v{version}")
|
|
452
|
+
tmp_path.unlink(missing_ok=True)
|
|
453
|
+
st.rerun()
|
|
454
|
+
except Exception as e:
|
|
455
|
+
st.error(f"Import failed: {e}")
|
|
456
|
+
elif uploaded_algorithm.name.endswith(".py"):
|
|
457
|
+
if st.button("Import Single Python File", type="primary", key="import_algo_btn_py"):
|
|
458
|
+
with st.spinner("Importing algorithm from .py..."):
|
|
459
|
+
try:
|
|
460
|
+
with tempfile.TemporaryDirectory() as tmp_dir:
|
|
461
|
+
tmp_dir_path = Path(tmp_dir)
|
|
462
|
+
algo_path = tmp_dir_path / "main.py"
|
|
463
|
+
algo_path.write_bytes(uploaded_algorithm.getvalue())
|
|
464
|
+
|
|
465
|
+
(tmp_dir_path / "drs.json").write_text(json.dumps({"datasets": []}, indent=2))
|
|
466
|
+
(tmp_dir_path / "params.schema.json").write_text(
|
|
467
|
+
json.dumps({"type": "object", "properties": {}}, indent=2)
|
|
468
|
+
)
|
|
469
|
+
algo_id = uploaded_algorithm.name.removesuffix(".py")
|
|
470
|
+
manifest = {
|
|
471
|
+
"manifestVersion": "1",
|
|
472
|
+
"algorithmId": algo_id,
|
|
473
|
+
"codeVersion": "local",
|
|
474
|
+
"contractVersion": "1.0.0",
|
|
475
|
+
"name": algo_id,
|
|
476
|
+
"summary": "Imported from single python file",
|
|
477
|
+
"authors": [{"name": "unknown"}],
|
|
478
|
+
}
|
|
479
|
+
(tmp_dir_path / "manifest.json").write_text(json.dumps(manifest, indent=2))
|
|
480
|
+
|
|
481
|
+
algo_id, version = algorithm_lib.import_algorithm(tmp_dir_path)
|
|
482
|
+
st.success(f"Imported algorithm: {algo_id} v{version}")
|
|
483
|
+
st.rerun()
|
|
484
|
+
except Exception as e:
|
|
485
|
+
st.error(f"Import failed: {e}")
|
|
486
|
+
|
|
487
|
+
st.divider()
|
|
488
|
+
|
|
489
|
+
st.subheader("Imported Algorithms")
|
|
490
|
+
|
|
491
|
+
algorithms = algorithm_lib.list_algorithms()
|
|
492
|
+
|
|
493
|
+
if not algorithms:
|
|
494
|
+
st.info("No algorithms imported yet")
|
|
495
|
+
else:
|
|
496
|
+
for algo in algorithms:
|
|
497
|
+
with st.expander(f"🧩 {algo.algorithm_id} (v{algo.version})", expanded=False):
|
|
498
|
+
with st.container(border=True):
|
|
499
|
+
# Header info
|
|
500
|
+
head_c1, head_c2, head_c3 = st.columns([3, 2, 2])
|
|
501
|
+
with head_c1:
|
|
502
|
+
st.caption("Name")
|
|
503
|
+
st.markdown(f"**{algo.name or 'N/A'}**")
|
|
504
|
+
with head_c2:
|
|
505
|
+
st.caption("Contract")
|
|
506
|
+
st.code(algo.contract_version or 'N/A', language="text")
|
|
507
|
+
with head_c3:
|
|
508
|
+
st.caption("Imported")
|
|
509
|
+
st.text(algo.imported_at)
|
|
510
|
+
|
|
511
|
+
st.markdown("---")
|
|
512
|
+
|
|
513
|
+
# Content info
|
|
514
|
+
st.caption("Summary")
|
|
515
|
+
if getattr(algo, 'summary', ''):
|
|
516
|
+
st.info(algo.summary)
|
|
517
|
+
else:
|
|
518
|
+
st.text("No summary provided.")
|
|
519
|
+
|
|
520
|
+
notes = getattr(algo, "notes", None)
|
|
521
|
+
if notes:
|
|
522
|
+
st.caption("Notes")
|
|
523
|
+
st.write(notes)
|
|
524
|
+
|
|
525
|
+
st.markdown("---")
|
|
526
|
+
|
|
527
|
+
# Actions
|
|
528
|
+
act_c1, act_c2 = st.columns([1, 5])
|
|
529
|
+
|
|
530
|
+
with act_c1:
|
|
531
|
+
manifest_path = algorithm_lib.get_algorithm(algo.algorithm_id, algo.version).directory / "manifest.json"
|
|
532
|
+
if manifest_path.exists():
|
|
533
|
+
if st.button("📝 Edit Manifest", key=f"edit_manifest_btn_{algo.algorithm_id}_{algo.version}"):
|
|
534
|
+
show_edit_manifest_dialog(algo.algorithm_id, algo.version, manifest_path)
|
|
535
|
+
|
|
536
|
+
with act_c2:
|
|
537
|
+
if st.button("🗑️ Delete", key=f"del_algo_{algo.algorithm_id}_{algo.version}"):
|
|
538
|
+
try:
|
|
539
|
+
algorithm_lib.delete_algorithm(algo.algorithm_id, algo.version)
|
|
540
|
+
ws_dir = WORKSPACE_ROOT / algo.algorithm_id / algo.version
|
|
541
|
+
if ws_dir.exists():
|
|
542
|
+
shutil.rmtree(ws_dir, ignore_errors=True)
|
|
543
|
+
st.success(f"Deleted")
|
|
544
|
+
st.rerun()
|
|
545
|
+
except Exception as e:
|
|
546
|
+
st.error(f"Delete failed: {e}")
|