alchemist-nrel 0.2.1__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alchemist_core/__init__.py +14 -7
- alchemist_core/acquisition/botorch_acquisition.py +14 -6
- alchemist_core/audit_log.py +594 -0
- alchemist_core/data/experiment_manager.py +69 -5
- alchemist_core/models/botorch_model.py +6 -4
- alchemist_core/models/sklearn_model.py +44 -6
- alchemist_core/session.py +600 -8
- alchemist_core/utils/doe.py +200 -0
- {alchemist_nrel-0.2.1.dist-info → alchemist_nrel-0.3.0.dist-info}/METADATA +57 -40
- alchemist_nrel-0.3.0.dist-info/RECORD +66 -0
- {alchemist_nrel-0.2.1.dist-info → alchemist_nrel-0.3.0.dist-info}/entry_points.txt +1 -0
- {alchemist_nrel-0.2.1.dist-info → alchemist_nrel-0.3.0.dist-info}/top_level.txt +1 -0
- api/main.py +19 -3
- api/models/requests.py +71 -0
- api/models/responses.py +144 -0
- api/routers/experiments.py +117 -5
- api/routers/sessions.py +329 -10
- api/routers/visualizations.py +10 -5
- api/services/session_store.py +210 -54
- api/static/NEW_ICON.ico +0 -0
- api/static/NEW_ICON.png +0 -0
- api/static/NEW_LOGO_DARK.png +0 -0
- api/static/NEW_LOGO_LIGHT.png +0 -0
- api/static/assets/api-vcoXEqyq.js +1 -0
- api/static/assets/index-C0_glioA.js +4084 -0
- api/static/assets/index-CB4V1LI5.css +1 -0
- api/static/index.html +14 -0
- api/static/vite.svg +1 -0
- run_api.py +55 -0
- ui/gpr_panel.py +7 -2
- ui/notifications.py +197 -10
- ui/ui.py +1117 -68
- ui/variables_setup.py +47 -2
- ui/visualizations.py +60 -3
- alchemist_nrel-0.2.1.dist-info/RECORD +0 -54
- {alchemist_nrel-0.2.1.dist-info → alchemist_nrel-0.3.0.dist-info}/WHEEL +0 -0
- {alchemist_nrel-0.2.1.dist-info → alchemist_nrel-0.3.0.dist-info}/licenses/LICENSE +0 -0
api/routers/sessions.py
CHANGED
|
@@ -2,13 +2,21 @@
|
|
|
2
2
|
Sessions router - Session lifecycle management.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
from fastapi import APIRouter, HTTPException, status, UploadFile, File
|
|
6
|
-
from fastapi.responses import Response
|
|
7
|
-
from ..models.
|
|
5
|
+
from fastapi import APIRouter, HTTPException, status, UploadFile, File, Depends
|
|
6
|
+
from fastapi.responses import Response, FileResponse, JSONResponse
|
|
7
|
+
from ..models.requests import UpdateMetadataRequest, LockDecisionRequest
|
|
8
|
+
from ..models.responses import (
|
|
9
|
+
SessionCreateResponse, SessionInfoResponse, SessionStateResponse,
|
|
10
|
+
SessionMetadataResponse, AuditLogResponse, AuditEntryResponse, LockDecisionResponse
|
|
11
|
+
)
|
|
8
12
|
from ..services import session_store
|
|
9
13
|
from ..dependencies import get_session
|
|
14
|
+
from alchemist_core.session import OptimizationSession
|
|
10
15
|
from datetime import datetime
|
|
11
16
|
import logging
|
|
17
|
+
import json
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
import tempfile
|
|
12
20
|
|
|
13
21
|
logger = logging.getLogger(__name__)
|
|
14
22
|
|
|
@@ -50,6 +58,36 @@ async def get_session_info(session_id: str):
|
|
|
50
58
|
return SessionInfoResponse(**info)
|
|
51
59
|
|
|
52
60
|
|
|
61
|
+
@router.get("/sessions/{session_id}/state", response_model=SessionStateResponse)
|
|
62
|
+
async def get_session_state(
|
|
63
|
+
session_id: str,
|
|
64
|
+
session: OptimizationSession = Depends(get_session)
|
|
65
|
+
):
|
|
66
|
+
"""
|
|
67
|
+
Get current session state for monitoring autonomous optimization.
|
|
68
|
+
|
|
69
|
+
Returns key metrics for dashboard displays or autonomous controllers
|
|
70
|
+
to monitor optimization progress without retrieving full session data.
|
|
71
|
+
"""
|
|
72
|
+
# Get session metrics
|
|
73
|
+
n_variables = len(session.search_space.variables)
|
|
74
|
+
n_experiments = len(session.experiment_manager.df)
|
|
75
|
+
model_trained = session.model is not None
|
|
76
|
+
|
|
77
|
+
# Get last suggestion if available
|
|
78
|
+
last_suggestion = None
|
|
79
|
+
if hasattr(session, '_last_suggestion') and session._last_suggestion:
|
|
80
|
+
last_suggestion = session._last_suggestion
|
|
81
|
+
|
|
82
|
+
return SessionStateResponse(
|
|
83
|
+
session_id=session_id,
|
|
84
|
+
n_variables=n_variables,
|
|
85
|
+
n_experiments=n_experiments,
|
|
86
|
+
model_trained=model_trained,
|
|
87
|
+
last_suggestion=last_suggestion
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
|
|
53
91
|
@router.delete("/sessions/{session_id}", status_code=status.HTTP_204_NO_CONTENT)
|
|
54
92
|
async def delete_session(session_id: str):
|
|
55
93
|
"""
|
|
@@ -89,13 +127,30 @@ async def extend_session(session_id: str, hours: int = 24):
|
|
|
89
127
|
}
|
|
90
128
|
|
|
91
129
|
|
|
130
|
+
@router.post("/sessions/{session_id}/save", status_code=status.HTTP_200_OK)
|
|
131
|
+
async def save_session_server_side(session_id: str):
|
|
132
|
+
"""
|
|
133
|
+
Persist the current in-memory session to the server-side session file.
|
|
134
|
+
|
|
135
|
+
This allows the web UI to save changes directly to the session store file
|
|
136
|
+
instead of triggering a browser download.
|
|
137
|
+
"""
|
|
138
|
+
success = session_store.persist_session_to_disk(session_id)
|
|
139
|
+
if not success:
|
|
140
|
+
raise HTTPException(
|
|
141
|
+
status_code=status.HTTP_404_NOT_FOUND,
|
|
142
|
+
detail=f"Session {session_id} not found or failed to save"
|
|
143
|
+
)
|
|
144
|
+
return {"message": "Session persisted to server storage"}
|
|
145
|
+
|
|
146
|
+
|
|
92
147
|
@router.get("/sessions/{session_id}/export")
|
|
93
148
|
async def export_session(session_id: str):
|
|
94
149
|
"""
|
|
95
150
|
Export a session for download.
|
|
96
151
|
|
|
97
|
-
Downloads the complete session state as a .
|
|
98
|
-
reimported later.
|
|
152
|
+
Downloads the complete session state as a .json file that can be
|
|
153
|
+
reimported later or used in desktop application.
|
|
99
154
|
"""
|
|
100
155
|
session_data = session_store.export_session(session_id)
|
|
101
156
|
if session_data is None:
|
|
@@ -106,9 +161,9 @@ async def export_session(session_id: str):
|
|
|
106
161
|
|
|
107
162
|
return Response(
|
|
108
163
|
content=session_data,
|
|
109
|
-
media_type="application/
|
|
164
|
+
media_type="application/json",
|
|
110
165
|
headers={
|
|
111
|
-
"Content-Disposition": f"attachment; filename=session_{session_id}.
|
|
166
|
+
"Content-Disposition": f"attachment; filename=session_{session_id}.json"
|
|
112
167
|
}
|
|
113
168
|
)
|
|
114
169
|
|
|
@@ -118,12 +173,14 @@ async def import_session(file: UploadFile = File(...)):
|
|
|
118
173
|
"""
|
|
119
174
|
Import a previously exported session.
|
|
120
175
|
|
|
121
|
-
Uploads a .
|
|
122
|
-
A new session ID will be generated.
|
|
176
|
+
Uploads a .json session file and creates a new session with the imported data.
|
|
177
|
+
A new session ID will be generated. Compatible with desktop application sessions.
|
|
123
178
|
"""
|
|
124
179
|
try:
|
|
125
180
|
session_data = await file.read()
|
|
126
|
-
|
|
181
|
+
# Decode bytes to string for JSON
|
|
182
|
+
session_json = session_data.decode('utf-8')
|
|
183
|
+
session_id = session_store.import_session(session_json)
|
|
127
184
|
|
|
128
185
|
if session_id is None:
|
|
129
186
|
raise HTTPException(
|
|
@@ -144,3 +201,265 @@ async def import_session(file: UploadFile = File(...)):
|
|
|
144
201
|
status_code=status.HTTP_400_BAD_REQUEST,
|
|
145
202
|
detail=f"Failed to import session: {str(e)}"
|
|
146
203
|
)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
# ============================================================
|
|
207
|
+
# Metadata Management Endpoints
|
|
208
|
+
# ============================================================
|
|
209
|
+
|
|
210
|
+
@router.get("/sessions/{session_id}/metadata", response_model=SessionMetadataResponse)
|
|
211
|
+
async def get_metadata(
|
|
212
|
+
session_id: str,
|
|
213
|
+
session: OptimizationSession = Depends(get_session)
|
|
214
|
+
):
|
|
215
|
+
"""
|
|
216
|
+
Get session metadata.
|
|
217
|
+
|
|
218
|
+
Returns the session's user-friendly name, description, tags, and timestamps.
|
|
219
|
+
"""
|
|
220
|
+
return SessionMetadataResponse(
|
|
221
|
+
session_id=session.metadata.session_id,
|
|
222
|
+
name=session.metadata.name,
|
|
223
|
+
created_at=session.metadata.created_at,
|
|
224
|
+
last_modified=session.metadata.last_modified,
|
|
225
|
+
description=session.metadata.description,
|
|
226
|
+
tags=session.metadata.tags
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
@router.patch("/sessions/{session_id}/metadata", response_model=SessionMetadataResponse)
|
|
231
|
+
async def update_metadata(
|
|
232
|
+
session_id: str,
|
|
233
|
+
request: UpdateMetadataRequest,
|
|
234
|
+
session: OptimizationSession = Depends(get_session)
|
|
235
|
+
):
|
|
236
|
+
"""
|
|
237
|
+
Update session metadata.
|
|
238
|
+
|
|
239
|
+
Update the session's name, description, and/or tags. Only provided fields
|
|
240
|
+
will be updated; omitted fields remain unchanged.
|
|
241
|
+
"""
|
|
242
|
+
session.update_metadata(
|
|
243
|
+
name=request.name,
|
|
244
|
+
description=request.description,
|
|
245
|
+
tags=request.tags
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
return SessionMetadataResponse(
|
|
249
|
+
session_id=session.metadata.session_id,
|
|
250
|
+
name=session.metadata.name,
|
|
251
|
+
created_at=session.metadata.created_at,
|
|
252
|
+
last_modified=session.metadata.last_modified,
|
|
253
|
+
description=session.metadata.description,
|
|
254
|
+
tags=session.metadata.tags
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
# ============================================================
|
|
259
|
+
# Audit Log Endpoints
|
|
260
|
+
# ============================================================
|
|
261
|
+
|
|
262
|
+
@router.get("/sessions/{session_id}/audit", response_model=AuditLogResponse)
|
|
263
|
+
async def get_audit_log(
|
|
264
|
+
session_id: str,
|
|
265
|
+
entry_type: str = None,
|
|
266
|
+
session: OptimizationSession = Depends(get_session)
|
|
267
|
+
):
|
|
268
|
+
"""
|
|
269
|
+
Get audit log entries.
|
|
270
|
+
|
|
271
|
+
Retrieves the complete audit trail or filters by entry type.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
session_id: Session identifier
|
|
275
|
+
entry_type: Optional filter ('data_locked', 'model_locked', 'acquisition_locked')
|
|
276
|
+
"""
|
|
277
|
+
if entry_type:
|
|
278
|
+
entries = session.audit_log.get_entries(entry_type)
|
|
279
|
+
else:
|
|
280
|
+
entries = session.audit_log.get_entries()
|
|
281
|
+
|
|
282
|
+
return AuditLogResponse(
|
|
283
|
+
entries=[AuditEntryResponse(**e.to_dict()) for e in entries],
|
|
284
|
+
n_entries=len(entries)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
@router.post("/sessions/{session_id}/audit/lock", response_model=LockDecisionResponse)
|
|
289
|
+
async def lock_decision(
|
|
290
|
+
session_id: str,
|
|
291
|
+
request: LockDecisionRequest,
|
|
292
|
+
session: OptimizationSession = Depends(get_session)
|
|
293
|
+
):
|
|
294
|
+
"""
|
|
295
|
+
Lock in a decision to the audit log.
|
|
296
|
+
|
|
297
|
+
Creates an immutable audit entry for data, model, or acquisition decisions.
|
|
298
|
+
This should be called when the user is satisfied with their configuration
|
|
299
|
+
and ready to commit the decision to the audit trail.
|
|
300
|
+
|
|
301
|
+
Args:
|
|
302
|
+
session_id: Session identifier
|
|
303
|
+
request: Lock decision request
|
|
304
|
+
"""
|
|
305
|
+
try:
|
|
306
|
+
if request.lock_type == "data":
|
|
307
|
+
entry = session.lock_data(notes=request.notes or "")
|
|
308
|
+
message = "Data decision locked successfully"
|
|
309
|
+
|
|
310
|
+
elif request.lock_type == "model":
|
|
311
|
+
entry = session.lock_model(notes=request.notes or "")
|
|
312
|
+
message = "Model decision locked successfully"
|
|
313
|
+
|
|
314
|
+
elif request.lock_type == "acquisition":
|
|
315
|
+
if not request.strategy or not request.parameters or not request.suggestions:
|
|
316
|
+
raise HTTPException(
|
|
317
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
318
|
+
detail="Acquisition lock requires strategy, parameters, and suggestions"
|
|
319
|
+
)
|
|
320
|
+
entry = session.lock_acquisition(
|
|
321
|
+
strategy=request.strategy,
|
|
322
|
+
parameters=request.parameters,
|
|
323
|
+
suggestions=request.suggestions,
|
|
324
|
+
notes=request.notes or ""
|
|
325
|
+
)
|
|
326
|
+
message = "Acquisition decision locked successfully"
|
|
327
|
+
|
|
328
|
+
else:
|
|
329
|
+
raise HTTPException(
|
|
330
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
331
|
+
detail=f"Invalid lock_type: {request.lock_type}"
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
return LockDecisionResponse(
|
|
335
|
+
success=True,
|
|
336
|
+
entry=AuditEntryResponse(**entry.to_dict()),
|
|
337
|
+
message=message
|
|
338
|
+
)
|
|
339
|
+
|
|
340
|
+
except ValueError as e:
|
|
341
|
+
raise HTTPException(
|
|
342
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
343
|
+
detail=str(e)
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
@router.get("/sessions/{session_id}/audit/export")
|
|
348
|
+
async def export_audit_markdown(
|
|
349
|
+
session_id: str,
|
|
350
|
+
session: OptimizationSession = Depends(get_session)
|
|
351
|
+
):
|
|
352
|
+
"""
|
|
353
|
+
Export audit log as markdown.
|
|
354
|
+
|
|
355
|
+
Returns the audit trail formatted as markdown for publication methods sections.
|
|
356
|
+
"""
|
|
357
|
+
markdown = session.export_audit_markdown()
|
|
358
|
+
|
|
359
|
+
return Response(
|
|
360
|
+
content=markdown,
|
|
361
|
+
media_type="text/markdown",
|
|
362
|
+
headers={
|
|
363
|
+
"Content-Disposition": f"attachment; filename=audit_log_{session_id}.md"
|
|
364
|
+
}
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
# ============================================================
|
|
369
|
+
# Session File Management (JSON Format)
|
|
370
|
+
# ============================================================
|
|
371
|
+
|
|
372
|
+
@router.get("/sessions/{session_id}/download")
|
|
373
|
+
async def download_session(
|
|
374
|
+
session_id: str,
|
|
375
|
+
session: OptimizationSession = Depends(get_session)
|
|
376
|
+
):
|
|
377
|
+
"""
|
|
378
|
+
Download session as JSON file.
|
|
379
|
+
|
|
380
|
+
Downloads the complete session state as a .json file with user-friendly
|
|
381
|
+
naming support. The file includes metadata, audit log, search space,
|
|
382
|
+
experiments, and configuration.
|
|
383
|
+
"""
|
|
384
|
+
# Create temporary file
|
|
385
|
+
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
|
386
|
+
temp_path = f.name
|
|
387
|
+
|
|
388
|
+
try:
|
|
389
|
+
# Save session to temp file
|
|
390
|
+
session.save_session(temp_path)
|
|
391
|
+
|
|
392
|
+
# Use session name for filename (sanitized)
|
|
393
|
+
filename = session.metadata.name.replace(" ", "_").replace("/", "_")
|
|
394
|
+
filename = f"{filename}.json"
|
|
395
|
+
|
|
396
|
+
return FileResponse(
|
|
397
|
+
path=temp_path,
|
|
398
|
+
media_type="application/json",
|
|
399
|
+
filename=filename,
|
|
400
|
+
headers={"Content-Disposition": f"attachment; filename={filename}"}
|
|
401
|
+
)
|
|
402
|
+
except Exception as e:
|
|
403
|
+
# Clean up temp file on error
|
|
404
|
+
Path(temp_path).unlink(missing_ok=True)
|
|
405
|
+
raise HTTPException(
|
|
406
|
+
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
407
|
+
detail=f"Failed to export session: {str(e)}"
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
@router.post("/sessions/upload", response_model=SessionCreateResponse, status_code=status.HTTP_201_CREATED)
|
|
412
|
+
async def upload_session(file: UploadFile = File(...)):
|
|
413
|
+
"""
|
|
414
|
+
Upload and restore a session from JSON file.
|
|
415
|
+
|
|
416
|
+
Uploads a .json session file and creates a new session with the restored data.
|
|
417
|
+
A new session ID will be generated for API use, but the original session ID
|
|
418
|
+
is preserved in the metadata.
|
|
419
|
+
"""
|
|
420
|
+
try:
|
|
421
|
+
# Save uploaded file to temp location
|
|
422
|
+
with tempfile.NamedTemporaryFile(mode='wb', suffix='.json', delete=False) as f:
|
|
423
|
+
content = await file.read()
|
|
424
|
+
f.write(content)
|
|
425
|
+
temp_path = f.name
|
|
426
|
+
|
|
427
|
+
try:
|
|
428
|
+
# Load session from file without retraining
|
|
429
|
+
loaded_session = OptimizationSession.load_session(temp_path, retrain_on_load=False)
|
|
430
|
+
|
|
431
|
+
# Create new session in store
|
|
432
|
+
new_session_id = session_store.create()
|
|
433
|
+
|
|
434
|
+
# Replace the session object with loaded one and align metadata
|
|
435
|
+
try:
|
|
436
|
+
loaded_session.metadata.session_id = new_session_id
|
|
437
|
+
except Exception:
|
|
438
|
+
pass
|
|
439
|
+
|
|
440
|
+
session_store._sessions[new_session_id]["session"] = loaded_session
|
|
441
|
+
|
|
442
|
+
# Update last accessed
|
|
443
|
+
session_store._sessions[new_session_id]["last_accessed"] = datetime.now()
|
|
444
|
+
|
|
445
|
+
# Persist to disk
|
|
446
|
+
session_store._save_to_disk(new_session_id)
|
|
447
|
+
|
|
448
|
+
session_info = session_store.get_info(new_session_id)
|
|
449
|
+
|
|
450
|
+
return SessionCreateResponse(
|
|
451
|
+
session_id=new_session_id,
|
|
452
|
+
created_at=session_info["created_at"],
|
|
453
|
+
expires_at=session_info["expires_at"]
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
finally:
|
|
457
|
+
# Clean up temp file
|
|
458
|
+
Path(temp_path).unlink(missing_ok=True)
|
|
459
|
+
|
|
460
|
+
except Exception as e:
|
|
461
|
+
logger.error(f"Failed to upload session: {e}")
|
|
462
|
+
raise HTTPException(
|
|
463
|
+
status_code=status.HTTP_400_BAD_REQUEST,
|
|
464
|
+
detail=f"Failed to upload session: {str(e)}"
|
|
465
|
+
)
|
api/routers/visualizations.py
CHANGED
|
@@ -192,12 +192,17 @@ async def get_contour_data(
|
|
|
192
192
|
grid_df = pd.DataFrame(grid_points)
|
|
193
193
|
|
|
194
194
|
# CRITICAL FIX: Reorder columns to match training data
|
|
195
|
-
# The model was trained with a specific column order, we must match it
|
|
195
|
+
# The model was trained with a specific column order, we must match it.
|
|
196
|
+
# Exclude metadata columns that are part of the experiments table but
|
|
197
|
+
# are not model input features (e.g., Iteration, Reason, Output, Noise).
|
|
196
198
|
train_data = session.experiment_manager.get_data()
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
199
|
+
metadata_cols = {'Iteration', 'Reason', 'Output', 'Noise'}
|
|
200
|
+
feature_cols = [col for col in train_data.columns if col not in metadata_cols]
|
|
201
|
+
|
|
202
|
+
# Safely align the prediction grid to the model feature order.
|
|
203
|
+
# Use reindex so missing columns (shouldn't happen) are filled with the
|
|
204
|
+
# midpoint/defaults the grid already supplies; this avoids KeyError.
|
|
205
|
+
grid_df = grid_df.reindex(columns=feature_cols)
|
|
201
206
|
|
|
202
207
|
# IMPORTANT: The model's predict() method handles preprocessing internally
|
|
203
208
|
# (including categorical encoding), so we can pass the raw DataFrame directly
|