alchemist-nrel 0.3.1__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. alchemist_core/__init__.py +2 -2
  2. alchemist_core/acquisition/botorch_acquisition.py +83 -126
  3. alchemist_core/data/experiment_manager.py +181 -12
  4. alchemist_core/models/botorch_model.py +292 -63
  5. alchemist_core/models/sklearn_model.py +145 -13
  6. alchemist_core/session.py +3330 -31
  7. alchemist_core/utils/__init__.py +3 -1
  8. alchemist_core/utils/acquisition_utils.py +60 -0
  9. alchemist_core/visualization/__init__.py +45 -0
  10. alchemist_core/visualization/helpers.py +130 -0
  11. alchemist_core/visualization/plots.py +1449 -0
  12. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/METADATA +13 -13
  13. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/RECORD +31 -26
  14. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/WHEEL +1 -1
  15. api/main.py +1 -1
  16. api/models/requests.py +52 -0
  17. api/models/responses.py +79 -2
  18. api/routers/experiments.py +333 -8
  19. api/routers/sessions.py +84 -9
  20. api/routers/visualizations.py +6 -4
  21. api/routers/websocket.py +2 -2
  22. api/services/session_store.py +295 -71
  23. api/static/assets/index-B6Cf6s_b.css +1 -0
  24. api/static/assets/{index-DWfIKU9j.js → index-B7njvc9r.js} +201 -196
  25. api/static/index.html +2 -2
  26. ui/gpr_panel.py +11 -5
  27. ui/target_column_dialog.py +299 -0
  28. ui/ui.py +52 -5
  29. api/static/assets/index-sMIa_1hV.css +0 -1
  30. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/entry_points.txt +0 -0
  31. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/licenses/LICENSE +0 -0
  32. {alchemist_nrel-0.3.1.dist-info → alchemist_nrel-0.3.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1449 @@
1
+ """
2
+ Pure plotting functions for ALchemist visualizations.
3
+
4
+ All functions are framework-agnostic and return matplotlib Figure/Axes objects.
5
+ They accept optional axes for embedding in existing figures.
6
+ """
7
+
8
+ from typing import Optional, Dict, Tuple, Union, List, Any
9
+ import numpy as np
10
+ import matplotlib.pyplot as plt
11
+ from matplotlib.figure import Figure
12
+ from matplotlib.axes import Axes
13
+
14
+
15
+ def create_parity_plot(
16
+ y_true: np.ndarray,
17
+ y_pred: np.ndarray,
18
+ y_std: Optional[np.ndarray] = None,
19
+ sigma_multiplier: float = 1.96,
20
+ show_error_bars: bool = True,
21
+ show_metrics: bool = True,
22
+ figsize: Tuple[float, float] = (8, 6),
23
+ dpi: int = 100,
24
+ title: Optional[str] = None,
25
+ ax: Optional[Axes] = None
26
+ ) -> Tuple[Figure, Axes]:
27
+ """
28
+ Create parity plot of actual vs predicted values.
29
+
30
+ Pure plotting function with no session or model dependencies.
31
+
32
+ Args:
33
+ y_true: Actual experimental values
34
+ y_pred: Model predicted values
35
+ y_std: Prediction uncertainties (optional)
36
+ sigma_multiplier: Error bar size multiplier (1.96 = 95% CI)
37
+ show_error_bars: Display uncertainty error bars
38
+ show_metrics: Include RMSE/MAE/R² in title
39
+ figsize: Figure size (width, height) in inches
40
+ dpi: Resolution in dots per inch
41
+ title: Custom title (auto-generated if None and show_metrics=True)
42
+ ax: Existing axes to plot on (creates new if None)
43
+
44
+ Returns:
45
+ Tuple of (Figure, Axes) objects
46
+
47
+ Example:
48
+ >>> fig, ax = create_parity_plot(y_true, y_pred, y_std)
49
+ >>> fig.savefig('parity.png', dpi=300, bbox_inches='tight')
50
+ """
51
+ # If ax provided, use its figure; otherwise create new
52
+ if ax is None:
53
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
54
+ should_tight_layout = True
55
+ else:
56
+ fig = ax.figure
57
+ should_tight_layout = False
58
+
59
+ # Calculate metrics if requested
60
+ if show_metrics:
61
+ from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
62
+ rmse = np.sqrt(mean_squared_error(y_true, y_pred))
63
+ mae = mean_absolute_error(y_true, y_pred)
64
+ try:
65
+ r2 = r2_score(y_true, y_pred) if len(y_true) > 1 else np.nan
66
+ except:
67
+ r2 = np.nan
68
+
69
+ # Plot data with optional error bars
70
+ if show_error_bars and y_std is not None:
71
+ yerr = sigma_multiplier * y_std
72
+ ax.errorbar(y_true, y_pred, yerr=yerr,
73
+ fmt='o', alpha=0.7, capsize=3, capthick=1,
74
+ elinewidth=1, markersize=5)
75
+ else:
76
+ ax.scatter(y_true, y_pred, alpha=0.7)
77
+
78
+ # Add parity line (y=x)
79
+ min_val = min(np.min(y_true), np.min(y_pred))
80
+ max_val = max(np.max(y_true), np.max(y_pred))
81
+ ax.plot([min_val, max_val], [min_val, max_val], 'r--', label='Parity line')
82
+
83
+ # Set labels
84
+ ax.set_xlabel("Actual Values")
85
+ ax.set_ylabel("Predicted Values")
86
+
87
+ # Create title with metrics
88
+ if title is None and show_metrics:
89
+ ci_labels = {
90
+ 1.0: "68% CI",
91
+ 1.96: "95% CI",
92
+ 2.0: "95.4% CI",
93
+ 2.58: "99% CI",
94
+ 3.0: "99.7% CI"
95
+ }
96
+ ci_label = ci_labels.get(sigma_multiplier, f"{sigma_multiplier}σ")
97
+
98
+ if show_error_bars and y_std is not None:
99
+ title = (f"Cross-Validation Parity Plot\n"
100
+ f"RMSE: {rmse:.4f}, MAE: {mae:.4f}, R²: {r2:.4f}\n"
101
+ f"Error bars: ±{sigma_multiplier}σ ({ci_label})")
102
+ else:
103
+ title = (f"Cross-Validation Parity Plot\n"
104
+ f"RMSE: {rmse:.4f}, MAE: {mae:.4f}, R²: {r2:.4f}")
105
+
106
+ if title:
107
+ ax.set_title(title)
108
+
109
+ ax.legend()
110
+
111
+ if should_tight_layout:
112
+ fig.tight_layout()
113
+
114
+ return fig, ax
115
+
116
+
117
+ def create_contour_plot(
118
+ x_grid: np.ndarray,
119
+ y_grid: np.ndarray,
120
+ predictions_grid: np.ndarray,
121
+ x_var: str,
122
+ y_var: str,
123
+ exp_x: Optional[np.ndarray] = None,
124
+ exp_y: Optional[np.ndarray] = None,
125
+ suggest_x: Optional[np.ndarray] = None,
126
+ suggest_y: Optional[np.ndarray] = None,
127
+ cmap: str = 'viridis',
128
+ use_log_scale: bool = False,
129
+ figsize: Tuple[float, float] = (8, 6),
130
+ dpi: int = 100,
131
+ title: str = "Contour Plot of Model Predictions",
132
+ ax: Optional[Axes] = None
133
+ ) -> Tuple[Figure, Axes, Any]:
134
+ """
135
+ Create 2D contour plot of model predictions.
136
+
137
+ Args:
138
+ x_grid: X-axis meshgrid values (2D array)
139
+ y_grid: Y-axis meshgrid values (2D array)
140
+ predictions_grid: Model predictions on grid (2D array)
141
+ x_var: X variable name for axis label
142
+ y_var: Y variable name for axis label
143
+ exp_x: Experimental X values to overlay (optional)
144
+ exp_y: Experimental Y values to overlay (optional)
145
+ suggest_x: Suggested X values to overlay (optional)
146
+ suggest_y: Suggested Y values to overlay (optional)
147
+ cmap: Matplotlib colormap name
148
+ use_log_scale: Use logarithmic color scale for values spanning orders of magnitude
149
+ figsize: Figure size (width, height) in inches
150
+ dpi: Resolution
151
+ title: Plot title
152
+ ax: Existing axes (creates new if None)
153
+
154
+ Returns:
155
+ Tuple of (Figure, Axes, Colorbar) - includes colorbar reference for management
156
+
157
+ Example:
158
+ >>> X, Y = np.meshgrid(x_range, y_range)
159
+ >>> Z = model_predictions.reshape(X.shape)
160
+ >>> fig, ax, cbar = create_contour_plot(X, Y, Z, 'temperature', 'pressure')
161
+ """
162
+ if ax is None:
163
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
164
+ should_tight_layout = True
165
+ else:
166
+ fig = ax.figure
167
+ should_tight_layout = False
168
+
169
+ # Contour plot with optional log scaling
170
+ if use_log_scale:
171
+ from matplotlib.colors import LogNorm, SymLogNorm
172
+
173
+ min_val = predictions_grid.min()
174
+ max_val = predictions_grid.max()
175
+
176
+ # For predominantly negative values (like LogEI), use negative to make positive
177
+ if max_val <= 0:
178
+ # All negative: flip sign to make positive for LogNorm, but keep track for colorbar
179
+ plot_grid = -predictions_grid
180
+ vmin_pos = -max_val # Most negative original value (worst)
181
+ vmax_pos = -min_val # Closest to zero original value (best)
182
+
183
+ # Create log-spaced levels in the positive space
184
+ levels = np.logspace(np.log10(vmin_pos), np.log10(vmax_pos), 50)
185
+ contour = ax.contourf(x_grid, y_grid, plot_grid, levels=levels, cmap=cmap, norm=LogNorm(vmin=vmin_pos, vmax=vmax_pos))
186
+
187
+ # Create colorbar with negative value labels
188
+ cbar = fig.colorbar(contour, ax=ax)
189
+
190
+ # Create a custom formatter that shows negative values
191
+ def fmt(x, pos):
192
+ return f'{-x:.0e}'
193
+
194
+ import matplotlib.ticker as ticker
195
+ cbar.ax.yaxis.set_major_formatter(ticker.FuncFormatter(fmt))
196
+ elif min_val >= 0:
197
+ # All positive: use directly
198
+ plot_grid = predictions_grid
199
+ levels = np.logspace(np.log10(min_val + 1e-10), np.log10(max_val), 50)
200
+ contour = ax.contourf(x_grid, y_grid, plot_grid, levels=levels, cmap=cmap, norm=LogNorm())
201
+ cbar = fig.colorbar(contour, ax=ax)
202
+ else:
203
+ # Mixed signs: use SymLogNorm
204
+ linthresh = max(abs(min_val), abs(max_val)) * 0.01
205
+ contour = ax.contourf(x_grid, y_grid, predictions_grid, levels=50, cmap=cmap,
206
+ norm=SymLogNorm(linthresh=linthresh, vmin=min_val, vmax=max_val))
207
+ cbar = fig.colorbar(contour, ax=ax)
208
+ else:
209
+ # Use explicit vmin/vmax to ensure colorbar spans full data range
210
+ min_val = predictions_grid.min()
211
+ max_val = predictions_grid.max()
212
+
213
+ # Generate levels that better handle extreme outliers
214
+ # Use percentile-based approach to create more levels in the dense region
215
+ percentiles = np.percentile(predictions_grid.ravel(), [0, 1, 5, 10, 25, 50, 75, 90, 95, 99, 100])
216
+
217
+ # If there's a large gap between percentiles, use adaptive levels
218
+ if (percentiles[-1] - percentiles[-2]) > 2 * (percentiles[-2] - percentiles[-3]):
219
+ # Extreme outliers detected - create custom levels
220
+ # More levels in the main data range, fewer in the outlier range
221
+ main_levels = np.linspace(percentiles[1], percentiles[-2], 40)
222
+ outlier_levels = np.linspace(percentiles[-2], max_val, 10)
223
+ levels = np.concatenate([main_levels, outlier_levels])
224
+ else:
225
+ # Normal distribution - use uniform levels
226
+ levels = np.linspace(min_val, max_val, 50)
227
+
228
+ contour = ax.contourf(x_grid, y_grid, predictions_grid, levels=levels, cmap=cmap,
229
+ vmin=min_val, vmax=max_val)
230
+ cbar = fig.colorbar(contour, ax=ax)
231
+
232
+ # Only set label if we haven't already created colorbar
233
+ if not use_log_scale or (use_log_scale and not (max_val <= 0 and min_val < 0)):
234
+ cbar.set_label('Predicted Output')
235
+ else:
236
+ cbar.set_label('Predicted Output')
237
+
238
+ # Overlay experimental points
239
+ if exp_x is not None and exp_y is not None and len(exp_x) > 0:
240
+ ax.scatter(exp_x, exp_y, c='white', edgecolors='black',
241
+ s=80, marker='o', label='Experiments', zorder=5)
242
+
243
+ # Overlay suggestion points
244
+ if suggest_x is not None and suggest_y is not None and len(suggest_x) > 0:
245
+ ax.scatter(suggest_x, suggest_y, c='black',
246
+ s=120, marker='*', label='Suggestions', zorder=6)
247
+
248
+ # Labels and title
249
+ ax.set_xlabel(x_var)
250
+ ax.set_ylabel(y_var)
251
+ ax.set_title(title)
252
+
253
+ # Legend if we have overlays
254
+ if (exp_x is not None and len(exp_x) > 0) or (suggest_x is not None and len(suggest_x) > 0):
255
+ ax.legend()
256
+
257
+ if should_tight_layout:
258
+ fig.tight_layout()
259
+
260
+ return fig, ax, cbar
261
+
262
+
263
+ def create_slice_plot(
264
+ x_values: np.ndarray,
265
+ predictions: np.ndarray,
266
+ x_var: str,
267
+ std: Optional[np.ndarray] = None,
268
+ sigma_bands: Optional[List[float]] = None,
269
+ exp_x: Optional[np.ndarray] = None,
270
+ exp_y: Optional[np.ndarray] = None,
271
+ figsize: Tuple[float, float] = (8, 6),
272
+ dpi: int = 100,
273
+ title: Optional[str] = None,
274
+ ax: Optional[Axes] = None,
275
+ prediction_label: str = 'Prediction',
276
+ line_color: Optional[str] = None,
277
+ line_width: Optional[float] = None
278
+ ) -> Tuple[Figure, Axes]:
279
+ """
280
+ Create 1D slice plot with uncertainty bands.
281
+
282
+ Shows model predictions along one variable while other variables are held fixed.
283
+ Optionally displays uncertainty bands at multiple sigma levels.
284
+
285
+ Args:
286
+ x_values: X-axis values (1D array)
287
+ predictions: Mean predictions (1D array)
288
+ x_var: Variable name for X-axis label
289
+ std: Standard deviations (1D array, optional)
290
+ sigma_bands: List of sigma values for uncertainty bands (e.g., [1.0, 2.0])
291
+ exp_x: Experimental X values to overlay (optional)
292
+ exp_y: Experimental Y values to overlay (optional)
293
+ figsize: Figure size (width, height) in inches
294
+ dpi: Resolution
295
+ title: Custom title (optional)
296
+ ax: Existing axes (creates new if None)
297
+ prediction_label: Label for the prediction line in legend (default: 'Prediction')
298
+ line_color: Color for the prediction line (default: dark blue)
299
+ line_width: Width of the prediction line (default: 2.6)
300
+
301
+ Returns:
302
+ Tuple of (Figure, Axes)
303
+
304
+ Example:
305
+ >>> x = np.linspace(20, 100, 100)
306
+ >>> y_pred, y_std = model.predict(X_grid, return_std=True)
307
+ >>> fig, ax = create_slice_plot(x, y_pred, 'temperature',
308
+ ... std=y_std, sigma_bands=[1.0, 2.0])
309
+ """
310
+ if ax is None:
311
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
312
+ should_tight_layout = True
313
+ else:
314
+ fig = ax.figure
315
+ should_tight_layout = False
316
+
317
+ # Styling constants
318
+ mean_color = "#0B3C5D" # dark blue-teal, prints well
319
+ exp_face = "#E07A00" # orange (colorblind-friendly vs blue)
320
+ grid_alpha = 0.25
321
+
322
+ # Plot uncertainty bands BEFORE mean line (for proper z-stacking)
323
+ if std is not None and sigma_bands:
324
+ # Sort largest to smallest for proper layering
325
+ sigma_bands_sorted = sorted(sigma_bands, reverse=True)
326
+ n = len(sigma_bands_sorted)
327
+
328
+ # Sequential colormap: same hue, different lightness
329
+ cmap = plt.get_cmap("Blues")
330
+
331
+ for i, sigma in enumerate(sigma_bands_sorted):
332
+ # i=0 is largest sigma (most transparent), i=n-1 is smallest (most opaque)
333
+ t = i / max(1, n - 1) # 0..1 ratio
334
+
335
+ # Lighter tones for larger sigma, darker for smaller
336
+ face = cmap(0.3 + 0.3 * t) # 0.30 to 0.60 in Blues colormap
337
+ edge = plt.matplotlib.colors.to_rgba(mean_color, 0.55)
338
+
339
+ # Sigmoid-based alpha: smaller sigma → higher alpha (more opaque)
340
+ alpha = 1.0 - 1.0 / (1.0 + np.exp(-sigma + 2.0))
341
+
342
+ ax.fill_between(
343
+ x_values,
344
+ predictions - sigma * std,
345
+ predictions + sigma * std,
346
+ facecolor=plt.matplotlib.colors.to_rgba(face, alpha),
347
+ edgecolor=edge,
348
+ linewidth=0.9,
349
+ label=f'±{sigma:.1f}σ',
350
+ zorder=1
351
+ )
352
+
353
+ # Mean prediction on top
354
+ ax.plot(
355
+ x_values,
356
+ predictions,
357
+ color=line_color if line_color is not None else mean_color,
358
+ linewidth=line_width if line_width is not None else 2.6,
359
+ label=prediction_label,
360
+ zorder=3
361
+ )
362
+
363
+ # Plot experimental points
364
+ if exp_x is not None and exp_y is not None and len(exp_x) > 0:
365
+ ax.scatter(
366
+ exp_x,
367
+ exp_y,
368
+ s=70,
369
+ facecolor=exp_face,
370
+ edgecolor='black',
371
+ linewidth=0.9,
372
+ alpha=0.9,
373
+ zorder=4,
374
+ label=f'Experiments (n={len(exp_x)})'
375
+ )
376
+
377
+ # Labels and grid
378
+ ax.set_xlabel(x_var)
379
+ ax.set_ylabel('Predicted Output')
380
+ ax.grid(True, alpha=grid_alpha)
381
+ ax.set_axisbelow(True)
382
+
383
+ if title:
384
+ ax.set_title(title)
385
+
386
+ # Create legend with smart ordering: Prediction, bands (small->large), Experiments
387
+ handles, labels = ax.get_legend_handles_labels()
388
+ if handles:
389
+ from alchemist_core.visualization.helpers import sort_legend_items
390
+ sorted_indices = sort_legend_items(labels)
391
+ sorted_handles = [handles[i] for i in sorted_indices]
392
+ sorted_labels = [labels[i] for i in sorted_indices]
393
+ ax.legend(sorted_handles, sorted_labels)
394
+
395
+ if should_tight_layout:
396
+ fig.tight_layout()
397
+
398
+ return fig, ax
399
+
400
+
401
+ def create_voxel_plot(
402
+ x_grid: np.ndarray,
403
+ y_grid: np.ndarray,
404
+ z_grid: np.ndarray,
405
+ predictions_grid: np.ndarray,
406
+ x_var: str,
407
+ y_var: str,
408
+ z_var: str,
409
+ exp_x: Optional[np.ndarray] = None,
410
+ exp_y: Optional[np.ndarray] = None,
411
+ exp_z: Optional[np.ndarray] = None,
412
+ suggest_x: Optional[np.ndarray] = None,
413
+ suggest_y: Optional[np.ndarray] = None,
414
+ suggest_z: Optional[np.ndarray] = None,
415
+ cmap: str = 'viridis',
416
+ alpha: float = 0.5,
417
+ use_log_scale: bool = False,
418
+ figsize: Tuple[float, float] = (10, 8),
419
+ dpi: int = 100,
420
+ title: str = "3D Voxel Plot of Model Predictions",
421
+ ax: Optional[Any] = None # 3D axes
422
+ ) -> Tuple[Figure, Any]:
423
+ """
424
+ Create 3D voxel plot of model predictions over a variable space.
425
+
426
+ Visualizes the model's predicted response surface by varying three variables
427
+ while holding others constant. Uses volumetric rendering to show the 3D
428
+ prediction landscape.
429
+
430
+ Args:
431
+ x_grid: X-axis meshgrid values (3D array)
432
+ y_grid: Y-axis meshgrid values (3D array)
433
+ z_grid: Z-axis meshgrid values (3D array)
434
+ predictions_grid: Model predictions on grid (3D array)
435
+ x_var: X variable name for axis label
436
+ y_var: Y variable name for axis label
437
+ z_var: Z variable name for axis label
438
+ exp_x: Experimental X values to overlay (optional)
439
+ exp_y: Experimental Y values to overlay (optional)
440
+ exp_z: Experimental Z values to overlay (optional)
441
+ suggest_x: Suggested X values to overlay (optional)
442
+ suggest_y: Suggested Y values to overlay (optional)
443
+ suggest_z: Suggested Z values to overlay (optional)
444
+ cmap: Matplotlib colormap name
445
+ alpha: Transparency level (0=transparent, 1=opaque)
446
+ use_log_scale: Use logarithmic color scale
447
+ figsize: Figure size (width, height) in inches
448
+ dpi: Resolution
449
+ title: Plot title
450
+ ax: Existing 3D axes (creates new if None)
451
+
452
+ Returns:
453
+ Tuple of (Figure, Axes3D) objects
454
+
455
+ Example:
456
+ >>> # Create 3D grid
457
+ >>> x = np.linspace(0, 10, 20)
458
+ >>> y = np.linspace(0, 10, 20)
459
+ >>> z = np.linspace(0, 10, 20)
460
+ >>> X, Y, Z = np.meshgrid(x, y, z)
461
+ >>> predictions = model.predict(grid)
462
+ >>> fig, ax = create_voxel_plot(X, Y, Z, predictions, 'temp', 'pressure', 'flow')
463
+
464
+ Note:
465
+ - Requires 3D arrays for x_grid, y_grid, z_grid, predictions_grid
466
+ - Use alpha to control transparency (lower values show interior structure)
467
+ - Computationally expensive for high-resolution grids
468
+ """
469
+ from mpl_toolkits.mplot3d import Axes3D
470
+ from matplotlib.colors import Normalize, LogNorm
471
+
472
+ # Create figure and 3D axes if not provided
473
+ if ax is None:
474
+ fig = plt.figure(figsize=figsize, dpi=dpi)
475
+ ax = fig.add_subplot(111, projection='3d')
476
+ should_tight_layout = True
477
+ else:
478
+ fig = ax.figure
479
+ should_tight_layout = False
480
+
481
+ # Normalize predictions for colormapping
482
+ min_val = predictions_grid.min()
483
+ max_val = predictions_grid.max()
484
+
485
+ if use_log_scale and min_val > 0:
486
+ norm = LogNorm(vmin=min_val, vmax=max_val)
487
+ else:
488
+ norm = Normalize(vmin=min_val, vmax=max_val)
489
+
490
+ # Get colormap
491
+ cm = plt.get_cmap(cmap)
492
+
493
+ # Create voxel colors based on predictions
494
+ # Flatten arrays for easier manipulation
495
+ colors = cm(norm(predictions_grid))
496
+ colors[..., -1] = alpha # Set alpha channel
497
+
498
+ # Create voxel plot using scatter3D with marker size based on grid resolution
499
+ # Flatten the 3D grids
500
+ x_flat = x_grid.ravel()
501
+ y_flat = y_grid.ravel()
502
+ z_flat = z_grid.ravel()
503
+ pred_flat = predictions_grid.ravel()
504
+
505
+ # Calculate marker size based on grid spacing
506
+ # Use smaller markers for denser grids
507
+ n_points = len(x_flat)
508
+ marker_size = max(10, 1000 / (n_points ** (1/3)))
509
+
510
+ # Plot as 3D scatter with colors
511
+ scatter = ax.scatter(
512
+ x_flat, y_flat, z_flat,
513
+ c=pred_flat,
514
+ cmap=cmap,
515
+ norm=norm,
516
+ alpha=alpha,
517
+ s=marker_size,
518
+ marker='o',
519
+ edgecolors='none'
520
+ )
521
+
522
+ # Add colorbar
523
+ cbar = fig.colorbar(scatter, ax=ax, pad=0.1, shrink=0.8)
524
+ cbar.set_label('Predicted Output', rotation=270, labelpad=20)
525
+
526
+ # Overlay experimental points if provided
527
+ if exp_x is not None and exp_y is not None and exp_z is not None and len(exp_x) > 0:
528
+ ax.scatter(
529
+ exp_x, exp_y, exp_z,
530
+ c='white',
531
+ edgecolors='black',
532
+ s=100,
533
+ marker='o',
534
+ label='Experiments',
535
+ linewidths=2,
536
+ depthshade=True
537
+ )
538
+
539
+ # Overlay suggestion points if provided
540
+ if suggest_x is not None and suggest_y is not None and suggest_z is not None and len(suggest_x) > 0:
541
+ ax.scatter(
542
+ suggest_x, suggest_y, suggest_z,
543
+ c='black',
544
+ s=150,
545
+ marker='*',
546
+ label='Suggestions',
547
+ linewidths=2,
548
+ depthshade=True
549
+ )
550
+
551
+ # Set labels and title
552
+ ax.set_xlabel(x_var)
553
+ ax.set_ylabel(y_var)
554
+ ax.set_zlabel(z_var)
555
+ ax.set_title(title)
556
+
557
+ # Add legend if we have overlays
558
+ if (exp_x is not None and len(exp_x) > 0) or (suggest_x is not None and len(suggest_x) > 0):
559
+ ax.legend(loc='upper left')
560
+
561
+ if should_tight_layout:
562
+ fig.tight_layout()
563
+
564
+ return fig, ax
565
+
566
+
567
+ def create_metrics_plot(
568
+ training_sizes: np.ndarray,
569
+ metric_values: np.ndarray,
570
+ metric_name: str,
571
+ figsize: Tuple[float, float] = (8, 6),
572
+ dpi: int = 100,
573
+ ax: Optional[Axes] = None
574
+ ) -> Tuple[Figure, Axes]:
575
+ """
576
+ Create learning curve plot showing metric vs training size.
577
+
578
+ Displays how model performance improves as more experimental data is added.
579
+
580
+ Args:
581
+ training_sizes: X-axis values (number of observations)
582
+ metric_values: Y-axis values (metric at each training size)
583
+ metric_name: Metric name ('rmse', 'mae', 'r2', 'mape')
584
+ figsize: Figure size (width, height) in inches
585
+ dpi: Resolution
586
+ ax: Existing axes (creates new if None)
587
+
588
+ Returns:
589
+ Tuple of (Figure, Axes)
590
+
591
+ Example:
592
+ >>> sizes = np.array([5, 6, 7, 8, 9, 10])
593
+ >>> rmse = np.array([0.15, 0.12, 0.10, 0.08, 0.07, 0.06])
594
+ >>> fig, ax = create_metrics_plot(sizes, rmse, 'rmse')
595
+ """
596
+ if ax is None:
597
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
598
+ should_tight_layout = True
599
+ else:
600
+ fig = ax.figure
601
+ should_tight_layout = False
602
+
603
+ # Plot as line with markers
604
+ ax.plot(training_sizes, metric_values, marker='o', linewidth=2,
605
+ markersize=6, color='#2E86AB')
606
+
607
+ # Labels
608
+ metric_labels = {
609
+ 'rmse': 'RMSE',
610
+ 'mae': 'MAE',
611
+ 'r2': 'R²',
612
+ 'mape': 'MAPE (%)'
613
+ }
614
+ label = metric_labels.get(metric_name.lower(), metric_name.upper())
615
+
616
+ ax.set_xlabel("Number of Observations")
617
+ ax.set_ylabel(label)
618
+ ax.set_title(f"{label} vs Number of Observations")
619
+ ax.grid(True, alpha=0.3)
620
+
621
+ if should_tight_layout:
622
+ fig.tight_layout()
623
+
624
+ return fig, ax
625
+
626
+
627
+ def create_qq_plot(
628
+ z_scores: np.ndarray,
629
+ figsize: Tuple[float, float] = (8, 6),
630
+ dpi: int = 100,
631
+ show_confidence_bands: bool = True,
632
+ title: str = "Q-Q Plot: Standardized Residuals",
633
+ ax: Optional[Axes] = None
634
+ ) -> Tuple[Figure, Axes]:
635
+ """
636
+ Create Q-Q plot of standardized residuals.
637
+
638
+ Compares the distribution of standardized residuals to a standard normal
639
+ distribution. Points following the diagonal line indicate well-calibrated
640
+ uncertainty estimates.
641
+
642
+ Args:
643
+ z_scores: Standardized residuals (z-scores)
644
+ figsize: Figure size (width, height) in inches
645
+ dpi: Resolution
646
+ show_confidence_bands: Add approximate 95% CI bands for small samples
647
+ title: Plot title
648
+ ax: Existing axes (creates new if None)
649
+
650
+ Returns:
651
+ Tuple of (Figure, Axes)
652
+
653
+ Example:
654
+ >>> z = (y_true - y_pred) / y_std
655
+ >>> fig, ax = create_qq_plot(z)
656
+ """
657
+ from scipy import stats
658
+
659
+ if ax is None:
660
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
661
+ should_tight_layout = True
662
+ else:
663
+ fig = ax.figure
664
+ should_tight_layout = False
665
+
666
+ # Sort z-scores
667
+ z_sorted = np.sort(z_scores)
668
+
669
+ # Theoretical quantiles from standard normal distribution
670
+ theoretical_quantiles = stats.norm.ppf(np.linspace(0.01, 0.99, len(z_scores)))
671
+
672
+ # Scatter plot
673
+ ax.scatter(theoretical_quantiles, z_sorted, alpha=0.7, s=30,
674
+ edgecolors='k', linewidth=0.5)
675
+
676
+ # Perfect calibration line (y=x)
677
+ min_val = min(theoretical_quantiles.min(), z_sorted.min())
678
+ max_val = max(theoretical_quantiles.max(), z_sorted.max())
679
+ ax.plot([min_val, max_val], [min_val, max_val], 'r--',
680
+ linewidth=2, label='Perfect calibration')
681
+
682
+ # Confidence bands for small samples
683
+ if show_confidence_bands and len(z_scores) < 100:
684
+ se = 1.96 / np.sqrt(len(z_scores))
685
+ ax.fill_between([min_val, max_val],
686
+ [min_val - se, max_val - se],
687
+ [min_val + se, max_val + se],
688
+ alpha=0.2, color='red', label='Approximate 95% CI')
689
+
690
+ # Labels and legend
691
+ ax.set_xlabel("Theoretical Quantiles (Standard Normal)")
692
+ ax.set_ylabel("Sample Quantiles (Standardized Residuals)")
693
+ ax.set_title(title)
694
+ ax.legend()
695
+ ax.grid(True, alpha=0.3)
696
+
697
+ if should_tight_layout:
698
+ fig.tight_layout()
699
+
700
+ return fig, ax
701
+
702
+
703
+ def create_calibration_plot(
704
+ nominal_probs: np.ndarray,
705
+ empirical_coverage: np.ndarray,
706
+ figsize: Tuple[float, float] = (8, 6),
707
+ dpi: int = 100,
708
+ title: str = "Calibration Curve",
709
+ ax: Optional[Axes] = None
710
+ ) -> Tuple[Figure, Axes]:
711
+ """
712
+ Create calibration curve (reliability diagram).
713
+
714
+ Shows whether predicted confidence intervals have the correct coverage.
715
+ Points on the diagonal line indicate well-calibrated uncertainty.
716
+ Points above the line indicate overconfident predictions (intervals too narrow).
717
+ Points below the line indicate underconfident predictions (intervals too wide).
718
+
719
+ Args:
720
+ nominal_probs: Expected coverage probabilities (X-axis)
721
+ empirical_coverage: Observed coverage fractions (Y-axis)
722
+ figsize: Figure size (width, height) in inches
723
+ dpi: Resolution
724
+ title: Plot title
725
+ ax: Existing axes (creates new if None)
726
+
727
+ Returns:
728
+ Tuple of (Figure, Axes)
729
+
730
+ Example:
731
+ >>> nominal = np.array([0.68, 0.95, 0.99])
732
+ >>> empirical = np.array([0.72, 0.94, 0.98])
733
+ >>> fig, ax = create_calibration_plot(nominal, empirical)
734
+ """
735
+ if ax is None:
736
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
737
+ should_tight_layout = True
738
+ else:
739
+ fig = ax.figure
740
+ should_tight_layout = False
741
+
742
+ # Empirical coverage line
743
+ ax.plot(nominal_probs, empirical_coverage, 'o-', linewidth=2,
744
+ markersize=6, label='Empirical coverage', color='steelblue')
745
+
746
+ # Perfect calibration line (y=x)
747
+ ax.plot([0, 1], [0, 1], 'r--', linewidth=2, label='Perfect calibration')
748
+
749
+ # Labels and legend
750
+ ax.set_xlabel("Nominal Coverage Probability")
751
+ ax.set_ylabel("Empirical Coverage")
752
+ ax.set_title(title)
753
+ ax.legend()
754
+ ax.grid(True, alpha=0.3)
755
+ ax.set_xlim([0, 1])
756
+ ax.set_ylim([0, 1])
757
+
758
+ if should_tight_layout:
759
+ fig.tight_layout()
760
+
761
+ return fig, ax
762
+
763
+
764
+ def create_regret_plot(
765
+ iterations: np.ndarray,
766
+ observed_values: np.ndarray,
767
+ show_cumulative: bool = False,
768
+ goal: str = 'maximize',
769
+ predicted_means: Optional[np.ndarray] = None,
770
+ predicted_stds: Optional[np.ndarray] = None,
771
+ sigma_bands: Optional[List[float]] = None,
772
+ figsize: Tuple[float, float] = (8, 6),
773
+ dpi: int = 100,
774
+ title: Optional[str] = None,
775
+ ax: Optional[Axes] = None
776
+ ) -> Tuple[Figure, Axes]:
777
+ """
778
+ Create regret curve (best observed value vs iteration).
779
+
780
+ Shows the cumulative best result achieved over the course of optimization.
781
+ A flattening curve indicates convergence (no further improvements found).
782
+
783
+ This is also known as "simple regret" or "incumbent trajectory" in the
784
+ Bayesian optimization literature.
785
+
786
+ Args:
787
+ iterations: Iteration numbers (typically 0, 1, 2, ... or experiment indices)
788
+ observed_values: Actual experimental outputs at each iteration
789
+ goal: 'maximize' or 'minimize' - determines how "best" is computed
790
+ predicted_means: Optional array of max(posterior mean) at each iteration
791
+ predicted_stds: Optional array of std at max(posterior mean) at each iteration
792
+ sigma_bands: List of sigma values for uncertainty bands (e.g., [1.0, 2.0])
793
+ figsize: Figure size (width, height) in inches
794
+ dpi: Resolution
795
+ title: Custom title (auto-generated if None)
796
+ ax: Existing axes (creates new if None)
797
+
798
+ Returns:
799
+ Tuple of (Figure, Axes)
800
+
801
+ Example:
802
+ >>> iterations = np.arange(len(outputs))
803
+ >>> fig, ax = create_regret_plot(iterations, outputs, goal='maximize')
804
+
805
+ Notes:
806
+ For maximization, plots cumulative maximum (best so far).
807
+ For minimization, plots cumulative minimum (best so far).
808
+ Curve should increase/decrease monotonically and flatten at convergence.
809
+ If predicted_means/stds provided, also shows model's predicted best with uncertainty.
810
+ """
811
+ if ax is None:
812
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
813
+ should_tight_layout = True
814
+ else:
815
+ fig = ax.figure
816
+ should_tight_layout = False
817
+
818
+ # Styling constants (matching slice plots)
819
+ mean_color = "#0B3C5D" # dark blue-teal, prints well
820
+ exp_face = "#E07A00" # orange (colorblind-friendly vs blue)
821
+ grid_alpha = 0.25
822
+
823
+ # Compute cumulative best
824
+ if goal.lower() == 'maximize':
825
+ cumulative_best = np.maximum.accumulate(observed_values)
826
+ else:
827
+ cumulative_best = np.minimum.accumulate(observed_values)
828
+
829
+ ylabel = 'Objective Function Value'
830
+
831
+ # Plot cumulative best as line
832
+ if show_cumulative:
833
+ ax.plot(iterations, cumulative_best, linewidth=2.5,
834
+ color='#E07A00', label='Best observed', zorder=3)
835
+
836
+ # Overlay actual observations as scatter points (using same orange as slice plots)
837
+ ax.scatter(iterations, observed_values, s=70, alpha=0.9,
838
+ facecolor=exp_face, edgecolors='black', linewidth=0.9,
839
+ label='All observations', zorder=2)
840
+
841
+ # Plot predicted best (max posterior mean) with uncertainty if provided
842
+ if predicted_means is not None:
843
+ if predicted_stds is not None and sigma_bands:
844
+ # Sort largest to smallest for proper layering
845
+ sigma_bands_sorted = sorted(sigma_bands, reverse=True)
846
+ n = len(sigma_bands_sorted)
847
+
848
+ # Sequential colormap: same hue, different lightness (matching slice plots)
849
+ cmap = plt.get_cmap("Blues")
850
+
851
+ for i, sigma in enumerate(sigma_bands_sorted):
852
+ # i=0 is largest sigma (most transparent), i=n-1 is smallest (most opaque)
853
+ t = i / max(1, n - 1) # 0..1 ratio
854
+
855
+ # Lighter tones for larger sigma, darker for smaller
856
+ face = cmap(0.3 + 0.3 * t) # 0.30 to 0.60 in Blues colormap
857
+ edge = plt.matplotlib.colors.to_rgba(mean_color, 0.55)
858
+
859
+ # Sigmoid-based alpha: smaller sigma → higher alpha (more opaque)
860
+ alpha = 1.0 - 1.0 / (1.0 + np.exp(-sigma + 2.0))
861
+
862
+ ax.fill_between(
863
+ iterations,
864
+ predicted_means - sigma * predicted_stds,
865
+ predicted_means + sigma * predicted_stds,
866
+ facecolor=plt.matplotlib.colors.to_rgba(face, alpha),
867
+ edgecolor=edge,
868
+ linewidth=0.9,
869
+ label=f'±{sigma:.1f}σ',
870
+ zorder=1
871
+ )
872
+
873
+ ax.plot(iterations, predicted_means, linewidth=2.6,
874
+ color=mean_color, linestyle='-',
875
+ label='Max posterior mean', zorder=3)
876
+
877
+ # Labels and title
878
+ ax.set_xlabel("Experiment Number")
879
+ ax.set_ylabel(ylabel)
880
+
881
+ if title is None:
882
+ title = f"Optimization Progress ({'maximization' if goal.lower() == 'maximize' else 'minimization'})"
883
+ ax.set_title(title)
884
+
885
+ ax.grid(True, alpha=grid_alpha)
886
+ ax.set_axisbelow(True)
887
+ ax.legend()
888
+
889
+ if should_tight_layout:
890
+ fig.tight_layout()
891
+
892
+ return fig, ax
893
+
894
+
895
+ def create_probability_of_improvement_plot(
896
+ iterations: np.ndarray,
897
+ max_pi_values: np.ndarray,
898
+ figsize: Tuple[float, float] = (8, 6),
899
+ dpi: int = 100,
900
+ title: Optional[str] = None,
901
+ ax: Optional[Axes] = None
902
+ ) -> Tuple[Figure, Axes]:
903
+ """
904
+ Create probability of improvement convergence curve.
905
+
906
+ Shows the maximum probability of improvement (PI) available in the search
907
+ space at each iteration. As optimization progresses and good regions are
908
+ explored, max(PI) should decrease, indicating convergence.
909
+
910
+ This is computed retroactively by:
911
+ 1. Training GP incrementally with observations 0:i
912
+ 2. Computing PI across the search space
913
+ 3. Taking the maximum PI value
914
+
915
+ Args:
916
+ iterations: Iteration numbers where PI was evaluated
917
+ max_pi_values: Maximum PI value in search space at each iteration
918
+ figsize: Figure size (width, height) in inches
919
+ dpi: Resolution
920
+ title: Custom title (auto-generated if None)
921
+ ax: Existing axes (creates new if None)
922
+
923
+ Returns:
924
+ Tuple of (Figure, Axes)
925
+
926
+ Example:
927
+ >>> # Computed retroactively from session
928
+ >>> fig = session.plot_probability_of_improvement()
929
+
930
+ Notes:
931
+ - Values range from 0 to 1 (probabilities)
932
+ - Decreasing trend indicates optimization converging
933
+ - Values near 0 suggest little room for improvement
934
+ - Useful for determining stopping criteria
935
+ """
936
+ if ax is None:
937
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
938
+ should_tight_layout = True
939
+ else:
940
+ fig = ax.figure
941
+ should_tight_layout = False
942
+
943
+ # Plot max PI values with markers
944
+ ax.plot(iterations, max_pi_values, linewidth=2.5,
945
+ marker='o', markersize=6, color='#5B9BD5',
946
+ label='Max PI in search space')
947
+
948
+ # Add horizontal reference lines
949
+ ax.axhline(y=0.5, color='gray', linestyle='--',
950
+ linewidth=1, alpha=0.4, label='PI = 0.5')
951
+ ax.axhline(y=0.1, color='orange', linestyle='--',
952
+ linewidth=1, alpha=0.4, label='PI = 0.1')
953
+
954
+ # Labels and title
955
+ ax.set_xlabel("Iteration")
956
+ ax.set_ylabel("Maximum Probability of Improvement")
957
+
958
+ if title is None:
959
+ title = "Probability of Improvement Convergence"
960
+ ax.set_title(title)
961
+
962
+ ax.set_ylim([0, 1.05]) # PI is a probability
963
+ ax.legend(loc='best')
964
+ ax.grid(True, alpha=0.3)
965
+
966
+ if should_tight_layout:
967
+ fig.tight_layout()
968
+
969
+ return fig, ax
970
+
971
+
972
+ # ==============================================================================
973
+ # VISUALIZATION ARCHITECTURE NOTES
974
+ # ==============================================================================
975
+ #
976
+ # This module implements a systematic framework for visualizing Gaussian Process
977
+ # models in the context of Bayesian optimization. The visualization space can be
978
+ # organized along two axes:
979
+ #
980
+ # 1. WHAT TO VISUALIZE (3 fundamental quantities):
981
+ # ------------------------------------------------
982
+ # a) Posterior Mean
983
+ # - Model's best estimate of the objective function
984
+ # - ✅ IMPLEMENTED: 1D slice plots, 2D contour plots, 3D voxel plots
985
+ #
986
+ # b) Posterior Uncertainty
987
+ # - Model's confidence/uncertainty in predictions
988
+ # - ✅ IMPLEMENTED: 1D slice plots (as bands around mean)
989
+ # - ❌ NOT YET: 2D contour plots of uncertainty, 3D voxel plots
990
+ #
991
+ # c) Acquisition Function
992
+ # - Decision-making criteria under uncertainty (EI, PI, UCB, etc.)
993
+ # - Shows where the optimization algorithm will sample next
994
+ # - Same dimensionality as posterior mean/uncertainty
995
+ # - ✅ IMPLEMENTED: 1D slice plots, 2D contour plots (via session API)
996
+ # - ❌ NOT YET: 3D voxel plots
997
+ #
998
+ # 2. HOW TO VISUALIZE (dimensionality of visualization):
999
+ # -----------------------------------------------------
1000
+ # a) 1D Slice - Fix all but 1 variable (✅ FULLY IMPLEMENTED)
1001
+ # - create_slice_plot(): Shows posterior mean + uncertainty bands
1002
+ # - Experimental points can be overlaid
1003
+ # - Custom sigma bands: [1.0, 2.0, 3.0] for ±1σ, ±2σ, ±3σ
1004
+ # - Used by: session.plot_slice(), session.plot_acquisition_slice()
1005
+ #
1006
+ # b) 2D Contour - Fix all but 2 variables (✅ MOSTLY IMPLEMENTED)
1007
+ # - create_contour_plot(): Shows posterior mean as colored contours
1008
+ # - Experimental points and suggestions can be overlaid
1009
+ # - Used by: session.plot_contour(), session.plot_acquisition_contour()
1010
+ # - ❌ NOT YET: uncertainty as separate subplot or transparency overlay
1011
+ #
1012
+ # c) 3D Voxel - Fix all but 3 variables (✅ NEWLY IMPLEMENTED - Jan 2026)
1013
+ # - create_voxel_plot(): 3D scatter visualization for response surfaces
1014
+ # - Uses matplotlib 3D scatter with color mapping and transparency
1015
+ # - Adjustable alpha parameter for seeing interior structure
1016
+ # - Experimental points and suggestions can be overlaid
1017
+ # - Used by: session.plot_voxel()
1018
+ # - Requires 3+ continuous (real/integer) variables
1019
+ # - ❌ NOT YET: uncertainty visualization, acquisition function plots
1020
+ # - Note: Computationally expensive (O(N³) evaluations)
1021
+ #
1022
+ # IMPLEMENTATION STATUS MATRIX (Jan 22, 2026):
1023
+ # -----------------------------------------
1024
+ # 1D Slice 2D Contour 3D Voxel
1025
+ # Posterior Mean ✅ ✅ ✅
1026
+ # Posterior Uncertainty ✅ ✅ ✅
1027
+ # Acquisition Function ✅ ✅ ✅
1028
+ #
1029
+ # COMPLETE! All 9 visualization combinations implemented.
1030
+ #
1031
+ # API CONSISTENCY NOTES:
1032
+ # ----------------------
1033
+ # Current naming conventions:
1034
+ # - plot_slice: show_uncertainty (Union[bool, List[float]])
1035
+ # - plot_parity: show_error_bars (bool)
1036
+ # - All: show_experiments (bool)
1037
+ # - plot_slice: n_points (int) - 1D sampling
1038
+ # - plot_contour: grid_resolution (int) - 2D grid (N×N)
1039
+ # - plot_voxel: grid_resolution (int) - 3D grid (N×N×N, default: 15)
1040
+ # - plot_voxel: alpha (float) - transparency (0-1, default: 0.5)
1041
+ #
1042
+ # These differences are intentional (slice shows bands, parity shows error bars;
1043
+ # 1D/2D/3D sampling densities), but maintain consistency within dimensionality.
1044
+ #
1045
+ # ==============================================================================
1046
+
1047
+
1048
+ def create_uncertainty_contour_plot(
1049
+ x_grid: np.ndarray,
1050
+ y_grid: np.ndarray,
1051
+ uncertainty_grid: np.ndarray,
1052
+ x_var: str,
1053
+ y_var: str,
1054
+ exp_x: Optional[np.ndarray] = None,
1055
+ exp_y: Optional[np.ndarray] = None,
1056
+ suggest_x: Optional[np.ndarray] = None,
1057
+ suggest_y: Optional[np.ndarray] = None,
1058
+ cmap: str = 'Reds',
1059
+ figsize: Tuple[float, float] = (8, 6),
1060
+ dpi: int = 100,
1061
+ title: str = "Posterior Uncertainty (Standard Deviation)",
1062
+ ax: Optional[Axes] = None
1063
+ ) -> Tuple[Figure, Axes, Any]:
1064
+ """
1065
+ Create 2D contour plot of posterior uncertainty (standard deviation).
1066
+
1067
+ Visualizes where the model is most uncertain about predictions, showing
1068
+ regions that may benefit from additional sampling. Higher values indicate
1069
+ greater uncertainty.
1070
+
1071
+ Args:
1072
+ x_grid: X-axis meshgrid values (2D array)
1073
+ y_grid: Y-axis meshgrid values (2D array)
1074
+ uncertainty_grid: Posterior standard deviations on grid (2D array)
1075
+ x_var: X variable name for axis label
1076
+ y_var: Y variable name for axis label
1077
+ exp_x: Experimental X values to overlay (optional)
1078
+ exp_y: Experimental Y values to overlay (optional)
1079
+ suggest_x: Suggested X values to overlay (optional)
1080
+ suggest_y: Suggested Y values to overlay (optional)
1081
+ cmap: Matplotlib colormap name (default: 'Reds' - darker = more uncertain)
1082
+ figsize: Figure size (width, height) in inches
1083
+ dpi: Resolution
1084
+ title: Plot title
1085
+ ax: Existing axes (creates new if None)
1086
+
1087
+ Returns:
1088
+ Tuple of (Figure, Axes, Colorbar)
1089
+
1090
+ Example:
1091
+ >>> X, Y = np.meshgrid(x_range, y_range)
1092
+ >>> _, std = model.predict(grid, return_std=True)
1093
+ >>> uncertainty = std.reshape(X.shape)
1094
+ >>> fig, ax, cbar = create_uncertainty_contour_plot(X, Y, uncertainty, 'temp', 'pressure')
1095
+
1096
+ Note:
1097
+ - Useful for identifying under-explored regions
1098
+ - Typically used with 'Reds' or 'YlOrRd' colormaps
1099
+ - High uncertainty near data gaps is expected
1100
+ - Can guide where to sample next for exploration
1101
+ """
1102
+ if ax is None:
1103
+ fig, ax = plt.subplots(figsize=figsize, dpi=dpi)
1104
+ should_tight_layout = True
1105
+ else:
1106
+ fig = ax.figure
1107
+ should_tight_layout = False
1108
+
1109
+ # Contour plot of uncertainty
1110
+ min_val = uncertainty_grid.min()
1111
+ max_val = uncertainty_grid.max()
1112
+
1113
+ levels = np.linspace(min_val, max_val, 50)
1114
+ contour = ax.contourf(x_grid, y_grid, uncertainty_grid, levels=levels,
1115
+ cmap=cmap, vmin=min_val, vmax=max_val)
1116
+
1117
+ cbar = fig.colorbar(contour, ax=ax)
1118
+ cbar.set_label('Posterior Standard Deviation', rotation=270, labelpad=20)
1119
+
1120
+ # Overlay experimental points
1121
+ if exp_x is not None and exp_y is not None and len(exp_x) > 0:
1122
+ ax.scatter(exp_x, exp_y, c='white', edgecolors='black',
1123
+ s=80, marker='o', label='Experiments', zorder=5)
1124
+
1125
+ # Overlay suggestion points
1126
+ if suggest_x is not None and suggest_y is not None and len(suggest_x) > 0:
1127
+ ax.scatter(suggest_x, suggest_y, c='black',
1128
+ s=120, marker='*', label='Suggestions', zorder=6)
1129
+
1130
+ # Labels and title
1131
+ ax.set_xlabel(x_var)
1132
+ ax.set_ylabel(y_var)
1133
+ ax.set_title(title)
1134
+
1135
+ # Legend if we have overlays
1136
+ if (exp_x is not None and len(exp_x) > 0) or (suggest_x is not None and len(suggest_x) > 0):
1137
+ ax.legend()
1138
+
1139
+ if should_tight_layout:
1140
+ fig.tight_layout()
1141
+
1142
+ return fig, ax, cbar
1143
+
1144
+
1145
+ def create_uncertainty_voxel_plot(
1146
+ x_grid: np.ndarray,
1147
+ y_grid: np.ndarray,
1148
+ z_grid: np.ndarray,
1149
+ uncertainty_grid: np.ndarray,
1150
+ x_var: str,
1151
+ y_var: str,
1152
+ z_var: str,
1153
+ exp_x: Optional[np.ndarray] = None,
1154
+ exp_y: Optional[np.ndarray] = None,
1155
+ exp_z: Optional[np.ndarray] = None,
1156
+ suggest_x: Optional[np.ndarray] = None,
1157
+ suggest_y: Optional[np.ndarray] = None,
1158
+ suggest_z: Optional[np.ndarray] = None,
1159
+ cmap: str = 'Reds',
1160
+ alpha: float = 0.5,
1161
+ figsize: Tuple[float, float] = (10, 8),
1162
+ dpi: int = 100,
1163
+ title: str = "3D Posterior Uncertainty",
1164
+ ax: Optional[Any] = None
1165
+ ) -> Tuple[Figure, Any]:
1166
+ """
1167
+ Create 3D voxel plot of posterior uncertainty over variable space.
1168
+
1169
+ Visualizes where the model is most uncertain in 3D, helping identify
1170
+ under-explored regions that may benefit from additional sampling.
1171
+
1172
+ Args:
1173
+ x_grid: X-axis meshgrid values (3D array)
1174
+ y_grid: Y-axis meshgrid values (3D array)
1175
+ z_grid: Z-axis meshgrid values (3D array)
1176
+ uncertainty_grid: Posterior standard deviations on grid (3D array)
1177
+ x_var: X variable name for axis label
1178
+ y_var: Y variable name for axis label
1179
+ z_var: Z variable name for axis label
1180
+ exp_x: Experimental X values to overlay (optional)
1181
+ exp_y: Experimental Y values to overlay (optional)
1182
+ exp_z: Experimental Z values to overlay (optional)
1183
+ suggest_x: Suggested X values to overlay (optional)
1184
+ suggest_y: Suggested Y values to overlay (optional)
1185
+ suggest_z: Suggested Z values to overlay (optional)
1186
+ cmap: Matplotlib colormap name (default: 'Reds')
1187
+ alpha: Transparency level (0=transparent, 1=opaque)
1188
+ figsize: Figure size (width, height) in inches
1189
+ dpi: Resolution
1190
+ title: Plot title
1191
+ ax: Existing 3D axes (creates new if None)
1192
+
1193
+ Returns:
1194
+ Tuple of (Figure, Axes3D) objects
1195
+
1196
+ Example:
1197
+ >>> X, Y, Z = np.meshgrid(x, y, z, indexing='ij')
1198
+ >>> _, std = model.predict(grid, return_std=True)
1199
+ >>> uncertainty = std.reshape(X.shape)
1200
+ >>> fig, ax = create_uncertainty_voxel_plot(X, Y, Z, uncertainty, 'temp', 'press', 'flow')
1201
+
1202
+ Note:
1203
+ - Higher values = greater uncertainty
1204
+ - Useful for planning exploration strategies
1205
+ - Shows data-sparse regions in 3D
1206
+ - Computationally expensive (O(N³) evaluations)
1207
+ """
1208
+ from mpl_toolkits.mplot3d import Axes3D
1209
+ from matplotlib.colors import Normalize
1210
+
1211
+ if ax is None:
1212
+ fig = plt.figure(figsize=figsize, dpi=dpi)
1213
+ ax = fig.add_subplot(111, projection='3d')
1214
+ should_tight_layout = True
1215
+ else:
1216
+ fig = ax.figure
1217
+ should_tight_layout = False
1218
+
1219
+ # Normalize uncertainty for colormapping
1220
+ min_val = uncertainty_grid.min()
1221
+ max_val = uncertainty_grid.max()
1222
+ norm = Normalize(vmin=min_val, vmax=max_val)
1223
+
1224
+ # Get colormap
1225
+ cm = plt.get_cmap(cmap)
1226
+
1227
+ # Flatten arrays
1228
+ x_flat = x_grid.ravel()
1229
+ y_flat = y_grid.ravel()
1230
+ z_flat = z_grid.ravel()
1231
+ uncertainty_flat = uncertainty_grid.ravel()
1232
+
1233
+ # Calculate marker size based on grid spacing
1234
+ n_points = len(x_flat)
1235
+ marker_size = max(10, 1000 / (n_points ** (1/3)))
1236
+
1237
+ # Plot as 3D scatter with colors
1238
+ scatter = ax.scatter(
1239
+ x_flat, y_flat, z_flat,
1240
+ c=uncertainty_flat,
1241
+ cmap=cmap,
1242
+ norm=norm,
1243
+ alpha=alpha,
1244
+ s=marker_size,
1245
+ marker='o',
1246
+ edgecolors='none'
1247
+ )
1248
+
1249
+ # Add colorbar
1250
+ cbar = fig.colorbar(scatter, ax=ax, pad=0.1, shrink=0.8)
1251
+ cbar.set_label('Posterior Standard Deviation', rotation=270, labelpad=20)
1252
+
1253
+ # Overlay experimental points
1254
+ if exp_x is not None and exp_y is not None and exp_z is not None and len(exp_x) > 0:
1255
+ ax.scatter(
1256
+ exp_x, exp_y, exp_z,
1257
+ c='white',
1258
+ edgecolors='black',
1259
+ s=100,
1260
+ marker='o',
1261
+ label='Experiments',
1262
+ linewidths=2,
1263
+ depthshade=True
1264
+ )
1265
+
1266
+ # Overlay suggestion points
1267
+ if suggest_x is not None and suggest_y is not None and suggest_z is not None and len(suggest_x) > 0:
1268
+ ax.scatter(
1269
+ suggest_x, suggest_y, suggest_z,
1270
+ c='blue',
1271
+ edgecolors='black',
1272
+ s=150,
1273
+ marker='*',
1274
+ label='Suggestions',
1275
+ linewidths=2,
1276
+ depthshade=True
1277
+ )
1278
+
1279
+ # Set labels and title
1280
+ ax.set_xlabel(x_var)
1281
+ ax.set_ylabel(y_var)
1282
+ ax.set_zlabel(z_var)
1283
+ ax.set_title(title)
1284
+
1285
+ # Add legend if we have overlays
1286
+ if (exp_x is not None and len(exp_x) > 0) or (suggest_x is not None and len(suggest_x) > 0):
1287
+ ax.legend(loc='upper left')
1288
+
1289
+ if should_tight_layout:
1290
+ fig.tight_layout()
1291
+
1292
+ return fig, ax
1293
+
1294
+
1295
+ def create_acquisition_voxel_plot(
1296
+ x_grid: np.ndarray,
1297
+ y_grid: np.ndarray,
1298
+ z_grid: np.ndarray,
1299
+ acquisition_grid: np.ndarray,
1300
+ x_var: str,
1301
+ y_var: str,
1302
+ z_var: str,
1303
+ exp_x: Optional[np.ndarray] = None,
1304
+ exp_y: Optional[np.ndarray] = None,
1305
+ exp_z: Optional[np.ndarray] = None,
1306
+ suggest_x: Optional[np.ndarray] = None,
1307
+ suggest_y: Optional[np.ndarray] = None,
1308
+ suggest_z: Optional[np.ndarray] = None,
1309
+ cmap: str = 'hot',
1310
+ alpha: float = 0.5,
1311
+ use_log_scale: bool = False,
1312
+ figsize: Tuple[float, float] = (10, 8),
1313
+ dpi: int = 100,
1314
+ title: str = "3D Acquisition Function",
1315
+ ax: Optional[Any] = None
1316
+ ) -> Tuple[Figure, Any]:
1317
+ """
1318
+ Create 3D voxel plot of acquisition function over variable space.
1319
+
1320
+ Visualizes the acquisition function in 3D, showing "hot spots" where
1321
+ the optimization algorithm believes the next experiment should be conducted.
1322
+ Higher values indicate more promising regions.
1323
+
1324
+ Args:
1325
+ x_grid: X-axis meshgrid values (3D array)
1326
+ y_grid: Y-axis meshgrid values (3D array)
1327
+ z_grid: Z-axis meshgrid values (3D array)
1328
+ acquisition_grid: Acquisition function values on grid (3D array)
1329
+ x_var: X variable name for axis label
1330
+ y_var: Y variable name for axis label
1331
+ z_var: Z variable name for axis label
1332
+ exp_x: Experimental X values to overlay (optional)
1333
+ exp_y: Experimental Y values to overlay (optional)
1334
+ exp_z: Experimental Z values to overlay (optional)
1335
+ suggest_x: Suggested X values to overlay (optional)
1336
+ suggest_y: Suggested Y values to overlay (optional)
1337
+ suggest_z: Suggested Z values to overlay (optional)
1338
+ cmap: Matplotlib colormap name (default: 'hot')
1339
+ alpha: Transparency level (0=transparent, 1=opaque)
1340
+ use_log_scale: Use logarithmic color scale
1341
+ figsize: Figure size (width, height) in inches
1342
+ dpi: Resolution
1343
+ title: Plot title
1344
+ ax: Existing 3D axes (creates new if None)
1345
+
1346
+ Returns:
1347
+ Tuple of (Figure, Axes3D) objects
1348
+
1349
+ Example:
1350
+ >>> X, Y, Z = np.meshgrid(x, y, z, indexing='ij')
1351
+ >>> acq_values = evaluate_acquisition(model, grid, acq_func='ei')
1352
+ >>> acq_grid = acq_values.reshape(X.shape)
1353
+ >>> fig, ax = create_acquisition_voxel_plot(X, Y, Z, acq_grid, 'temp', 'press', 'flow')
1354
+
1355
+ Note:
1356
+ - Higher values = more promising for next experiment
1357
+ - Use with EI, PI, UCB, or other acquisition functions
1358
+ - Helps visualize exploration-exploitation tradeoff
1359
+ - Suggestions should align with high-value regions
1360
+ - Computationally expensive (O(N³) evaluations)
1361
+ """
1362
+ from mpl_toolkits.mplot3d import Axes3D
1363
+ from matplotlib.colors import Normalize, LogNorm
1364
+
1365
+ if ax is None:
1366
+ fig = plt.figure(figsize=figsize, dpi=dpi)
1367
+ ax = fig.add_subplot(111, projection='3d')
1368
+ should_tight_layout = True
1369
+ else:
1370
+ fig = ax.figure
1371
+ should_tight_layout = False
1372
+
1373
+ # Normalize acquisition values for colormapping
1374
+ min_val = acquisition_grid.min()
1375
+ max_val = acquisition_grid.max()
1376
+
1377
+ if use_log_scale and min_val > 0:
1378
+ norm = LogNorm(vmin=min_val, vmax=max_val)
1379
+ else:
1380
+ norm = Normalize(vmin=min_val, vmax=max_val)
1381
+
1382
+ # Get colormap
1383
+ cm = plt.get_cmap(cmap)
1384
+
1385
+ # Flatten arrays
1386
+ x_flat = x_grid.ravel()
1387
+ y_flat = y_grid.ravel()
1388
+ z_flat = z_grid.ravel()
1389
+ acq_flat = acquisition_grid.ravel()
1390
+
1391
+ # Calculate marker size based on grid spacing
1392
+ n_points = len(x_flat)
1393
+ marker_size = max(10, 1000 / (n_points ** (1/3)))
1394
+
1395
+ # Plot as 3D scatter with colors
1396
+ scatter = ax.scatter(
1397
+ x_flat, y_flat, z_flat,
1398
+ c=acq_flat,
1399
+ cmap=cmap,
1400
+ norm=norm,
1401
+ alpha=alpha,
1402
+ s=marker_size,
1403
+ marker='o',
1404
+ edgecolors='none'
1405
+ )
1406
+
1407
+ # Add colorbar
1408
+ cbar = fig.colorbar(scatter, ax=ax, pad=0.1, shrink=0.8)
1409
+ cbar.set_label('Acquisition Function Value', rotation=270, labelpad=20)
1410
+
1411
+ # Overlay experimental points
1412
+ if exp_x is not None and exp_y is not None and exp_z is not None and len(exp_x) > 0:
1413
+ ax.scatter(
1414
+ exp_x, exp_y, exp_z,
1415
+ c='cyan',
1416
+ edgecolors='black',
1417
+ s=100,
1418
+ marker='o',
1419
+ label='Experiments',
1420
+ linewidths=2,
1421
+ depthshade=True
1422
+ )
1423
+
1424
+ # Overlay suggestion points (should be in high-acquisition regions)
1425
+ if suggest_x is not None and suggest_y is not None and suggest_z is not None and len(suggest_x) > 0:
1426
+ ax.scatter(
1427
+ suggest_x, suggest_y, suggest_z,
1428
+ c='black',
1429
+ s=150,
1430
+ marker='*',
1431
+ label='Suggestions',
1432
+ linewidths=2,
1433
+ depthshade=True
1434
+ )
1435
+
1436
+ # Set labels and title
1437
+ ax.set_xlabel(x_var)
1438
+ ax.set_ylabel(y_var)
1439
+ ax.set_zlabel(z_var)
1440
+ ax.set_title(title)
1441
+
1442
+ # Add legend if we have overlays
1443
+ if (exp_x is not None and len(exp_x) > 0) or (suggest_x is not None and len(suggest_x) > 0):
1444
+ ax.legend(loc='upper left')
1445
+
1446
+ if should_tight_layout:
1447
+ fig.tight_layout()
1448
+
1449
+ return fig, ax