spotoptim 0.0.2__tar.gz → 0.0.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of spotoptim might be problematic. Click here for more details.
- spotoptim-0.0.7/PKG-INFO +229 -0
- spotoptim-0.0.7/README.md +217 -0
- {spotoptim-0.0.2 → spotoptim-0.0.7}/pyproject.toml +8 -5
- spotoptim-0.0.7/src/spotoptim/.DS_Store +0 -0
- spotoptim-0.0.7/src/spotoptim/SpotOptim.py +782 -0
- spotoptim-0.0.7/src/spotoptim/__init__.py +11 -0
- spotoptim-0.0.7/src/spotoptim/surrogate/README.md +149 -0
- spotoptim-0.0.7/src/spotoptim/surrogate/__init__.py +5 -0
- spotoptim-0.0.7/src/spotoptim/surrogate/kriging.py +360 -0
- spotoptim-0.0.2/PKG-INFO +0 -15
- spotoptim-0.0.2/README.md +0 -0
- spotoptim-0.0.2/src/spotoptim/SpotOptim.py +0 -319
- spotoptim-0.0.2/src/spotoptim/__init__.py +0 -2
- {spotoptim-0.0.2 → spotoptim-0.0.7}/src/spotoptim/py.typed +0 -0
spotoptim-0.0.7/PKG-INFO
ADDED
|
@@ -0,0 +1,229 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: spotoptim
|
|
3
|
+
Version: 0.0.7
|
|
4
|
+
Summary: Add your description here
|
|
5
|
+
Author: bartzbeielstein
|
|
6
|
+
Author-email: bartzbeielstein <32470350+bartzbeielstein@users.noreply.github.com>
|
|
7
|
+
Requires-Dist: numpy>=1.24.3
|
|
8
|
+
Requires-Dist: scipy>=1.10.1
|
|
9
|
+
Requires-Dist: scikit-learn>=1.3.0
|
|
10
|
+
Requires-Python: >=3.10
|
|
11
|
+
Description-Content-Type: text/markdown
|
|
12
|
+
|
|
13
|
+
# SpotOptim
|
|
14
|
+
|
|
15
|
+
Sequential Parameter Optimization with Bayesian Optimization.
|
|
16
|
+
|
|
17
|
+
## Features
|
|
18
|
+
|
|
19
|
+
- **Bayesian Optimization**: Uses surrogate models to efficiently optimize expensive black-box functions
|
|
20
|
+
- **Multiple Acquisition Functions**: Expected Improvement (EI), Predicted Mean (y), Probability of Improvement (PI)
|
|
21
|
+
- **Flexible Surrogates**: Default Gaussian Process or custom Kriging surrogate
|
|
22
|
+
- **Variable Types**: Support for continuous, integer, and mixed variable types
|
|
23
|
+
- **scipy-compatible**: Returns OptimizeResult objects compatible with scipy.optimize
|
|
24
|
+
|
|
25
|
+
## Installation
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
pip install spotoptim
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Quick Start
|
|
32
|
+
|
|
33
|
+
```python
|
|
34
|
+
import numpy as np
|
|
35
|
+
from spotoptim import SpotOptim
|
|
36
|
+
|
|
37
|
+
# Define objective function
|
|
38
|
+
def rosenbrock(X):
|
|
39
|
+
X = np.atleast_2d(X)
|
|
40
|
+
x, y = X[:, 0], X[:, 1]
|
|
41
|
+
return (1 - x)**2 + 100 * (y - x**2)**2
|
|
42
|
+
|
|
43
|
+
# Set up optimization
|
|
44
|
+
bounds = [(-2, 2), (-2, 2)]
|
|
45
|
+
|
|
46
|
+
optimizer = SpotOptim(
|
|
47
|
+
fun=rosenbrock,
|
|
48
|
+
bounds=bounds,
|
|
49
|
+
max_iter=50,
|
|
50
|
+
n_initial=10,
|
|
51
|
+
seed=42
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
# Run optimization
|
|
55
|
+
result = optimizer.optimize()
|
|
56
|
+
|
|
57
|
+
print(f"Best point: {result.x}")
|
|
58
|
+
print(f"Best value: {result.fun}")
|
|
59
|
+
```
|
|
60
|
+
|
|
61
|
+
## Using Kriging Surrogate
|
|
62
|
+
|
|
63
|
+
SpotOptim includes a simplified Kriging (Gaussian Process) surrogate as an alternative to scikit-learn's GaussianProcessRegressor:
|
|
64
|
+
|
|
65
|
+
```python
|
|
66
|
+
from spotoptim import SpotOptim, Kriging
|
|
67
|
+
|
|
68
|
+
# Create Kriging surrogate
|
|
69
|
+
kriging = Kriging(
|
|
70
|
+
noise=1e-6,
|
|
71
|
+
min_theta=-3.0,
|
|
72
|
+
max_theta=2.0,
|
|
73
|
+
seed=42
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
# Use with SpotOptim
|
|
77
|
+
optimizer = SpotOptim(
|
|
78
|
+
fun=rosenbrock,
|
|
79
|
+
bounds=bounds,
|
|
80
|
+
surrogate=kriging, # Use Kriging instead of default GP
|
|
81
|
+
seed=42
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
result = optimizer.optimize()
|
|
85
|
+
```
|
|
86
|
+
|
|
87
|
+
## API Reference
|
|
88
|
+
|
|
89
|
+
### SpotOptim
|
|
90
|
+
|
|
91
|
+
**Parameters:**
|
|
92
|
+
- `fun` (callable): Objective function to minimize
|
|
93
|
+
- `bounds` (list of tuples): Bounds for each dimension as [(low, high), ...]
|
|
94
|
+
- `max_iter` (int, default=20): Maximum number of optimization iterations
|
|
95
|
+
- `n_initial` (int, default=10): Number of initial design points
|
|
96
|
+
- `surrogate` (object, optional): Surrogate model (default: GaussianProcessRegressor)
|
|
97
|
+
- `acquisition` (str, default='ei'): Acquisition function ('ei', 'y', 'pi')
|
|
98
|
+
- `var_type` (list of str, optional): Variable types for each dimension
|
|
99
|
+
- `tolerance_x` (float, optional): Minimum distance between points
|
|
100
|
+
- `seed` (int, optional): Random seed for reproducibility
|
|
101
|
+
- `verbose` (bool, default=False): Print progress information
|
|
102
|
+
- `max_surrogate_points` (int, optional): Maximum number of points for surrogate fitting (default: None, use all points)
|
|
103
|
+
- `selection_method` (str, default='distant'): Point selection method ('distant' or 'best')
|
|
104
|
+
|
|
105
|
+
**Methods:**
|
|
106
|
+
- `optimize(X0=None)`: Run optimization, optionally with initial design points
|
|
107
|
+
- `plot_surrogate(i=0, j=1, show=True, **kwargs)`: Visualize the fitted surrogate model
|
|
108
|
+
|
|
109
|
+
## Point Selection for Surrogate Training
|
|
110
|
+
|
|
111
|
+
When optimizing expensive functions with many iterations, the number of evaluated points can become large, making surrogate model training computationally expensive. SpotOptim implements an automatic point selection mechanism to address this:
|
|
112
|
+
|
|
113
|
+
### Usage
|
|
114
|
+
|
|
115
|
+
```python
|
|
116
|
+
optimizer = SpotOptim(
|
|
117
|
+
fun=expensive_function,
|
|
118
|
+
bounds=bounds,
|
|
119
|
+
max_iter=100,
|
|
120
|
+
n_initial=20,
|
|
121
|
+
max_surrogate_points=50, # Use only 50 points for surrogate training
|
|
122
|
+
selection_method='distant', # or 'best'
|
|
123
|
+
verbose=True
|
|
124
|
+
)
|
|
125
|
+
```
|
|
126
|
+
|
|
127
|
+
### Selection Methods
|
|
128
|
+
|
|
129
|
+
1. **'distant' (default)**: Uses K-means clustering to select points that are maximally distant from each other, ensuring good space-filling properties.
|
|
130
|
+
|
|
131
|
+
2. **'best'**: Clusters points and selects all points from the cluster with the best (lowest) mean objective function value, focusing on promising regions.
|
|
132
|
+
|
|
133
|
+
### Benefits
|
|
134
|
+
|
|
135
|
+
- **Reduced computational cost**: Surrogate training scales with the number of points
|
|
136
|
+
- **Maintained accuracy**: Carefully selected points preserve model quality
|
|
137
|
+
- **Scalability**: Enables optimization with hundreds or thousands of function evaluations
|
|
138
|
+
|
|
139
|
+
See `examples/point_selection_example.py` for a complete demonstration.
|
|
140
|
+
|
|
141
|
+
### Kriging
|
|
142
|
+
|
|
143
|
+
**Parameters:**
|
|
144
|
+
- `noise` (float, optional): Regularization parameter
|
|
145
|
+
- `kernel` (str, default='gauss'): Kernel type
|
|
146
|
+
- `n_theta` (int, optional): Number of theta parameters
|
|
147
|
+
- `min_theta` (float, default=-3.0): Minimum log10(theta) bound
|
|
148
|
+
- `max_theta` (float, default=2.0): Maximum log10(theta) bound
|
|
149
|
+
- `seed` (int, optional): Random seed
|
|
150
|
+
|
|
151
|
+
**Methods:**
|
|
152
|
+
- `fit(X, y)`: Fit the model to training data
|
|
153
|
+
- `predict(X, return_std=False)`: Predict at new points
|
|
154
|
+
|
|
155
|
+
## Visualizing Results
|
|
156
|
+
|
|
157
|
+
SpotOptim includes a `plot_surrogate()` method to visualize the fitted surrogate model:
|
|
158
|
+
|
|
159
|
+
```python
|
|
160
|
+
# After running optimization
|
|
161
|
+
optimizer.plot_surrogate(
|
|
162
|
+
i=0, j=1, # Dimensions to plot
|
|
163
|
+
var_name=['x1', 'x2'], # Variable names
|
|
164
|
+
add_points=True, # Show evaluated points
|
|
165
|
+
cmap='viridis', # Colormap
|
|
166
|
+
show=True
|
|
167
|
+
)
|
|
168
|
+
```
|
|
169
|
+
|
|
170
|
+
The plot shows:
|
|
171
|
+
- **Top left**: 3D surface of predictions
|
|
172
|
+
- **Top right**: 3D surface of prediction uncertainty
|
|
173
|
+
- **Bottom left**: Contour plot of predictions with evaluated points
|
|
174
|
+
- **Bottom right**: Contour plot of prediction uncertainty
|
|
175
|
+
|
|
176
|
+
For higher-dimensional problems, the method visualizes a 2D slice by fixing other dimensions at their mean values.
|
|
177
|
+
|
|
178
|
+
## Examples
|
|
179
|
+
|
|
180
|
+
### Notebooks
|
|
181
|
+
|
|
182
|
+
See `notebooks/demos.ipynb` for interactive examples:
|
|
183
|
+
1. 2D Rosenbrock function optimization
|
|
184
|
+
2. 6D Rosenbrock with budget constraints
|
|
185
|
+
3. Using Kriging surrogate vs default GP
|
|
186
|
+
4. Visualizing surrogate models with `plot_surrogate()`
|
|
187
|
+
|
|
188
|
+
### Real-World Applications
|
|
189
|
+
|
|
190
|
+
The `examples/` directory contains detailed tutorials:
|
|
191
|
+
|
|
192
|
+
**Aircraft Wing Weight Optimization (AWWE)**
|
|
193
|
+
- `awwe.qmd` - Comprehensive Quarto tutorial teaching surrogate-based optimization
|
|
194
|
+
- `awwe_optimization.py` - Standalone Python script demonstrating complete workflow
|
|
195
|
+
- 9-dimensional optimization problem from engineering design
|
|
196
|
+
- Includes homework exercise for 10-dimensional extension
|
|
197
|
+
|
|
198
|
+
Run the example:
|
|
199
|
+
```bash
|
|
200
|
+
cd examples
|
|
201
|
+
python awwe_optimization.py
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
See `examples/README.md` for more details and additional examples.
|
|
205
|
+
|
|
206
|
+
## Development
|
|
207
|
+
|
|
208
|
+
```bash
|
|
209
|
+
# Clone repository
|
|
210
|
+
git clone https://github.com/sequential-parameter-optimization/spotoptim.git
|
|
211
|
+
cd spotoptim
|
|
212
|
+
|
|
213
|
+
# Install with uv
|
|
214
|
+
uv pip install -e .
|
|
215
|
+
|
|
216
|
+
# Run tests
|
|
217
|
+
uv run pytest tests/
|
|
218
|
+
|
|
219
|
+
# Build package
|
|
220
|
+
uv build
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
## License
|
|
224
|
+
|
|
225
|
+
See LICENSE file.
|
|
226
|
+
|
|
227
|
+
## References
|
|
228
|
+
|
|
229
|
+
Based on the SPOT (Sequential Parameter Optimization Toolbox) methodology.
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
# SpotOptim
|
|
2
|
+
|
|
3
|
+
Sequential Parameter Optimization with Bayesian Optimization.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
- **Bayesian Optimization**: Uses surrogate models to efficiently optimize expensive black-box functions
|
|
8
|
+
- **Multiple Acquisition Functions**: Expected Improvement (EI), Predicted Mean (y), Probability of Improvement (PI)
|
|
9
|
+
- **Flexible Surrogates**: Default Gaussian Process or custom Kriging surrogate
|
|
10
|
+
- **Variable Types**: Support for continuous, integer, and mixed variable types
|
|
11
|
+
- **scipy-compatible**: Returns OptimizeResult objects compatible with scipy.optimize
|
|
12
|
+
|
|
13
|
+
## Installation
|
|
14
|
+
|
|
15
|
+
```bash
|
|
16
|
+
pip install spotoptim
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
## Quick Start
|
|
20
|
+
|
|
21
|
+
```python
|
|
22
|
+
import numpy as np
|
|
23
|
+
from spotoptim import SpotOptim
|
|
24
|
+
|
|
25
|
+
# Define objective function
|
|
26
|
+
def rosenbrock(X):
|
|
27
|
+
X = np.atleast_2d(X)
|
|
28
|
+
x, y = X[:, 0], X[:, 1]
|
|
29
|
+
return (1 - x)**2 + 100 * (y - x**2)**2
|
|
30
|
+
|
|
31
|
+
# Set up optimization
|
|
32
|
+
bounds = [(-2, 2), (-2, 2)]
|
|
33
|
+
|
|
34
|
+
optimizer = SpotOptim(
|
|
35
|
+
fun=rosenbrock,
|
|
36
|
+
bounds=bounds,
|
|
37
|
+
max_iter=50,
|
|
38
|
+
n_initial=10,
|
|
39
|
+
seed=42
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
# Run optimization
|
|
43
|
+
result = optimizer.optimize()
|
|
44
|
+
|
|
45
|
+
print(f"Best point: {result.x}")
|
|
46
|
+
print(f"Best value: {result.fun}")
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
## Using Kriging Surrogate
|
|
50
|
+
|
|
51
|
+
SpotOptim includes a simplified Kriging (Gaussian Process) surrogate as an alternative to scikit-learn's GaussianProcessRegressor:
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
from spotoptim import SpotOptim, Kriging
|
|
55
|
+
|
|
56
|
+
# Create Kriging surrogate
|
|
57
|
+
kriging = Kriging(
|
|
58
|
+
noise=1e-6,
|
|
59
|
+
min_theta=-3.0,
|
|
60
|
+
max_theta=2.0,
|
|
61
|
+
seed=42
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
# Use with SpotOptim
|
|
65
|
+
optimizer = SpotOptim(
|
|
66
|
+
fun=rosenbrock,
|
|
67
|
+
bounds=bounds,
|
|
68
|
+
surrogate=kriging, # Use Kriging instead of default GP
|
|
69
|
+
seed=42
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
result = optimizer.optimize()
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## API Reference
|
|
76
|
+
|
|
77
|
+
### SpotOptim
|
|
78
|
+
|
|
79
|
+
**Parameters:**
|
|
80
|
+
- `fun` (callable): Objective function to minimize
|
|
81
|
+
- `bounds` (list of tuples): Bounds for each dimension as [(low, high), ...]
|
|
82
|
+
- `max_iter` (int, default=20): Maximum number of optimization iterations
|
|
83
|
+
- `n_initial` (int, default=10): Number of initial design points
|
|
84
|
+
- `surrogate` (object, optional): Surrogate model (default: GaussianProcessRegressor)
|
|
85
|
+
- `acquisition` (str, default='ei'): Acquisition function ('ei', 'y', 'pi')
|
|
86
|
+
- `var_type` (list of str, optional): Variable types for each dimension
|
|
87
|
+
- `tolerance_x` (float, optional): Minimum distance between points
|
|
88
|
+
- `seed` (int, optional): Random seed for reproducibility
|
|
89
|
+
- `verbose` (bool, default=False): Print progress information
|
|
90
|
+
- `max_surrogate_points` (int, optional): Maximum number of points for surrogate fitting (default: None, use all points)
|
|
91
|
+
- `selection_method` (str, default='distant'): Point selection method ('distant' or 'best')
|
|
92
|
+
|
|
93
|
+
**Methods:**
|
|
94
|
+
- `optimize(X0=None)`: Run optimization, optionally with initial design points
|
|
95
|
+
- `plot_surrogate(i=0, j=1, show=True, **kwargs)`: Visualize the fitted surrogate model
|
|
96
|
+
|
|
97
|
+
## Point Selection for Surrogate Training
|
|
98
|
+
|
|
99
|
+
When optimizing expensive functions with many iterations, the number of evaluated points can become large, making surrogate model training computationally expensive. SpotOptim implements an automatic point selection mechanism to address this:
|
|
100
|
+
|
|
101
|
+
### Usage
|
|
102
|
+
|
|
103
|
+
```python
|
|
104
|
+
optimizer = SpotOptim(
|
|
105
|
+
fun=expensive_function,
|
|
106
|
+
bounds=bounds,
|
|
107
|
+
max_iter=100,
|
|
108
|
+
n_initial=20,
|
|
109
|
+
max_surrogate_points=50, # Use only 50 points for surrogate training
|
|
110
|
+
selection_method='distant', # or 'best'
|
|
111
|
+
verbose=True
|
|
112
|
+
)
|
|
113
|
+
```
|
|
114
|
+
|
|
115
|
+
### Selection Methods
|
|
116
|
+
|
|
117
|
+
1. **'distant' (default)**: Uses K-means clustering to select points that are maximally distant from each other, ensuring good space-filling properties.
|
|
118
|
+
|
|
119
|
+
2. **'best'**: Clusters points and selects all points from the cluster with the best (lowest) mean objective function value, focusing on promising regions.
|
|
120
|
+
|
|
121
|
+
### Benefits
|
|
122
|
+
|
|
123
|
+
- **Reduced computational cost**: Surrogate training scales with the number of points
|
|
124
|
+
- **Maintained accuracy**: Carefully selected points preserve model quality
|
|
125
|
+
- **Scalability**: Enables optimization with hundreds or thousands of function evaluations
|
|
126
|
+
|
|
127
|
+
See `examples/point_selection_example.py` for a complete demonstration.
|
|
128
|
+
|
|
129
|
+
### Kriging
|
|
130
|
+
|
|
131
|
+
**Parameters:**
|
|
132
|
+
- `noise` (float, optional): Regularization parameter
|
|
133
|
+
- `kernel` (str, default='gauss'): Kernel type
|
|
134
|
+
- `n_theta` (int, optional): Number of theta parameters
|
|
135
|
+
- `min_theta` (float, default=-3.0): Minimum log10(theta) bound
|
|
136
|
+
- `max_theta` (float, default=2.0): Maximum log10(theta) bound
|
|
137
|
+
- `seed` (int, optional): Random seed
|
|
138
|
+
|
|
139
|
+
**Methods:**
|
|
140
|
+
- `fit(X, y)`: Fit the model to training data
|
|
141
|
+
- `predict(X, return_std=False)`: Predict at new points
|
|
142
|
+
|
|
143
|
+
## Visualizing Results
|
|
144
|
+
|
|
145
|
+
SpotOptim includes a `plot_surrogate()` method to visualize the fitted surrogate model:
|
|
146
|
+
|
|
147
|
+
```python
|
|
148
|
+
# After running optimization
|
|
149
|
+
optimizer.plot_surrogate(
|
|
150
|
+
i=0, j=1, # Dimensions to plot
|
|
151
|
+
var_name=['x1', 'x2'], # Variable names
|
|
152
|
+
add_points=True, # Show evaluated points
|
|
153
|
+
cmap='viridis', # Colormap
|
|
154
|
+
show=True
|
|
155
|
+
)
|
|
156
|
+
```
|
|
157
|
+
|
|
158
|
+
The plot shows:
|
|
159
|
+
- **Top left**: 3D surface of predictions
|
|
160
|
+
- **Top right**: 3D surface of prediction uncertainty
|
|
161
|
+
- **Bottom left**: Contour plot of predictions with evaluated points
|
|
162
|
+
- **Bottom right**: Contour plot of prediction uncertainty
|
|
163
|
+
|
|
164
|
+
For higher-dimensional problems, the method visualizes a 2D slice by fixing other dimensions at their mean values.
|
|
165
|
+
|
|
166
|
+
## Examples
|
|
167
|
+
|
|
168
|
+
### Notebooks
|
|
169
|
+
|
|
170
|
+
See `notebooks/demos.ipynb` for interactive examples:
|
|
171
|
+
1. 2D Rosenbrock function optimization
|
|
172
|
+
2. 6D Rosenbrock with budget constraints
|
|
173
|
+
3. Using Kriging surrogate vs default GP
|
|
174
|
+
4. Visualizing surrogate models with `plot_surrogate()`
|
|
175
|
+
|
|
176
|
+
### Real-World Applications
|
|
177
|
+
|
|
178
|
+
The `examples/` directory contains detailed tutorials:
|
|
179
|
+
|
|
180
|
+
**Aircraft Wing Weight Optimization (AWWE)**
|
|
181
|
+
- `awwe.qmd` - Comprehensive Quarto tutorial teaching surrogate-based optimization
|
|
182
|
+
- `awwe_optimization.py` - Standalone Python script demonstrating complete workflow
|
|
183
|
+
- 9-dimensional optimization problem from engineering design
|
|
184
|
+
- Includes homework exercise for 10-dimensional extension
|
|
185
|
+
|
|
186
|
+
Run the example:
|
|
187
|
+
```bash
|
|
188
|
+
cd examples
|
|
189
|
+
python awwe_optimization.py
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
See `examples/README.md` for more details and additional examples.
|
|
193
|
+
|
|
194
|
+
## Development
|
|
195
|
+
|
|
196
|
+
```bash
|
|
197
|
+
# Clone repository
|
|
198
|
+
git clone https://github.com/sequential-parameter-optimization/spotoptim.git
|
|
199
|
+
cd spotoptim
|
|
200
|
+
|
|
201
|
+
# Install with uv
|
|
202
|
+
uv pip install -e .
|
|
203
|
+
|
|
204
|
+
# Run tests
|
|
205
|
+
uv run pytest tests/
|
|
206
|
+
|
|
207
|
+
# Build package
|
|
208
|
+
uv build
|
|
209
|
+
```
|
|
210
|
+
|
|
211
|
+
## License
|
|
212
|
+
|
|
213
|
+
See LICENSE file.
|
|
214
|
+
|
|
215
|
+
## References
|
|
216
|
+
|
|
217
|
+
Based on the SPOT (Sequential Parameter Optimization Toolbox) methodology.
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "spotoptim"
|
|
3
|
-
version = "0.0.
|
|
3
|
+
version = "0.0.7"
|
|
4
4
|
description = "Add your description here"
|
|
5
5
|
readme = "README.md"
|
|
6
6
|
authors = [
|
|
@@ -8,12 +8,15 @@ authors = [
|
|
|
8
8
|
]
|
|
9
9
|
requires-python = ">=3.10"
|
|
10
10
|
dependencies = [
|
|
11
|
-
"fastapi>=0.121.1",
|
|
12
11
|
"numpy>=1.24.3",
|
|
13
12
|
"scipy>=1.10.1",
|
|
14
|
-
"
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
"scikit-learn>=1.3.0",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
[dependency-groups]
|
|
17
|
+
dev = [
|
|
18
|
+
"pytest>=7.4.0",
|
|
19
|
+
"pytest-cov>=4.1.0",
|
|
17
20
|
]
|
|
18
21
|
|
|
19
22
|
[build-system]
|
|
Binary file
|