spotoptim 0.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,9 @@
1
+ Metadata-Version: 2.3
2
+ Name: spotoptim
3
+ Version: 0.0.1
4
+ Summary: Add your description here
5
+ Author: bartzbeielstein
6
+ Author-email: bartzbeielstein <32470350+bartzbeielstein@users.noreply.github.com>
7
+ Requires-Python: >=3.10
8
+ Description-Content-Type: text/markdown
9
+
File without changes
@@ -0,0 +1,14 @@
1
+ [project]
2
+ name = "spotoptim"
3
+ version = "0.0.1"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ authors = [
7
+ { name = "bartzbeielstein", email = "32470350+bartzbeielstein@users.noreply.github.com" }
8
+ ]
9
+ requires-python = ">=3.10"
10
+ dependencies = []
11
+
12
+ [build-system]
13
+ requires = ["uv_build>=0.9.8,<0.10.0"]
14
+ build-backend = "uv_build"
@@ -0,0 +1,352 @@
1
+ import numpy as np
2
+ from typing import Callable, Optional, Tuple
3
+ from scipy.optimize import OptimizeResult, differential_evolution
4
+ from sklearn.base import BaseEstimator
5
+ from sklearn.gaussian_process import GaussianProcessRegressor
6
+ from sklearn.gaussian_process.kernels import Matern, ConstantKernel
7
+ from spotpython.design.spacefilling import SpaceFilling
8
+ from spotpython.utils.repair import repair_non_numeric
9
+ from spotpython.utils.compare import selectNew
10
+
11
+
12
+ class SpotOptim(BaseEstimator):
13
+ """
14
+ SPOT optimizer compatible with scipy.optimize interface.
15
+
16
+ Parameters
17
+ ----------
18
+ fun : callable
19
+ Objective function to minimize. Should accept array of shape (n_samples, n_features).
20
+ bounds : list of tuple
21
+ Bounds for each dimension as [(low, high), ...].
22
+ max_iter : int, default=20
23
+ Maximum number of optimization iterations.
24
+ n_initial : int, default=10
25
+ Number of initial design points.
26
+ surrogate : object, optional
27
+ Surrogate model (default: Gaussian Process with Matern kernel).
28
+ acquisition : str, default='ei'
29
+ Acquisition function ('ei', 'y', 'pi').
30
+ var_type : list of str, optional
31
+ Variable types for each dimension ('num', 'int', 'float', 'factor').
32
+ tolerance_x : float, default=1e-6
33
+ Minimum distance between points.
34
+ seed : int, optional
35
+ Random seed for reproducibility.
36
+ verbose : bool, default=False
37
+ Print progress information.
38
+
39
+ Attributes
40
+ ----------
41
+ X_ : ndarray of shape (n_samples, n_features)
42
+ All evaluated points.
43
+ y_ : ndarray of shape (n_samples,)
44
+ Function values at X_.
45
+ best_x_ : ndarray of shape (n_features,)
46
+ Best point found.
47
+ best_y_ : float
48
+ Best function value found.
49
+ n_iter_ : int
50
+ Number of iterations performed.
51
+ """
52
+
53
+ def __init__(
54
+ self,
55
+ fun: Callable,
56
+ bounds: list,
57
+ max_iter: int = 20,
58
+ n_initial: int = 10,
59
+ surrogate: Optional[object] = None,
60
+ acquisition: str = 'ei',
61
+ var_type: Optional[list] = None,
62
+ tolerance_x: float = 1e-6,
63
+ seed: Optional[int] = None,
64
+ verbose: bool = False
65
+ ):
66
+ self.fun = fun
67
+ self.bounds = bounds
68
+ self.max_iter = max_iter
69
+ self.n_initial = n_initial
70
+ self.surrogate = surrogate
71
+ self.acquisition = acquisition
72
+ self.var_type = var_type
73
+ self.tolerance_x = tolerance_x
74
+ self.seed = seed
75
+ self.verbose = verbose
76
+
77
+ # Derived attributes
78
+ self.n_dim = len(bounds)
79
+ self.lower = np.array([b[0] for b in bounds])
80
+ self.upper = np.array([b[1] for b in bounds])
81
+
82
+ # Default variable types
83
+ if self.var_type is None:
84
+ self.var_type = ['num'] * self.n_dim
85
+
86
+ # Initialize surrogate if not provided
87
+ if self.surrogate is None:
88
+ kernel = ConstantKernel(1.0, (1e-3, 1e3)) * Matern(
89
+ length_scale=1.0,
90
+ length_scale_bounds=(1e-2, 1e2),
91
+ nu=2.5
92
+ )
93
+ self.surrogate = GaussianProcessRegressor(
94
+ kernel=kernel,
95
+ n_restarts_optimizer=10,
96
+ normalize_y=True,
97
+ random_state=self.seed
98
+ )
99
+
100
+ # Design generator
101
+ self.design = SpaceFilling(k=self.n_dim, seed=self.seed)
102
+
103
+ # Storage for results
104
+ self.X_ = None
105
+ self.y_ = None
106
+ self.best_x_ = None
107
+ self.best_y_ = None
108
+ self.n_iter_ = 0
109
+
110
+ def _evaluate_function(self, X: np.ndarray) -> np.ndarray:
111
+ """Evaluate objective function at points X."""
112
+ # Ensure X is 2D
113
+ X = np.atleast_2d(X)
114
+
115
+ # Evaluate function
116
+ y = self.fun(X)
117
+
118
+ # Ensure y is 1D
119
+ if isinstance(y, np.ndarray) and y.ndim > 1:
120
+ y = y.ravel()
121
+ elif not isinstance(y, np.ndarray):
122
+ y = np.array([y])
123
+
124
+ return y
125
+
126
+ def _generate_initial_design(self) -> np.ndarray:
127
+ """Generate initial space-filling design."""
128
+ X0 = self.design.scipy_lhd(
129
+ n=self.n_initial,
130
+ repeats=1,
131
+ lower=self.lower,
132
+ upper=self.upper
133
+ )
134
+ return repair_non_numeric(X0, self.var_type)
135
+
136
+ def _fit_surrogate(self, X: np.ndarray, y: np.ndarray) -> None:
137
+ """Fit surrogate model to data."""
138
+ self.surrogate.fit(X, y)
139
+
140
+ def _acquisition_function(self, x: np.ndarray) -> float:
141
+ """
142
+ Compute acquisition function value.
143
+
144
+ Parameters
145
+ ----------
146
+ x : ndarray of shape (n_features,)
147
+ Point to evaluate.
148
+
149
+ Returns
150
+ -------
151
+ float
152
+ Acquisition function value (to be minimized).
153
+ """
154
+ x = x.reshape(1, -1)
155
+
156
+ if self.acquisition == 'y':
157
+ # Predicted mean
158
+ return self.surrogate.predict(x)[0]
159
+
160
+ elif self.acquisition == 'ei':
161
+ # Expected Improvement
162
+ mu, sigma = self.surrogate.predict(x, return_std=True)
163
+ mu = mu[0]
164
+ sigma = sigma[0]
165
+
166
+ if sigma < 1e-10:
167
+ return 0.0
168
+
169
+ y_best = np.min(self.y_)
170
+ improvement = y_best - mu
171
+ Z = improvement / sigma
172
+
173
+ from scipy.stats import norm
174
+ ei = improvement * norm.cdf(Z) + sigma * norm.pdf(Z)
175
+ return -ei # Minimize negative EI
176
+
177
+ elif self.acquisition == 'pi':
178
+ # Probability of Improvement
179
+ mu, sigma = self.surrogate.predict(x, return_std=True)
180
+ mu = mu[0]
181
+ sigma = sigma[0]
182
+
183
+ if sigma < 1e-10:
184
+ return 0.0
185
+
186
+ y_best = np.min(self.y_)
187
+ Z = (y_best - mu) / sigma
188
+
189
+ from scipy.stats import norm
190
+ pi = norm.cdf(Z)
191
+ return -pi # Minimize negative PI
192
+
193
+ else:
194
+ raise ValueError(f"Unknown acquisition function: {self.acquisition}")
195
+
196
+ def _suggest_next_point(self) -> np.ndarray:
197
+ """
198
+ Suggest next point to evaluate using acquisition function optimization.
199
+
200
+ Returns
201
+ -------
202
+ ndarray of shape (n_features,)
203
+ Next point to evaluate.
204
+ """
205
+ result = differential_evolution(
206
+ func=self._acquisition_function,
207
+ bounds=self.bounds,
208
+ seed=self.seed,
209
+ maxiter=1000
210
+ )
211
+
212
+ x_next = result.x
213
+
214
+ # Ensure minimum distance to existing points
215
+ x_next_2d = x_next.reshape(1, -1)
216
+ x_new, _ = selectNew(A=x_next_2d, X=self.X_, tolerance=self.tolerance_x)
217
+
218
+ if x_new.shape[0] == 0:
219
+ # If too close, generate random point
220
+ if self.verbose:
221
+ print("Proposed point too close, generating random point")
222
+ x_next = self.design.scipy_lhd(
223
+ n=1,
224
+ repeats=1,
225
+ lower=self.lower,
226
+ upper=self.upper
227
+ )[0]
228
+
229
+ return repair_non_numeric(x_next.reshape(1, -1), self.var_type)[0]
230
+
231
+ def optimize(self, X0: Optional[np.ndarray] = None) -> OptimizeResult:
232
+ """
233
+ Run the optimization process.
234
+
235
+ Parameters
236
+ ----------
237
+ X0 : ndarray of shape (n_initial, n_features), optional
238
+ Initial design points. If None, generates space-filling design.
239
+
240
+ Returns
241
+ -------
242
+ OptimizeResult
243
+ Optimization result with fields:
244
+ - x : best point found
245
+ - fun : best function value
246
+ - nfev : number of function evaluations
247
+ - success : whether optimization succeeded
248
+ - message : termination message
249
+ - X : all evaluated points
250
+ - y : all function values
251
+ """
252
+ # Generate or use provided initial design
253
+ if X0 is None:
254
+ X0 = self._generate_initial_design()
255
+ else:
256
+ X0 = np.atleast_2d(X0)
257
+ X0 = repair_non_numeric(X0, self.var_type)
258
+
259
+ # Evaluate initial design
260
+ y0 = self._evaluate_function(X0)
261
+
262
+ # Initialize storage
263
+ self.X_ = X0.copy()
264
+ self.y_ = y0.copy()
265
+ self.n_iter_ = 0
266
+
267
+ # Initial best
268
+ best_idx = np.argmin(self.y_)
269
+ self.best_x_ = self.X_[best_idx].copy()
270
+ self.best_y_ = self.y_[best_idx]
271
+
272
+ if self.verbose:
273
+ print(f"Initial best: f(x) = {self.best_y_:.6f}")
274
+
275
+ # Main optimization loop
276
+ for iteration in range(self.max_iter):
277
+ self.n_iter_ = iteration + 1
278
+
279
+ # Fit surrogate
280
+ self._fit_surrogate(self.X_, self.y_)
281
+
282
+ # Suggest next point
283
+ x_next = self._suggest_next_point()
284
+
285
+ # Evaluate next point
286
+ y_next = self._evaluate_function(x_next.reshape(1, -1))
287
+
288
+ # Update storage
289
+ self.X_ = np.vstack([self.X_, x_next])
290
+ self.y_ = np.append(self.y_, y_next)
291
+
292
+ # Update best
293
+ if y_next[0] < self.best_y_:
294
+ self.best_x_ = x_next.copy()
295
+ self.best_y_ = y_next[0]
296
+
297
+ if self.verbose:
298
+ print(f"Iteration {iteration+1}: New best f(x) = {self.best_y_:.6f}")
299
+ elif self.verbose:
300
+ print(f"Iteration {iteration+1}: f(x) = {y_next[0]:.6f}")
301
+
302
+ # Return scipy-style result
303
+ return OptimizeResult(
304
+ x=self.best_x_,
305
+ fun=self.best_y_,
306
+ nfev=len(self.y_),
307
+ nit=self.n_iter_,
308
+ success=True,
309
+ message="Optimization finished successfully",
310
+ X=self.X_,
311
+ y=self.y_
312
+ )
313
+
314
+
315
+ # Example usage
316
+ if __name__ == "__main__":
317
+ # Define Rosenbrock function
318
+ def rosenbrock(X):
319
+ """Rosenbrock function for optimization."""
320
+ X = np.atleast_2d(X)
321
+ x = X[:, 0]
322
+ y = X[:, 1]
323
+ return (1 - x)**2 + 100 * (y - x**2)**2
324
+
325
+ # Set up bounds for 2D problem
326
+ bounds = [(-2, 2), (-2, 2)]
327
+
328
+ # Create optimizer
329
+ optimizer = SpotOptim(
330
+ fun=rosenbrock,
331
+ bounds=bounds,
332
+ max_iter=5,
333
+ n_initial=5,
334
+ acquisition='ei',
335
+ seed=42,
336
+ verbose=True
337
+ )
338
+
339
+ # Run optimization
340
+ result = optimizer.optimize()
341
+
342
+ # Print results
343
+ print("\n" + "="*50)
344
+ print("Optimization Results")
345
+ print("="*50)
346
+ print(f"Best point found: {result.x}")
347
+ print(f"Best function value: {result.fun:.6f}")
348
+ print(f"Number of function evaluations: {result.nfev}")
349
+ print(f"Number of iterations: {result.nit}")
350
+ print(f"Success: {result.success}")
351
+ print(f"Message: {result.message}")
352
+ print("\nTrue optimum: [1, 1] with f(x) = 0")
@@ -0,0 +1,2 @@
1
+ def hello() -> str:
2
+ return "Hello from spotoptim!"
File without changes