ez-optimize 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ez_optimize-0.1.0/LICENSE +21 -0
- ez_optimize-0.1.0/PKG-INFO +117 -0
- ez_optimize-0.1.0/README.md +87 -0
- ez_optimize-0.1.0/pyproject.toml +58 -0
- ez_optimize-0.1.0/setup.cfg +4 -0
- ez_optimize-0.1.0/src/ez_optimize/__init__.py +15 -0
- ez_optimize-0.1.0/src/ez_optimize/constants.py +35 -0
- ez_optimize-0.1.0/src/ez_optimize/minimize.py +72 -0
- ez_optimize-0.1.0/src/ez_optimize/optimization_problem.py +281 -0
- ez_optimize-0.1.0/src/ez_optimize/utilities.py +191 -0
- ez_optimize-0.1.0/src/ez_optimize.egg-info/PKG-INFO +117 -0
- ez_optimize-0.1.0/src/ez_optimize.egg-info/SOURCES.txt +15 -0
- ez_optimize-0.1.0/src/ez_optimize.egg-info/dependency_links.txt +1 -0
- ez_optimize-0.1.0/src/ez_optimize.egg-info/requires.txt +5 -0
- ez_optimize-0.1.0/src/ez_optimize.egg-info/top_level.txt +1 -0
- ez_optimize-0.1.0/tests/test_minimize.py +122 -0
- ez_optimize-0.1.0/tests/test_optimization_problem.py +134 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Quinn Marsh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ez-optimize
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A drop-in for scipy optimize that allows keyword args (x0={'x': 1, 'y': 2}) and more
|
|
5
|
+
Author-email: Quinn Marsh <quinnmarsh@hotmail.com>
|
|
6
|
+
Maintainer-email: Quinn Marsh <quinnmarsh@hotmail.com>
|
|
7
|
+
License: MIT License
|
|
8
|
+
Project-URL: Homepage, https://github.com/qthedoc/ez-optimize
|
|
9
|
+
Project-URL: Repository, https://github.com/qthedoc/ez-optimize
|
|
10
|
+
Project-URL: Issues, https://github.com/qthedoc/ez-optimize/issues
|
|
11
|
+
Project-URL: Say Thanks!, http://quinnmarsh.com
|
|
12
|
+
Keywords: optimize,optimizer,minimize,minimizer,maximize,maximizer,keyword,keyword optimize,keyword optimizer,keyword minimizer,keyword maximizer,scipy,scipy optimize,scipy minimize,scipy root,quality of life
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Science/Research
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
20
|
+
Classifier: Topic :: Scientific/Engineering
|
|
21
|
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
|
22
|
+
Requires-Python: >=3.10
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
License-File: LICENSE
|
|
25
|
+
Requires-Dist: numpy
|
|
26
|
+
Requires-Dist: scipy>=1.15
|
|
27
|
+
Provides-Extra: tests
|
|
28
|
+
Requires-Dist: pytest; extra == "tests"
|
|
29
|
+
Dynamic: license-file
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# ez-optimize
|
|
33
|
+
|
|
34
|
+
**Author**: Quinn Marsh\
|
|
35
|
+
**GitHub**: https://github.com/qthedoc/ez-optimize/ \
|
|
36
|
+
**PyPI**: https://pypi.org/project/ez-optimize/
|
|
37
|
+
|
|
38
|
+
`ez-optimize` makes optimization easy and intuitive. It is a lightweight wrapper for SciPy's `optimize` that offer a drop-in replacements for SciPy's `minimize` function with enhanced features like keyword-based parameter definitions and quick switching between minimization and maximization.
|
|
39
|
+
|
|
40
|
+
The Ironman suit for optimization.
|
|
41
|
+
|
|
42
|
+
## Why ez-optimize?
|
|
43
|
+
|
|
44
|
+
### 1. Keyword-Based Optimization (e.g.: `x0={'x': 1, 'y': 2}`)
|
|
45
|
+
By default, optimization uses arrays `x0=[1, 2]`. However sometimes it's more intuitive to use named parameters `x0={'x': 1, 'y': 2}`. `ez-optimize` allows you to define parameters as dictionaries. Then under the hood, `ez-optimize` automatically flattens parameters (and wraps your function) for SciPy while restoring the original structure in results. Keyword-based optimization is especially useful in complex systems like aerospace or energy models where parameters have meaningful names representing physical quantities.
|
|
46
|
+
|
|
47
|
+
### 2. Switch to Maximize with `direction='max'`
|
|
48
|
+
By default, optimization minimizes the objective function. To maximize, you typically need to write a negated version of your function. With `ez-optimize`, simply set `direction='max'` and the library will automatically negates your function under the hood.
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
## Examples
|
|
52
|
+
|
|
53
|
+
### Example 1: Minimizing the Rosenbrock Function with Array Mode
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
import numpy as np
|
|
57
|
+
from ez_optimize import minimize
|
|
58
|
+
|
|
59
|
+
def rosenbrock_2d(x, y, a, b):
|
|
60
|
+
return (a - x)**2 + b * (y - x**2)**2
|
|
61
|
+
|
|
62
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
63
|
+
|
|
64
|
+
result = minimize(rosenbrock_2d, x0, method='BFGS')
|
|
65
|
+
|
|
66
|
+
print(f"Optimal x: {result.x_original}")
|
|
67
|
+
print(f"Optimal value: {result.fun}")
|
|
68
|
+
```
|
|
69
|
+
```
|
|
70
|
+
Optimal x: {'x': 1.0, 'y': 1.0}
|
|
71
|
+
Optimal value: 0.0
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Example 2: Using OptimizationProblem for Advanced Manual Control
|
|
75
|
+
|
|
76
|
+
For more control, use the `OptimizationProblem` class directly. This also serves as a look under the hood for how `minimize` works.:
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
from ez_optimize import OptimizationProblem
|
|
80
|
+
from scipy.optimize import minimize as scipy_minimize
|
|
81
|
+
|
|
82
|
+
def objective(a, b, c):
|
|
83
|
+
return a**2 + b**2 + c**2
|
|
84
|
+
|
|
85
|
+
x0 = {'a': 1.0, 'b': 2.0, 'c': 3.0}
|
|
86
|
+
bounds = {'a': (0, 5), 'b': (0, 5), 'c': (0, 5)}
|
|
87
|
+
|
|
88
|
+
# Define the optimization problem
|
|
89
|
+
problem = OptimizationProblem(objective, x0, method='SLSQP', bounds=bounds)
|
|
90
|
+
|
|
91
|
+
# Run SciPy method directly, passing in the arguments prepared by the OptimizationProblem
|
|
92
|
+
scipy_result = scipy_minimize(**problem.scipy.get_minimize_args())
|
|
93
|
+
|
|
94
|
+
# Use the OptimizationProblem to interpret the result back into our structured format
|
|
95
|
+
result = problem.scipy.interpret_result(scipy_result)
|
|
96
|
+
|
|
97
|
+
print(f"Optimal parameters: {result.x_original}")
|
|
98
|
+
print(f"Optimal value: {result.fun}")
|
|
99
|
+
```
|
|
100
|
+
```
|
|
101
|
+
Optimal parameters: {'a': 0.0, 'b': 0.0, 'c': 0.0}
|
|
102
|
+
Optimal value: 0.0
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## Fundumentally Why?
|
|
106
|
+
Lets be honest, there is good reason optimization typically uses arrays and always minimizes... it makes the math simple and efficient. For example, optimizing in a vector space allows the hessian to be represented as a matrix. However, this level of optimization isn't always necessary like with black-box functions that have no gradient or hessian. In those cases, the convenience of defining keyword-based parameters and easy switching between min/max can outweigh the mathematical perfection of array-based optimization.
|
|
107
|
+
|
|
108
|
+
## Acknowledgments
|
|
109
|
+
|
|
110
|
+
Inspired by [better_optimize](https://github.com/jessegrabowski/better_optimize) by Jesse Grabowski, licensed under MIT.
|
|
111
|
+
|
|
112
|
+
## Contributing
|
|
113
|
+
|
|
114
|
+
Contributions Welcome! Report bugs, request features, or improve documentation via GitHub issues or pull requests.
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
|
|
2
|
+
# ez-optimize
|
|
3
|
+
|
|
4
|
+
**Author**: Quinn Marsh\
|
|
5
|
+
**GitHub**: https://github.com/qthedoc/ez-optimize/ \
|
|
6
|
+
**PyPI**: https://pypi.org/project/ez-optimize/
|
|
7
|
+
|
|
8
|
+
`ez-optimize` makes optimization easy and intuitive. It is a lightweight wrapper for SciPy's `optimize` that offer a drop-in replacements for SciPy's `minimize` function with enhanced features like keyword-based parameter definitions and quick switching between minimization and maximization.
|
|
9
|
+
|
|
10
|
+
The Ironman suit for optimization.
|
|
11
|
+
|
|
12
|
+
## Why ez-optimize?
|
|
13
|
+
|
|
14
|
+
### 1. Keyword-Based Optimization (e.g.: `x0={'x': 1, 'y': 2}`)
|
|
15
|
+
By default, optimization uses arrays `x0=[1, 2]`. However sometimes it's more intuitive to use named parameters `x0={'x': 1, 'y': 2}`. `ez-optimize` allows you to define parameters as dictionaries. Then under the hood, `ez-optimize` automatically flattens parameters (and wraps your function) for SciPy while restoring the original structure in results. Keyword-based optimization is especially useful in complex systems like aerospace or energy models where parameters have meaningful names representing physical quantities.
|
|
16
|
+
|
|
17
|
+
### 2. Switch to Maximize with `direction='max'`
|
|
18
|
+
By default, optimization minimizes the objective function. To maximize, you typically need to write a negated version of your function. With `ez-optimize`, simply set `direction='max'` and the library will automatically negates your function under the hood.
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
## Examples
|
|
22
|
+
|
|
23
|
+
### Example 1: Minimizing the Rosenbrock Function with Array Mode
|
|
24
|
+
|
|
25
|
+
```python
|
|
26
|
+
import numpy as np
|
|
27
|
+
from ez_optimize import minimize
|
|
28
|
+
|
|
29
|
+
def rosenbrock_2d(x, y, a, b):
|
|
30
|
+
return (a - x)**2 + b * (y - x**2)**2
|
|
31
|
+
|
|
32
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
33
|
+
|
|
34
|
+
result = minimize(rosenbrock_2d, x0, method='BFGS')
|
|
35
|
+
|
|
36
|
+
print(f"Optimal x: {result.x_original}")
|
|
37
|
+
print(f"Optimal value: {result.fun}")
|
|
38
|
+
```
|
|
39
|
+
```
|
|
40
|
+
Optimal x: {'x': 1.0, 'y': 1.0}
|
|
41
|
+
Optimal value: 0.0
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
### Example 2: Using OptimizationProblem for Advanced Manual Control
|
|
45
|
+
|
|
46
|
+
For more control, use the `OptimizationProblem` class directly. This also serves as a look under the hood for how `minimize` works.:
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
from ez_optimize import OptimizationProblem
|
|
50
|
+
from scipy.optimize import minimize as scipy_minimize
|
|
51
|
+
|
|
52
|
+
def objective(a, b, c):
|
|
53
|
+
return a**2 + b**2 + c**2
|
|
54
|
+
|
|
55
|
+
x0 = {'a': 1.0, 'b': 2.0, 'c': 3.0}
|
|
56
|
+
bounds = {'a': (0, 5), 'b': (0, 5), 'c': (0, 5)}
|
|
57
|
+
|
|
58
|
+
# Define the optimization problem
|
|
59
|
+
problem = OptimizationProblem(objective, x0, method='SLSQP', bounds=bounds)
|
|
60
|
+
|
|
61
|
+
# Run SciPy method directly, passing in the arguments prepared by the OptimizationProblem
|
|
62
|
+
scipy_result = scipy_minimize(**problem.scipy.get_minimize_args())
|
|
63
|
+
|
|
64
|
+
# Use the OptimizationProblem to interpret the result back into our structured format
|
|
65
|
+
result = problem.scipy.interpret_result(scipy_result)
|
|
66
|
+
|
|
67
|
+
print(f"Optimal parameters: {result.x_original}")
|
|
68
|
+
print(f"Optimal value: {result.fun}")
|
|
69
|
+
```
|
|
70
|
+
```
|
|
71
|
+
Optimal parameters: {'a': 0.0, 'b': 0.0, 'c': 0.0}
|
|
72
|
+
Optimal value: 0.0
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Fundumentally Why?
|
|
76
|
+
Lets be honest, there is good reason optimization typically uses arrays and always minimizes... it makes the math simple and efficient. For example, optimizing in a vector space allows the hessian to be represented as a matrix. However, this level of optimization isn't always necessary like with black-box functions that have no gradient or hessian. In those cases, the convenience of defining keyword-based parameters and easy switching between min/max can outweigh the mathematical perfection of array-based optimization.
|
|
77
|
+
|
|
78
|
+
## Acknowledgments
|
|
79
|
+
|
|
80
|
+
Inspired by [better_optimize](https://github.com/jessegrabowski/better_optimize) by Jesse Grabowski, licensed under MIT.
|
|
81
|
+
|
|
82
|
+
## Contributing
|
|
83
|
+
|
|
84
|
+
Contributions Welcome! Report bugs, request features, or improve documentation via GitHub issues or pull requests.
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[tool.setuptools.packages.find]
|
|
6
|
+
where = ["src"]
|
|
7
|
+
include = ["ez_optimize*"]
|
|
8
|
+
|
|
9
|
+
[project]
|
|
10
|
+
name = 'ez-optimize'
|
|
11
|
+
version = "0.1.0"
|
|
12
|
+
requires-python = ">=3.10"
|
|
13
|
+
authors = [{ name = "Quinn Marsh", email = "quinnmarsh@hotmail.com" }]
|
|
14
|
+
maintainers = [{ name = "Quinn Marsh", email = "quinnmarsh@hotmail.com" }]
|
|
15
|
+
description = "A drop-in for scipy optimize that allows keyword args (x0={'x': 1, 'y': 2}) and more"
|
|
16
|
+
readme = "README.md"
|
|
17
|
+
license = {text = "MIT License"}
|
|
18
|
+
keywords = [
|
|
19
|
+
"optimize", "optimizer",
|
|
20
|
+
"minimize", "minimizer",
|
|
21
|
+
"maximize", "maximizer",
|
|
22
|
+
"keyword", "keyword optimize", "keyword optimizer",
|
|
23
|
+
"keyword minimizer", "keyword maximizer",
|
|
24
|
+
"scipy", "scipy optimize",
|
|
25
|
+
"scipy minimize", "scipy root",
|
|
26
|
+
"quality of life"
|
|
27
|
+
]
|
|
28
|
+
classifiers =[
|
|
29
|
+
"Development Status :: 4 - Beta",
|
|
30
|
+
"Intended Audience :: Science/Research",
|
|
31
|
+
"License :: OSI Approved :: MIT License",
|
|
32
|
+
"Programming Language :: Python :: 3.10",
|
|
33
|
+
"Programming Language :: Python :: 3.11",
|
|
34
|
+
"Programming Language :: Python :: 3.12",
|
|
35
|
+
"Programming Language :: Python :: 3.13",
|
|
36
|
+
"Topic :: Scientific/Engineering",
|
|
37
|
+
"Topic :: Scientific/Engineering :: Mathematics"
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
dependencies = [
|
|
41
|
+
'numpy',
|
|
42
|
+
'scipy>=1.15',
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
[project.optional-dependencies]
|
|
48
|
+
tests = [
|
|
49
|
+
'pytest',
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
[project.urls]
|
|
53
|
+
Homepage = "https://github.com/qthedoc/ez-optimize"
|
|
54
|
+
Repository = "https://github.com/qthedoc/ez-optimize"
|
|
55
|
+
Issues = "https://github.com/qthedoc/ez-optimize/issues"
|
|
56
|
+
"Say Thanks!" = "http://quinnmarsh.com"
|
|
57
|
+
|
|
58
|
+
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from ez_optimize.minimize import minimize
|
|
4
|
+
from ez_optimize.optimization_problem import OptimizationProblem
|
|
5
|
+
|
|
6
|
+
_log = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
if not logging.root.handlers:
|
|
9
|
+
_log.setLevel(logging.INFO)
|
|
10
|
+
if len(_log.handlers) == 0:
|
|
11
|
+
handler = logging.StreamHandler()
|
|
12
|
+
_log.addHandler(handler)
|
|
13
|
+
|
|
14
|
+
# __version__ = get_versions()["version"]
|
|
15
|
+
__all__ = ["minimize", "OptimizationProblem"]
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from typing import Literal, get_args
|
|
2
|
+
|
|
3
|
+
MinimizeMethod = Literal[
|
|
4
|
+
"nelder-mead",
|
|
5
|
+
"powell",
|
|
6
|
+
"CG",
|
|
7
|
+
"BFGS",
|
|
8
|
+
"Newton-CG",
|
|
9
|
+
"L-BFGS-B",
|
|
10
|
+
"TNC",
|
|
11
|
+
"COBYLA",
|
|
12
|
+
"SLSQP",
|
|
13
|
+
"trust-constr",
|
|
14
|
+
"dogleg",
|
|
15
|
+
"trust-ncg",
|
|
16
|
+
"trust-exact",
|
|
17
|
+
"trust-krylov",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
MINIMIZE_METHODS = tuple(get_args(MinimizeMethod))
|
|
21
|
+
|
|
22
|
+
RootMethod = Literal[
|
|
23
|
+
"hybr",
|
|
24
|
+
"lm",
|
|
25
|
+
"broyden1",
|
|
26
|
+
"broyden2",
|
|
27
|
+
"anderson",
|
|
28
|
+
"linearmixing",
|
|
29
|
+
"diagbroyden",
|
|
30
|
+
"excitingmixing",
|
|
31
|
+
"krylov",
|
|
32
|
+
"df-sane",
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
ROOT_METHODS = tuple(get_args(RootMethod))
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
|
|
7
|
+
from ez_optimize.optimization_problem import OptimizationProblem
|
|
8
|
+
from ez_optimize.utilities import EzOptimizeResult
|
|
9
|
+
from ez_optimize.constants import MinimizeMethod
|
|
10
|
+
|
|
11
|
+
def minimize(
|
|
12
|
+
fun: Callable,
|
|
13
|
+
x0: Union[np.ndarray, Dict[str, Any]],
|
|
14
|
+
method: Optional[MinimizeMethod] = None,
|
|
15
|
+
direction: Literal["min", "max"] = "min",
|
|
16
|
+
bounds: Optional[Union[List[Tuple[float, float]], Dict[str, Tuple[float, float]], Dict[str, List[Tuple[float, float]]]]] = None,
|
|
17
|
+
x_mode: Optional[Literal["array", "dict"]] = None,
|
|
18
|
+
**kwargs, # bounds, constraints, args, options, etc. stored for later use
|
|
19
|
+
) -> EzOptimizeResult:
|
|
20
|
+
"""
|
|
21
|
+
Minimize (or maximize) a scalar function of one or more variables.
|
|
22
|
+
|
|
23
|
+
This function provides a high-level interface to optimization algorithms,
|
|
24
|
+
supporting both array and dict-based parameter modes, with automatic
|
|
25
|
+
flattening and restoration of structures.
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
fun : callable
|
|
30
|
+
The objective function to be minimized (or maximized).
|
|
31
|
+
For array mode: fun(x) where x is a numpy array.
|
|
32
|
+
For dict mode: fun(**params) where params is a dict of parameters.
|
|
33
|
+
x0 : array_like or dict
|
|
34
|
+
Initial guess. Array of real elements of size (n,),
|
|
35
|
+
or dict with parameter names as keys.
|
|
36
|
+
method : str, optional
|
|
37
|
+
Type of solver. Should be one of the methods supported by SciPy.
|
|
38
|
+
direction : {'min', 'max'}, optional
|
|
39
|
+
Direction of optimization. Default is 'min'.
|
|
40
|
+
x_mode : {'array', 'dict'}, optional
|
|
41
|
+
Mode for parameter handling. If None, inferred from x0.
|
|
42
|
+
bounds : sequence or dict, optional
|
|
43
|
+
Bounds on variables. For array mode: list of (min, max) pairs.
|
|
44
|
+
For dict mode: dict with same keys as x0, values as (min, max) or list of pairs.
|
|
45
|
+
**kwargs
|
|
46
|
+
Additional keyword arguments passed to the optimizer.
|
|
47
|
+
|
|
48
|
+
Returns
|
|
49
|
+
-------
|
|
50
|
+
EzOptimizeResult
|
|
51
|
+
The optimization result represented as an EzOptimizeResult object.
|
|
52
|
+
Important attributes are: ``x`` the solution array/dict, ``success`` a
|
|
53
|
+
Boolean flag indicating if the optimizer exited successfully and
|
|
54
|
+
``message`` which describes the cause of the termination. See
|
|
55
|
+
EzOptimizeResult for a description of other attributes.
|
|
56
|
+
|
|
57
|
+
Notes
|
|
58
|
+
-----
|
|
59
|
+
This function uses the OptimizationProblem class internally to handle
|
|
60
|
+
parameter flattening, bounds, and direction, then delegates to SciPy's
|
|
61
|
+
minimize function.
|
|
62
|
+
"""
|
|
63
|
+
problem = OptimizationProblem(
|
|
64
|
+
fun=fun,
|
|
65
|
+
x0=x0,
|
|
66
|
+
method=method,
|
|
67
|
+
direction=direction,
|
|
68
|
+
x_mode=x_mode,
|
|
69
|
+
bounds=bounds,
|
|
70
|
+
**kwargs
|
|
71
|
+
)
|
|
72
|
+
return problem.optimize()
|
|
@@ -0,0 +1,281 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import warnings
|
|
4
|
+
from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
|
|
8
|
+
from ez_optimize.utilities import EzOptimizeResult, wrap_reconstruct_args, wrap_negate_if_max
|
|
9
|
+
from ez_optimize.constants import MINIMIZE_METHODS, MinimizeMethod
|
|
10
|
+
from scipy.optimize import minimize as scipy_minimize, OptimizeResult # lazy import
|
|
11
|
+
|
|
12
|
+
class OptimizationProblem:
|
|
13
|
+
"""
|
|
14
|
+
Defines an optimization problem in a backend-agnostic way.
|
|
15
|
+
|
|
16
|
+
Handles:
|
|
17
|
+
- named / dict parameters
|
|
18
|
+
- flattening of nested structures (scalars, arrays, matrices)
|
|
19
|
+
- min / max direction
|
|
20
|
+
- fused objective return formats
|
|
21
|
+
|
|
22
|
+
SciPy-specific functionality is isolated in `.scipy` namespace.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
fun: Callable,
|
|
28
|
+
x0: Union[np.ndarray, Dict[str, Any]],
|
|
29
|
+
method: Optional[MinimizeMethod] = None,
|
|
30
|
+
direction: Literal["min", "max"] = "min",
|
|
31
|
+
bounds: Optional[Union[List[Tuple[float, float]], Dict[str, Tuple[float, float]], Dict[str, List[Tuple[float, float]]]]] = None,
|
|
32
|
+
x_mode: Optional[Literal["array", "dict"]] = None,
|
|
33
|
+
# fused_fun_map: Optional[Union[Literal["dict"], tuple[str, ...], Dict[str, str]]] = None,
|
|
34
|
+
# jac: Optional[Callable] = None,
|
|
35
|
+
# hess: Optional[Callable] = None,
|
|
36
|
+
# hessp: Optional[Callable] = None,
|
|
37
|
+
**kwargs, # bounds, constraints, args, options, etc. stored for later use
|
|
38
|
+
):
|
|
39
|
+
# Mode & flattening setup
|
|
40
|
+
self._prepare_parameters(x0, x_mode)
|
|
41
|
+
|
|
42
|
+
self._prepare_bounds(bounds)
|
|
43
|
+
|
|
44
|
+
self.method = self._prepare_method(method)
|
|
45
|
+
|
|
46
|
+
self.user_fun = fun
|
|
47
|
+
self.direction = direction.lower()
|
|
48
|
+
if self.direction not in ("min", "max"):
|
|
49
|
+
raise ValueError("direction must be 'min' or 'max'")
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# Pass-through kwargs (bounds, constraints, tol, options, args, etc.)
|
|
53
|
+
self.kwargs = kwargs
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Return map for fused objectives
|
|
57
|
+
# self.fused_fun_map = fused_fun_map
|
|
58
|
+
|
|
59
|
+
# ────────────────────────────────────────────────────────────────
|
|
60
|
+
# Public API – backend agnostic
|
|
61
|
+
# ────────────────────────────────────────────────────────────────
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def scipy(self):
|
|
65
|
+
"""SciPy-specific interface."""
|
|
66
|
+
return OptimizationProblem.SciPyInterface(self)
|
|
67
|
+
|
|
68
|
+
def optimize(self) -> EzOptimizeResult:
|
|
69
|
+
"""
|
|
70
|
+
Convenience: run optimization using SciPy backend and return interpreted result.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
args = self.scipy.get_minimize_args()
|
|
74
|
+
res = scipy_minimize(**args)
|
|
75
|
+
return self.scipy.interpret_result(res)
|
|
76
|
+
|
|
77
|
+
# ────────────────────────────────────────────────────────────────
|
|
78
|
+
# Internal helpers – shared across backends
|
|
79
|
+
# ────────────────────────────────────────────────────────────────
|
|
80
|
+
|
|
81
|
+
def _prepare_parameters(self, x0, x_mode: Optional[str] = None):
|
|
82
|
+
"""Determine mode, flatten x0 and store mapping back to original structure."""
|
|
83
|
+
|
|
84
|
+
# Determine mode: array vs dict
|
|
85
|
+
self.x_mode = x_mode.lower() if x_mode is not None else ("dict" if isinstance(x0, dict) else "array")
|
|
86
|
+
if self.x_mode not in ("array", "dict"):
|
|
87
|
+
raise ValueError("x_mode must be 'array' or 'dict'")
|
|
88
|
+
|
|
89
|
+
# For array mode, flatten x0 to 1D and store shape for restoration
|
|
90
|
+
if self.x_mode == "array":
|
|
91
|
+
x0_array = np.atleast_1d(np.asarray(x0, dtype=float))
|
|
92
|
+
if x0_array.ndim > 1:
|
|
93
|
+
raise ValueError("x0 must be 1D or scalar for array mode; multi-dimensional arrays are not supported")
|
|
94
|
+
self.x0_flat = x0_array.flatten()
|
|
95
|
+
self.x_map = x0_array.shape
|
|
96
|
+
self.x_keys = None
|
|
97
|
+
self.x_to_original = lambda x: x.reshape(self.x_map)
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
# dict mode
|
|
101
|
+
if not isinstance(x0, dict):
|
|
102
|
+
raise ValueError("x0 must be a dict when x_mode='dict'")
|
|
103
|
+
|
|
104
|
+
self.x_keys = list(x0.keys())
|
|
105
|
+
self.x_map = {}
|
|
106
|
+
flat_parts = []
|
|
107
|
+
for k in self.x_keys:
|
|
108
|
+
val = np.asarray(x0[k], dtype=float)
|
|
109
|
+
if val.ndim > 1:
|
|
110
|
+
raise ValueError(f"x0['{k}'] must be 1D or scalar for dict mode; multi-dimensional arrays are not supported")
|
|
111
|
+
self.x_map[k] = val.shape
|
|
112
|
+
flat_parts.append(val.ravel())
|
|
113
|
+
self.x0_flat = np.concatenate(flat_parts) if flat_parts else np.array([])
|
|
114
|
+
self.x_to_original = self._reconstruct_dict
|
|
115
|
+
|
|
116
|
+
def _reconstruct_dict(self, flat: np.ndarray) -> Dict[str, Any]:
|
|
117
|
+
result = {}
|
|
118
|
+
idx = 0
|
|
119
|
+
for k in self.x_keys:
|
|
120
|
+
shape = self.x_map[k]
|
|
121
|
+
size = np.prod(shape, dtype=int)
|
|
122
|
+
result[k] = flat[idx : idx + size].reshape(shape)
|
|
123
|
+
idx += size
|
|
124
|
+
return result
|
|
125
|
+
|
|
126
|
+
def _prepare_bounds(self, bounds: Optional[Union[List[Tuple[float, float]], Dict[str, Tuple[float, float]], Dict[str, List[Tuple[float, float]]]]]):
|
|
127
|
+
"""
|
|
128
|
+
Validates bounds against x_mode and x_map, and flattens bounds if in dict mode.
|
|
129
|
+
"""
|
|
130
|
+
if bounds is None:
|
|
131
|
+
self.bounds_flat = None
|
|
132
|
+
return
|
|
133
|
+
|
|
134
|
+
if self.x_mode == "array":
|
|
135
|
+
# Expect bounds as list of (min, max) tuples matching x0 length
|
|
136
|
+
if not isinstance(bounds, list) or len(bounds) != self.x0_flat.size or any(not isinstance(b, tuple) or len(b) != 2 for b in bounds):
|
|
137
|
+
raise ValueError(f"Bounds must be a list of (min, max) tuples matching the size of x0 ({self.x0_flat.size}) in array mode.")
|
|
138
|
+
self.bounds_flat = bounds
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
# Dict mode: expect bounds as dict with same keys as x0, values are (min, max) tuples or lists of tuples
|
|
142
|
+
if not isinstance(bounds, dict):
|
|
143
|
+
raise ValueError("Bounds must be a dict when x_mode='dict'")
|
|
144
|
+
|
|
145
|
+
flat_bounds = []
|
|
146
|
+
for k in self.x_keys:
|
|
147
|
+
if k not in bounds:
|
|
148
|
+
# fill in with (None, None) if key is missing
|
|
149
|
+
flat_bounds.extend([(None, None)] * np.prod(self.x_map[k], dtype=int))
|
|
150
|
+
continue
|
|
151
|
+
|
|
152
|
+
b = bounds[k]
|
|
153
|
+
shape = self.x_map[k]
|
|
154
|
+
if isinstance(b, tuple) and len(shape) == 0:
|
|
155
|
+
# Scalar parameter with (min, max)
|
|
156
|
+
flat_bounds.append(b)
|
|
157
|
+
elif isinstance(b, list) and len(b) == np.prod(shape) and all(isinstance(t, tuple) and len(t) == 2 for t in b):
|
|
158
|
+
# Parameter with shape > 0, bounds provided as list of tuples
|
|
159
|
+
flat_bounds.extend(b)
|
|
160
|
+
else:
|
|
161
|
+
raise ValueError(f"Bounds for key '{k}' must be a tuple for scalar parameters or a list of tuples matching the size of the parameter in dict mode.")
|
|
162
|
+
|
|
163
|
+
if len(flat_bounds) != self.x0_flat.size:
|
|
164
|
+
raise ValueError(f"Internal error preparing bounds: Number of bounds ({len(flat_bounds)}) does not match number of parameters ({self.x0_flat.size})")
|
|
165
|
+
|
|
166
|
+
self.bounds_flat = flat_bounds
|
|
167
|
+
|
|
168
|
+
@staticmethod
|
|
169
|
+
def _prepare_method(method: Optional[str]) -> Optional[str]:
|
|
170
|
+
if method is None or not isinstance(method, str):
|
|
171
|
+
raise ValueError("method must be a str")
|
|
172
|
+
if method not in MINIMIZE_METHODS:
|
|
173
|
+
raise ValueError(f"Unsupported method '{method}'. Supported: {', '.join(MINIMIZE_METHODS)}")
|
|
174
|
+
return method
|
|
175
|
+
|
|
176
|
+
class SciPyInterface:
|
|
177
|
+
"""
|
|
178
|
+
Everything related to scipy.optimize.minimize is isolated here.
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
def __init__(self, parent: "OptimizationProblem"):
|
|
182
|
+
self.parent = parent
|
|
183
|
+
|
|
184
|
+
def get_minimize_args(
|
|
185
|
+
self,
|
|
186
|
+
) -> dict:
|
|
187
|
+
"""Build arguments for scipy.optimize.minimize."""
|
|
188
|
+
|
|
189
|
+
# Validate method
|
|
190
|
+
method = self.parent.method
|
|
191
|
+
if method is None or method not in MINIMIZE_METHODS:
|
|
192
|
+
raise ValueError(f"Unsupported method '{method}'. Supported: {', '.join(MINIMIZE_METHODS)}")
|
|
193
|
+
|
|
194
|
+
# Core arguments
|
|
195
|
+
args = {
|
|
196
|
+
"fun": self._wrap_fun(),
|
|
197
|
+
"x0": self.parent.x0_flat,
|
|
198
|
+
"method": method,
|
|
199
|
+
"bounds": self.parent.bounds_flat,
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
# Merge any extra kwargs
|
|
203
|
+
args.update(self.parent.kwargs)
|
|
204
|
+
|
|
205
|
+
return args
|
|
206
|
+
|
|
207
|
+
def interpret_result(self, scipy_result: OptimizeResult) -> EzOptimizeResult:
|
|
208
|
+
"""Convert SciPy result into EasyOptimizeResult with restored structure."""
|
|
209
|
+
|
|
210
|
+
# Check success and warn if not successful
|
|
211
|
+
if not scipy_result.success:
|
|
212
|
+
warnings.warn(f"Optimization did not converge: {scipy_result.message}", RuntimeWarning)
|
|
213
|
+
|
|
214
|
+
result_dict = dict(scipy_result)
|
|
215
|
+
|
|
216
|
+
# Correct sign for maximization
|
|
217
|
+
if self.parent.direction == "max":
|
|
218
|
+
if "fun" in result_dict and result_dict["fun"] is not None:
|
|
219
|
+
result_dict["fun"] = -result_dict.get("fun", 0.0)
|
|
220
|
+
# if "jac" in result_dict and result_dict["jac"] is not None:
|
|
221
|
+
# result_dict["jac"] = -result_dict["jac"]
|
|
222
|
+
|
|
223
|
+
return EzOptimizeResult(
|
|
224
|
+
scipy_result=scipy_result,
|
|
225
|
+
x_mode=self.parent.x_mode,
|
|
226
|
+
x_map=self.parent.x_map,
|
|
227
|
+
x_to_original=self.parent.x_to_original,
|
|
228
|
+
direction=self.parent.direction,
|
|
229
|
+
**result_dict,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
# ─── SciPy-specific wrappers ────────────────────────────────────────
|
|
233
|
+
|
|
234
|
+
def _wrap_fun(self) -> Callable:
|
|
235
|
+
"""
|
|
236
|
+
Wrap the user's objective function, return a scipy-ready function
|
|
237
|
+
"""
|
|
238
|
+
fun = self.parent.user_fun
|
|
239
|
+
|
|
240
|
+
# Argument transformation and wrapping for SciPy
|
|
241
|
+
fun = wrap_reconstruct_args(
|
|
242
|
+
fun=fun,
|
|
243
|
+
x_mode=self.parent.x_mode,
|
|
244
|
+
x_to_original=self.parent.x_to_original
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
fun = wrap_negate_if_max(fun, self.parent.direction)
|
|
248
|
+
|
|
249
|
+
return fun
|
|
250
|
+
|
|
251
|
+
# def _wrap_jac(self, jac_func: Optional[Callable]) -> Callable:
|
|
252
|
+
# if not jac_func:
|
|
253
|
+
# return None
|
|
254
|
+
|
|
255
|
+
# def wrapped(x_flat):
|
|
256
|
+
# x = self.parent.flat_to_original(x_flat)
|
|
257
|
+
# g = jac_func(**x) if self.parent.x_mode == "dict" else jac_func(x)
|
|
258
|
+
# return -g if self.parent.direction == "max" else g
|
|
259
|
+
|
|
260
|
+
# return wrapped
|
|
261
|
+
|
|
262
|
+
# def _wrap_hess(self, hess_func: Optional[Callable]) -> Callable:
|
|
263
|
+
# # Hessian usually not negated
|
|
264
|
+
# def wrapped(x_flat):
|
|
265
|
+
# x = self.parent.flat_to_original(x_flat)
|
|
266
|
+
# return hess_func(**x) if self.parent.x_mode == "dict" else hess_func(x)
|
|
267
|
+
|
|
268
|
+
# return wrapped
|
|
269
|
+
|
|
270
|
+
# def _wrap_hessp(self, hessp_func: Optional[Callable]) -> Callable:
|
|
271
|
+
# def wrapped(x_flat, p):
|
|
272
|
+
# x = self.parent.flat_to_original(x_flat)
|
|
273
|
+
# return hessp_func(x=x, p=p) if self.parent.x_mode == "array" else hessp_func(**x, p=p)
|
|
274
|
+
|
|
275
|
+
# return wrapped
|
|
276
|
+
|
|
277
|
+
# def _add_fused_derivatives(self, args: dict):
|
|
278
|
+
# # TODO: extract value, grad, hess from fused call
|
|
279
|
+
# # For now just placeholder
|
|
280
|
+
# raise NotImplementedError("Fused derivatives not yet implemented")
|
|
281
|
+
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
import inspect
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from typing import Any, Dict, Optional, Union
|
|
6
|
+
import warnings
|
|
7
|
+
from scipy.optimize import OptimizeResult as ScipyOptimizeResult
|
|
8
|
+
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
_log = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
def call_with_kwargs(func: Callable, kwargs: Dict[str, Any]) -> Any:
|
|
14
|
+
'''
|
|
15
|
+
Calls a function with matching kwargs, passing all provided kwargs if the function has **kwargs,
|
|
16
|
+
while checking for missing required arguments. Functions with *args are not supported and log a warning.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
func: The function to call.
|
|
20
|
+
kwargs: Dictionary of keyword arguments to pass.
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
The result of the function call.
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
ValueError: If required arguments (without defaults) are missing.
|
|
27
|
+
'''
|
|
28
|
+
# Get the function signature
|
|
29
|
+
signature = inspect.signature(func)
|
|
30
|
+
parameters = signature.parameters
|
|
31
|
+
|
|
32
|
+
# Check for *args (VAR_POSITIONAL)
|
|
33
|
+
if any(param.kind == inspect.Parameter.VAR_POSITIONAL for param in parameters.values()):
|
|
34
|
+
_log.warning(
|
|
35
|
+
f"Function '{func.__name__}' has a *args parameter, which may not be fully supported by call_with_kwargs. "
|
|
36
|
+
"Consider using keyword arguments or **kwargs instead."
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# Check for missing required arguments
|
|
40
|
+
missing_args = []
|
|
41
|
+
for name, param in parameters.items():
|
|
42
|
+
if (
|
|
43
|
+
param.default == inspect.Parameter.empty
|
|
44
|
+
and param.kind not in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD)
|
|
45
|
+
and name not in kwargs
|
|
46
|
+
):
|
|
47
|
+
missing_args.append(name)
|
|
48
|
+
|
|
49
|
+
# Raise an error if required arguments are missing
|
|
50
|
+
if missing_args:
|
|
51
|
+
missing_args_str = [f"'{arg}'" for arg in missing_args]
|
|
52
|
+
raise ValueError(
|
|
53
|
+
f"Missing required parameters for function '{func.__name__}': {', '.join(missing_args_str)}"
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
# Check if the function has a **kwargs parameter
|
|
57
|
+
has_kwargs = any(param.kind == inspect.Parameter.VAR_KEYWORD for param in parameters.values())
|
|
58
|
+
|
|
59
|
+
if has_kwargs:
|
|
60
|
+
# Pass all kwargs, including those matching named parameters and extras
|
|
61
|
+
return func(**kwargs)
|
|
62
|
+
else:
|
|
63
|
+
# Pass only kwargs that match the function's parameters, log if extras
|
|
64
|
+
extra_kwargs = set(kwargs.keys()) - set(parameters.keys())
|
|
65
|
+
if extra_kwargs:
|
|
66
|
+
_log.warning(
|
|
67
|
+
f"Extra kwargs provided to '{func.__name__}' without **kwargs support: {extra_kwargs}. They will be ignored."
|
|
68
|
+
)
|
|
69
|
+
matched_kwargs = {name: kwargs[name] for name in kwargs if name in parameters}
|
|
70
|
+
return func(**matched_kwargs)
|
|
71
|
+
|
|
72
|
+
def wrap_reconstruct_args(
|
|
73
|
+
fun: Optional[Callable],
|
|
74
|
+
x_mode: str,
|
|
75
|
+
x_to_original: Callable[[np.ndarray], Any],
|
|
76
|
+
pos_arg_names: list[str] = None, # e.g. ['p'] for hessp
|
|
77
|
+
) -> Optional[Callable]:
|
|
78
|
+
"""
|
|
79
|
+
Returns a wrapped version of a function that:
|
|
80
|
+
- take flat x (and optionally other args like p) for SciPy compatibility
|
|
81
|
+
- reconstruct original parameter structure
|
|
82
|
+
- call user function
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
fun: The user-provided callable (objective, jac, hess, hessp, ...)
|
|
86
|
+
x_mode: "array" or "dict"
|
|
87
|
+
x_to_original: Function that turns flat array → original x (dict or array)
|
|
88
|
+
pos_arg_names: Names to map positional args to (e.g. ['p'] for hessp)
|
|
89
|
+
"""
|
|
90
|
+
if fun is None:
|
|
91
|
+
return None
|
|
92
|
+
|
|
93
|
+
pos_arg_names = pos_arg_names or []
|
|
94
|
+
|
|
95
|
+
def wrapped(x_flat: np.ndarray, *extra_args) -> Any:
|
|
96
|
+
# Reconstruct original parameters
|
|
97
|
+
x = x_to_original(x_flat)
|
|
98
|
+
|
|
99
|
+
# Prepare call arguments
|
|
100
|
+
if x_mode == "dict":
|
|
101
|
+
call_args = x.copy() # dict
|
|
102
|
+
else:
|
|
103
|
+
call_args = [x] # positional for array mode
|
|
104
|
+
|
|
105
|
+
# Map positional extra args (e.g. p for hessp)
|
|
106
|
+
extra_kwargs = {}
|
|
107
|
+
for name, value in zip(pos_arg_names, extra_args):
|
|
108
|
+
if name in extra_kwargs:
|
|
109
|
+
raise ValueError(f"Duplicate argument: {name}")
|
|
110
|
+
extra_kwargs[name] = value
|
|
111
|
+
|
|
112
|
+
# Call the function
|
|
113
|
+
if x_mode == "dict":
|
|
114
|
+
result = call_with_kwargs(fun, {**call_args, **extra_kwargs})
|
|
115
|
+
else:
|
|
116
|
+
result = fun(*call_args, *extra_args)
|
|
117
|
+
|
|
118
|
+
return result
|
|
119
|
+
|
|
120
|
+
return wrapped
|
|
121
|
+
|
|
122
|
+
def wrap_negate_if_max(fun: Callable, direction: str) -> Callable:
|
|
123
|
+
"""
|
|
124
|
+
Wraps a function to negate its output if the optimization direction is 'max'.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
fun: The original function to wrap.
|
|
128
|
+
direction: 'min' or 'max'. If 'max', the output of `fun` will be negated.
|
|
129
|
+
Returns:
|
|
130
|
+
A wrapped function that negates the output of `fun` if direction is 'max'.
|
|
131
|
+
"""
|
|
132
|
+
if direction == 'min':
|
|
133
|
+
return fun
|
|
134
|
+
|
|
135
|
+
def wrapped(*args, **kwargs):
|
|
136
|
+
return -fun(*args, **kwargs)
|
|
137
|
+
|
|
138
|
+
return wrapped
|
|
139
|
+
|
|
140
|
+
class EzOptimizeResult():
|
|
141
|
+
"""
|
|
142
|
+
Enhanced result object for optimizations performed via ez-optimize.
|
|
143
|
+
|
|
144
|
+
Adds support for:
|
|
145
|
+
- Named/dict mode results (x_dict)
|
|
146
|
+
- Restored original shapes/structures (x_original)
|
|
147
|
+
- Automatic sign correction when direction='max'
|
|
148
|
+
"""
|
|
149
|
+
def __init__(
|
|
150
|
+
self,
|
|
151
|
+
scipy_result: ScipyOptimizeResult,
|
|
152
|
+
x_mode: str = 'array',
|
|
153
|
+
x_map: Optional[list[str]] = None,
|
|
154
|
+
x_to_original: Optional[Callable[[np.ndarray], Any]] = None,
|
|
155
|
+
direction: str = 'min',
|
|
156
|
+
**extra_attrs
|
|
157
|
+
):
|
|
158
|
+
# super().__init__(**scipy_result.__dict__)
|
|
159
|
+
self.scipy_result = scipy_result
|
|
160
|
+
self._x_mode = x_mode # Store mode ('array' or 'dict')
|
|
161
|
+
self._x_map = x_map # Store sorted keys from x0 dict
|
|
162
|
+
self.x_flat = scipy_result.x # flat optimized parameters
|
|
163
|
+
self._x_to_original = x_to_original
|
|
164
|
+
self._direction = direction
|
|
165
|
+
|
|
166
|
+
# Attach any extra attributes
|
|
167
|
+
for k, v in extra_attrs.items():
|
|
168
|
+
setattr(self, k, v)
|
|
169
|
+
|
|
170
|
+
@property
|
|
171
|
+
def x_original(self) -> Union[np.ndarray, Dict[str, float]]:
|
|
172
|
+
'''
|
|
173
|
+
x_original property returns the optimized parameters as a numpy array or dictionary based on the mode.
|
|
174
|
+
'''
|
|
175
|
+
return self._restore_original_x()
|
|
176
|
+
|
|
177
|
+
def _restore_original_x(self) -> Union[np.ndarray, Dict[str, Any]]:
|
|
178
|
+
"""Convert flat optimized x back to original form (dict or shaped array)."""
|
|
179
|
+
if self._x_to_original is None:
|
|
180
|
+
return self.x_flat
|
|
181
|
+
|
|
182
|
+
try:
|
|
183
|
+
restored = self._x_to_original(self.x_flat)
|
|
184
|
+
except Exception as e:
|
|
185
|
+
warnings.warn(f"Failed to restore original shape: {e}", RuntimeWarning)
|
|
186
|
+
restored = self.x_flat
|
|
187
|
+
|
|
188
|
+
return restored
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: ez-optimize
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A drop-in for scipy optimize that allows keyword args (x0={'x': 1, 'y': 2}) and more
|
|
5
|
+
Author-email: Quinn Marsh <quinnmarsh@hotmail.com>
|
|
6
|
+
Maintainer-email: Quinn Marsh <quinnmarsh@hotmail.com>
|
|
7
|
+
License: MIT License
|
|
8
|
+
Project-URL: Homepage, https://github.com/qthedoc/ez-optimize
|
|
9
|
+
Project-URL: Repository, https://github.com/qthedoc/ez-optimize
|
|
10
|
+
Project-URL: Issues, https://github.com/qthedoc/ez-optimize/issues
|
|
11
|
+
Project-URL: Say Thanks!, http://quinnmarsh.com
|
|
12
|
+
Keywords: optimize,optimizer,minimize,minimizer,maximize,maximizer,keyword,keyword optimize,keyword optimizer,keyword minimizer,keyword maximizer,scipy,scipy optimize,scipy minimize,scipy root,quality of life
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Science/Research
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
20
|
+
Classifier: Topic :: Scientific/Engineering
|
|
21
|
+
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
|
22
|
+
Requires-Python: >=3.10
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
License-File: LICENSE
|
|
25
|
+
Requires-Dist: numpy
|
|
26
|
+
Requires-Dist: scipy>=1.15
|
|
27
|
+
Provides-Extra: tests
|
|
28
|
+
Requires-Dist: pytest; extra == "tests"
|
|
29
|
+
Dynamic: license-file
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# ez-optimize
|
|
33
|
+
|
|
34
|
+
**Author**: Quinn Marsh\
|
|
35
|
+
**GitHub**: https://github.com/qthedoc/ez-optimize/ \
|
|
36
|
+
**PyPI**: https://pypi.org/project/ez-optimize/
|
|
37
|
+
|
|
38
|
+
`ez-optimize` makes optimization easy and intuitive. It is a lightweight wrapper for SciPy's `optimize` that offer a drop-in replacements for SciPy's `minimize` function with enhanced features like keyword-based parameter definitions and quick switching between minimization and maximization.
|
|
39
|
+
|
|
40
|
+
The Ironman suit for optimization.
|
|
41
|
+
|
|
42
|
+
## Why ez-optimize?
|
|
43
|
+
|
|
44
|
+
### 1. Keyword-Based Optimization (e.g.: `x0={'x': 1, 'y': 2}`)
|
|
45
|
+
By default, optimization uses arrays `x0=[1, 2]`. However sometimes it's more intuitive to use named parameters `x0={'x': 1, 'y': 2}`. `ez-optimize` allows you to define parameters as dictionaries. Then under the hood, `ez-optimize` automatically flattens parameters (and wraps your function) for SciPy while restoring the original structure in results. Keyword-based optimization is especially useful in complex systems like aerospace or energy models where parameters have meaningful names representing physical quantities.
|
|
46
|
+
|
|
47
|
+
### 2. Switch to Maximize with `direction='max'`
|
|
48
|
+
By default, optimization minimizes the objective function. To maximize, you typically need to write a negated version of your function. With `ez-optimize`, simply set `direction='max'` and the library will automatically negates your function under the hood.
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
## Examples
|
|
52
|
+
|
|
53
|
+
### Example 1: Minimizing the Rosenbrock Function with Array Mode
|
|
54
|
+
|
|
55
|
+
```python
|
|
56
|
+
import numpy as np
|
|
57
|
+
from ez_optimize import minimize
|
|
58
|
+
|
|
59
|
+
def rosenbrock_2d(x, y, a, b):
|
|
60
|
+
return (a - x)**2 + b * (y - x**2)**2
|
|
61
|
+
|
|
62
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
63
|
+
|
|
64
|
+
result = minimize(rosenbrock_2d, x0, method='BFGS')
|
|
65
|
+
|
|
66
|
+
print(f"Optimal x: {result.x_original}")
|
|
67
|
+
print(f"Optimal value: {result.fun}")
|
|
68
|
+
```
|
|
69
|
+
```
|
|
70
|
+
Optimal x: {'x': 1.0, 'y': 1.0}
|
|
71
|
+
Optimal value: 0.0
|
|
72
|
+
```
|
|
73
|
+
|
|
74
|
+
### Example 2: Using OptimizationProblem for Advanced Manual Control
|
|
75
|
+
|
|
76
|
+
For more control, use the `OptimizationProblem` class directly. This also serves as a look under the hood for how `minimize` works.:
|
|
77
|
+
|
|
78
|
+
```python
|
|
79
|
+
from ez_optimize import OptimizationProblem
|
|
80
|
+
from scipy.optimize import minimize as scipy_minimize
|
|
81
|
+
|
|
82
|
+
def objective(a, b, c):
|
|
83
|
+
return a**2 + b**2 + c**2
|
|
84
|
+
|
|
85
|
+
x0 = {'a': 1.0, 'b': 2.0, 'c': 3.0}
|
|
86
|
+
bounds = {'a': (0, 5), 'b': (0, 5), 'c': (0, 5)}
|
|
87
|
+
|
|
88
|
+
# Define the optimization problem
|
|
89
|
+
problem = OptimizationProblem(objective, x0, method='SLSQP', bounds=bounds)
|
|
90
|
+
|
|
91
|
+
# Run SciPy method directly, passing in the arguments prepared by the OptimizationProblem
|
|
92
|
+
scipy_result = scipy_minimize(**problem.scipy.get_minimize_args())
|
|
93
|
+
|
|
94
|
+
# Use the OptimizationProblem to interpret the result back into our structured format
|
|
95
|
+
result = problem.scipy.interpret_result(scipy_result)
|
|
96
|
+
|
|
97
|
+
print(f"Optimal parameters: {result.x_original}")
|
|
98
|
+
print(f"Optimal value: {result.fun}")
|
|
99
|
+
```
|
|
100
|
+
```
|
|
101
|
+
Optimal parameters: {'a': 0.0, 'b': 0.0, 'c': 0.0}
|
|
102
|
+
Optimal value: 0.0
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
## Fundumentally Why?
|
|
106
|
+
Lets be honest, there is good reason optimization typically uses arrays and always minimizes... it makes the math simple and efficient. For example, optimizing in a vector space allows the hessian to be represented as a matrix. However, this level of optimization isn't always necessary like with black-box functions that have no gradient or hessian. In those cases, the convenience of defining keyword-based parameters and easy switching between min/max can outweigh the mathematical perfection of array-based optimization.
|
|
107
|
+
|
|
108
|
+
## Acknowledgments
|
|
109
|
+
|
|
110
|
+
Inspired by [better_optimize](https://github.com/jessegrabowski/better_optimize) by Jesse Grabowski, licensed under MIT.
|
|
111
|
+
|
|
112
|
+
## Contributing
|
|
113
|
+
|
|
114
|
+
Contributions Welcome! Report bugs, request features, or improve documentation via GitHub issues or pull requests.
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
LICENSE
|
|
2
|
+
README.md
|
|
3
|
+
pyproject.toml
|
|
4
|
+
src/ez_optimize/__init__.py
|
|
5
|
+
src/ez_optimize/constants.py
|
|
6
|
+
src/ez_optimize/minimize.py
|
|
7
|
+
src/ez_optimize/optimization_problem.py
|
|
8
|
+
src/ez_optimize/utilities.py
|
|
9
|
+
src/ez_optimize.egg-info/PKG-INFO
|
|
10
|
+
src/ez_optimize.egg-info/SOURCES.txt
|
|
11
|
+
src/ez_optimize.egg-info/dependency_links.txt
|
|
12
|
+
src/ez_optimize.egg-info/requires.txt
|
|
13
|
+
src/ez_optimize.egg-info/top_level.txt
|
|
14
|
+
tests/test_minimize.py
|
|
15
|
+
tests/test_optimization_problem.py
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ez_optimize
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from functools import partial
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from numpy.testing import assert_allclose
|
|
7
|
+
|
|
8
|
+
from ez_optimize.minimize import minimize
|
|
9
|
+
from ez_optimize.utilities import EzOptimizeResult
|
|
10
|
+
|
|
11
|
+
no_grad_methods = ["nelder-mead", "powell", "CG", "BFGS", "L-BFGS-B"]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def rosen(x, a, b) -> float:
|
|
15
|
+
"""The Rosenbrock function"""
|
|
16
|
+
return sum(a * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) + b
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def rosen_2d_kw(x, y, a, b) -> float:
|
|
20
|
+
"""The 2D Rosenbrock function in keyword mode"""
|
|
21
|
+
return rosen(np.array([x, y]), a, b)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
25
|
+
def test_array_5d(method: str):
|
|
26
|
+
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])
|
|
27
|
+
|
|
28
|
+
res = minimize(rosen, x0, method=method, args=(100, 0), tol=1e-8)
|
|
29
|
+
|
|
30
|
+
assert isinstance(res, EzOptimizeResult)
|
|
31
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
32
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
36
|
+
def test_kw_2d(method: str):
|
|
37
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
38
|
+
|
|
39
|
+
res = minimize(partial(rosen_2d_kw, a=100, b=0), x0, method=method, tol=1e-8)
|
|
40
|
+
|
|
41
|
+
assert isinstance(res, EzOptimizeResult)
|
|
42
|
+
assert_allclose(res.x, np.ones(2), atol=1e-4, rtol=1e-4)
|
|
43
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
44
|
+
assert isinstance(res.x_original, dict)
|
|
45
|
+
assert_allclose(list(res.x_original.values()), np.ones(2), atol=1e-4, rtol=1e-4)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
49
|
+
def test_kw_array(method: str):
|
|
50
|
+
x0 = {'x':np.array([1.3, 0.7, 0.8, 1.9, 1.2])}
|
|
51
|
+
|
|
52
|
+
res = minimize(partial(rosen, a=100, b=0), x0, method=method, tol=1e-8)
|
|
53
|
+
|
|
54
|
+
assert isinstance(res, EzOptimizeResult)
|
|
55
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
56
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
57
|
+
assert isinstance(res.x_original, dict)
|
|
58
|
+
assert_allclose(res.x_original['x'], np.ones(5), atol=1e-4, rtol=1e-4)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def test_array_direction_max():
|
|
62
|
+
def f(x):
|
|
63
|
+
return - (x - 1)**2
|
|
64
|
+
|
|
65
|
+
res = minimize(f, np.array([0.]), method='SLSQP', direction='max', bounds=[(0, 2)], tol=1e-8)
|
|
66
|
+
|
|
67
|
+
assert isinstance(res, EzOptimizeResult)
|
|
68
|
+
assert_allclose(res.x, 1.0, atol=1e-4)
|
|
69
|
+
assert_allclose(res.fun, 0.0, atol=1e-8)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def test_kw_direction_max():
|
|
73
|
+
def f_kw(x):
|
|
74
|
+
return - (x - 1)**2
|
|
75
|
+
|
|
76
|
+
res = minimize(f_kw, {'x': 0.}, method='SLSQP', direction='max', tol=1e-8)
|
|
77
|
+
|
|
78
|
+
assert isinstance(res, EzOptimizeResult)
|
|
79
|
+
assert_allclose(res.x, np.ones(1), atol=1e-4, rtol=1e-4)
|
|
80
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
81
|
+
assert isinstance(res.x_original, dict)
|
|
82
|
+
assert_allclose(res.x_original['x'], np.ones(1), atol=1e-4, rtol=1e-4)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
86
|
+
def test_array_with_bounds(method: str):
|
|
87
|
+
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])
|
|
88
|
+
bounds = [(0, 2), (0, 2), (0, 2), (0, 2), (0, 2)]
|
|
89
|
+
|
|
90
|
+
res = minimize(rosen, x0, method=method, args=(100, 0), bounds=bounds, tol=1e-8)
|
|
91
|
+
|
|
92
|
+
assert isinstance(res, EzOptimizeResult)
|
|
93
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
94
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
98
|
+
def test_kw_with_bounds(method: str):
|
|
99
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
100
|
+
bounds = {'x': (0, 2), 'y': (0, 2)}
|
|
101
|
+
|
|
102
|
+
res = minimize(partial(rosen_2d_kw, a=100, b=0), x0, method=method, bounds=bounds, tol=1e-8)
|
|
103
|
+
|
|
104
|
+
assert isinstance(res, EzOptimizeResult)
|
|
105
|
+
assert_allclose(res.x, np.ones(2), atol=1e-2, rtol=1e-2)
|
|
106
|
+
assert_allclose(res.fun, 0.0, atol=1e-5, rtol=1e-5)
|
|
107
|
+
assert isinstance(res.x_original, dict)
|
|
108
|
+
assert_allclose(list(res.x_original.values()), np.ones(2), atol=1e-2, rtol=1e-2)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
112
|
+
def test_kw_array_with_bounds(method: str):
|
|
113
|
+
x0 = {'x':np.array([1.3, 0.7, 0.8, 1.9, 1.2])}
|
|
114
|
+
bounds = {'x': [(0, 2), (0, 2), (0, 2), (0, 2), (0, 2)]}
|
|
115
|
+
|
|
116
|
+
res = minimize(partial(rosen, a=100, b=0), x0, method=method, bounds=bounds, tol=1e-8)
|
|
117
|
+
|
|
118
|
+
assert isinstance(res, EzOptimizeResult)
|
|
119
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
120
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
121
|
+
assert isinstance(res.x_original, dict)
|
|
122
|
+
assert_allclose(res.x_original['x'], np.ones(5), atol=1e-4, rtol=1e-4)
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
from functools import partial
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from numpy.testing import assert_allclose
|
|
7
|
+
|
|
8
|
+
from ez_optimize.optimization_problem import OptimizationProblem
|
|
9
|
+
from ez_optimize.utilities import EzOptimizeResult
|
|
10
|
+
|
|
11
|
+
no_grad_methods = ["nelder-mead", "powell", "CG", "BFGS", "L-BFGS-B"]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def rosen(x, a, b) -> float:
|
|
15
|
+
"""The Rosenbrock function"""
|
|
16
|
+
return sum(a * (x[1:] - x[:-1] ** 2.0) ** 2.0 + (1 - x[:-1]) ** 2.0) + b
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def rosen_2d_kw(x, y, a, b) -> float:
|
|
20
|
+
"""The 2D Rosenbrock function in keyword mode"""
|
|
21
|
+
return rosen(np.array([x, y]), a, b)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
25
|
+
def test_array_5d(method: str):
|
|
26
|
+
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])
|
|
27
|
+
|
|
28
|
+
prob = OptimizationProblem(rosen, x0, method=method, args=(100, 0), tol=1e-8)
|
|
29
|
+
|
|
30
|
+
res = prob.optimize()
|
|
31
|
+
|
|
32
|
+
assert isinstance(res, EzOptimizeResult)
|
|
33
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
34
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
38
|
+
def test_kw_2d(method: str):
|
|
39
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
40
|
+
|
|
41
|
+
prob = OptimizationProblem(partial(rosen_2d_kw, a=100, b=0), x0, method=method, tol=1e-8)
|
|
42
|
+
|
|
43
|
+
res = prob.optimize()
|
|
44
|
+
|
|
45
|
+
assert isinstance(res, EzOptimizeResult)
|
|
46
|
+
assert_allclose(res.x, np.ones(2), atol=1e-4, rtol=1e-4)
|
|
47
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
48
|
+
assert isinstance(res.x_original, dict)
|
|
49
|
+
assert_allclose(list(res.x_original.values()), np.ones(2), atol=1e-4, rtol=1e-4)
|
|
50
|
+
|
|
51
|
+
@pytest.mark.parametrize("method", no_grad_methods, ids=no_grad_methods)
|
|
52
|
+
def test_kw_array(method: str):
|
|
53
|
+
x0 = {'x':np.array([1.3, 0.7, 0.8, 1.9, 1.2])}
|
|
54
|
+
|
|
55
|
+
prob = OptimizationProblem(partial(rosen, a=100, b=0), x0, method=method, tol=1e-8)
|
|
56
|
+
|
|
57
|
+
res = prob.optimize()
|
|
58
|
+
|
|
59
|
+
assert isinstance(res, EzOptimizeResult)
|
|
60
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
61
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
62
|
+
assert isinstance(res.x_original, dict)
|
|
63
|
+
assert_allclose(res.x_original['x'], np.ones(5), atol=1e-4, rtol=1e-4)
|
|
64
|
+
|
|
65
|
+
def test_array_direction_max():
|
|
66
|
+
def f(x):
|
|
67
|
+
return - (x - 1)**2
|
|
68
|
+
|
|
69
|
+
prob = OptimizationProblem(f, np.array([0.]), method='SLSQP', direction='max', bounds=[(0, 2)], tol=1e-8)
|
|
70
|
+
|
|
71
|
+
res = prob.optimize()
|
|
72
|
+
|
|
73
|
+
assert isinstance(res, EzOptimizeResult)
|
|
74
|
+
assert_allclose(res.x, 1.0, atol=1e-4)
|
|
75
|
+
assert_allclose(res.fun, 0.0, atol=1e-8)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_kw_direction_max():
|
|
79
|
+
def f_kw(x):
|
|
80
|
+
return - (x - 1)**2
|
|
81
|
+
|
|
82
|
+
prob = OptimizationProblem(f_kw, {'x': 0.}, method='SLSQP', direction='max', tol=1e-8)
|
|
83
|
+
|
|
84
|
+
res = prob.optimize()
|
|
85
|
+
|
|
86
|
+
assert isinstance(res, EzOptimizeResult)
|
|
87
|
+
assert_allclose(res.x, np.ones(1), atol=1e-4, rtol=1e-4)
|
|
88
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
89
|
+
assert isinstance(res.x_original, dict)
|
|
90
|
+
assert_allclose(res.x_original['x'], np.ones(1), atol=1e-4, rtol=1e-4)
|
|
91
|
+
|
|
92
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
93
|
+
def test_array_with_bounds(method: str):
|
|
94
|
+
x0 = np.array([1.3, 0.7, 0.8, 1.9, 1.2])
|
|
95
|
+
bounds = [(0, 2), (0, 2), (0, 2), (0, 2), (0, 2)]
|
|
96
|
+
|
|
97
|
+
prob = OptimizationProblem(rosen, x0, method=method, args=(100, 0), bounds=bounds, tol=1e-8)
|
|
98
|
+
|
|
99
|
+
res = prob.optimize()
|
|
100
|
+
|
|
101
|
+
assert isinstance(res, EzOptimizeResult)
|
|
102
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
103
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
107
|
+
def test_kw_with_bounds(method: str):
|
|
108
|
+
x0 = {'x': 1.3, 'y': 0.7}
|
|
109
|
+
bounds = {'x': (0, 2), 'y': (0, 2)}
|
|
110
|
+
|
|
111
|
+
prob = OptimizationProblem(partial(rosen_2d_kw, a=100, b=0), x0, method=method, bounds=bounds, tol=1e-8)
|
|
112
|
+
|
|
113
|
+
res = prob.optimize()
|
|
114
|
+
|
|
115
|
+
assert isinstance(res, EzOptimizeResult)
|
|
116
|
+
assert_allclose(res.x, np.ones(2), atol=1e-2, rtol=1e-2)
|
|
117
|
+
assert_allclose(res.fun, 0.0, atol=1e-5, rtol=1e-5)
|
|
118
|
+
assert isinstance(res.x_original, dict)
|
|
119
|
+
assert_allclose(list(res.x_original.values()), np.ones(2), atol=1e-2, rtol=1e-2)
|
|
120
|
+
|
|
121
|
+
@pytest.mark.parametrize("method", ["SLSQP", "L-BFGS-B"], ids=["SLSQP", "L-BFGS-B"])
|
|
122
|
+
def test_kw_array_with_bounds(method: str):
|
|
123
|
+
x0 = {'x':np.array([1.3, 0.7, 0.8, 1.9, 1.2])}
|
|
124
|
+
bounds = {'x': [(0, 2), (0, 2), (0, 2), (0, 2), (0, 2)]}
|
|
125
|
+
|
|
126
|
+
prob = OptimizationProblem(partial(rosen, a=100, b=0), x0, method=method, bounds=bounds, tol=1e-8)
|
|
127
|
+
|
|
128
|
+
res = prob.optimize()
|
|
129
|
+
|
|
130
|
+
assert isinstance(res, EzOptimizeResult)
|
|
131
|
+
assert_allclose(res.x, np.ones(5), atol=1e-4, rtol=1e-4)
|
|
132
|
+
assert_allclose(res.fun, 0.0, atol=1e-8, rtol=1e-8)
|
|
133
|
+
assert isinstance(res.x_original, dict)
|
|
134
|
+
assert_allclose(res.x_original['x'], np.ones(5), atol=1e-4, rtol=1e-4)
|