gemseo-multi-fidelity 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gemseo_multi_fidelity/__init__.py +17 -0
- gemseo_multi_fidelity/core/MFMapperAdapter_input.json +22 -0
- gemseo_multi_fidelity/core/MFMapperAdapter_output.json +22 -0
- gemseo_multi_fidelity/core/MFMapperLinker_input.json +22 -0
- gemseo_multi_fidelity/core/MFMapperLinker_output.json +22 -0
- gemseo_multi_fidelity/core/MFScenarioAdapter_input.json +39 -0
- gemseo_multi_fidelity/core/MFScenarioAdapter_output.json +23 -0
- gemseo_multi_fidelity/core/__init__.py +16 -0
- gemseo_multi_fidelity/core/boxed_domain.py +242 -0
- gemseo_multi_fidelity/core/corr_function.py +411 -0
- gemseo_multi_fidelity/core/criticality.py +124 -0
- gemseo_multi_fidelity/core/ds_mapper.py +307 -0
- gemseo_multi_fidelity/core/errors.py +42 -0
- gemseo_multi_fidelity/core/eval_mapper.py +188 -0
- gemseo_multi_fidelity/core/id_mapper_adapter.py +61 -0
- gemseo_multi_fidelity/core/mapper_adapter.py +126 -0
- gemseo_multi_fidelity/core/mapper_linker.py +72 -0
- gemseo_multi_fidelity/core/mf_formulation.py +635 -0
- gemseo_multi_fidelity/core/mf_logger.py +216 -0
- gemseo_multi_fidelity/core/mf_opt_problem.py +480 -0
- gemseo_multi_fidelity/core/mf_scenario.py +205 -0
- gemseo_multi_fidelity/core/noise_criterion.py +94 -0
- gemseo_multi_fidelity/core/projpolytope.out +0 -0
- gemseo_multi_fidelity/core/scenario_adapter.py +568 -0
- gemseo_multi_fidelity/core/stop_criteria.py +201 -0
- gemseo_multi_fidelity/core/strict_chain.py +75 -0
- gemseo_multi_fidelity/core/utils_model_quality.py +74 -0
- gemseo_multi_fidelity/corrections/__init__.py +16 -0
- gemseo_multi_fidelity/corrections/add_corr_function.py +80 -0
- gemseo_multi_fidelity/corrections/correction_factory.py +65 -0
- gemseo_multi_fidelity/corrections/mul_corr_function.py +86 -0
- gemseo_multi_fidelity/drivers/__init__.py +16 -0
- gemseo_multi_fidelity/drivers/mf_algo_factory.py +38 -0
- gemseo_multi_fidelity/drivers/mf_driver_lib.py +462 -0
- gemseo_multi_fidelity/drivers/refinement.py +234 -0
- gemseo_multi_fidelity/drivers/settings/__init__.py +16 -0
- gemseo_multi_fidelity/drivers/settings/base_mf_driver_settings.py +59 -0
- gemseo_multi_fidelity/drivers/settings/mf_refine_settings.py +50 -0
- gemseo_multi_fidelity/formulations/__init__.py +16 -0
- gemseo_multi_fidelity/formulations/refinement.py +144 -0
- gemseo_multi_fidelity/mapping/__init__.py +16 -0
- gemseo_multi_fidelity/mapping/identity_mapper.py +74 -0
- gemseo_multi_fidelity/mapping/interp_mapper.py +422 -0
- gemseo_multi_fidelity/mapping/mapper_factory.py +70 -0
- gemseo_multi_fidelity/mapping/mapping_errors.py +46 -0
- gemseo_multi_fidelity/mapping/subset_mapper.py +122 -0
- gemseo_multi_fidelity/mf_rosenbrock/__init__.py +16 -0
- gemseo_multi_fidelity/mf_rosenbrock/delayed_disc.py +136 -0
- gemseo_multi_fidelity/mf_rosenbrock/refact_rosen_testcase.py +46 -0
- gemseo_multi_fidelity/mf_rosenbrock/rosen_mf_case.py +284 -0
- gemseo_multi_fidelity/mf_rosenbrock/rosen_mf_funcs.py +350 -0
- gemseo_multi_fidelity/models/__init__.py +16 -0
- gemseo_multi_fidelity/models/fake_updater.py +112 -0
- gemseo_multi_fidelity/models/model_updater.py +91 -0
- gemseo_multi_fidelity/models/rbf/__init__.py +16 -0
- gemseo_multi_fidelity/models/rbf/kernel_factory.py +66 -0
- gemseo_multi_fidelity/models/rbf/kernels/__init__.py +16 -0
- gemseo_multi_fidelity/models/rbf/kernels/gaussian.py +93 -0
- gemseo_multi_fidelity/models/rbf/kernels/matern_3_2.py +101 -0
- gemseo_multi_fidelity/models/rbf/kernels/matern_5_2.py +101 -0
- gemseo_multi_fidelity/models/rbf/kernels/rbf_kernel.py +172 -0
- gemseo_multi_fidelity/models/rbf/rbf_model.py +422 -0
- gemseo_multi_fidelity/models/sparse_rbf_updater.py +96 -0
- gemseo_multi_fidelity/models/taylor/__init__.py +16 -0
- gemseo_multi_fidelity/models/taylor/taylor.py +212 -0
- gemseo_multi_fidelity/models/taylor_updater.py +66 -0
- gemseo_multi_fidelity/models/updater_factory.py +62 -0
- gemseo_multi_fidelity/settings/__init__.py +16 -0
- gemseo_multi_fidelity/settings/drivers.py +22 -0
- gemseo_multi_fidelity/settings/formulations.py +16 -0
- gemseo_multi_fidelity-0.0.1.dist-info/METADATA +99 -0
- gemseo_multi_fidelity-0.0.1.dist-info/RECORD +76 -0
- gemseo_multi_fidelity-0.0.1.dist-info/WHEEL +5 -0
- gemseo_multi_fidelity-0.0.1.dist-info/entry_points.txt +2 -0
- gemseo_multi_fidelity-0.0.1.dist-info/licenses/LICENSE.txt +165 -0
- gemseo_multi_fidelity-0.0.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
# Copyright 2021 IRT Saint Exupéry, https://www.irt-saintexupery.com
|
|
2
|
+
#
|
|
3
|
+
# This program is free software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU Lesser General Public
|
|
5
|
+
# License version 3 as published by the Free Software Foundation.
|
|
6
|
+
#
|
|
7
|
+
# This program is distributed in the hope that it will be useful,
|
|
8
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
9
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
10
|
+
# Lesser General Public License for more details.
|
|
11
|
+
#
|
|
12
|
+
# You should have received a copy of the GNU Lesser General Public License
|
|
13
|
+
# along with this program; if not, write to the Free Software Foundation,
|
|
14
|
+
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
15
|
+
|
|
16
|
+
# Copyright (c) 2019 AIRBUS OPERATIONS
|
|
17
|
+
|
|
18
|
+
#
|
|
19
|
+
# Contributors:
|
|
20
|
+
# INITIAL AUTHORS - API and implementation and/or documentation
|
|
21
|
+
# :author: Romain Olivanti
|
|
22
|
+
# OTHER AUTHORS - MACROSCOPIC CHANGES
|
|
23
|
+
"""Multi-fidelity MDO scenario."""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
import logging
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from typing import TYPE_CHECKING
|
|
30
|
+
from typing import Any
|
|
31
|
+
from typing import ClassVar
|
|
32
|
+
|
|
33
|
+
from gemseo.algos.database import Database
|
|
34
|
+
|
|
35
|
+
# from gemseo.algos.opt.factory import OptimizationLibraryFactory
|
|
36
|
+
from gemseo.algos.optimization_problem import OptimizationProblem
|
|
37
|
+
from gemseo.scenarios.mdo_scenario import MDOScenario
|
|
38
|
+
|
|
39
|
+
from gemseo_multi_fidelity.core.mf_opt_problem import MFOptimizationProblem
|
|
40
|
+
from gemseo_multi_fidelity.drivers.mf_algo_factory import MFAlgoFactory
|
|
41
|
+
|
|
42
|
+
if TYPE_CHECKING:
|
|
43
|
+
from collections.abc import Sequence
|
|
44
|
+
|
|
45
|
+
from gemseo.algos.opt.factory import OptimizationLibraryFactory
|
|
46
|
+
from gemseo.core.discipline.discipline import Discipline
|
|
47
|
+
|
|
48
|
+
from gemseo_multi_fidelity.core.mf_formulation import MFFormulation
|
|
49
|
+
|
|
50
|
+
LOGGER = logging.getLogger(__name__)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class MFMDOScenario(MDOScenario):
|
|
54
|
+
"""Multi-fidelity MDO Scenario class."""
|
|
55
|
+
|
|
56
|
+
_ALGO_FACTORY_CLASS: ClassVar[type[OptimizationLibraryFactory]] = MFAlgoFactory
|
|
57
|
+
"""Algorithm factory class."""
|
|
58
|
+
|
|
59
|
+
formulation: MFFormulation
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
disciplines: Sequence[MDOScenario],
|
|
64
|
+
formulation_name: str,
|
|
65
|
+
name: str | None = None,
|
|
66
|
+
**formulation_options: Any,
|
|
67
|
+
):
|
|
68
|
+
"""Constructor."""
|
|
69
|
+
hifi_scenario = disciplines[0]
|
|
70
|
+
design_space = hifi_scenario.design_space
|
|
71
|
+
objective_name = hifi_scenario.formulation.optimization_problem.objective_name
|
|
72
|
+
super().__init__(
|
|
73
|
+
disciplines=disciplines,
|
|
74
|
+
formulation_name=formulation_name,
|
|
75
|
+
objective_name=objective_name,
|
|
76
|
+
design_space=design_space,
|
|
77
|
+
name=name,
|
|
78
|
+
**formulation_options,
|
|
79
|
+
)
|
|
80
|
+
# self._algo_factory=MFAlgoFactory()
|
|
81
|
+
|
|
82
|
+
def _execute(self) -> None:
|
|
83
|
+
self.optimization_result = self._algo_factory.execute(
|
|
84
|
+
self.formulation.mf_opt_problem,
|
|
85
|
+
algo_name=self._settings.algo_name,
|
|
86
|
+
**self._settings.algo_settings,
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
@property
|
|
90
|
+
def disciplines(self) -> tuple[Discipline, ...]:
|
|
91
|
+
"""The disciplines."""
|
|
92
|
+
return self.formulation.disciplines
|
|
93
|
+
|
|
94
|
+
def save_optimization_history(
|
|
95
|
+
self,
|
|
96
|
+
file_path,
|
|
97
|
+
file_format=OptimizationProblem.HistoryFileFormat.HDF5,
|
|
98
|
+
append=False,
|
|
99
|
+
hdf_node_path="",
|
|
100
|
+
) -> None:
|
|
101
|
+
"""Save the optimization history of the scenario to a file.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
file_path: The path to the file to save the history.
|
|
105
|
+
file_format: The format of the file, either ``hdf5`` or ``ggobi``
|
|
106
|
+
(default value = ``hdf5``).
|
|
107
|
+
append: If True, data is appended to the file if not empty
|
|
108
|
+
(default value = ``False``).
|
|
109
|
+
hdf_node_path: The path of the HDF node in which the problem should be
|
|
110
|
+
exported, ``None`` to select the root node.
|
|
111
|
+
"""
|
|
112
|
+
if file_format != OptimizationProblem.HistoryFileFormat.HDF5:
|
|
113
|
+
raise ValueError(
|
|
114
|
+
"Cannot export optimization history to file format:" + str(file_format)
|
|
115
|
+
)
|
|
116
|
+
self.formulation.mf_opt_problem.to_hdf(
|
|
117
|
+
file_path, append=append, hdf_node_path=hdf_node_path
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
def set_optimization_history_backup(
|
|
121
|
+
self,
|
|
122
|
+
file_path: str | Path,
|
|
123
|
+
at_each_iteration: bool = False,
|
|
124
|
+
at_each_function_call: bool = True,
|
|
125
|
+
erase: bool = False,
|
|
126
|
+
load: bool = False,
|
|
127
|
+
plot: bool = False,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Set the backup file to store the evaluations of the functions during the run.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
file_path: The backup file path.
|
|
133
|
+
at_each_iteration: Whether the backup file is updated at every iteration of
|
|
134
|
+
the optimization.
|
|
135
|
+
at_each_function_call: Whether the backup file is updated at every function
|
|
136
|
+
call.
|
|
137
|
+
erase: Whether the backup file is erased before the run.
|
|
138
|
+
load: Whether the backup file is loaded before run, useful after a crash.
|
|
139
|
+
plot: Whether to plot the optimization history view at each iteration. The
|
|
140
|
+
plots will be generated only after the first two iterations.
|
|
141
|
+
|
|
142
|
+
Raises:
|
|
143
|
+
ValueError: If both ``erase`` and ``pre_load`` are ``True``.
|
|
144
|
+
"""
|
|
145
|
+
opt_pb = self.formulation.optimization_problem
|
|
146
|
+
self.__history_backup_is_set = True
|
|
147
|
+
self._opt_hist_backup_path = Path(file_path)
|
|
148
|
+
|
|
149
|
+
if self._opt_hist_backup_path.exists():
|
|
150
|
+
if erase and load:
|
|
151
|
+
msg = (
|
|
152
|
+
"Conflicting options for history backup, "
|
|
153
|
+
"cannot pre load optimization history and erase it!"
|
|
154
|
+
)
|
|
155
|
+
raise ValueError(msg)
|
|
156
|
+
if erase:
|
|
157
|
+
LOGGER.warning(
|
|
158
|
+
"Erasing optimization history in %s",
|
|
159
|
+
self._opt_hist_backup_path,
|
|
160
|
+
)
|
|
161
|
+
self._opt_hist_backup_path.unlink()
|
|
162
|
+
elif load:
|
|
163
|
+
opt_pb.database.update_from_hdf(self._opt_hist_backup_path)
|
|
164
|
+
max_iteration = len(opt_pb.database)
|
|
165
|
+
if max_iteration != 0:
|
|
166
|
+
opt_pb.evaluation_counter.current = max_iteration
|
|
167
|
+
|
|
168
|
+
opt_pb.add_listener(
|
|
169
|
+
self._execute_backup_callback,
|
|
170
|
+
at_each_iteration=at_each_iteration,
|
|
171
|
+
at_each_function_call=at_each_function_call,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
if plot:
|
|
175
|
+
opt_pb.add_listener(
|
|
176
|
+
self._execute_plot_callback,
|
|
177
|
+
at_each_iteration=True,
|
|
178
|
+
at_each_function_call=False,
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
elif load:
|
|
182
|
+
# Load only the reference databases
|
|
183
|
+
ref_databases = self.formulation.get_reference_databases()
|
|
184
|
+
for i, _database in enumerate(ref_databases):
|
|
185
|
+
if i == 0:
|
|
186
|
+
hdf_node = ""
|
|
187
|
+
else:
|
|
188
|
+
dtb_group = MFOptimizationProblem.MF_DTB_GROUP
|
|
189
|
+
hdf_node = f"{dtb_group}_{i:d}"
|
|
190
|
+
self.formulation._databases[i] = Database().from_hdf(
|
|
191
|
+
file_path=file_path, hdf_node_path=hdf_node
|
|
192
|
+
)
|
|
193
|
+
|
|
194
|
+
def backup_callback(x_vect=None) -> None:
|
|
195
|
+
"""A callback function to back up optimization history."""
|
|
196
|
+
self.save_optimization_history(file_path, append=True)
|
|
197
|
+
|
|
198
|
+
# Add the callback for each sub-problem
|
|
199
|
+
mf_opt_prob = self.formulation.mf_opt_problem
|
|
200
|
+
for sub_prob in mf_opt_prob.sub_opt_problems:
|
|
201
|
+
sub_prob.add_listener(
|
|
202
|
+
backup_callback,
|
|
203
|
+
at_each_iteration=at_each_iteration,
|
|
204
|
+
at_each_function_call=at_each_function_call,
|
|
205
|
+
)
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
# Copyright 2021 IRT Saint Exupéry, https://www.irt-saintexupery.com
|
|
2
|
+
#
|
|
3
|
+
# This program is free software; you can redistribute it and/or
|
|
4
|
+
# modify it under the terms of the GNU Lesser General Public
|
|
5
|
+
# License version 3 as published by the Free Software Foundation.
|
|
6
|
+
#
|
|
7
|
+
# This program is distributed in the hope that it will be useful,
|
|
8
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
9
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
10
|
+
# Lesser General Public License for more details.
|
|
11
|
+
#
|
|
12
|
+
# You should have received a copy of the GNU Lesser General Public License
|
|
13
|
+
# along with this program; if not, write to the Free Software Foundation,
|
|
14
|
+
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
15
|
+
|
|
16
|
+
# Copyright (c) 2019 AIRBUS OPERATIONS
|
|
17
|
+
|
|
18
|
+
#
|
|
19
|
+
# Contributors:
|
|
20
|
+
# INITIAL AUTHORS - API and implementation and/or documentation
|
|
21
|
+
# :author: Romain Olivanti
|
|
22
|
+
# OTHER AUTHORS - MACROSCOPIC CHANGES
|
|
23
|
+
"""Noise criterion."""
|
|
24
|
+
|
|
25
|
+
from __future__ import annotations
|
|
26
|
+
|
|
27
|
+
from typing import TYPE_CHECKING
|
|
28
|
+
|
|
29
|
+
from numpy import asarray
|
|
30
|
+
from numpy import inner
|
|
31
|
+
from numpy.linalg import norm
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from numpy.typing import NDArray
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def check_gradient_by_step(
|
|
38
|
+
step: float | NDArray,
|
|
39
|
+
delta_func: float,
|
|
40
|
+
grad_orig: float | NDArray,
|
|
41
|
+
thr_step_check: float = 1e-5,
|
|
42
|
+
mul_step_thr: float = 0.05,
|
|
43
|
+
) -> list[bool, str]:
|
|
44
|
+
"""Check gradient by step."""
|
|
45
|
+
step = asarray(step)
|
|
46
|
+
grad_orig = asarray(grad_orig)
|
|
47
|
+
step_norm = norm(step)
|
|
48
|
+
thr_fail = mul_step_thr * step_norm
|
|
49
|
+
|
|
50
|
+
# Do not check above the threshold
|
|
51
|
+
if step_norm > thr_step_check:
|
|
52
|
+
msg = (
|
|
53
|
+
f"No gradient check allowed: ||step|| = {step_norm:.6g}, "
|
|
54
|
+
f"thr = {thr_step_check:.6g}"
|
|
55
|
+
)
|
|
56
|
+
is_consistent = True
|
|
57
|
+
else:
|
|
58
|
+
var_err = delta_func - inner(grad_orig, step).item()
|
|
59
|
+
is_consistent = abs(var_err) <= thr_fail
|
|
60
|
+
msg = f"Stepwise error: {var_err:.6g}, consistent: {is_consistent}"
|
|
61
|
+
return is_consistent, msg
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
if __name__ == "__main__":
|
|
65
|
+
from numpy import array
|
|
66
|
+
from numpy import sum as np_sum
|
|
67
|
+
from numpy.random import Generator
|
|
68
|
+
|
|
69
|
+
def func(x_vect: NDArray) -> float:
|
|
70
|
+
"""Sum."""
|
|
71
|
+
return np_sum(x_vect**2)
|
|
72
|
+
|
|
73
|
+
def grad(x_vect: NDArray, noise_per=0.0) -> NDArray:
|
|
74
|
+
"""Gradient."""
|
|
75
|
+
real_grad = 2.0 * x_vect
|
|
76
|
+
if noise_per != 0.0:
|
|
77
|
+
grad_norm = norm(real_grad)
|
|
78
|
+
real_grad += grad_norm * noise_per * 1e-2 * Generator(x_vect.shape[0])
|
|
79
|
+
return real_grad
|
|
80
|
+
|
|
81
|
+
x_grad = array([2.0])
|
|
82
|
+
step = 1e-3
|
|
83
|
+
delta_func = func(x_grad + step) - func(x_grad)
|
|
84
|
+
grad_val = grad(x_grad, noise_per=0.0)
|
|
85
|
+
|
|
86
|
+
is_grad_correct, msg = check_gradient_by_step(
|
|
87
|
+
step, delta_func, grad_val, thr_step_check=1e-3, mul_step_thr=0.05
|
|
88
|
+
)
|
|
89
|
+
assert True
|
|
90
|
+
|
|
91
|
+
grad_val = grad(x_grad, noise_per=5.0)
|
|
92
|
+
is_grad_correct, msg = check_gradient_by_step(
|
|
93
|
+
step, delta_func, grad_val, thr_step_check=1e-3, mul_step_thr=0.05
|
|
94
|
+
)
|
|
File without changes
|