mdo-lib 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mdo_lib/__init__.py +45 -0
- mdo_lib/core/__init__.py +18 -0
- mdo_lib/core/constraint.py +29 -0
- mdo_lib/core/evaluator.py +60 -0
- mdo_lib/core/objective.py +27 -0
- mdo_lib/core/parameter.py +41 -0
- mdo_lib/core/problem.py +24 -0
- mdo_lib/core/result.py +39 -0
- mdo_lib/doe/__init__.py +13 -0
- mdo_lib/doe/base.py +22 -0
- mdo_lib/doe/central_composite.py +61 -0
- mdo_lib/doe/full_factorial.py +39 -0
- mdo_lib/doe/latin_hypercube.py +43 -0
- mdo_lib/examples/__init__.py +13 -0
- mdo_lib/examples/basic_optimization.py +51 -0
- mdo_lib/examples/reliability_analysis.py +61 -0
- mdo_lib/examples/sensitivity_analysis.py +62 -0
- mdo_lib/examples/surrogate_modeling.py +58 -0
- mdo_lib/optimization/__init__.py +15 -0
- mdo_lib/optimization/base.py +22 -0
- mdo_lib/optimization/bayesian.py +117 -0
- mdo_lib/optimization/genetic.py +184 -0
- mdo_lib/optimization/gradient.py +88 -0
- mdo_lib/optimization/pso.py +132 -0
- mdo_lib/reliability/__init__.py +13 -0
- mdo_lib/reliability/base.py +22 -0
- mdo_lib/reliability/form.py +90 -0
- mdo_lib/reliability/monte_carlo.py +73 -0
- mdo_lib/reliability/sorm.py +103 -0
- mdo_lib/sensitivity/__init__.py +13 -0
- mdo_lib/sensitivity/base.py +22 -0
- mdo_lib/sensitivity/fast.py +67 -0
- mdo_lib/sensitivity/morris.py +74 -0
- mdo_lib/sensitivity/sobol.py +79 -0
- mdo_lib/surrogate/__init__.py +13 -0
- mdo_lib/surrogate/base.py +39 -0
- mdo_lib/surrogate/kriging.py +116 -0
- mdo_lib/surrogate/polynomial.py +56 -0
- mdo_lib/surrogate/rbf.py +74 -0
- mdo_lib/tests/__init__.py +1 -0
- mdo_lib/uncertainty/__init__.py +11 -0
- mdo_lib/uncertainty/base.py +24 -0
- mdo_lib/uncertainty/reliability_based.py +114 -0
- mdo_lib/uncertainty/robust.py +93 -0
- mdo_lib/utils/__init__.py +14 -0
- mdo_lib/utils/decorators.py +40 -0
- mdo_lib/utils/parallel.py +23 -0
- mdo_lib/utils/visualization.py +110 -0
- mdo_lib-0.1.0.dist-info/LICENSE +21 -0
- mdo_lib-0.1.0.dist-info/METADATA +231 -0
- mdo_lib-0.1.0.dist-info/RECORD +53 -0
- mdo_lib-0.1.0.dist-info/WHEEL +5 -0
- mdo_lib-0.1.0.dist-info/top_level.txt +1 -0
mdo_lib/__init__.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"""MDO (Multi-Disciplinary Optimization) library"""
|
|
2
|
+
|
|
3
|
+
__version__ = "0.1.0"
|
|
4
|
+
|
|
5
|
+
from .core import Problem, Parameter, Objective, Constraint, Evaluator, Sample, Result
|
|
6
|
+
from .doe import DOE, FullFactorial, LatinHypercube, CentralComposite
|
|
7
|
+
from .surrogate import SurrogateModel, PolynomialRegression, Kriging, RBF
|
|
8
|
+
from .sensitivity import SensitivityAnalysis, SobolIndices, MorrisMethod, FAST
|
|
9
|
+
from .optimization import Optimizer, GradientDescent, GeneticAlgorithm, ParticleSwarmOptimization, BayesianOptimization
|
|
10
|
+
from .reliability import ReliabilityAnalysis, MonteCarlo, FORM, SORM
|
|
11
|
+
from .uncertainty import UncertaintyOptimizer, RobustOptimization, ReliabilityBasedOptimization
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"Problem",
|
|
15
|
+
"Parameter",
|
|
16
|
+
"Objective",
|
|
17
|
+
"Constraint",
|
|
18
|
+
"Evaluator",
|
|
19
|
+
"Sample",
|
|
20
|
+
"Result",
|
|
21
|
+
"DOE",
|
|
22
|
+
"FullFactorial",
|
|
23
|
+
"LatinHypercube",
|
|
24
|
+
"CentralComposite",
|
|
25
|
+
"SurrogateModel",
|
|
26
|
+
"PolynomialRegression",
|
|
27
|
+
"Kriging",
|
|
28
|
+
"RBF",
|
|
29
|
+
"SensitivityAnalysis",
|
|
30
|
+
"SobolIndices",
|
|
31
|
+
"MorrisMethod",
|
|
32
|
+
"FAST",
|
|
33
|
+
"Optimizer",
|
|
34
|
+
"GradientDescent",
|
|
35
|
+
"GeneticAlgorithm",
|
|
36
|
+
"ParticleSwarmOptimization",
|
|
37
|
+
"BayesianOptimization",
|
|
38
|
+
"ReliabilityAnalysis",
|
|
39
|
+
"MonteCarlo",
|
|
40
|
+
"FORM",
|
|
41
|
+
"SORM",
|
|
42
|
+
"UncertaintyOptimizer",
|
|
43
|
+
"RobustOptimization",
|
|
44
|
+
"ReliabilityBasedOptimization",
|
|
45
|
+
]
|
mdo_lib/core/__init__.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Core module for MDO library"""
|
|
2
|
+
|
|
3
|
+
from .parameter import Parameter
|
|
4
|
+
from .objective import Objective
|
|
5
|
+
from .constraint import Constraint
|
|
6
|
+
from .problem import Problem
|
|
7
|
+
from .evaluator import Evaluator
|
|
8
|
+
from .result import Sample, Result
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"Parameter",
|
|
12
|
+
"Objective",
|
|
13
|
+
"Constraint",
|
|
14
|
+
"Problem",
|
|
15
|
+
"Evaluator",
|
|
16
|
+
"Sample",
|
|
17
|
+
"Result",
|
|
18
|
+
]
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Constraint module"""
|
|
2
|
+
|
|
3
|
+
class Constraint:
|
|
4
|
+
"""Constraint class for optimization problems"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, name, type='inequality', upper_bound=0.0, lower_bound=None):
|
|
7
|
+
"""Initialize a constraint
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
name (str): Constraint name
|
|
11
|
+
type (str, optional): Constraint type ('inequality' or 'equality'). Defaults to 'inequality'.
|
|
12
|
+
upper_bound (float, optional): Upper bound for inequality constraint. Defaults to 0.0.
|
|
13
|
+
lower_bound (float, optional): Lower bound for equality constraint. Defaults to None.
|
|
14
|
+
"""
|
|
15
|
+
self.name = name
|
|
16
|
+
self.type = type
|
|
17
|
+
self.upper_bound = upper_bound
|
|
18
|
+
self.lower_bound = lower_bound
|
|
19
|
+
|
|
20
|
+
def evaluate(self, x):
|
|
21
|
+
"""Evaluate the constraint
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
x (list): Design variables
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
float: Constraint value
|
|
28
|
+
"""
|
|
29
|
+
raise NotImplementedError("Subclass must implement evaluate method")
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
"""Sample evaluation module"""
|
|
2
|
+
|
|
3
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
4
|
+
|
|
5
|
+
class Evaluator:
|
|
6
|
+
"""Evaluator class for evaluating samples"""
|
|
7
|
+
|
|
8
|
+
def __init__(self, problem, parallel=False):
|
|
9
|
+
"""Initialize an evaluator
|
|
10
|
+
|
|
11
|
+
Args:
|
|
12
|
+
problem (Problem): Problem to evaluate
|
|
13
|
+
parallel (bool, optional): Whether to evaluate in parallel. Defaults to False.
|
|
14
|
+
"""
|
|
15
|
+
self.problem = problem
|
|
16
|
+
self.parallel = parallel
|
|
17
|
+
|
|
18
|
+
def evaluate(self, samples):
|
|
19
|
+
"""Evaluate samples
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
samples (list): List of Sample objects
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
list: List of Result objects
|
|
26
|
+
"""
|
|
27
|
+
from .result import Result
|
|
28
|
+
|
|
29
|
+
if self.parallel:
|
|
30
|
+
with ThreadPoolExecutor() as executor:
|
|
31
|
+
results = list(executor.map(self._evaluate_sample, samples))
|
|
32
|
+
else:
|
|
33
|
+
results = [self._evaluate_sample(sample) for sample in samples]
|
|
34
|
+
|
|
35
|
+
return results
|
|
36
|
+
|
|
37
|
+
def _evaluate_sample(self, sample):
|
|
38
|
+
"""Evaluate a single sample
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
sample (Sample): Sample to evaluate
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Result: Evaluation result
|
|
45
|
+
"""
|
|
46
|
+
from .result import Result
|
|
47
|
+
|
|
48
|
+
# Evaluate objectives
|
|
49
|
+
objectives = []
|
|
50
|
+
for obj in self.problem.objectives:
|
|
51
|
+
value = obj.evaluate(sample.values)
|
|
52
|
+
objectives.append(value)
|
|
53
|
+
|
|
54
|
+
# Evaluate constraints
|
|
55
|
+
constraints = []
|
|
56
|
+
for con in self.problem.constraints:
|
|
57
|
+
value = con.evaluate(sample.values)
|
|
58
|
+
constraints.append(value)
|
|
59
|
+
|
|
60
|
+
return Result(sample, objectives, constraints)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Objective function module"""
|
|
2
|
+
|
|
3
|
+
class Objective:
|
|
4
|
+
"""Objective function class for optimization problems"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, name, sense='minimize', weight=1.0):
|
|
7
|
+
"""Initialize an objective function
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
name (str): Objective name
|
|
11
|
+
sense (str, optional): Optimization sense ('minimize' or 'maximize'). Defaults to 'minimize'.
|
|
12
|
+
weight (float, optional): Objective weight. Defaults to 1.0.
|
|
13
|
+
"""
|
|
14
|
+
self.name = name
|
|
15
|
+
self.sense = sense
|
|
16
|
+
self.weight = weight
|
|
17
|
+
|
|
18
|
+
def evaluate(self, x):
|
|
19
|
+
"""Evaluate the objective function
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
x (list): Design variables
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
float: Objective value
|
|
26
|
+
"""
|
|
27
|
+
raise NotImplementedError("Subclass must implement evaluate method")
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""Parameter management module"""
|
|
2
|
+
|
|
3
|
+
class Parameter:
|
|
4
|
+
"""Parameter class for optimization problems"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, name, value, bounds=None, is_continuous=True, is_integer=False):
|
|
7
|
+
"""Initialize a parameter
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
name (str): Parameter name
|
|
11
|
+
value (float): Initial value
|
|
12
|
+
bounds (tuple, optional): (lower, upper) bounds. Defaults to None.
|
|
13
|
+
is_continuous (bool, optional): Whether the parameter is continuous. Defaults to True.
|
|
14
|
+
is_integer (bool, optional): Whether the parameter is integer. Defaults to False.
|
|
15
|
+
"""
|
|
16
|
+
self.name = name
|
|
17
|
+
self.value = value
|
|
18
|
+
self.bounds = bounds
|
|
19
|
+
self.is_continuous = is_continuous
|
|
20
|
+
self.is_integer = is_integer
|
|
21
|
+
|
|
22
|
+
def validate(self, value):
|
|
23
|
+
"""Validate a parameter value
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
value (float): Value to validate
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
bool: True if valid, False otherwise
|
|
30
|
+
"""
|
|
31
|
+
# Check if value is within bounds
|
|
32
|
+
if self.bounds:
|
|
33
|
+
lower, upper = self.bounds
|
|
34
|
+
if value < lower or value > upper:
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
# Check if value is integer if required
|
|
38
|
+
if self.is_integer and not isinstance(value, int) and not value.is_integer():
|
|
39
|
+
return False
|
|
40
|
+
|
|
41
|
+
return True
|
mdo_lib/core/problem.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"""Problem definition module"""
|
|
2
|
+
|
|
3
|
+
class Problem:
|
|
4
|
+
"""Problem class for optimization problems"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, parameters, objectives, constraints=None):
|
|
7
|
+
"""Initialize a problem
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
parameters (list): List of Parameter objects
|
|
11
|
+
objectives (list): List of Objective objects
|
|
12
|
+
constraints (list, optional): List of Constraint objects. Defaults to None.
|
|
13
|
+
"""
|
|
14
|
+
self.parameters = parameters
|
|
15
|
+
self.objectives = objectives
|
|
16
|
+
self.constraints = constraints or []
|
|
17
|
+
|
|
18
|
+
def get_design_space(self):
|
|
19
|
+
"""Get the design space
|
|
20
|
+
|
|
21
|
+
Returns:
|
|
22
|
+
list: List of parameter bounds
|
|
23
|
+
"""
|
|
24
|
+
return [param.bounds for param in self.parameters]
|
mdo_lib/core/result.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Result management module"""
|
|
2
|
+
|
|
3
|
+
class Sample:
|
|
4
|
+
"""Sample class for optimization problems"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, parameters, values):
|
|
7
|
+
"""Initialize a sample
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
parameters (list): List of Parameter objects
|
|
11
|
+
values (list): List of parameter values
|
|
12
|
+
"""
|
|
13
|
+
self.parameters = parameters
|
|
14
|
+
self.values = values
|
|
15
|
+
|
|
16
|
+
class Result:
|
|
17
|
+
"""Result class for optimization problems"""
|
|
18
|
+
|
|
19
|
+
def __init__(self, sample, objectives, constraints):
|
|
20
|
+
"""Initialize a result
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
sample (Sample): Sample that was evaluated
|
|
24
|
+
objectives (list): List of objective values
|
|
25
|
+
constraints (list): List of constraint values
|
|
26
|
+
"""
|
|
27
|
+
self.sample = sample
|
|
28
|
+
self.objectives = objectives
|
|
29
|
+
self.constraints = constraints
|
|
30
|
+
|
|
31
|
+
def get_best(self):
|
|
32
|
+
"""Get the best result
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Result: Best result
|
|
36
|
+
"""
|
|
37
|
+
# This is a placeholder implementation
|
|
38
|
+
# In practice, you would compare results based on objectives and constraints
|
|
39
|
+
return self
|
mdo_lib/doe/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""DOE (Design of Experiments) module"""
|
|
2
|
+
|
|
3
|
+
from .base import DOE
|
|
4
|
+
from .full_factorial import FullFactorial
|
|
5
|
+
from .latin_hypercube import LatinHypercube
|
|
6
|
+
from .central_composite import CentralComposite
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"DOE",
|
|
10
|
+
"FullFactorial",
|
|
11
|
+
"LatinHypercube",
|
|
12
|
+
"CentralComposite",
|
|
13
|
+
]
|
mdo_lib/doe/base.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""Base DOE class"""
|
|
2
|
+
|
|
3
|
+
class DOE:
|
|
4
|
+
"""Base class for Design of Experiments"""
|
|
5
|
+
|
|
6
|
+
def __init__(self, problem, n_samples):
|
|
7
|
+
"""Initialize a DOE
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
problem (Problem): Problem to generate samples for
|
|
11
|
+
n_samples (int): Number of samples to generate
|
|
12
|
+
"""
|
|
13
|
+
self.problem = problem
|
|
14
|
+
self.n_samples = n_samples
|
|
15
|
+
|
|
16
|
+
def generate(self):
|
|
17
|
+
"""Generate samples
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
list: List of Sample objects
|
|
21
|
+
"""
|
|
22
|
+
raise NotImplementedError("Subclass must implement generate method")
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""Central composite design"""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
from .base import DOE
|
|
5
|
+
from ..core.result import Sample
|
|
6
|
+
|
|
7
|
+
class CentralComposite(DOE):
|
|
8
|
+
"""Central composite design"""
|
|
9
|
+
|
|
10
|
+
def generate(self):
|
|
11
|
+
"""Generate samples using central composite design
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
list: List of Sample objects
|
|
15
|
+
"""
|
|
16
|
+
n_params = len(self.problem.parameters)
|
|
17
|
+
|
|
18
|
+
# Calculate the number of factorial points
|
|
19
|
+
factorial_points = 2 ** n_params
|
|
20
|
+
|
|
21
|
+
# Calculate the number of axial points
|
|
22
|
+
axial_points = 2 * n_params
|
|
23
|
+
|
|
24
|
+
# Calculate the number of center points
|
|
25
|
+
center_points = self.n_samples - factorial_points - axial_points
|
|
26
|
+
if center_points < 1:
|
|
27
|
+
center_points = 1
|
|
28
|
+
|
|
29
|
+
# Generate factorial points
|
|
30
|
+
factorial = np.array([list(np.binary_repr(i, width=n_params)) for i in range(factorial_points)], dtype=int)
|
|
31
|
+
factorial = 2 * factorial - 1 # Convert to {-1, 1}
|
|
32
|
+
|
|
33
|
+
# Generate axial points
|
|
34
|
+
axial = np.zeros((axial_points, n_params))
|
|
35
|
+
alpha = 2 ** (n_params / 4) # Rotatable design
|
|
36
|
+
for i in range(n_params):
|
|
37
|
+
axial[2*i, i] = alpha
|
|
38
|
+
axial[2*i + 1, i] = -alpha
|
|
39
|
+
|
|
40
|
+
# Generate center points
|
|
41
|
+
center = np.zeros((center_points, n_params))
|
|
42
|
+
|
|
43
|
+
# Combine all points
|
|
44
|
+
all_points = np.vstack([factorial, axial, center])
|
|
45
|
+
|
|
46
|
+
# Scale to parameter bounds
|
|
47
|
+
samples = np.zeros_like(all_points)
|
|
48
|
+
for i, param in enumerate(self.problem.parameters):
|
|
49
|
+
if param.bounds:
|
|
50
|
+
lower, upper = param.bounds
|
|
51
|
+
# Scale from [-1, 1] to [lower, upper]
|
|
52
|
+
samples[:, i] = (all_points[:, i] + 1) / 2 * (upper - lower) + lower
|
|
53
|
+
else:
|
|
54
|
+
samples[:, i] = all_points[:, i]
|
|
55
|
+
|
|
56
|
+
# Convert to Sample objects
|
|
57
|
+
result = []
|
|
58
|
+
for sample in samples:
|
|
59
|
+
result.append(Sample(self.problem.parameters, sample.tolist()))
|
|
60
|
+
|
|
61
|
+
return result
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""Full factorial design"""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
from .base import DOE
|
|
5
|
+
from ..core.result import Sample
|
|
6
|
+
|
|
7
|
+
class FullFactorial(DOE):
|
|
8
|
+
"""Full factorial design"""
|
|
9
|
+
|
|
10
|
+
def generate(self):
|
|
11
|
+
"""Generate samples using full factorial design
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
list: List of Sample objects
|
|
15
|
+
"""
|
|
16
|
+
# This is a simplified implementation
|
|
17
|
+
# In practice, you would need to calculate the number of levels based on n_samples
|
|
18
|
+
n_params = len(self.problem.parameters)
|
|
19
|
+
levels = int(np.round(self.n_samples ** (1/n_params)))
|
|
20
|
+
|
|
21
|
+
# Generate full factorial design
|
|
22
|
+
ranges = []
|
|
23
|
+
for param in self.problem.parameters:
|
|
24
|
+
if param.bounds:
|
|
25
|
+
lower, upper = param.bounds
|
|
26
|
+
ranges.append(np.linspace(lower, upper, levels))
|
|
27
|
+
else:
|
|
28
|
+
ranges.append(np.linspace(0, 1, levels))
|
|
29
|
+
|
|
30
|
+
# Create meshgrid and flatten
|
|
31
|
+
mesh = np.meshgrid(*ranges)
|
|
32
|
+
samples = np.vstack([grid.ravel() for grid in mesh]).T
|
|
33
|
+
|
|
34
|
+
# Convert to Sample objects
|
|
35
|
+
result = []
|
|
36
|
+
for sample in samples:
|
|
37
|
+
result.append(Sample(self.problem.parameters, sample.tolist()))
|
|
38
|
+
|
|
39
|
+
return result
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"""Latin hypercube design"""
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
from .base import DOE
|
|
5
|
+
from ..core.result import Sample
|
|
6
|
+
|
|
7
|
+
class LatinHypercube(DOE):
|
|
8
|
+
"""Latin hypercube design"""
|
|
9
|
+
|
|
10
|
+
def generate(self):
|
|
11
|
+
"""Generate samples using Latin hypercube design
|
|
12
|
+
|
|
13
|
+
Returns:
|
|
14
|
+
list: List of Sample objects
|
|
15
|
+
"""
|
|
16
|
+
n_params = len(self.problem.parameters)
|
|
17
|
+
|
|
18
|
+
# Generate Latin hypercube design
|
|
19
|
+
samples = np.zeros((self.n_samples, n_params))
|
|
20
|
+
|
|
21
|
+
for i in range(n_params):
|
|
22
|
+
# Generate equally spaced intervals
|
|
23
|
+
intervals = np.linspace(0, 1, self.n_samples + 1)
|
|
24
|
+
|
|
25
|
+
# Randomly permute the intervals
|
|
26
|
+
permuted = np.random.permutation(self.n_samples)
|
|
27
|
+
|
|
28
|
+
# Generate random points within each interval
|
|
29
|
+
for j in range(self.n_samples):
|
|
30
|
+
samples[j, i] = np.random.uniform(intervals[permuted[j]], intervals[permuted[j] + 1])
|
|
31
|
+
|
|
32
|
+
# Scale to parameter bounds
|
|
33
|
+
for i, param in enumerate(self.problem.parameters):
|
|
34
|
+
if param.bounds:
|
|
35
|
+
lower, upper = param.bounds
|
|
36
|
+
samples[:, i] = samples[:, i] * (upper - lower) + lower
|
|
37
|
+
|
|
38
|
+
# Convert to Sample objects
|
|
39
|
+
result = []
|
|
40
|
+
for sample in samples:
|
|
41
|
+
result.append(Sample(self.problem.parameters, sample.tolist()))
|
|
42
|
+
|
|
43
|
+
return result
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
"""Examples module"""
|
|
2
|
+
|
|
3
|
+
from .basic_optimization import run_basic_optimization
|
|
4
|
+
from .surrogate_modeling import run_surrogate_modeling
|
|
5
|
+
from .sensitivity_analysis import run_sensitivity_analysis
|
|
6
|
+
from .reliability_analysis import run_reliability_analysis
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"run_basic_optimization",
|
|
10
|
+
"run_surrogate_modeling",
|
|
11
|
+
"run_sensitivity_analysis",
|
|
12
|
+
"run_reliability_analysis",
|
|
13
|
+
]
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""Basic optimization example"""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import os
|
|
5
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
|
6
|
+
|
|
7
|
+
from mdo_lib import Problem, Parameter, Objective, Constraint
|
|
8
|
+
from mdo_lib.optimization import GeneticAlgorithm
|
|
9
|
+
|
|
10
|
+
def run_basic_optimization():
|
|
11
|
+
"""Run basic optimization example"""
|
|
12
|
+
# Define parameters
|
|
13
|
+
x1 = Parameter('x1', 0.5, bounds=[0, 1])
|
|
14
|
+
x2 = Parameter('x2', 0.5, bounds=[0, 1])
|
|
15
|
+
|
|
16
|
+
# Define objective function
|
|
17
|
+
def objective_function(x):
|
|
18
|
+
return (x[0] - 0.5)**2 + (x[1] - 0.5)**2
|
|
19
|
+
|
|
20
|
+
obj = Objective('f', 'minimize')
|
|
21
|
+
obj.evaluate = objective_function
|
|
22
|
+
|
|
23
|
+
# Define constraint function
|
|
24
|
+
def constraint_function(x):
|
|
25
|
+
return x[0] + x[1] - 1.0
|
|
26
|
+
|
|
27
|
+
con = Constraint('g', 'inequality', upper_bound=0.0)
|
|
28
|
+
con.evaluate = constraint_function
|
|
29
|
+
|
|
30
|
+
# Create problem
|
|
31
|
+
problem = Problem([x1, x2], [obj], [con])
|
|
32
|
+
|
|
33
|
+
# Create optimizer
|
|
34
|
+
optimizer = GeneticAlgorithm(problem, {
|
|
35
|
+
'population_size': 50,
|
|
36
|
+
'max_generations': 100,
|
|
37
|
+
'mutation_rate': 0.1,
|
|
38
|
+
'crossover_rate': 0.8
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
# Run optimization
|
|
42
|
+
result = optimizer.optimize()
|
|
43
|
+
|
|
44
|
+
# Print results
|
|
45
|
+
print("Optimization Results:")
|
|
46
|
+
print(f"Best point: {result.sample.values}")
|
|
47
|
+
print(f"Objective value: {result.objectives[0]}")
|
|
48
|
+
print(f"Constraint value: {result.constraints[0]}")
|
|
49
|
+
|
|
50
|
+
if __name__ == "__main__":
|
|
51
|
+
run_basic_optimization()
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
"""Reliability analysis example"""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import os
|
|
5
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from mdo_lib import Problem, Parameter, Objective
|
|
9
|
+
from mdo_lib.doe import LatinHypercube
|
|
10
|
+
from mdo_lib.surrogate import Kriging
|
|
11
|
+
from mdo_lib.reliability import MonteCarlo, FORM
|
|
12
|
+
from mdo_lib.core import Evaluator
|
|
13
|
+
from mdo_lib.utils import plot_reliability
|
|
14
|
+
|
|
15
|
+
def run_reliability_analysis():
|
|
16
|
+
"""Run reliability analysis example"""
|
|
17
|
+
# Define parameters
|
|
18
|
+
x1 = Parameter('x1', 0.5, bounds=[0, 1])
|
|
19
|
+
x2 = Parameter('x2', 0.5, bounds=[0, 1])
|
|
20
|
+
|
|
21
|
+
# Define limit state function (failure when > 0)
|
|
22
|
+
def limit_state_function(x):
|
|
23
|
+
return (x[0] - 0.7)**2 + (x[1] - 0.7)**2 - 0.1
|
|
24
|
+
|
|
25
|
+
obj = Objective('g', 'minimize')
|
|
26
|
+
obj.evaluate = limit_state_function
|
|
27
|
+
|
|
28
|
+
# Create problem
|
|
29
|
+
problem = Problem([x1, x2], [obj])
|
|
30
|
+
|
|
31
|
+
# Generate samples
|
|
32
|
+
doe = LatinHypercube(problem, n_samples=50)
|
|
33
|
+
samples = doe.generate()
|
|
34
|
+
|
|
35
|
+
# Evaluate samples
|
|
36
|
+
evaluator = Evaluator(problem)
|
|
37
|
+
results = evaluator.evaluate(samples)
|
|
38
|
+
|
|
39
|
+
# Extract X and y
|
|
40
|
+
X = [sample.values for sample in samples]
|
|
41
|
+
y = [result.objectives[0] for result in results]
|
|
42
|
+
|
|
43
|
+
# Train surrogate model
|
|
44
|
+
model = Kriging()
|
|
45
|
+
model.fit(X, y)
|
|
46
|
+
|
|
47
|
+
# Perform Monte Carlo reliability analysis
|
|
48
|
+
print("Monte Carlo Reliability Analysis:")
|
|
49
|
+
monte_carlo = MonteCarlo(problem, model, n_samples=10000)
|
|
50
|
+
mc_results = monte_carlo.analyze()
|
|
51
|
+
print(mc_results)
|
|
52
|
+
plot_reliability(mc_results)
|
|
53
|
+
|
|
54
|
+
# Perform FORM reliability analysis
|
|
55
|
+
print("\nFORM Reliability Analysis:")
|
|
56
|
+
form = FORM(problem, model)
|
|
57
|
+
form_results = form.analyze()
|
|
58
|
+
print(form_results)
|
|
59
|
+
|
|
60
|
+
if __name__ == "__main__":
|
|
61
|
+
run_reliability_analysis()
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
"""Sensitivity analysis example"""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import os
|
|
5
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from mdo_lib import Problem, Parameter, Objective
|
|
9
|
+
from mdo_lib.doe import LatinHypercube
|
|
10
|
+
from mdo_lib.surrogate import Kriging
|
|
11
|
+
from mdo_lib.sensitivity import SobolIndices, MorrisMethod
|
|
12
|
+
from mdo_lib.core import Evaluator
|
|
13
|
+
from mdo_lib.utils import plot_sensitivity
|
|
14
|
+
|
|
15
|
+
def run_sensitivity_analysis():
|
|
16
|
+
"""Run sensitivity analysis example"""
|
|
17
|
+
# Define parameters
|
|
18
|
+
x1 = Parameter('x1', 0.5, bounds=[0, 1])
|
|
19
|
+
x2 = Parameter('x2', 0.5, bounds=[0, 1])
|
|
20
|
+
x3 = Parameter('x3', 0.5, bounds=[0, 1])
|
|
21
|
+
|
|
22
|
+
# Define objective function
|
|
23
|
+
def objective_function(x):
|
|
24
|
+
return (x[0] - 0.5)**2 + 2*(x[1] - 0.5)**2 + 3*(x[2] - 0.5)**2
|
|
25
|
+
|
|
26
|
+
obj = Objective('f', 'minimize')
|
|
27
|
+
obj.evaluate = objective_function
|
|
28
|
+
|
|
29
|
+
# Create problem
|
|
30
|
+
problem = Problem([x1, x2, x3], [obj])
|
|
31
|
+
|
|
32
|
+
# Generate samples
|
|
33
|
+
doe = LatinHypercube(problem, n_samples=100)
|
|
34
|
+
samples = doe.generate()
|
|
35
|
+
|
|
36
|
+
# Evaluate samples
|
|
37
|
+
evaluator = Evaluator(problem)
|
|
38
|
+
results = evaluator.evaluate(samples)
|
|
39
|
+
|
|
40
|
+
# Extract X and y
|
|
41
|
+
X = [sample.values for sample in samples]
|
|
42
|
+
y = [result.objectives[0] for result in results]
|
|
43
|
+
|
|
44
|
+
# Train surrogate model
|
|
45
|
+
model = Kriging()
|
|
46
|
+
model.fit(X, y)
|
|
47
|
+
|
|
48
|
+
# Perform Sobol indices analysis
|
|
49
|
+
print("Sobol Indices Analysis:")
|
|
50
|
+
sobol = SobolIndices(model, problem, n_samples=1000)
|
|
51
|
+
sobol_results = sobol.analyze()
|
|
52
|
+
print(sobol_results)
|
|
53
|
+
plot_sensitivity(sobol_results, sobol_results['parameter_names'])
|
|
54
|
+
|
|
55
|
+
# Perform Morris method analysis
|
|
56
|
+
print("\nMorris Method Analysis:")
|
|
57
|
+
morris = MorrisMethod(model, problem, n_levels=4, n_trajectories=10)
|
|
58
|
+
morris_results = morris.analyze()
|
|
59
|
+
print(morris_results)
|
|
60
|
+
|
|
61
|
+
if __name__ == "__main__":
|
|
62
|
+
run_sensitivity_analysis()
|