morphml 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of morphml might be problematic. Click here for more details.
- morphml/__init__.py +14 -0
- morphml/api/__init__.py +26 -0
- morphml/api/app.py +326 -0
- morphml/api/auth.py +193 -0
- morphml/api/client.py +338 -0
- morphml/api/models.py +132 -0
- morphml/api/rate_limit.py +192 -0
- morphml/benchmarking/__init__.py +36 -0
- morphml/benchmarking/comparison.py +430 -0
- morphml/benchmarks/__init__.py +56 -0
- morphml/benchmarks/comparator.py +409 -0
- morphml/benchmarks/datasets.py +280 -0
- morphml/benchmarks/metrics.py +199 -0
- morphml/benchmarks/openml_suite.py +201 -0
- morphml/benchmarks/problems.py +289 -0
- morphml/benchmarks/suite.py +318 -0
- morphml/cli/__init__.py +5 -0
- morphml/cli/commands/experiment.py +329 -0
- morphml/cli/main.py +457 -0
- morphml/cli/quickstart.py +312 -0
- morphml/config.py +278 -0
- morphml/constraints/__init__.py +19 -0
- morphml/constraints/handler.py +205 -0
- morphml/constraints/predicates.py +285 -0
- morphml/core/__init__.py +3 -0
- morphml/core/crossover.py +449 -0
- morphml/core/dsl/README.md +359 -0
- morphml/core/dsl/__init__.py +72 -0
- morphml/core/dsl/ast_nodes.py +364 -0
- morphml/core/dsl/compiler.py +318 -0
- morphml/core/dsl/layers.py +368 -0
- morphml/core/dsl/lexer.py +336 -0
- morphml/core/dsl/parser.py +455 -0
- morphml/core/dsl/search_space.py +386 -0
- morphml/core/dsl/syntax.py +199 -0
- morphml/core/dsl/type_system.py +361 -0
- morphml/core/dsl/validator.py +386 -0
- morphml/core/graph/__init__.py +40 -0
- morphml/core/graph/edge.py +124 -0
- morphml/core/graph/graph.py +507 -0
- morphml/core/graph/mutations.py +409 -0
- morphml/core/graph/node.py +196 -0
- morphml/core/graph/serialization.py +361 -0
- morphml/core/graph/visualization.py +431 -0
- morphml/core/objectives/__init__.py +20 -0
- morphml/core/search/__init__.py +33 -0
- morphml/core/search/individual.py +252 -0
- morphml/core/search/parameters.py +453 -0
- morphml/core/search/population.py +375 -0
- morphml/core/search/search_engine.py +340 -0
- morphml/distributed/__init__.py +76 -0
- morphml/distributed/fault_tolerance.py +497 -0
- morphml/distributed/health_monitor.py +348 -0
- morphml/distributed/master.py +709 -0
- morphml/distributed/proto/README.md +224 -0
- morphml/distributed/proto/__init__.py +74 -0
- morphml/distributed/proto/worker.proto +170 -0
- morphml/distributed/proto/worker_pb2.py +79 -0
- morphml/distributed/proto/worker_pb2_grpc.py +423 -0
- morphml/distributed/resource_manager.py +416 -0
- morphml/distributed/scheduler.py +567 -0
- morphml/distributed/storage/__init__.py +33 -0
- morphml/distributed/storage/artifacts.py +381 -0
- morphml/distributed/storage/cache.py +366 -0
- morphml/distributed/storage/checkpointing.py +329 -0
- morphml/distributed/storage/database.py +459 -0
- morphml/distributed/worker.py +549 -0
- morphml/evaluation/__init__.py +5 -0
- morphml/evaluation/heuristic.py +237 -0
- morphml/exceptions.py +55 -0
- morphml/execution/__init__.py +5 -0
- morphml/execution/local_executor.py +350 -0
- morphml/integrations/__init__.py +28 -0
- morphml/integrations/jax_adapter.py +206 -0
- morphml/integrations/pytorch_adapter.py +530 -0
- morphml/integrations/sklearn_adapter.py +206 -0
- morphml/integrations/tensorflow_adapter.py +230 -0
- morphml/logging_config.py +93 -0
- morphml/meta_learning/__init__.py +66 -0
- morphml/meta_learning/architecture_similarity.py +277 -0
- morphml/meta_learning/experiment_database.py +240 -0
- morphml/meta_learning/knowledge_base/__init__.py +19 -0
- morphml/meta_learning/knowledge_base/embedder.py +179 -0
- morphml/meta_learning/knowledge_base/knowledge_base.py +313 -0
- morphml/meta_learning/knowledge_base/meta_features.py +265 -0
- morphml/meta_learning/knowledge_base/vector_store.py +271 -0
- morphml/meta_learning/predictors/__init__.py +27 -0
- morphml/meta_learning/predictors/ensemble.py +221 -0
- morphml/meta_learning/predictors/gnn_predictor.py +552 -0
- morphml/meta_learning/predictors/learning_curve.py +231 -0
- morphml/meta_learning/predictors/proxy_metrics.py +261 -0
- morphml/meta_learning/strategy_evolution/__init__.py +27 -0
- morphml/meta_learning/strategy_evolution/adaptive_optimizer.py +226 -0
- morphml/meta_learning/strategy_evolution/bandit.py +276 -0
- morphml/meta_learning/strategy_evolution/portfolio.py +230 -0
- morphml/meta_learning/transfer.py +581 -0
- morphml/meta_learning/warm_start.py +286 -0
- morphml/optimizers/__init__.py +74 -0
- morphml/optimizers/adaptive_operators.py +399 -0
- morphml/optimizers/bayesian/__init__.py +52 -0
- morphml/optimizers/bayesian/acquisition.py +387 -0
- morphml/optimizers/bayesian/base.py +319 -0
- morphml/optimizers/bayesian/gaussian_process.py +635 -0
- morphml/optimizers/bayesian/smac.py +534 -0
- morphml/optimizers/bayesian/tpe.py +411 -0
- morphml/optimizers/differential_evolution.py +220 -0
- morphml/optimizers/evolutionary/__init__.py +61 -0
- morphml/optimizers/evolutionary/cma_es.py +416 -0
- morphml/optimizers/evolutionary/differential_evolution.py +556 -0
- morphml/optimizers/evolutionary/encoding.py +426 -0
- morphml/optimizers/evolutionary/particle_swarm.py +449 -0
- morphml/optimizers/genetic_algorithm.py +486 -0
- morphml/optimizers/gradient_based/__init__.py +22 -0
- morphml/optimizers/gradient_based/darts.py +550 -0
- morphml/optimizers/gradient_based/enas.py +585 -0
- morphml/optimizers/gradient_based/operations.py +474 -0
- morphml/optimizers/gradient_based/utils.py +601 -0
- morphml/optimizers/hill_climbing.py +169 -0
- morphml/optimizers/multi_objective/__init__.py +56 -0
- morphml/optimizers/multi_objective/indicators.py +504 -0
- morphml/optimizers/multi_objective/nsga2.py +647 -0
- morphml/optimizers/multi_objective/visualization.py +427 -0
- morphml/optimizers/nsga2.py +308 -0
- morphml/optimizers/random_search.py +172 -0
- morphml/optimizers/simulated_annealing.py +181 -0
- morphml/plugins/__init__.py +35 -0
- morphml/plugins/custom_evaluator_example.py +81 -0
- morphml/plugins/custom_optimizer_example.py +63 -0
- morphml/plugins/plugin_system.py +454 -0
- morphml/reports/__init__.py +30 -0
- morphml/reports/generator.py +362 -0
- morphml/tracking/__init__.py +7 -0
- morphml/tracking/experiment.py +309 -0
- morphml/tracking/logger.py +301 -0
- morphml/tracking/reporter.py +357 -0
- morphml/utils/__init__.py +6 -0
- morphml/utils/checkpoint.py +189 -0
- morphml/utils/comparison.py +390 -0
- morphml/utils/export.py +407 -0
- morphml/utils/progress.py +392 -0
- morphml/utils/validation.py +392 -0
- morphml/version.py +7 -0
- morphml/visualization/__init__.py +50 -0
- morphml/visualization/analytics.py +423 -0
- morphml/visualization/architecture_diagrams.py +353 -0
- morphml/visualization/architecture_plot.py +223 -0
- morphml/visualization/convergence_plot.py +174 -0
- morphml/visualization/crossover_viz.py +386 -0
- morphml/visualization/graph_viz.py +338 -0
- morphml/visualization/pareto_plot.py +149 -0
- morphml/visualization/plotly_dashboards.py +422 -0
- morphml/visualization/population.py +309 -0
- morphml/visualization/progress.py +260 -0
- morphml-1.0.0.dist-info/METADATA +434 -0
- morphml-1.0.0.dist-info/RECORD +158 -0
- morphml-1.0.0.dist-info/WHEEL +4 -0
- morphml-1.0.0.dist-info/entry_points.txt +3 -0
- morphml-1.0.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,504 @@
|
|
|
1
|
+
"""Quality indicators for multi-objective optimization.
|
|
2
|
+
|
|
3
|
+
This module provides metrics to assess the quality of Pareto fronts including:
|
|
4
|
+
- Hypervolume (S-metric): Volume of objective space dominated by Pareto front
|
|
5
|
+
- Inverted Generational Distance (IGD): Average distance to reference Pareto front
|
|
6
|
+
- Spacing: Distribution uniformity of solutions
|
|
7
|
+
- Spread: Extent of Pareto front coverage
|
|
8
|
+
|
|
9
|
+
Author: Eshan Roy <eshanized@proton.me>
|
|
10
|
+
Organization: TONMOY INFRASTRUCTURE & VISION
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from typing import List, Optional
|
|
14
|
+
|
|
15
|
+
import numpy as np
|
|
16
|
+
|
|
17
|
+
from morphml.logging_config import get_logger
|
|
18
|
+
from morphml.optimizers.multi_objective.nsga2 import MultiObjectiveIndividual
|
|
19
|
+
|
|
20
|
+
logger = get_logger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class QualityIndicators:
|
|
24
|
+
"""
|
|
25
|
+
Quality indicators for multi-objective optimization.
|
|
26
|
+
|
|
27
|
+
Provides metrics to evaluate and compare Pareto fronts.
|
|
28
|
+
|
|
29
|
+
Example:
|
|
30
|
+
>>> indicators = QualityIndicators()
|
|
31
|
+
>>> hv = indicators.hypervolume(pareto_front, reference_point)
|
|
32
|
+
>>> print(f"Hypervolume: {hv:.4f}")
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
@staticmethod
|
|
36
|
+
def hypervolume(
|
|
37
|
+
pareto_front: List[MultiObjectiveIndividual],
|
|
38
|
+
reference_point: Optional[np.ndarray] = None,
|
|
39
|
+
objective_names: Optional[List[str]] = None,
|
|
40
|
+
) -> float:
|
|
41
|
+
"""
|
|
42
|
+
Calculate hypervolume (S-metric) indicator.
|
|
43
|
+
|
|
44
|
+
Hypervolume measures the volume of objective space dominated by the
|
|
45
|
+
Pareto front. Higher is better.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
pareto_front: List of Pareto-optimal individuals
|
|
49
|
+
reference_point: Reference point (nadir point). If None, uses worst values.
|
|
50
|
+
objective_names: List of objective names to consider
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
Hypervolume value
|
|
54
|
+
|
|
55
|
+
Note:
|
|
56
|
+
For 2D/3D, uses efficient algorithms. For >3D, uses Monte Carlo approximation.
|
|
57
|
+
|
|
58
|
+
Example:
|
|
59
|
+
>>> hv = QualityIndicators.hypervolume(
|
|
60
|
+
... pareto_front,
|
|
61
|
+
... reference_point=np.array([0.0, 100.0, 1e7])
|
|
62
|
+
... )
|
|
63
|
+
"""
|
|
64
|
+
if not pareto_front:
|
|
65
|
+
return 0.0
|
|
66
|
+
|
|
67
|
+
# Extract objective values
|
|
68
|
+
if objective_names is None:
|
|
69
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
70
|
+
|
|
71
|
+
points = np.array(
|
|
72
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
n_objectives = points.shape[1]
|
|
76
|
+
|
|
77
|
+
# Set reference point if not provided
|
|
78
|
+
if reference_point is None:
|
|
79
|
+
reference_point = np.min(points, axis=0) - 1.0
|
|
80
|
+
|
|
81
|
+
# Different algorithms based on dimensionality
|
|
82
|
+
if n_objectives == 2:
|
|
83
|
+
hv = QualityIndicators._hypervolume_2d(points, reference_point)
|
|
84
|
+
elif n_objectives == 3:
|
|
85
|
+
hv = QualityIndicators._hypervolume_3d(points, reference_point)
|
|
86
|
+
else:
|
|
87
|
+
hv = QualityIndicators._hypervolume_monte_carlo(points, reference_point)
|
|
88
|
+
|
|
89
|
+
return hv
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def _hypervolume_2d(points: np.ndarray, reference: np.ndarray) -> float:
|
|
93
|
+
"""
|
|
94
|
+
Efficient 2D hypervolume calculation.
|
|
95
|
+
|
|
96
|
+
Sorts points by first objective and calculates area.
|
|
97
|
+
Complexity: O(n log n)
|
|
98
|
+
"""
|
|
99
|
+
# Sort by first objective (descending for maximization)
|
|
100
|
+
sorted_indices = np.argsort(-points[:, 0])
|
|
101
|
+
sorted_points = points[sorted_indices]
|
|
102
|
+
|
|
103
|
+
hv = 0.0
|
|
104
|
+
prev_y = reference[1]
|
|
105
|
+
|
|
106
|
+
for point in sorted_points:
|
|
107
|
+
width = point[0] - reference[0]
|
|
108
|
+
height = point[1] - prev_y
|
|
109
|
+
|
|
110
|
+
if width > 0 and height > 0:
|
|
111
|
+
hv += width * height
|
|
112
|
+
|
|
113
|
+
prev_y = max(prev_y, point[1])
|
|
114
|
+
|
|
115
|
+
return abs(hv)
|
|
116
|
+
|
|
117
|
+
@staticmethod
|
|
118
|
+
def _hypervolume_3d(points: np.ndarray, reference: np.ndarray) -> float:
|
|
119
|
+
"""
|
|
120
|
+
3D hypervolume calculation using WFG algorithm.
|
|
121
|
+
|
|
122
|
+
Complexity: O(n² log n)
|
|
123
|
+
"""
|
|
124
|
+
# Simplified 3D calculation
|
|
125
|
+
# For production, use pymoo or pygmo for exact WFG implementation
|
|
126
|
+
|
|
127
|
+
# Monte Carlo approximation for now
|
|
128
|
+
return QualityIndicators._hypervolume_monte_carlo(points, reference, n_samples=10000)
|
|
129
|
+
|
|
130
|
+
@staticmethod
|
|
131
|
+
def _hypervolume_monte_carlo(
|
|
132
|
+
points: np.ndarray, reference: np.ndarray, n_samples: int = 100000
|
|
133
|
+
) -> float:
|
|
134
|
+
"""
|
|
135
|
+
Monte Carlo approximation of hypervolume for high dimensions.
|
|
136
|
+
|
|
137
|
+
Samples random points in objective space and checks domination.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
points: Pareto front points
|
|
141
|
+
reference: Reference point
|
|
142
|
+
n_samples: Number of Monte Carlo samples
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
Approximated hypervolume
|
|
146
|
+
"""
|
|
147
|
+
# Define bounds
|
|
148
|
+
upper_bound = np.max(points, axis=0)
|
|
149
|
+
lower_bound = reference
|
|
150
|
+
|
|
151
|
+
# Volume of bounding box
|
|
152
|
+
box_volume = np.prod(upper_bound - lower_bound)
|
|
153
|
+
|
|
154
|
+
if box_volume <= 0:
|
|
155
|
+
return 0.0
|
|
156
|
+
|
|
157
|
+
# Sample random points
|
|
158
|
+
random_points = np.random.uniform(
|
|
159
|
+
lower_bound, upper_bound, size=(n_samples, len(lower_bound))
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
# Count dominated points
|
|
163
|
+
dominated_count = 0
|
|
164
|
+
for sample in random_points:
|
|
165
|
+
# Check if any Pareto point dominates this sample
|
|
166
|
+
dominated = np.any(np.all(points >= sample, axis=1))
|
|
167
|
+
if dominated:
|
|
168
|
+
dominated_count += 1
|
|
169
|
+
|
|
170
|
+
# Estimate hypervolume
|
|
171
|
+
hv = (dominated_count / n_samples) * box_volume
|
|
172
|
+
return hv
|
|
173
|
+
|
|
174
|
+
@staticmethod
|
|
175
|
+
def inverted_generational_distance(
|
|
176
|
+
pareto_front: List[MultiObjectiveIndividual],
|
|
177
|
+
reference_pareto: List[MultiObjectiveIndividual],
|
|
178
|
+
objective_names: Optional[List[str]] = None,
|
|
179
|
+
) -> float:
|
|
180
|
+
"""
|
|
181
|
+
Calculate Inverted Generational Distance (IGD).
|
|
182
|
+
|
|
183
|
+
IGD measures average distance from reference Pareto front to
|
|
184
|
+
obtained front. Lower is better.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
pareto_front: Obtained Pareto front
|
|
188
|
+
reference_pareto: True/reference Pareto front
|
|
189
|
+
objective_names: Objectives to consider
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
IGD value
|
|
193
|
+
|
|
194
|
+
Example:
|
|
195
|
+
>>> igd = QualityIndicators.inverted_generational_distance(
|
|
196
|
+
... obtained_front,
|
|
197
|
+
... true_pareto_front
|
|
198
|
+
... )
|
|
199
|
+
"""
|
|
200
|
+
if not pareto_front or not reference_pareto:
|
|
201
|
+
return float("inf")
|
|
202
|
+
|
|
203
|
+
# Extract objective values
|
|
204
|
+
if objective_names is None:
|
|
205
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
206
|
+
|
|
207
|
+
obtained_points = np.array(
|
|
208
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
reference_points = np.array(
|
|
212
|
+
[[ind.objectives[name] for name in objective_names] for ind in reference_pareto]
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# For each reference point, find minimum distance to obtained front
|
|
216
|
+
distances = []
|
|
217
|
+
for ref_point in reference_points:
|
|
218
|
+
min_dist = np.min(np.linalg.norm(obtained_points - ref_point, axis=1))
|
|
219
|
+
distances.append(min_dist)
|
|
220
|
+
|
|
221
|
+
igd = np.mean(distances)
|
|
222
|
+
return igd
|
|
223
|
+
|
|
224
|
+
@staticmethod
|
|
225
|
+
def spacing(
|
|
226
|
+
pareto_front: List[MultiObjectiveIndividual], objective_names: Optional[List[str]] = None
|
|
227
|
+
) -> float:
|
|
228
|
+
"""
|
|
229
|
+
Calculate spacing metric.
|
|
230
|
+
|
|
231
|
+
Spacing measures the uniformity of distribution of solutions
|
|
232
|
+
in the Pareto front. Lower is better (more uniform).
|
|
233
|
+
|
|
234
|
+
Args:
|
|
235
|
+
pareto_front: Pareto front
|
|
236
|
+
objective_names: Objectives to consider
|
|
237
|
+
|
|
238
|
+
Returns:
|
|
239
|
+
Spacing value
|
|
240
|
+
"""
|
|
241
|
+
if len(pareto_front) < 2:
|
|
242
|
+
return 0.0
|
|
243
|
+
|
|
244
|
+
# Extract objective values
|
|
245
|
+
if objective_names is None:
|
|
246
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
247
|
+
|
|
248
|
+
points = np.array(
|
|
249
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
# Calculate pairwise distances
|
|
253
|
+
n = len(points)
|
|
254
|
+
distances = []
|
|
255
|
+
|
|
256
|
+
for i in range(n):
|
|
257
|
+
min_dist = float("inf")
|
|
258
|
+
for j in range(n):
|
|
259
|
+
if i != j:
|
|
260
|
+
dist = np.linalg.norm(points[i] - points[j])
|
|
261
|
+
min_dist = min(min_dist, dist)
|
|
262
|
+
distances.append(min_dist)
|
|
263
|
+
|
|
264
|
+
# Spacing = standard deviation of distances
|
|
265
|
+
mean_dist = np.mean(distances)
|
|
266
|
+
spacing_value = np.sqrt(np.mean((np.array(distances) - mean_dist) ** 2))
|
|
267
|
+
|
|
268
|
+
return spacing_value
|
|
269
|
+
|
|
270
|
+
@staticmethod
|
|
271
|
+
def spread(
|
|
272
|
+
pareto_front: List[MultiObjectiveIndividual], objective_names: Optional[List[str]] = None
|
|
273
|
+
) -> float:
|
|
274
|
+
"""
|
|
275
|
+
Calculate spread (delta) metric.
|
|
276
|
+
|
|
277
|
+
Spread measures the extent of coverage of the Pareto front.
|
|
278
|
+
Lower is better (better coverage).
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
pareto_front: Pareto front
|
|
282
|
+
objective_names: Objectives to consider
|
|
283
|
+
|
|
284
|
+
Returns:
|
|
285
|
+
Spread value
|
|
286
|
+
"""
|
|
287
|
+
if len(pareto_front) < 2:
|
|
288
|
+
return 0.0
|
|
289
|
+
|
|
290
|
+
# Extract objective values
|
|
291
|
+
if objective_names is None:
|
|
292
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
293
|
+
|
|
294
|
+
points = np.array(
|
|
295
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
# Sort by first objective
|
|
299
|
+
sorted_indices = np.argsort(points[:, 0])
|
|
300
|
+
sorted_points = points[sorted_indices]
|
|
301
|
+
|
|
302
|
+
# Calculate consecutive distances
|
|
303
|
+
distances = []
|
|
304
|
+
for i in range(len(sorted_points) - 1):
|
|
305
|
+
dist = np.linalg.norm(sorted_points[i + 1] - sorted_points[i])
|
|
306
|
+
distances.append(dist)
|
|
307
|
+
|
|
308
|
+
if not distances:
|
|
309
|
+
return 0.0
|
|
310
|
+
|
|
311
|
+
# Extreme distances (to ideal corners)
|
|
312
|
+
d_f = np.linalg.norm(sorted_points[0] - np.max(points, axis=0))
|
|
313
|
+
d_l = np.linalg.norm(sorted_points[-1] - np.max(points, axis=0))
|
|
314
|
+
|
|
315
|
+
# Mean distance
|
|
316
|
+
d_mean = np.mean(distances)
|
|
317
|
+
|
|
318
|
+
# Spread metric
|
|
319
|
+
numerator = d_f + d_l + np.sum(np.abs(np.array(distances) - d_mean))
|
|
320
|
+
denominator = d_f + d_l + len(distances) * d_mean
|
|
321
|
+
|
|
322
|
+
if denominator == 0:
|
|
323
|
+
return 0.0
|
|
324
|
+
|
|
325
|
+
spread_value = numerator / denominator
|
|
326
|
+
return spread_value
|
|
327
|
+
|
|
328
|
+
@staticmethod
|
|
329
|
+
def generational_distance(
|
|
330
|
+
pareto_front: List[MultiObjectiveIndividual],
|
|
331
|
+
reference_pareto: List[MultiObjectiveIndividual],
|
|
332
|
+
objective_names: Optional[List[str]] = None,
|
|
333
|
+
p: int = 2,
|
|
334
|
+
) -> float:
|
|
335
|
+
"""
|
|
336
|
+
Calculate Generational Distance (GD).
|
|
337
|
+
|
|
338
|
+
GD measures average distance from obtained front to reference front.
|
|
339
|
+
Lower is better (closer to true Pareto front).
|
|
340
|
+
|
|
341
|
+
Args:
|
|
342
|
+
pareto_front: Obtained Pareto front
|
|
343
|
+
reference_pareto: Reference Pareto front
|
|
344
|
+
objective_names: Objectives to consider
|
|
345
|
+
p: Distance metric exponent (typically 2 for Euclidean)
|
|
346
|
+
|
|
347
|
+
Returns:
|
|
348
|
+
GD value
|
|
349
|
+
"""
|
|
350
|
+
if not pareto_front or not reference_pareto:
|
|
351
|
+
return float("inf")
|
|
352
|
+
|
|
353
|
+
# Extract objective values
|
|
354
|
+
if objective_names is None:
|
|
355
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
356
|
+
|
|
357
|
+
obtained_points = np.array(
|
|
358
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
reference_points = np.array(
|
|
362
|
+
[[ind.objectives[name] for name in objective_names] for ind in reference_pareto]
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
# For each obtained point, find minimum distance to reference front
|
|
366
|
+
distances = []
|
|
367
|
+
for point in obtained_points:
|
|
368
|
+
min_dist = np.min(np.linalg.norm(reference_points - point, axis=1))
|
|
369
|
+
distances.append(min_dist**p)
|
|
370
|
+
|
|
371
|
+
gd = (np.sum(distances) / len(distances)) ** (1.0 / p)
|
|
372
|
+
return gd
|
|
373
|
+
|
|
374
|
+
@staticmethod
|
|
375
|
+
def epsilon_indicator(
|
|
376
|
+
pareto_front: List[MultiObjectiveIndividual],
|
|
377
|
+
reference_pareto: List[MultiObjectiveIndividual],
|
|
378
|
+
objective_names: Optional[List[str]] = None,
|
|
379
|
+
) -> float:
|
|
380
|
+
"""
|
|
381
|
+
Calculate additive epsilon indicator.
|
|
382
|
+
|
|
383
|
+
Measures minimum epsilon by which obtained front must be translated
|
|
384
|
+
to dominate reference front.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
pareto_front: Obtained Pareto front
|
|
388
|
+
reference_pareto: Reference Pareto front
|
|
389
|
+
objective_names: Objectives to consider
|
|
390
|
+
|
|
391
|
+
Returns:
|
|
392
|
+
Epsilon value
|
|
393
|
+
"""
|
|
394
|
+
if not pareto_front or not reference_pareto:
|
|
395
|
+
return float("inf")
|
|
396
|
+
|
|
397
|
+
# Extract objective values
|
|
398
|
+
if objective_names is None:
|
|
399
|
+
objective_names = list(pareto_front[0].objectives.keys())
|
|
400
|
+
|
|
401
|
+
obtained_points = np.array(
|
|
402
|
+
[[ind.objectives[name] for name in objective_names] for ind in pareto_front]
|
|
403
|
+
)
|
|
404
|
+
|
|
405
|
+
reference_points = np.array(
|
|
406
|
+
[[ind.objectives[name] for name in objective_names] for ind in reference_pareto]
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
# For each reference point, find minimum epsilon
|
|
410
|
+
epsilons = []
|
|
411
|
+
for ref_point in reference_points:
|
|
412
|
+
min_epsilon = float("inf")
|
|
413
|
+
for obt_point in obtained_points:
|
|
414
|
+
epsilon = np.max(ref_point - obt_point)
|
|
415
|
+
min_epsilon = min(min_epsilon, epsilon)
|
|
416
|
+
epsilons.append(min_epsilon)
|
|
417
|
+
|
|
418
|
+
# Maximum of minimum epsilons
|
|
419
|
+
epsilon_indicator = np.max(epsilons)
|
|
420
|
+
return epsilon_indicator
|
|
421
|
+
|
|
422
|
+
|
|
423
|
+
def calculate_all_indicators(
|
|
424
|
+
pareto_front: List[MultiObjectiveIndividual],
|
|
425
|
+
reference_pareto: Optional[List[MultiObjectiveIndividual]] = None,
|
|
426
|
+
reference_point: Optional[np.ndarray] = None,
|
|
427
|
+
) -> dict:
|
|
428
|
+
"""
|
|
429
|
+
Calculate all quality indicators for a Pareto front.
|
|
430
|
+
|
|
431
|
+
Args:
|
|
432
|
+
pareto_front: Obtained Pareto front
|
|
433
|
+
reference_pareto: Optional reference Pareto front for IGD/GD/epsilon
|
|
434
|
+
reference_point: Optional reference point for hypervolume
|
|
435
|
+
|
|
436
|
+
Returns:
|
|
437
|
+
Dictionary of indicator values
|
|
438
|
+
|
|
439
|
+
Example:
|
|
440
|
+
>>> indicators = calculate_all_indicators(pareto_front)
|
|
441
|
+
>>> print(f"Hypervolume: {indicators['hypervolume']:.4f}")
|
|
442
|
+
>>> print(f"Spacing: {indicators['spacing']:.4f}")
|
|
443
|
+
"""
|
|
444
|
+
qi = QualityIndicators()
|
|
445
|
+
|
|
446
|
+
results = {
|
|
447
|
+
"hypervolume": qi.hypervolume(pareto_front, reference_point),
|
|
448
|
+
"spacing": qi.spacing(pareto_front),
|
|
449
|
+
"spread": qi.spread(pareto_front),
|
|
450
|
+
"pareto_size": len(pareto_front),
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if reference_pareto is not None:
|
|
454
|
+
results["igd"] = qi.inverted_generational_distance(pareto_front, reference_pareto)
|
|
455
|
+
results["gd"] = qi.generational_distance(pareto_front, reference_pareto)
|
|
456
|
+
results["epsilon"] = qi.epsilon_indicator(pareto_front, reference_pareto)
|
|
457
|
+
|
|
458
|
+
return results
|
|
459
|
+
|
|
460
|
+
|
|
461
|
+
def compare_pareto_fronts(
|
|
462
|
+
front1: List[MultiObjectiveIndividual],
|
|
463
|
+
front2: List[MultiObjectiveIndividual],
|
|
464
|
+
front1_name: str = "Front 1",
|
|
465
|
+
front2_name: str = "Front 2",
|
|
466
|
+
) -> None:
|
|
467
|
+
"""
|
|
468
|
+
Compare two Pareto fronts and print results.
|
|
469
|
+
|
|
470
|
+
Args:
|
|
471
|
+
front1: First Pareto front
|
|
472
|
+
front2: Second Pareto front
|
|
473
|
+
front1_name: Name for first front
|
|
474
|
+
front2_name: Name for second front
|
|
475
|
+
|
|
476
|
+
Example:
|
|
477
|
+
>>> compare_pareto_fronts(ga_front, nsga2_front, "GA", "NSGA-II")
|
|
478
|
+
"""
|
|
479
|
+
qi = QualityIndicators()
|
|
480
|
+
|
|
481
|
+
# Calculate indicators
|
|
482
|
+
hv1 = qi.hypervolume(front1)
|
|
483
|
+
hv2 = qi.hypervolume(front2)
|
|
484
|
+
|
|
485
|
+
spacing1 = qi.spacing(front1)
|
|
486
|
+
spacing2 = qi.spacing(front2)
|
|
487
|
+
|
|
488
|
+
spread1 = qi.spread(front1)
|
|
489
|
+
spread2 = qi.spread(front2)
|
|
490
|
+
|
|
491
|
+
# Print comparison
|
|
492
|
+
print("\n" + "=" * 60)
|
|
493
|
+
print("Pareto Front Comparison")
|
|
494
|
+
print("=" * 60)
|
|
495
|
+
|
|
496
|
+
print(f"\n{front1_name:20s} | {front2_name:20s}")
|
|
497
|
+
print("-" * 60)
|
|
498
|
+
|
|
499
|
+
print(f"Size: {len(front1):14d} | {len(front2):14d}")
|
|
500
|
+
print(f"Hypervolume: {hv1:12.4f} | {hv2:12.4f} {'✓' if hv1 > hv2 else ''}")
|
|
501
|
+
print(f"Spacing: {spacing1:16.4f} | {spacing2:16.4f} {'✓' if spacing1 < spacing2 else ''}")
|
|
502
|
+
print(f"Spread: {spread1:17.4f} | {spread2:17.4f} {'✓' if spread1 < spread2 else ''}")
|
|
503
|
+
|
|
504
|
+
print("=" * 60 + "\n")
|