desdeo 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. desdeo/adm/ADMAfsar.py +551 -0
  2. desdeo/adm/ADMChen.py +414 -0
  3. desdeo/adm/BaseADM.py +119 -0
  4. desdeo/adm/__init__.py +11 -0
  5. desdeo/api/__init__.py +6 -6
  6. desdeo/api/app.py +38 -28
  7. desdeo/api/config.py +65 -44
  8. desdeo/api/config.toml +23 -12
  9. desdeo/api/db.py +10 -8
  10. desdeo/api/db_init.py +12 -6
  11. desdeo/api/models/__init__.py +220 -20
  12. desdeo/api/models/archive.py +16 -27
  13. desdeo/api/models/emo.py +128 -0
  14. desdeo/api/models/enautilus.py +69 -0
  15. desdeo/api/models/gdm/gdm_aggregate.py +139 -0
  16. desdeo/api/models/gdm/gdm_base.py +69 -0
  17. desdeo/api/models/gdm/gdm_score_bands.py +114 -0
  18. desdeo/api/models/gdm/gnimbus.py +138 -0
  19. desdeo/api/models/generic.py +104 -0
  20. desdeo/api/models/generic_states.py +401 -0
  21. desdeo/api/models/nimbus.py +158 -0
  22. desdeo/api/models/preference.py +44 -6
  23. desdeo/api/models/problem.py +274 -64
  24. desdeo/api/models/session.py +4 -1
  25. desdeo/api/models/state.py +419 -52
  26. desdeo/api/models/user.py +7 -6
  27. desdeo/api/models/utopia.py +25 -0
  28. desdeo/api/routers/_EMO.backup +309 -0
  29. desdeo/api/routers/_NIMBUS.py +6 -3
  30. desdeo/api/routers/emo.py +497 -0
  31. desdeo/api/routers/enautilus.py +237 -0
  32. desdeo/api/routers/gdm/gdm_aggregate.py +234 -0
  33. desdeo/api/routers/gdm/gdm_base.py +420 -0
  34. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_manager.py +398 -0
  35. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_routers.py +377 -0
  36. desdeo/api/routers/gdm/gnimbus/gnimbus_manager.py +698 -0
  37. desdeo/api/routers/gdm/gnimbus/gnimbus_routers.py +591 -0
  38. desdeo/api/routers/generic.py +233 -0
  39. desdeo/api/routers/nimbus.py +705 -0
  40. desdeo/api/routers/problem.py +201 -4
  41. desdeo/api/routers/reference_point_method.py +20 -44
  42. desdeo/api/routers/session.py +50 -26
  43. desdeo/api/routers/user_authentication.py +180 -26
  44. desdeo/api/routers/utils.py +187 -0
  45. desdeo/api/routers/utopia.py +230 -0
  46. desdeo/api/schema.py +10 -4
  47. desdeo/api/tests/conftest.py +94 -2
  48. desdeo/api/tests/test_enautilus.py +330 -0
  49. desdeo/api/tests/test_models.py +550 -72
  50. desdeo/api/tests/test_routes.py +902 -43
  51. desdeo/api/utils/_database.py +263 -0
  52. desdeo/api/utils/database.py +28 -266
  53. desdeo/api/utils/emo_database.py +40 -0
  54. desdeo/core.py +7 -0
  55. desdeo/emo/__init__.py +154 -24
  56. desdeo/emo/hooks/archivers.py +18 -2
  57. desdeo/emo/methods/EAs.py +128 -5
  58. desdeo/emo/methods/bases.py +9 -56
  59. desdeo/emo/methods/templates.py +111 -0
  60. desdeo/emo/operators/crossover.py +544 -42
  61. desdeo/emo/operators/evaluator.py +10 -14
  62. desdeo/emo/operators/generator.py +127 -24
  63. desdeo/emo/operators/mutation.py +212 -41
  64. desdeo/emo/operators/scalar_selection.py +202 -0
  65. desdeo/emo/operators/selection.py +956 -214
  66. desdeo/emo/operators/termination.py +124 -16
  67. desdeo/emo/options/__init__.py +108 -0
  68. desdeo/emo/options/algorithms.py +435 -0
  69. desdeo/emo/options/crossover.py +164 -0
  70. desdeo/emo/options/generator.py +131 -0
  71. desdeo/emo/options/mutation.py +260 -0
  72. desdeo/emo/options/repair.py +61 -0
  73. desdeo/emo/options/scalar_selection.py +66 -0
  74. desdeo/emo/options/selection.py +127 -0
  75. desdeo/emo/options/templates.py +383 -0
  76. desdeo/emo/options/termination.py +143 -0
  77. desdeo/gdm/__init__.py +22 -0
  78. desdeo/gdm/gdmtools.py +45 -0
  79. desdeo/gdm/score_bands.py +114 -0
  80. desdeo/gdm/voting_rules.py +50 -0
  81. desdeo/mcdm/__init__.py +23 -1
  82. desdeo/mcdm/enautilus.py +338 -0
  83. desdeo/mcdm/gnimbus.py +484 -0
  84. desdeo/mcdm/nautilus_navigator.py +7 -6
  85. desdeo/mcdm/reference_point_method.py +70 -0
  86. desdeo/problem/__init__.py +5 -1
  87. desdeo/problem/external/__init__.py +18 -0
  88. desdeo/problem/external/core.py +356 -0
  89. desdeo/problem/external/pymoo_provider.py +266 -0
  90. desdeo/problem/external/runtime.py +44 -0
  91. desdeo/problem/infix_parser.py +2 -2
  92. desdeo/problem/pyomo_evaluator.py +25 -6
  93. desdeo/problem/schema.py +69 -48
  94. desdeo/problem/simulator_evaluator.py +65 -15
  95. desdeo/problem/testproblems/__init__.py +26 -11
  96. desdeo/problem/testproblems/benchmarks_server.py +120 -0
  97. desdeo/problem/testproblems/cake_problem.py +185 -0
  98. desdeo/problem/testproblems/dmitry_forest_problem_discrete.py +71 -0
  99. desdeo/problem/testproblems/forest_problem.py +77 -69
  100. desdeo/problem/testproblems/multi_valued_constraints.py +119 -0
  101. desdeo/problem/testproblems/{river_pollution_problem.py → river_pollution_problems.py} +28 -22
  102. desdeo/problem/testproblems/single_objective.py +289 -0
  103. desdeo/problem/testproblems/zdt_problem.py +4 -1
  104. desdeo/tools/__init__.py +39 -21
  105. desdeo/tools/desc_gen.py +22 -0
  106. desdeo/tools/generics.py +22 -2
  107. desdeo/tools/group_scalarization.py +3090 -0
  108. desdeo/tools/indicators_binary.py +107 -1
  109. desdeo/tools/indicators_unary.py +3 -16
  110. desdeo/tools/message.py +33 -2
  111. desdeo/tools/non_dominated_sorting.py +4 -3
  112. desdeo/tools/patterns.py +9 -7
  113. desdeo/tools/pyomo_solver_interfaces.py +48 -35
  114. desdeo/tools/reference_vectors.py +118 -351
  115. desdeo/tools/scalarization.py +340 -1413
  116. desdeo/tools/score_bands.py +491 -328
  117. desdeo/tools/utils.py +117 -49
  118. desdeo/tools/visualizations.py +67 -0
  119. desdeo/utopia_stuff/utopia_problem.py +1 -1
  120. desdeo/utopia_stuff/utopia_problem_old.py +1 -1
  121. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info}/METADATA +46 -28
  122. desdeo-2.1.0.dist-info/RECORD +180 -0
  123. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info}/WHEEL +1 -1
  124. desdeo-2.0.0.dist-info/RECORD +0 -120
  125. /desdeo/api/utils/{logger.py → _logger.py} +0 -0
  126. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info/licenses}/LICENSE +0 -0
@@ -1,20 +1,7 @@
1
- from enum import Enum
2
- from itertools import combinations, product
3
- from typing import Sequence
1
+ from itertools import combinations
4
2
 
5
3
  import numpy as np
6
4
  from scipy.special import comb
7
- from scipy.stats.qmc import LatinHypercube
8
-
9
- from desdeo.problem import Problem
10
- from desdeo.tools.message import (
11
- DictMessage,
12
- Message,
13
- PolarsDataFrameMessage,
14
- ReferenceVectorMessageTopics,
15
- TerminatorMessageTopics,
16
- )
17
- from desdeo.tools.patterns import Publisher, Subscriber
18
5
 
19
6
 
20
7
  def normalize(vectors):
@@ -119,344 +106,124 @@ def rotate_toward(initial_vector, final_vector, other_vectors, degrees: float =
119
106
  return (rotate(initial_vector, rotated_vector, other_vectors), False)
120
107
 
121
108
 
122
- class VectorCreationOptions(Enum):
123
- """Enum class for reference vector creation methods."""
124
-
125
- SIMPLEX = "Uniform"
126
- """Uniformly distributed reference vectors created using simplex lattice design.
127
- This method is generates distributions with specific numbers of reference vectors.
128
- Check: https://www.itl.nist.gov/div898/handbook/pri/section5/pri542.htm for more information."""
129
- S_ENERGY = "s-energy"
130
- """Reference vectors created using Riesz s-energy criterion. This method is used to distribute
131
- an arbitrary number of reference vectors in the objective space while minimizing the s-energy.
132
- Currently not implemented."""
133
-
134
-
135
- class VectorTypeOptions(Enum):
136
- """Enum class for reference vector normalization methods."""
137
-
138
- SPHERICAL = "Spherical"
139
- """Normalize the reference vectors to a hypersphere, i.e. the second norm is equal to 1."""
140
- PLANAR = "Planar"
141
- """Normalize the reference vectors to a plane, i.e. the first norm is equal to 1."""
142
-
143
-
144
- class ReferenceVectors(Subscriber):
145
- """Class object for reference vectors."""
146
-
147
- def __init__(
148
- self,
149
- problem: Problem,
150
- publisher: Publisher,
151
- adaptation_frequency: int = 0,
152
- verbosity: int = 2,
153
- lattice_resolution: int | None = None,
154
- number_of_vectors: int | None = None,
155
- creation_type: VectorCreationOptions = VectorCreationOptions.SIMPLEX,
156
- ):
157
- """Create a Reference vectors object.
158
-
159
- Parameters
160
- ----------
161
- problem : Problem
162
- Problem object.
163
- publisher : Publisher
164
- Publisher object.
165
- adaptation_frequency : int, optional
166
- The number of generations in between reference vector adaptation. By default 0, i.e. no adaptation.
167
- verbosity : int, optional
168
- Verbosity level. By default 2.
169
- lattice_resolution : int
170
- Number of divisions along an axis when creating the simplex lattice. If not specified, the lattice resolution
171
- is calculated based on the desired number of vectors.
172
- number_of_vectors : int
173
- Number of reference vectors to be created. If not specified, the number of vectors is calculated based on
174
- the lattice resolution. By default None.
175
- creation_type : VectorCreationOptions
176
- Method for creating reference vectors. By default VectorCreationOptions.SIMPLEX. Currently only
177
- VectorCreationOptions.SIMPLEX is implemented. Future versions will include VectorCreationOptions.S_ENERGY.
178
- """
179
- interested_topics = [TerminatorMessageTopics.GENERATION]
180
- provided_topics: list[ReferenceVectorMessageTopics] = []
181
- match verbosity:
182
- case 0:
183
- provided_topics: list[ReferenceVectorMessageTopics] = []
184
- case 1:
185
- provided_topics = [ReferenceVectorMessageTopics.STATE]
186
- case 2:
187
- provided_topics = [
188
- ReferenceVectorMessageTopics.STATE,
189
- ReferenceVectorMessageTopics.REFERENCE_VECTORS_SPHERICAL,
190
- ReferenceVectorMessageTopics.REFERENCE_VECTORS_PLANAR,
191
- ]
192
-
193
- super().__init__(
194
- publisher, interested_topics=interested_topics, provided_topics=provided_topics, verbosity=verbosity
195
- )
196
- self.number_of_objectives = number_of_objectives
197
- self.lattice_resolution = lattice_resolution
198
- self.number_of_vectors = number_of_vectors
199
- self.adaptation_frequency = adaptation_frequency
200
- self.generation_at_last_adaptation = 0
201
-
202
- if creation_type == VectorCreationOptions.S_ENERGY:
203
- raise NotImplementedError("Riesz s-energy criterion not implemented.")
204
- if number_of_vectors is None:
205
- raise ValueError("Number of vectors must be specified for Riesz s-energy criterion.")
206
- if not (lattice_resolution or number_of_vectors):
207
- raise ValueError("Either lattice_resolution or number_of_vectors must be specified.")
208
-
209
- if number_of_vectors is not None:
210
- temp_lattice_resolution = 0
211
- while True:
212
- temp_lattice_resolution += 1
213
- temp_number_of_vectors = comb(
214
- temp_lattice_resolution + self.number_of_objectives - 1,
215
- self.number_of_objectives - 1,
216
- exact=True,
217
- )
218
- if temp_number_of_vectors > number_of_vectors:
219
- break
220
- self.lattice_resolution = temp_lattice_resolution - 1
221
-
222
- self.creation_type = creation_type
223
- self.values: np.ndarray = None
224
- self.values_planar: np.ndarray = None
225
- if self.creation_type == VectorCreationOptions.SIMPLEX:
226
- self._create_simplex()
227
- self.initial_values = np.copy(self.values)
228
- self.initial_values_planar = np.copy(self.values_planar)
229
- self.neighbouring_angles()
230
-
231
- def _create_simplex(self):
232
- """Create the reference vectors using simplex lattice design."""
233
- if self.lattice_resolution is None:
234
- raise ValueError("Lattice resolution must be specified.")
235
-
236
- number_of_vectors: int = comb(
237
- self.lattice_resolution + self.number_of_objectives - 1,
238
- self.number_of_objectives - 1,
109
+ def approx_lattice_resolution(number_of_vectors: int, num_dims: int) -> int:
110
+ """
111
+ Approximate the lattice resolution based on the number of vectors and dimensions.
112
+
113
+ Args:
114
+ number_of_vectors (int): Desired number of reference vectors.
115
+ num_dims (int): Number of objectives (dimensions).
116
+
117
+ Returns:
118
+ int: The smallest lattice resolution that produces more than the desired number of vectors.
119
+ """
120
+ temp_lattice_resolution = 0
121
+ while True:
122
+ temp_lattice_resolution += 1
123
+ temp_number_of_vectors = comb(
124
+ temp_lattice_resolution + num_dims - 1,
125
+ num_dims - 1,
239
126
  exact=True,
240
127
  )
241
- self.number_of_vectors = number_of_vectors
242
- temp1 = range(1, self.number_of_objectives + self.lattice_resolution)
243
- temp1 = np.array(list(combinations(temp1, self.number_of_objectives - 1)))
244
- temp2 = np.array([range(self.number_of_objectives - 1)] * self.number_of_vectors)
245
- temp = temp1 - temp2 - 1
246
- weight = np.zeros((self.number_of_vectors, self.number_of_objectives), dtype=int)
247
- weight[:, 0] = temp[:, 0]
248
- for i in range(1, self.number_of_objectives - 1):
249
- weight[:, i] = temp[:, i] - temp[:, i - 1]
250
- weight[:, -1] = self.lattice_resolution - temp[:, -1]
251
- self.values = weight / self.lattice_resolution
252
- self.values_planar = np.copy(self.values)
253
- self.normalize()
254
-
255
- def normalize(self):
256
- """Normalize the reference vectors to a unit hypersphere."""
257
- self.number_of_vectors = self.values.shape[0]
258
- norm_2 = np.linalg.norm(self.values, axis=1).reshape(-1, 1)
259
- norm_1 = np.sum(self.values_planar, axis=1).reshape(-1, 1)
260
- norm_2[norm_2 == 0] = np.finfo(float).eps
261
- self.values = np.divide(self.values, norm_2)
262
- self.values_planar = np.divide(self.values_planar, norm_1)
263
-
264
- def neighbouring_angles(self) -> np.ndarray:
265
- """Calculate neighbouring angles for normalization."""
266
- cosvv = np.dot(self.values, self.values.transpose())
267
- cosvv.sort(axis=1)
268
- cosvv = np.flip(cosvv, 1)
269
- cosvv[cosvv > 1] = 1
270
- acosvv = np.arccos(cosvv[:, 1])
271
- self.neighbouring_angles_current = acosvv
272
- return acosvv
273
-
274
- def adapt(self, fitness: np.ndarray):
275
- """Adapt reference vectors. Then normalize.
276
-
277
- Parameters
278
- ----------
279
- fitness : np.ndarray
280
- """
281
- max_val = np.amax(fitness, axis=0)
282
- min_val = np.amin(fitness, axis=0)
283
- self.values = self.initial_values * (max_val - min_val)
284
-
285
- self.normalize()
286
-
287
- def interactive_adapt_1(self, z: np.ndarray, translation_param: float = 0.2) -> None:
288
- """Adapt reference vectors using the information about prefererred solution(s) selected by the Decision maker.
289
-
290
- Args:
291
- z (np.ndarray): Preferred solution(s).
292
- translation_param (float): Parameter determining how close the reference vectors are to the central vector
293
- **v** defined by using the selected solution(s) z.
294
- """
295
- if z.shape[0] == 1:
296
- # single preferred solution
297
- # calculate new reference vectors
298
- self.values = translation_param * self.initial_values + ((1 - translation_param) * z)
299
- self.values_planar = translation_param * self.initial_values_planar + ((1 - translation_param) * z)
300
-
301
- else:
302
- # multiple preferred solutions
303
- # calculate new reference vectors for each preferred solution
304
- values = [translation_param * self.initial_values + ((1 - translation_param) * z_i) for z_i in z]
305
- values_planar = [
306
- translation_param * self.initial_values_planar + ((1 - translation_param) * z_i) for z_i in z
307
- ]
308
-
309
- # combine arrays of reference vectors into a single array and update reference vectors
310
- self.values = np.concatenate(values)
311
- self.values_planar = np.concatenate(values_planar)
312
-
313
- self.normalize()
314
-
315
- def interactive_adapt_2(self, z: np.ndarray, predefined_distance: float = 0.2) -> None:
316
- """Adapt reference vectors by using the information about non-preferred solution(s) selected by the Decision maker.
317
-
318
- After the Decision maker has specified non-preferred solution(s), Euclidian distance between normalized solution
319
- vector(s) and each of the reference vectors are calculated. Those reference vectors that are **closer** than a
320
- predefined distance are either **removed** or **re-positioned** somewhere else.
321
-
322
- Note:
323
- At the moment, only the **removal** of reference vectors is supported. Repositioning of the reference
324
- vectors is **not** supported.
325
-
326
- Note:
327
- In case the Decision maker specifies multiple non-preferred solutions, the reference vector(s) for which the
328
- distance to **any** of the non-preferred solutions is less than predefined distance are removed.
329
-
330
- Note:
331
- Future developer should implement a way for a user to say: "Remove some percentage of
332
- objecive space/reference vectors" rather than giving a predefined distance value.
333
-
334
- Args:
335
- z (np.ndarray): Non-preferred solution(s).
336
- predefined_distance (float): The reference vectors that are closer than this distance are either removed or
337
- re-positioned somewhere else.
338
- Default value: 0.2
339
- """
340
- # calculate L1 norm of non-preferred solution(s)
341
- z = np.atleast_2d(z)
342
- norm = np.linalg.norm(z, ord=1, axis=1).reshape(np.shape(z)[0], 1)
343
-
344
- # non-preferred solutions normalized
345
- v_c = np.divide(z, norm)
346
-
347
- # distances from non-preferred solution(s) to each reference vector
348
- distances = np.array(
349
- [
350
- list(
351
- map(
352
- lambda solution: np.linalg.norm(solution - value, ord=2),
353
- v_c,
354
- )
355
- )
356
- for value in self.values_planar
357
- ]
128
+ if temp_number_of_vectors > number_of_vectors:
129
+ break
130
+ return temp_lattice_resolution - 1
131
+
132
+
133
+ def create_simplex(
134
+ number_of_objectives: int,
135
+ lattice_resolution: int = None,
136
+ number_of_vectors: int = None,
137
+ ) -> np.ndarray:
138
+ """
139
+ Create reference vectors using the simplex lattice design.
140
+
141
+ Args:
142
+ number_of_objectives (int): Number of objectives (dimensions).
143
+ lattice_resolution (int, optional): Lattice resolution to use. If None, will be determined from number_of_vectors.
144
+ number_of_vectors (int, optional): Desired number of reference vectors. Used if lattice_resolution is None.
145
+
146
+ Returns:
147
+ np.ndarray: Array of normalized reference vectors.
148
+
149
+ Raises:
150
+ ValueError: If both lattice_resolution and number_of_vectors are None.
151
+ """
152
+ if lattice_resolution is None and number_of_vectors is None:
153
+ raise ValueError(
154
+ "Either lattice resolution or number of vectors must be specified."
358
155
  )
359
156
 
360
- # find out reference vectors that are not closer than threshold value to any non-preferred solution
361
- mask = [all(d >= predefined_distance) for d in distances]
362
-
363
- # set those reference vectors that met previous condition as new reference vectors, drop others
364
- self.values = self.values[mask]
365
- self.values_planar = self.values_planar[mask]
366
-
367
- def iteractive_adapt_3(self, ref_point, translation_param=0.2):
368
- """Adapt reference vectors linearly towards a reference point. Then normalize.
369
-
370
- The details can be found in the following paper: Hakanen, Jussi &
371
- Chugh, Tinkle & Sindhya, Karthik & Jin, Yaochu & Miettinen, Kaisa.
372
- (2016). Connections of Reference Vectors and Different Types of
373
- Preference Information in Interactive Multiobjective Evolutionary
374
- Algorithms.
375
-
376
- Parameters
377
- ----------
378
- ref_point :
379
-
380
- translation_param :
381
- (Default value = 0.2)
382
-
383
- """
384
- self.values = self.initial_values * translation_param + ((1 - translation_param) * ref_point)
385
- self.values_planar = self.initial_values_planar * translation_param + ((1 - translation_param) * ref_point)
386
- self.normalize()
387
-
388
- def interactive_adapt_4(self, preferred_ranges: np.ndarray) -> None:
389
- """Adapt reference vectors by using the information about the Decision maker's preferred range for each of the objective.
390
-
391
- Using these ranges, Latin hypercube sampling is applied to generate m number of samples between
392
- within these ranges, where m is the number of reference vectors. Normalized vectors constructed of these samples
393
- are then set as new reference vectors.
394
-
395
- Args:
396
- preferred_ranges (np.ndarray): Preferred lower and upper bound for each of the objective function values.
397
- """
398
- # bounds
399
- lower_limits = np.array([ranges[0] for ranges in preferred_ranges])
400
- upper_limits = np.array([ranges[1] for ranges in preferred_ranges])
401
-
402
- # generate samples using Latin hypercube sampling
403
- lhs = LatinHypercube(d=self.number_of_objectives)
404
- w = lhs.random(n=self.number_of_vectors)
405
-
406
- # scale between bounds
407
- w = w * (upper_limits - lower_limits) + lower_limits
408
-
409
- # set new reference vectors and normalize them
410
- self.values = w
411
- self.values_planar = w
412
- self.normalize()
413
-
414
- def add_edge_vectors(self):
415
- """Add edge vectors to the list of reference vectors.
416
-
417
- Used to cover the entire orthant when preference information is
418
- provided.
419
-
420
- """
421
- edge_vectors = np.eye(self.values.shape[1])
422
- self.values = np.vstack([self.values, edge_vectors])
423
- self.values_planar = np.vstack([self.values_planar, edge_vectors])
424
- self.number_of_vectors = self.values.shape[0]
425
- self.normalize()
426
-
427
- def state(self) -> Sequence[Message]:
428
- """Return the current state of the reference vectors."""
429
- if self.verbosity == 0:
430
- return []
431
- if self.verbosity == 1:
432
- return [
433
- DictMessage(
434
- topic=ReferenceVectorMessageTopics.STATE,
435
- value={},
436
- source=self.__class__.__name__,
437
- )
438
- ]
439
- if self.verbosity == 2:
440
- return [
441
- DictMessage(
442
- topic=ReferenceVectorMessageTopics.STATE,
443
- value={
444
- "number_of_vectors": self.number_of_vectors,
445
- "number_of_objectives": self.number_of_objectives,
446
- "lattice_resolution": self.lattice_resolution,
447
- "creation_type": self.creation_type,
448
- },
449
- source=self.__class__.__name__,
450
- ),
451
- PolarsDataFrameMessage(
452
- topic=ReferenceVectorMessageTopics.REFERENCE_VECTORS_SPHERICAL,
453
- value=self.values,
454
- source=self.__class__.__name__,
455
- ),
456
- PolarsDataFrameMessage(
457
- topic=ReferenceVectorMessageTopics.REFERENCE_VECTORS_PLANAR,
458
- value=self.values_planar,
459
- source=self.__class__.__name__,
460
- ),
461
- ]
462
- raise ValueError(f"Verbosity level {self.verbosity} is not allowed.")
157
+ if lattice_resolution is None:
158
+ lattice_resolution = approx_lattice_resolution(
159
+ number_of_vectors, number_of_objectives
160
+ )
161
+
162
+ number_of_vectors = comb(
163
+ lattice_resolution + number_of_objectives - 1,
164
+ number_of_objectives - 1,
165
+ exact=True,
166
+ )
167
+
168
+ temp1 = range(1, number_of_objectives + lattice_resolution)
169
+ temp1 = np.array(list(combinations(temp1, number_of_objectives - 1)))
170
+ temp2 = np.array([range(number_of_objectives - 1)] * number_of_vectors)
171
+ temp = temp1 - temp2 - 1
172
+ weight = np.zeros((number_of_vectors, number_of_objectives), dtype=int)
173
+ weight[:, 0] = temp[:, 0]
174
+ for i in range(1, number_of_objectives - 1):
175
+ weight[:, i] = temp[:, i] - temp[:, i - 1]
176
+ weight[:, -1] = lattice_resolution - temp[:, -1]
177
+ values = weight / lattice_resolution
178
+ return normalize(values)
179
+
180
+
181
+ def normalize(values: np.ndarray) -> np.ndarray:
182
+ """
183
+ Normalize a set of vectors to unit length (project onto the unit hypersphere).
184
+
185
+ Args:
186
+ values (np.ndarray): Array of vectors to normalize.
187
+
188
+ Returns:
189
+ np.ndarray: Normalized vectors.
190
+ """
191
+ norm_2 = np.linalg.norm(values, axis=1).reshape(-1, 1)
192
+ norm_2[norm_2 == 0] = np.finfo(float).eps
193
+ values = np.divide(values, norm_2)
194
+ return values
195
+
196
+
197
+ def neighbouring_angles(values: np.ndarray) -> np.ndarray:
198
+ """
199
+ Calculate the angles to the nearest neighbor for each reference vector.
200
+
201
+ Args:
202
+ values (np.ndarray): Array of normalized reference vectors.
203
+
204
+ Returns:
205
+ np.ndarray: Array of angles (in radians) to the nearest neighbor for each vector.
206
+ """
207
+ cosvv = np.dot(values, values.transpose())
208
+ cosvv.sort(axis=1)
209
+ cosvv = np.flip(cosvv, 1)
210
+ cosvv[cosvv > 1] = 1
211
+ acosvv = np.arccos(cosvv[:, 1])
212
+ return acosvv
213
+
214
+
215
+ def add_edge_vectors(values: np.ndarray) -> np.ndarray:
216
+ """
217
+ Add edge (axis-aligned) vectors to the set of reference vectors.
218
+
219
+ This ensures that each axis direction is represented in the set.
220
+
221
+ Args:
222
+ values (np.ndarray): Array of reference vectors.
223
+
224
+ Returns:
225
+ np.ndarray: Array of reference vectors with edge vectors added and normalized.
226
+ """
227
+ edge_vectors = np.eye(values.shape[1])
228
+ values = np.vstack([values, edge_vectors])
229
+ return normalize(values)