desdeo 1.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. desdeo/__init__.py +8 -8
  2. desdeo/api/README.md +73 -0
  3. desdeo/api/__init__.py +15 -0
  4. desdeo/api/app.py +40 -0
  5. desdeo/api/config.py +69 -0
  6. desdeo/api/config.toml +53 -0
  7. desdeo/api/db.py +25 -0
  8. desdeo/api/db_init.py +79 -0
  9. desdeo/api/db_models.py +164 -0
  10. desdeo/api/malaga_db_init.py +27 -0
  11. desdeo/api/models/__init__.py +66 -0
  12. desdeo/api/models/archive.py +34 -0
  13. desdeo/api/models/preference.py +90 -0
  14. desdeo/api/models/problem.py +507 -0
  15. desdeo/api/models/reference_point_method.py +18 -0
  16. desdeo/api/models/session.py +46 -0
  17. desdeo/api/models/state.py +96 -0
  18. desdeo/api/models/user.py +51 -0
  19. desdeo/api/routers/_NAUTILUS.py +245 -0
  20. desdeo/api/routers/_NAUTILUS_navigator.py +233 -0
  21. desdeo/api/routers/_NIMBUS.py +762 -0
  22. desdeo/api/routers/__init__.py +5 -0
  23. desdeo/api/routers/problem.py +110 -0
  24. desdeo/api/routers/reference_point_method.py +117 -0
  25. desdeo/api/routers/session.py +76 -0
  26. desdeo/api/routers/test.py +16 -0
  27. desdeo/api/routers/user_authentication.py +366 -0
  28. desdeo/api/schema.py +94 -0
  29. desdeo/api/tests/__init__.py +0 -0
  30. desdeo/api/tests/conftest.py +59 -0
  31. desdeo/api/tests/test_models.py +701 -0
  32. desdeo/api/tests/test_routes.py +216 -0
  33. desdeo/api/utils/database.py +274 -0
  34. desdeo/api/utils/logger.py +29 -0
  35. desdeo/core.py +27 -0
  36. desdeo/emo/__init__.py +29 -0
  37. desdeo/emo/hooks/archivers.py +172 -0
  38. desdeo/emo/methods/EAs.py +418 -0
  39. desdeo/emo/methods/__init__.py +0 -0
  40. desdeo/emo/methods/bases.py +59 -0
  41. desdeo/emo/operators/__init__.py +1 -0
  42. desdeo/emo/operators/crossover.py +780 -0
  43. desdeo/emo/operators/evaluator.py +118 -0
  44. desdeo/emo/operators/generator.py +356 -0
  45. desdeo/emo/operators/mutation.py +1053 -0
  46. desdeo/emo/operators/selection.py +1036 -0
  47. desdeo/emo/operators/termination.py +178 -0
  48. desdeo/explanations/__init__.py +6 -0
  49. desdeo/explanations/explainer.py +100 -0
  50. desdeo/explanations/utils.py +90 -0
  51. desdeo/mcdm/__init__.py +19 -0
  52. desdeo/mcdm/nautili.py +345 -0
  53. desdeo/mcdm/nautilus.py +477 -0
  54. desdeo/mcdm/nautilus_navigator.py +655 -0
  55. desdeo/mcdm/nimbus.py +417 -0
  56. desdeo/mcdm/pareto_navigator.py +269 -0
  57. desdeo/mcdm/reference_point_method.py +116 -0
  58. desdeo/problem/__init__.py +79 -0
  59. desdeo/problem/evaluator.py +561 -0
  60. desdeo/problem/gurobipy_evaluator.py +562 -0
  61. desdeo/problem/infix_parser.py +341 -0
  62. desdeo/problem/json_parser.py +944 -0
  63. desdeo/problem/pyomo_evaluator.py +468 -0
  64. desdeo/problem/schema.py +1808 -0
  65. desdeo/problem/simulator_evaluator.py +298 -0
  66. desdeo/problem/sympy_evaluator.py +244 -0
  67. desdeo/problem/testproblems/__init__.py +73 -0
  68. desdeo/problem/testproblems/binh_and_korn_problem.py +88 -0
  69. desdeo/problem/testproblems/dtlz2_problem.py +102 -0
  70. desdeo/problem/testproblems/forest_problem.py +275 -0
  71. desdeo/problem/testproblems/knapsack_problem.py +163 -0
  72. desdeo/problem/testproblems/mcwb_problem.py +831 -0
  73. desdeo/problem/testproblems/mixed_variable_dimenrions_problem.py +83 -0
  74. desdeo/problem/testproblems/momip_problem.py +172 -0
  75. desdeo/problem/testproblems/nimbus_problem.py +143 -0
  76. desdeo/problem/testproblems/pareto_navigator_problem.py +89 -0
  77. desdeo/problem/testproblems/re_problem.py +492 -0
  78. desdeo/problem/testproblems/river_pollution_problem.py +434 -0
  79. desdeo/problem/testproblems/rocket_injector_design_problem.py +140 -0
  80. desdeo/problem/testproblems/simple_problem.py +351 -0
  81. desdeo/problem/testproblems/simulator_problem.py +92 -0
  82. desdeo/problem/testproblems/spanish_sustainability_problem.py +945 -0
  83. desdeo/problem/testproblems/zdt_problem.py +271 -0
  84. desdeo/problem/utils.py +245 -0
  85. desdeo/tools/GenerateReferencePoints.py +181 -0
  86. desdeo/tools/__init__.py +102 -0
  87. desdeo/tools/generics.py +145 -0
  88. desdeo/tools/gurobipy_solver_interfaces.py +258 -0
  89. desdeo/tools/indicators_binary.py +11 -0
  90. desdeo/tools/indicators_unary.py +375 -0
  91. desdeo/tools/interaction_schema.py +38 -0
  92. desdeo/tools/intersection.py +54 -0
  93. desdeo/tools/iterative_pareto_representer.py +99 -0
  94. desdeo/tools/message.py +234 -0
  95. desdeo/tools/ng_solver_interfaces.py +199 -0
  96. desdeo/tools/non_dominated_sorting.py +133 -0
  97. desdeo/tools/patterns.py +281 -0
  98. desdeo/tools/proximal_solver.py +99 -0
  99. desdeo/tools/pyomo_solver_interfaces.py +464 -0
  100. desdeo/tools/reference_vectors.py +462 -0
  101. desdeo/tools/scalarization.py +3138 -0
  102. desdeo/tools/scipy_solver_interfaces.py +454 -0
  103. desdeo/tools/score_bands.py +464 -0
  104. desdeo/tools/utils.py +320 -0
  105. desdeo/utopia_stuff/__init__.py +0 -0
  106. desdeo/utopia_stuff/data/1.json +15 -0
  107. desdeo/utopia_stuff/data/2.json +13 -0
  108. desdeo/utopia_stuff/data/3.json +15 -0
  109. desdeo/utopia_stuff/data/4.json +17 -0
  110. desdeo/utopia_stuff/data/5.json +15 -0
  111. desdeo/utopia_stuff/from_json.py +40 -0
  112. desdeo/utopia_stuff/reinit_user.py +38 -0
  113. desdeo/utopia_stuff/utopia_db_init.py +212 -0
  114. desdeo/utopia_stuff/utopia_problem.py +403 -0
  115. desdeo/utopia_stuff/utopia_problem_old.py +415 -0
  116. desdeo/utopia_stuff/utopia_reference_solutions.py +79 -0
  117. desdeo-2.0.0.dist-info/LICENSE +21 -0
  118. desdeo-2.0.0.dist-info/METADATA +168 -0
  119. desdeo-2.0.0.dist-info/RECORD +120 -0
  120. {desdeo-1.2.dist-info → desdeo-2.0.0.dist-info}/WHEEL +1 -1
  121. desdeo-1.2.dist-info/METADATA +0 -16
  122. desdeo-1.2.dist-info/RECORD +0 -4
@@ -0,0 +1,462 @@
1
+ from enum import Enum
2
+ from itertools import combinations, product
3
+ from typing import Sequence
4
+
5
+ import numpy as np
6
+ from scipy.special import comb
7
+ from scipy.stats.qmc import LatinHypercube
8
+
9
+ from desdeo.problem import Problem
10
+ from desdeo.tools.message import (
11
+ DictMessage,
12
+ Message,
13
+ PolarsDataFrameMessage,
14
+ ReferenceVectorMessageTopics,
15
+ TerminatorMessageTopics,
16
+ )
17
+ from desdeo.tools.patterns import Publisher, Subscriber
18
+
19
+
20
+ def normalize(vectors):
21
+ """Normalize a set of vectors.
22
+
23
+ The length of the returned vectors will be unity.
24
+
25
+ Parameters
26
+ ----------
27
+ vectors : np.ndarray
28
+ Set of vectors of any length, except zero.
29
+
30
+ """
31
+ if len(np.asarray(vectors).shape) == 1:
32
+ return vectors / np.linalg.norm(vectors)
33
+ norm = np.linalg.norm(vectors, axis=1)
34
+ return vectors / norm[:, np.newaxis]
35
+
36
+
37
+ def shear(vectors, degrees: float = 5):
38
+ """Shear a set of vectors lying on the plane z=0 towards the z-axis.
39
+
40
+ The resulting vectors are'degrees' angle away from the z axis.
41
+
42
+ Parameters
43
+ ----------
44
+ vectors : numpy.ndarray
45
+ The final element of each vector should be zero.
46
+ degrees : float, optional
47
+ The angle that the resultant vectors make with the z axis. Unit is radians.
48
+ (the default is 5)
49
+ """
50
+ angle = degrees * np.pi / 180
51
+ m = 1 / np.tan(angle)
52
+ norm = np.linalg.norm(vectors, axis=1)
53
+ vectors[:, -1] += norm * m
54
+ return normalize(vectors)
55
+
56
+
57
+ def rotate(initial_vector, rotated_vector, other_vectors):
58
+ """Calculate the rotation matrix that rotates the initial_vector to the rotated_vector.
59
+
60
+ Apply that rotation on other_vectors and return.
61
+ Uses Householder reflections twice to achieve this.
62
+ """
63
+ init_vec_norm = normalize(initial_vector)
64
+ rot_vec_norm = normalize(np.asarray(rotated_vector))
65
+ middle_vec_norm = normalize(init_vec_norm + rot_vec_norm)
66
+ first_reflector = init_vec_norm - middle_vec_norm
67
+ second_reflector = middle_vec_norm - rot_vec_norm
68
+ Q1 = householder(first_reflector)
69
+ Q2 = householder(second_reflector)
70
+ reflection_matrix = np.matmul(Q2, Q1)
71
+ rotated_vectors = np.matmul(other_vectors, np.transpose(reflection_matrix))
72
+ return rotated_vectors
73
+
74
+
75
+ def householder(vector):
76
+ """Return reflection matrix via householder transformation."""
77
+ identity_mat = np.eye(len(vector))
78
+ v = vector[np.newaxis]
79
+ denominator = np.matmul(v, v.T)
80
+ numerator = np.matmul(v.T, v)
81
+ rot_mat = identity_mat - (2 * numerator / denominator)
82
+ return rot_mat
83
+
84
+
85
+ def rotate_toward(initial_vector, final_vector, other_vectors, degrees: float = 5):
86
+ """Rotate other_vectors (with the centre at initial_vector) towards final_vector by an angle degrees.
87
+
88
+ Parameters
89
+ ----------
90
+ initial_vector : np.ndarray
91
+ Centre of the vectors to be rotated.
92
+ final_vector : np.ndarray
93
+ The final position of the center of other_vectors.
94
+ other_vectors : np.ndarray
95
+ The array of vectors to be rotated
96
+ degrees : float, optional
97
+ The amount of rotation (the default is 5)
98
+
99
+ Returns:
100
+ -------
101
+ rotated_vectors : np.ndarray
102
+ The rotated vectors
103
+ reached: bool
104
+ True if final_vector has been reached
105
+ """
106
+ final_vector = normalize(final_vector)
107
+ initial_vector = normalize(initial_vector)
108
+ cos_phi = np.dot(initial_vector, final_vector)
109
+ theta = degrees * np.pi / 180
110
+ cos_theta = np.cos(theta)
111
+ phi = np.arccos(cos_phi)
112
+ if phi < theta:
113
+ return (rotate(initial_vector, final_vector, other_vectors), True)
114
+ cos_phi_theta = np.cos(phi - theta)
115
+ A = np.asarray([[cos_phi, 1], [1, cos_phi]])
116
+ B = np.asarray([cos_phi_theta, cos_theta])
117
+ x = np.linalg.solve(A, B)
118
+ rotated_vector = x[0] * initial_vector + x[1] * final_vector
119
+ return (rotate(initial_vector, rotated_vector, other_vectors), False)
120
+
121
+
122
+ class VectorCreationOptions(Enum):
123
+ """Enum class for reference vector creation methods."""
124
+
125
+ SIMPLEX = "Uniform"
126
+ """Uniformly distributed reference vectors created using simplex lattice design.
127
+ This method is generates distributions with specific numbers of reference vectors.
128
+ Check: https://www.itl.nist.gov/div898/handbook/pri/section5/pri542.htm for more information."""
129
+ S_ENERGY = "s-energy"
130
+ """Reference vectors created using Riesz s-energy criterion. This method is used to distribute
131
+ an arbitrary number of reference vectors in the objective space while minimizing the s-energy.
132
+ Currently not implemented."""
133
+
134
+
135
+ class VectorTypeOptions(Enum):
136
+ """Enum class for reference vector normalization methods."""
137
+
138
+ SPHERICAL = "Spherical"
139
+ """Normalize the reference vectors to a hypersphere, i.e. the second norm is equal to 1."""
140
+ PLANAR = "Planar"
141
+ """Normalize the reference vectors to a plane, i.e. the first norm is equal to 1."""
142
+
143
+
144
+ class ReferenceVectors(Subscriber):
145
+ """Class object for reference vectors."""
146
+
147
+ def __init__(
148
+ self,
149
+ problem: Problem,
150
+ publisher: Publisher,
151
+ adaptation_frequency: int = 0,
152
+ verbosity: int = 2,
153
+ lattice_resolution: int | None = None,
154
+ number_of_vectors: int | None = None,
155
+ creation_type: VectorCreationOptions = VectorCreationOptions.SIMPLEX,
156
+ ):
157
+ """Create a Reference vectors object.
158
+
159
+ Parameters
160
+ ----------
161
+ problem : Problem
162
+ Problem object.
163
+ publisher : Publisher
164
+ Publisher object.
165
+ adaptation_frequency : int, optional
166
+ The number of generations in between reference vector adaptation. By default 0, i.e. no adaptation.
167
+ verbosity : int, optional
168
+ Verbosity level. By default 2.
169
+ lattice_resolution : int
170
+ Number of divisions along an axis when creating the simplex lattice. If not specified, the lattice resolution
171
+ is calculated based on the desired number of vectors.
172
+ number_of_vectors : int
173
+ Number of reference vectors to be created. If not specified, the number of vectors is calculated based on
174
+ the lattice resolution. By default None.
175
+ creation_type : VectorCreationOptions
176
+ Method for creating reference vectors. By default VectorCreationOptions.SIMPLEX. Currently only
177
+ VectorCreationOptions.SIMPLEX is implemented. Future versions will include VectorCreationOptions.S_ENERGY.
178
+ """
179
+ interested_topics = [TerminatorMessageTopics.GENERATION]
180
+ provided_topics: list[ReferenceVectorMessageTopics] = []
181
+ match verbosity:
182
+ case 0:
183
+ provided_topics: list[ReferenceVectorMessageTopics] = []
184
+ case 1:
185
+ provided_topics = [ReferenceVectorMessageTopics.STATE]
186
+ case 2:
187
+ provided_topics = [
188
+ ReferenceVectorMessageTopics.STATE,
189
+ ReferenceVectorMessageTopics.REFERENCE_VECTORS_SPHERICAL,
190
+ ReferenceVectorMessageTopics.REFERENCE_VECTORS_PLANAR,
191
+ ]
192
+
193
+ super().__init__(
194
+ publisher, interested_topics=interested_topics, provided_topics=provided_topics, verbosity=verbosity
195
+ )
196
+ self.number_of_objectives = number_of_objectives
197
+ self.lattice_resolution = lattice_resolution
198
+ self.number_of_vectors = number_of_vectors
199
+ self.adaptation_frequency = adaptation_frequency
200
+ self.generation_at_last_adaptation = 0
201
+
202
+ if creation_type == VectorCreationOptions.S_ENERGY:
203
+ raise NotImplementedError("Riesz s-energy criterion not implemented.")
204
+ if number_of_vectors is None:
205
+ raise ValueError("Number of vectors must be specified for Riesz s-energy criterion.")
206
+ if not (lattice_resolution or number_of_vectors):
207
+ raise ValueError("Either lattice_resolution or number_of_vectors must be specified.")
208
+
209
+ if number_of_vectors is not None:
210
+ temp_lattice_resolution = 0
211
+ while True:
212
+ temp_lattice_resolution += 1
213
+ temp_number_of_vectors = comb(
214
+ temp_lattice_resolution + self.number_of_objectives - 1,
215
+ self.number_of_objectives - 1,
216
+ exact=True,
217
+ )
218
+ if temp_number_of_vectors > number_of_vectors:
219
+ break
220
+ self.lattice_resolution = temp_lattice_resolution - 1
221
+
222
+ self.creation_type = creation_type
223
+ self.values: np.ndarray = None
224
+ self.values_planar: np.ndarray = None
225
+ if self.creation_type == VectorCreationOptions.SIMPLEX:
226
+ self._create_simplex()
227
+ self.initial_values = np.copy(self.values)
228
+ self.initial_values_planar = np.copy(self.values_planar)
229
+ self.neighbouring_angles()
230
+
231
+ def _create_simplex(self):
232
+ """Create the reference vectors using simplex lattice design."""
233
+ if self.lattice_resolution is None:
234
+ raise ValueError("Lattice resolution must be specified.")
235
+
236
+ number_of_vectors: int = comb(
237
+ self.lattice_resolution + self.number_of_objectives - 1,
238
+ self.number_of_objectives - 1,
239
+ exact=True,
240
+ )
241
+ self.number_of_vectors = number_of_vectors
242
+ temp1 = range(1, self.number_of_objectives + self.lattice_resolution)
243
+ temp1 = np.array(list(combinations(temp1, self.number_of_objectives - 1)))
244
+ temp2 = np.array([range(self.number_of_objectives - 1)] * self.number_of_vectors)
245
+ temp = temp1 - temp2 - 1
246
+ weight = np.zeros((self.number_of_vectors, self.number_of_objectives), dtype=int)
247
+ weight[:, 0] = temp[:, 0]
248
+ for i in range(1, self.number_of_objectives - 1):
249
+ weight[:, i] = temp[:, i] - temp[:, i - 1]
250
+ weight[:, -1] = self.lattice_resolution - temp[:, -1]
251
+ self.values = weight / self.lattice_resolution
252
+ self.values_planar = np.copy(self.values)
253
+ self.normalize()
254
+
255
+ def normalize(self):
256
+ """Normalize the reference vectors to a unit hypersphere."""
257
+ self.number_of_vectors = self.values.shape[0]
258
+ norm_2 = np.linalg.norm(self.values, axis=1).reshape(-1, 1)
259
+ norm_1 = np.sum(self.values_planar, axis=1).reshape(-1, 1)
260
+ norm_2[norm_2 == 0] = np.finfo(float).eps
261
+ self.values = np.divide(self.values, norm_2)
262
+ self.values_planar = np.divide(self.values_planar, norm_1)
263
+
264
+ def neighbouring_angles(self) -> np.ndarray:
265
+ """Calculate neighbouring angles for normalization."""
266
+ cosvv = np.dot(self.values, self.values.transpose())
267
+ cosvv.sort(axis=1)
268
+ cosvv = np.flip(cosvv, 1)
269
+ cosvv[cosvv > 1] = 1
270
+ acosvv = np.arccos(cosvv[:, 1])
271
+ self.neighbouring_angles_current = acosvv
272
+ return acosvv
273
+
274
+ def adapt(self, fitness: np.ndarray):
275
+ """Adapt reference vectors. Then normalize.
276
+
277
+ Parameters
278
+ ----------
279
+ fitness : np.ndarray
280
+ """
281
+ max_val = np.amax(fitness, axis=0)
282
+ min_val = np.amin(fitness, axis=0)
283
+ self.values = self.initial_values * (max_val - min_val)
284
+
285
+ self.normalize()
286
+
287
+ def interactive_adapt_1(self, z: np.ndarray, translation_param: float = 0.2) -> None:
288
+ """Adapt reference vectors using the information about prefererred solution(s) selected by the Decision maker.
289
+
290
+ Args:
291
+ z (np.ndarray): Preferred solution(s).
292
+ translation_param (float): Parameter determining how close the reference vectors are to the central vector
293
+ **v** defined by using the selected solution(s) z.
294
+ """
295
+ if z.shape[0] == 1:
296
+ # single preferred solution
297
+ # calculate new reference vectors
298
+ self.values = translation_param * self.initial_values + ((1 - translation_param) * z)
299
+ self.values_planar = translation_param * self.initial_values_planar + ((1 - translation_param) * z)
300
+
301
+ else:
302
+ # multiple preferred solutions
303
+ # calculate new reference vectors for each preferred solution
304
+ values = [translation_param * self.initial_values + ((1 - translation_param) * z_i) for z_i in z]
305
+ values_planar = [
306
+ translation_param * self.initial_values_planar + ((1 - translation_param) * z_i) for z_i in z
307
+ ]
308
+
309
+ # combine arrays of reference vectors into a single array and update reference vectors
310
+ self.values = np.concatenate(values)
311
+ self.values_planar = np.concatenate(values_planar)
312
+
313
+ self.normalize()
314
+
315
+ def interactive_adapt_2(self, z: np.ndarray, predefined_distance: float = 0.2) -> None:
316
+ """Adapt reference vectors by using the information about non-preferred solution(s) selected by the Decision maker.
317
+
318
+ After the Decision maker has specified non-preferred solution(s), Euclidian distance between normalized solution
319
+ vector(s) and each of the reference vectors are calculated. Those reference vectors that are **closer** than a
320
+ predefined distance are either **removed** or **re-positioned** somewhere else.
321
+
322
+ Note:
323
+ At the moment, only the **removal** of reference vectors is supported. Repositioning of the reference
324
+ vectors is **not** supported.
325
+
326
+ Note:
327
+ In case the Decision maker specifies multiple non-preferred solutions, the reference vector(s) for which the
328
+ distance to **any** of the non-preferred solutions is less than predefined distance are removed.
329
+
330
+ Note:
331
+ Future developer should implement a way for a user to say: "Remove some percentage of
332
+ objecive space/reference vectors" rather than giving a predefined distance value.
333
+
334
+ Args:
335
+ z (np.ndarray): Non-preferred solution(s).
336
+ predefined_distance (float): The reference vectors that are closer than this distance are either removed or
337
+ re-positioned somewhere else.
338
+ Default value: 0.2
339
+ """
340
+ # calculate L1 norm of non-preferred solution(s)
341
+ z = np.atleast_2d(z)
342
+ norm = np.linalg.norm(z, ord=1, axis=1).reshape(np.shape(z)[0], 1)
343
+
344
+ # non-preferred solutions normalized
345
+ v_c = np.divide(z, norm)
346
+
347
+ # distances from non-preferred solution(s) to each reference vector
348
+ distances = np.array(
349
+ [
350
+ list(
351
+ map(
352
+ lambda solution: np.linalg.norm(solution - value, ord=2),
353
+ v_c,
354
+ )
355
+ )
356
+ for value in self.values_planar
357
+ ]
358
+ )
359
+
360
+ # find out reference vectors that are not closer than threshold value to any non-preferred solution
361
+ mask = [all(d >= predefined_distance) for d in distances]
362
+
363
+ # set those reference vectors that met previous condition as new reference vectors, drop others
364
+ self.values = self.values[mask]
365
+ self.values_planar = self.values_planar[mask]
366
+
367
+ def iteractive_adapt_3(self, ref_point, translation_param=0.2):
368
+ """Adapt reference vectors linearly towards a reference point. Then normalize.
369
+
370
+ The details can be found in the following paper: Hakanen, Jussi &
371
+ Chugh, Tinkle & Sindhya, Karthik & Jin, Yaochu & Miettinen, Kaisa.
372
+ (2016). Connections of Reference Vectors and Different Types of
373
+ Preference Information in Interactive Multiobjective Evolutionary
374
+ Algorithms.
375
+
376
+ Parameters
377
+ ----------
378
+ ref_point :
379
+
380
+ translation_param :
381
+ (Default value = 0.2)
382
+
383
+ """
384
+ self.values = self.initial_values * translation_param + ((1 - translation_param) * ref_point)
385
+ self.values_planar = self.initial_values_planar * translation_param + ((1 - translation_param) * ref_point)
386
+ self.normalize()
387
+
388
+ def interactive_adapt_4(self, preferred_ranges: np.ndarray) -> None:
389
+ """Adapt reference vectors by using the information about the Decision maker's preferred range for each of the objective.
390
+
391
+ Using these ranges, Latin hypercube sampling is applied to generate m number of samples between
392
+ within these ranges, where m is the number of reference vectors. Normalized vectors constructed of these samples
393
+ are then set as new reference vectors.
394
+
395
+ Args:
396
+ preferred_ranges (np.ndarray): Preferred lower and upper bound for each of the objective function values.
397
+ """
398
+ # bounds
399
+ lower_limits = np.array([ranges[0] for ranges in preferred_ranges])
400
+ upper_limits = np.array([ranges[1] for ranges in preferred_ranges])
401
+
402
+ # generate samples using Latin hypercube sampling
403
+ lhs = LatinHypercube(d=self.number_of_objectives)
404
+ w = lhs.random(n=self.number_of_vectors)
405
+
406
+ # scale between bounds
407
+ w = w * (upper_limits - lower_limits) + lower_limits
408
+
409
+ # set new reference vectors and normalize them
410
+ self.values = w
411
+ self.values_planar = w
412
+ self.normalize()
413
+
414
+ def add_edge_vectors(self):
415
+ """Add edge vectors to the list of reference vectors.
416
+
417
+ Used to cover the entire orthant when preference information is
418
+ provided.
419
+
420
+ """
421
+ edge_vectors = np.eye(self.values.shape[1])
422
+ self.values = np.vstack([self.values, edge_vectors])
423
+ self.values_planar = np.vstack([self.values_planar, edge_vectors])
424
+ self.number_of_vectors = self.values.shape[0]
425
+ self.normalize()
426
+
427
+ def state(self) -> Sequence[Message]:
428
+ """Return the current state of the reference vectors."""
429
+ if self.verbosity == 0:
430
+ return []
431
+ if self.verbosity == 1:
432
+ return [
433
+ DictMessage(
434
+ topic=ReferenceVectorMessageTopics.STATE,
435
+ value={},
436
+ source=self.__class__.__name__,
437
+ )
438
+ ]
439
+ if self.verbosity == 2:
440
+ return [
441
+ DictMessage(
442
+ topic=ReferenceVectorMessageTopics.STATE,
443
+ value={
444
+ "number_of_vectors": self.number_of_vectors,
445
+ "number_of_objectives": self.number_of_objectives,
446
+ "lattice_resolution": self.lattice_resolution,
447
+ "creation_type": self.creation_type,
448
+ },
449
+ source=self.__class__.__name__,
450
+ ),
451
+ PolarsDataFrameMessage(
452
+ topic=ReferenceVectorMessageTopics.REFERENCE_VECTORS_SPHERICAL,
453
+ value=self.values,
454
+ source=self.__class__.__name__,
455
+ ),
456
+ PolarsDataFrameMessage(
457
+ topic=ReferenceVectorMessageTopics.REFERENCE_VECTORS_PLANAR,
458
+ value=self.values_planar,
459
+ source=self.__class__.__name__,
460
+ ),
461
+ ]
462
+ raise ValueError(f"Verbosity level {self.verbosity} is not allowed.")