desdeo 1.2__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (182) hide show
  1. desdeo/__init__.py +8 -8
  2. desdeo/adm/ADMAfsar.py +551 -0
  3. desdeo/adm/ADMChen.py +414 -0
  4. desdeo/adm/BaseADM.py +119 -0
  5. desdeo/adm/__init__.py +11 -0
  6. desdeo/api/README.md +73 -0
  7. desdeo/api/__init__.py +15 -0
  8. desdeo/api/app.py +50 -0
  9. desdeo/api/config.py +90 -0
  10. desdeo/api/config.toml +64 -0
  11. desdeo/api/db.py +27 -0
  12. desdeo/api/db_init.py +85 -0
  13. desdeo/api/db_models.py +164 -0
  14. desdeo/api/malaga_db_init.py +27 -0
  15. desdeo/api/models/__init__.py +266 -0
  16. desdeo/api/models/archive.py +23 -0
  17. desdeo/api/models/emo.py +128 -0
  18. desdeo/api/models/enautilus.py +69 -0
  19. desdeo/api/models/gdm/gdm_aggregate.py +139 -0
  20. desdeo/api/models/gdm/gdm_base.py +69 -0
  21. desdeo/api/models/gdm/gdm_score_bands.py +114 -0
  22. desdeo/api/models/gdm/gnimbus.py +138 -0
  23. desdeo/api/models/generic.py +104 -0
  24. desdeo/api/models/generic_states.py +401 -0
  25. desdeo/api/models/nimbus.py +158 -0
  26. desdeo/api/models/preference.py +128 -0
  27. desdeo/api/models/problem.py +717 -0
  28. desdeo/api/models/reference_point_method.py +18 -0
  29. desdeo/api/models/session.py +49 -0
  30. desdeo/api/models/state.py +463 -0
  31. desdeo/api/models/user.py +52 -0
  32. desdeo/api/models/utopia.py +25 -0
  33. desdeo/api/routers/_EMO.backup +309 -0
  34. desdeo/api/routers/_NAUTILUS.py +245 -0
  35. desdeo/api/routers/_NAUTILUS_navigator.py +233 -0
  36. desdeo/api/routers/_NIMBUS.py +765 -0
  37. desdeo/api/routers/__init__.py +5 -0
  38. desdeo/api/routers/emo.py +497 -0
  39. desdeo/api/routers/enautilus.py +237 -0
  40. desdeo/api/routers/gdm/gdm_aggregate.py +234 -0
  41. desdeo/api/routers/gdm/gdm_base.py +420 -0
  42. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_manager.py +398 -0
  43. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_routers.py +377 -0
  44. desdeo/api/routers/gdm/gnimbus/gnimbus_manager.py +698 -0
  45. desdeo/api/routers/gdm/gnimbus/gnimbus_routers.py +591 -0
  46. desdeo/api/routers/generic.py +233 -0
  47. desdeo/api/routers/nimbus.py +705 -0
  48. desdeo/api/routers/problem.py +307 -0
  49. desdeo/api/routers/reference_point_method.py +93 -0
  50. desdeo/api/routers/session.py +100 -0
  51. desdeo/api/routers/test.py +16 -0
  52. desdeo/api/routers/user_authentication.py +520 -0
  53. desdeo/api/routers/utils.py +187 -0
  54. desdeo/api/routers/utopia.py +230 -0
  55. desdeo/api/schema.py +100 -0
  56. desdeo/api/tests/__init__.py +0 -0
  57. desdeo/api/tests/conftest.py +151 -0
  58. desdeo/api/tests/test_enautilus.py +330 -0
  59. desdeo/api/tests/test_models.py +1179 -0
  60. desdeo/api/tests/test_routes.py +1075 -0
  61. desdeo/api/utils/_database.py +263 -0
  62. desdeo/api/utils/_logger.py +29 -0
  63. desdeo/api/utils/database.py +36 -0
  64. desdeo/api/utils/emo_database.py +40 -0
  65. desdeo/core.py +34 -0
  66. desdeo/emo/__init__.py +159 -0
  67. desdeo/emo/hooks/archivers.py +188 -0
  68. desdeo/emo/methods/EAs.py +541 -0
  69. desdeo/emo/methods/__init__.py +0 -0
  70. desdeo/emo/methods/bases.py +12 -0
  71. desdeo/emo/methods/templates.py +111 -0
  72. desdeo/emo/operators/__init__.py +1 -0
  73. desdeo/emo/operators/crossover.py +1282 -0
  74. desdeo/emo/operators/evaluator.py +114 -0
  75. desdeo/emo/operators/generator.py +459 -0
  76. desdeo/emo/operators/mutation.py +1224 -0
  77. desdeo/emo/operators/scalar_selection.py +202 -0
  78. desdeo/emo/operators/selection.py +1778 -0
  79. desdeo/emo/operators/termination.py +286 -0
  80. desdeo/emo/options/__init__.py +108 -0
  81. desdeo/emo/options/algorithms.py +435 -0
  82. desdeo/emo/options/crossover.py +164 -0
  83. desdeo/emo/options/generator.py +131 -0
  84. desdeo/emo/options/mutation.py +260 -0
  85. desdeo/emo/options/repair.py +61 -0
  86. desdeo/emo/options/scalar_selection.py +66 -0
  87. desdeo/emo/options/selection.py +127 -0
  88. desdeo/emo/options/templates.py +383 -0
  89. desdeo/emo/options/termination.py +143 -0
  90. desdeo/explanations/__init__.py +6 -0
  91. desdeo/explanations/explainer.py +100 -0
  92. desdeo/explanations/utils.py +90 -0
  93. desdeo/gdm/__init__.py +22 -0
  94. desdeo/gdm/gdmtools.py +45 -0
  95. desdeo/gdm/score_bands.py +114 -0
  96. desdeo/gdm/voting_rules.py +50 -0
  97. desdeo/mcdm/__init__.py +41 -0
  98. desdeo/mcdm/enautilus.py +338 -0
  99. desdeo/mcdm/gnimbus.py +484 -0
  100. desdeo/mcdm/nautili.py +345 -0
  101. desdeo/mcdm/nautilus.py +477 -0
  102. desdeo/mcdm/nautilus_navigator.py +656 -0
  103. desdeo/mcdm/nimbus.py +417 -0
  104. desdeo/mcdm/pareto_navigator.py +269 -0
  105. desdeo/mcdm/reference_point_method.py +186 -0
  106. desdeo/problem/__init__.py +83 -0
  107. desdeo/problem/evaluator.py +561 -0
  108. desdeo/problem/external/__init__.py +18 -0
  109. desdeo/problem/external/core.py +356 -0
  110. desdeo/problem/external/pymoo_provider.py +266 -0
  111. desdeo/problem/external/runtime.py +44 -0
  112. desdeo/problem/gurobipy_evaluator.py +562 -0
  113. desdeo/problem/infix_parser.py +341 -0
  114. desdeo/problem/json_parser.py +944 -0
  115. desdeo/problem/pyomo_evaluator.py +487 -0
  116. desdeo/problem/schema.py +1829 -0
  117. desdeo/problem/simulator_evaluator.py +348 -0
  118. desdeo/problem/sympy_evaluator.py +244 -0
  119. desdeo/problem/testproblems/__init__.py +88 -0
  120. desdeo/problem/testproblems/benchmarks_server.py +120 -0
  121. desdeo/problem/testproblems/binh_and_korn_problem.py +88 -0
  122. desdeo/problem/testproblems/cake_problem.py +185 -0
  123. desdeo/problem/testproblems/dmitry_forest_problem_discrete.py +71 -0
  124. desdeo/problem/testproblems/dtlz2_problem.py +102 -0
  125. desdeo/problem/testproblems/forest_problem.py +283 -0
  126. desdeo/problem/testproblems/knapsack_problem.py +163 -0
  127. desdeo/problem/testproblems/mcwb_problem.py +831 -0
  128. desdeo/problem/testproblems/mixed_variable_dimenrions_problem.py +83 -0
  129. desdeo/problem/testproblems/momip_problem.py +172 -0
  130. desdeo/problem/testproblems/multi_valued_constraints.py +119 -0
  131. desdeo/problem/testproblems/nimbus_problem.py +143 -0
  132. desdeo/problem/testproblems/pareto_navigator_problem.py +89 -0
  133. desdeo/problem/testproblems/re_problem.py +492 -0
  134. desdeo/problem/testproblems/river_pollution_problems.py +440 -0
  135. desdeo/problem/testproblems/rocket_injector_design_problem.py +140 -0
  136. desdeo/problem/testproblems/simple_problem.py +351 -0
  137. desdeo/problem/testproblems/simulator_problem.py +92 -0
  138. desdeo/problem/testproblems/single_objective.py +289 -0
  139. desdeo/problem/testproblems/spanish_sustainability_problem.py +945 -0
  140. desdeo/problem/testproblems/zdt_problem.py +274 -0
  141. desdeo/problem/utils.py +245 -0
  142. desdeo/tools/GenerateReferencePoints.py +181 -0
  143. desdeo/tools/__init__.py +120 -0
  144. desdeo/tools/desc_gen.py +22 -0
  145. desdeo/tools/generics.py +165 -0
  146. desdeo/tools/group_scalarization.py +3090 -0
  147. desdeo/tools/gurobipy_solver_interfaces.py +258 -0
  148. desdeo/tools/indicators_binary.py +117 -0
  149. desdeo/tools/indicators_unary.py +362 -0
  150. desdeo/tools/interaction_schema.py +38 -0
  151. desdeo/tools/intersection.py +54 -0
  152. desdeo/tools/iterative_pareto_representer.py +99 -0
  153. desdeo/tools/message.py +265 -0
  154. desdeo/tools/ng_solver_interfaces.py +199 -0
  155. desdeo/tools/non_dominated_sorting.py +134 -0
  156. desdeo/tools/patterns.py +283 -0
  157. desdeo/tools/proximal_solver.py +99 -0
  158. desdeo/tools/pyomo_solver_interfaces.py +477 -0
  159. desdeo/tools/reference_vectors.py +229 -0
  160. desdeo/tools/scalarization.py +2065 -0
  161. desdeo/tools/scipy_solver_interfaces.py +454 -0
  162. desdeo/tools/score_bands.py +627 -0
  163. desdeo/tools/utils.py +388 -0
  164. desdeo/tools/visualizations.py +67 -0
  165. desdeo/utopia_stuff/__init__.py +0 -0
  166. desdeo/utopia_stuff/data/1.json +15 -0
  167. desdeo/utopia_stuff/data/2.json +13 -0
  168. desdeo/utopia_stuff/data/3.json +15 -0
  169. desdeo/utopia_stuff/data/4.json +17 -0
  170. desdeo/utopia_stuff/data/5.json +15 -0
  171. desdeo/utopia_stuff/from_json.py +40 -0
  172. desdeo/utopia_stuff/reinit_user.py +38 -0
  173. desdeo/utopia_stuff/utopia_db_init.py +212 -0
  174. desdeo/utopia_stuff/utopia_problem.py +403 -0
  175. desdeo/utopia_stuff/utopia_problem_old.py +415 -0
  176. desdeo/utopia_stuff/utopia_reference_solutions.py +79 -0
  177. desdeo-2.1.0.dist-info/METADATA +186 -0
  178. desdeo-2.1.0.dist-info/RECORD +180 -0
  179. {desdeo-1.2.dist-info → desdeo-2.1.0.dist-info}/WHEEL +1 -1
  180. desdeo-2.1.0.dist-info/licenses/LICENSE +21 -0
  181. desdeo-1.2.dist-info/METADATA +0 -16
  182. desdeo-1.2.dist-info/RECORD +0 -4
@@ -0,0 +1,717 @@
1
+ """."""
2
+
3
+ import json
4
+ from pathlib import Path
5
+ from types import UnionType
6
+ from typing import TYPE_CHECKING
7
+
8
+ from fastapi import UploadFile
9
+ from pydantic import BaseModel, ConfigDict, create_model
10
+ from sqlalchemy.types import JSON, String, TypeDecorator
11
+ from sqlmodel import Column, Field, Relationship, SQLModel
12
+
13
+ from desdeo.problem.schema import (
14
+ Constant,
15
+ Constraint,
16
+ DiscreteRepresentation,
17
+ ExtraFunction,
18
+ Objective,
19
+ Problem,
20
+ ScalarizationFunction,
21
+ Simulator,
22
+ Tensor,
23
+ TensorConstant,
24
+ TensorVariable,
25
+ Url,
26
+ Variable,
27
+ VariableDomainTypeEnum,
28
+ VariableType,
29
+ )
30
+
31
+ from desdeo.tools.utils import available_solvers
32
+
33
+ if TYPE_CHECKING:
34
+ from .archive import UserSavedSolutionDB
35
+ from .preference import PreferenceDB
36
+ from .user import User
37
+
38
+
39
+ class ProblemBase(SQLModel):
40
+ """The base model for `ProblemDB` and related requests/responses."""
41
+
42
+ model_config = ConfigDict(from_attributes=True)
43
+
44
+ # Model fields
45
+ name: str | None = Field()
46
+ description: str | None = Field()
47
+ is_convex: bool | None = Field(nullable=True, default=None)
48
+ is_linear: bool | None = Field(nullable=True, default=None)
49
+ is_twice_differentiable: bool | None = Field(nullable=True, default=None)
50
+ scenario_keys: list[str] | None = Field(sa_column=Column(JSON, nullable=True), default=None)
51
+ variable_domain: VariableDomainTypeEnum | None = Field()
52
+
53
+
54
+ class ProblemGetRequest(SQLModel):
55
+ """Model to deal with problem fetching requests."""
56
+
57
+ problem_id: int
58
+
59
+
60
+ class ProblemSelectSolverRequest(SQLModel):
61
+ """Model to request a specific solver for a problem."""
62
+
63
+ problem_id: int = Field(
64
+ description="ID of the problem that the solver is assigned to."
65
+ )
66
+ solver_string_representation: str = Field(
67
+ description=f"One of the following: {[x for x, _ in available_solvers.items()]}"
68
+ )
69
+
70
+
71
+ class ProblemAddFromJSONRequest(SQLModel):
72
+ """Model to request addition of a problem based on the contents of a JSON file."""
73
+
74
+ json_file: UploadFile
75
+
76
+
77
+ class ProblemInfo(ProblemBase):
78
+ """Problem info request return data."""
79
+
80
+ id: int
81
+ user_id: int
82
+
83
+ name: str
84
+ description: str
85
+ is_convex: bool | None
86
+ is_linear: bool | None
87
+ is_twice_differentiable: bool | None
88
+ scenario_keys: list[str] | None
89
+ variable_domain: VariableDomainTypeEnum
90
+
91
+ constants: list["ConstantDB"] | None
92
+ tensor_constants: list["TensorConstantDB"] | None
93
+ variables: list["VariableDB"] | None
94
+ tensor_variables: list["TensorVariableDB"] | None
95
+ objectives: list["ObjectiveDB"]
96
+ constraints: list["ConstraintDB"] | None
97
+ scalarization_funcs: list["ScalarizationFunctionDB"] | None
98
+ extra_funcs: list["ExtraFunctionDB"] | None
99
+ discrete_representation: "DiscreteRepresentationDB | None"
100
+ simulators: list["SimulatorDB"] | None
101
+
102
+ problem_metadata: "ProblemMetaDataPublic | None"
103
+
104
+
105
+ class ProblemInfoSmall(ProblemBase):
106
+ """Problem info request return data, but smaller."""
107
+
108
+ id: int
109
+ user_id: int
110
+
111
+ name: str
112
+ description: str
113
+ is_convex: bool | None
114
+ is_linear: bool | None
115
+ is_twice_differentiable: bool | None
116
+ scenario_keys: list[str] | None
117
+ variable_domain: VariableDomainTypeEnum
118
+
119
+ problem_metadata: "ProblemMetaDataPublic | None"
120
+
121
+
122
+ class ProblemDB(ProblemBase, table=True):
123
+ """The table model to represent the `Problem` class in the database."""
124
+
125
+ model_config = ConfigDict(from_attributes=True)
126
+
127
+ # Database specific
128
+ id: int | None = Field(primary_key=True, default=None)
129
+ user_id: int | None = Field(foreign_key="user.id", default=None)
130
+
131
+ # Model fields
132
+ name: str = Field()
133
+ description: str = Field()
134
+ is_convex: bool | None = Field(nullable=True, default=None)
135
+ is_linear: bool | None = Field(nullable=True, default=None)
136
+ is_twice_differentiable: bool | None = Field(nullable=True, default=None)
137
+ scenario_keys: list[str] | None = Field(sa_column=Column(JSON, nullable=True), default=None)
138
+ variable_domain: VariableDomainTypeEnum = Field()
139
+
140
+ # Back populates
141
+ user: "User" = Relationship(back_populates="problems")
142
+ solutions: list["UserSavedSolutionDB"] = Relationship(back_populates="problem")
143
+ preferences: list["PreferenceDB"] = Relationship(back_populates="problem")
144
+
145
+ # Populated by other models
146
+ constants: list["ConstantDB"] = Relationship(back_populates="problem")
147
+ tensor_constants: list["TensorConstantDB"] = Relationship(back_populates="problem")
148
+ variables: list["VariableDB"] = Relationship(back_populates="problem")
149
+ tensor_variables: list["TensorVariableDB"] = Relationship(back_populates="problem")
150
+ objectives: list["ObjectiveDB"] = Relationship(back_populates="problem")
151
+ constraints: list["ConstraintDB"] = Relationship(back_populates="problem")
152
+ scalarization_funcs: list["ScalarizationFunctionDB"] = Relationship(back_populates="problem")
153
+ extra_funcs: list["ExtraFunctionDB"] = Relationship(back_populates="problem")
154
+ discrete_representation: "DiscreteRepresentationDB" = Relationship(back_populates="problem")
155
+ simulators: list["SimulatorDB"] = Relationship(back_populates="problem")
156
+ problem_metadata: "ProblemMetaDataDB" = Relationship(back_populates="problem")
157
+
158
+ @classmethod
159
+ def from_problem(cls, problem_instance: Problem, user: "User") -> "ProblemDB":
160
+ """Initialized the model from an instance of `Problem`.
161
+
162
+ Args:
163
+ problem_instance (Problem): the `Problem` instance from which to initialize
164
+ a `ProblemDB` model.
165
+ user (User): the user the instance of `ProblemDB` is assigned to.
166
+
167
+ Returns:
168
+ ProblemDB: the new instance of `ProblemDB`.
169
+ """
170
+ scalar_constants = (
171
+ [const for const in problem_instance.constants if isinstance(const, Constant)]
172
+ if problem_instance.constants is not None
173
+ else []
174
+ )
175
+ tensor_constants = (
176
+ [const for const in problem_instance.constants if isinstance(const, TensorConstant)]
177
+ if problem_instance.constants is not None
178
+ else []
179
+ )
180
+ scalar_variables = [var for var in problem_instance.variables if isinstance(var, Variable)]
181
+ tensor_variables = [var for var in problem_instance.variables if isinstance(var, TensorVariable)]
182
+ return cls(
183
+ user_id=user.id,
184
+ name=problem_instance.name,
185
+ description=problem_instance.description,
186
+ is_convex=problem_instance.is_convex_,
187
+ is_linear=problem_instance.is_linear_,
188
+ is_twice_differentiable=problem_instance.is_twice_differentiable_,
189
+ variable_domain=problem_instance.variable_domain,
190
+ scenario_keys=problem_instance.scenario_keys,
191
+ constants=[ConstantDB.model_validate(const) for const in scalar_constants],
192
+ tensor_constants=[TensorConstantDB.model_validate(const) for const in tensor_constants],
193
+ variables=[VariableDB.model_validate(var) for var in scalar_variables],
194
+ tensor_variables=[TensorVariableDB.model_validate(var) for var in tensor_variables],
195
+ objectives=[ObjectiveDB.model_validate(obj) for obj in problem_instance.objectives],
196
+ constraints=(
197
+ [ConstraintDB.model_validate(con) for con in problem_instance.constraints]
198
+ if problem_instance.constraints is not None
199
+ else []
200
+ ),
201
+ scalarization_funcs=(
202
+ [ScalarizationFunctionDB.model_validate(scal) for scal in problem_instance.scalarization_funcs]
203
+ if problem_instance.scalarization_funcs is not None
204
+ else []
205
+ ),
206
+ extra_funcs=(
207
+ [ExtraFunctionDB.model_validate(extra) for extra in problem_instance.extra_funcs]
208
+ if problem_instance.extra_funcs is not None
209
+ else []
210
+ ),
211
+ discrete_representation=(
212
+ DiscreteRepresentationDB.model_validate(problem_instance.discrete_representation)
213
+ if problem_instance.discrete_representation is not None
214
+ else None
215
+ ),
216
+ simulators=(
217
+ [SimulatorDB.model_validate(sim) for sim in problem_instance.simulators]
218
+ if problem_instance.simulators is not None
219
+ else []
220
+ ),
221
+ )
222
+
223
+
224
+ ### PROBLEM METADATA ###
225
+ class ForestProblemMetaData(SQLModel, table=True):
226
+ """A problem metadata class to hold UTOPIA forest problem specific information."""
227
+
228
+ id: int | None = Field(primary_key=True, default=None)
229
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
230
+
231
+ metadata_type: str = "forest_problem_metadata"
232
+
233
+ map_json: str = Field()
234
+ schedule_dict: dict = Field(sa_column=Column(JSON))
235
+ years: list[str] = Field(sa_column=Column(JSON))
236
+ stand_id_field: str = Field()
237
+ stand_descriptor: dict | None = Field(sa_column=Column(JSON), default=None)
238
+ compensation: float | None = Field(default=None)
239
+
240
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="forest_metadata")
241
+
242
+
243
+ class RepresentativeNonDominatedSolutions(SQLModel, table=True):
244
+ """A problem metadata class to store representative solutions sets, i.e., non-dominated sets...
245
+
246
+ A problem metadata class to store representative solutions sets, i.e., non-dominated sets that
247
+ represent/approximate the Pareto optimal solution set of the problem.
248
+
249
+ Note:
250
+ It is assumed that the solution set is non-dominated.
251
+ """
252
+
253
+ id: int | None = Field(primary_key=True, default=None)
254
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
255
+
256
+ metadata_type: str = "representative_non_dominated_solutions"
257
+
258
+ name: str = Field(description="The name of the representative set.")
259
+ description: str | None = Field(description="A description of the representative set. Optional.", default=None)
260
+
261
+ solution_data: dict[str, list[float]] = Field(
262
+ sa_column=Column(JSON),
263
+ description="The non-dominated solutions. It is assumed that columns "
264
+ "exist for each variable and objective function. For functions, the "
265
+ "`_min` variant should be present, and any tensor variables should be "
266
+ "unrolled.",
267
+ )
268
+
269
+ ideal: dict[str, float] = Field(
270
+ sa_column=Column(JSON), description="The ideal objective function values of the representative set."
271
+ )
272
+ nadir: dict[str, float] = Field(
273
+ sa_column=Column(JSON), description="The nadir objective function values of the representative set."
274
+ )
275
+
276
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="representative_nd_metadata")
277
+
278
+
279
+ class SolverSelectionMetadata(SQLModel, table=True):
280
+ """A problem metadata class to store the preferred solver of a problem.
281
+
282
+ A problem metadata class to store the preferred solver of a problem.
283
+ See desdeo/tools/utils.py -> available_solvers for available solvers.
284
+ """
285
+
286
+ id: int | None = Field(primary_key=True, default=None)
287
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
288
+
289
+ metadata_type: str = "solver_selection_metadata"
290
+
291
+ # The solver's string representation is used in endpoints to fetch the proper solver from available solvers.
292
+ solver_string_representation: str = Field(description="The string representation of the selected solver.")
293
+
294
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="solver_selection_metadata")
295
+
296
+
297
+ class ProblemMetaDataDB(SQLModel, table=True):
298
+ """Store Problem MetaData to DB with this class."""
299
+
300
+ id: int | None = Field(primary_key=True, default=None)
301
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
302
+
303
+ forest_metadata: list[ForestProblemMetaData] = Relationship(back_populates="metadata_instance")
304
+ representative_nd_metadata: list[RepresentativeNonDominatedSolutions] = Relationship(
305
+ back_populates="metadata_instance"
306
+ )
307
+ solver_selection_metadata: list[SolverSelectionMetadata] = Relationship(back_populates="metadata_instance")
308
+ problem: ProblemDB = Relationship(back_populates="problem_metadata")
309
+
310
+ @property
311
+ def all_metadata(
312
+ self,
313
+ ) -> list[ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata]:
314
+ """Return all metadata in one list."""
315
+ return (
316
+ (self.forest_metadata or [])
317
+ + (self.representative_nd_metadata or [])
318
+ + (self.solver_selection_metadata or [])
319
+ )
320
+
321
+
322
+ class ProblemMetaDataPublic(SQLModel):
323
+ """Response model for ProblemMetaData."""
324
+
325
+ problem_id: int
326
+
327
+ forest_metadata: list[ForestProblemMetaData] | None
328
+ representative_nd_metadata: list[RepresentativeNonDominatedSolutions] | None
329
+
330
+
331
+ class ProblemMetaDataGetRequest(SQLModel):
332
+ """Request model for getting specific type of metadata from a specific problem."""
333
+
334
+ problem_id: int
335
+ metadata_type: str
336
+
337
+
338
+ ### PATH TYPES ###
339
+ class PathOrUrlType(TypeDecorator):
340
+ """Helper class for dealing with Paths and Urls."""
341
+
342
+ impl = JSON
343
+ cache_ok = True
344
+
345
+ def process_bind_param(self, value: Path | Url | None, dialect):
346
+ """Convert to string or JSON."""
347
+ if value is None:
348
+ return None
349
+ elif isinstance(value, Path): # noqa: RET505
350
+ return {"_type": "path", "value": str(value)}
351
+ elif isinstance(value, Url):
352
+ return {"_type": "url", "value": value.model_dump()}
353
+ else:
354
+ raise ValueError(f"Unsupported type: {type(value)}")
355
+
356
+ def process_result_value(self, value, dialect):
357
+ """Convert back to Path or URL."""
358
+ if value is None:
359
+ return None
360
+ elif isinstance(value, dict) and "_type" in value: # noqa: RET505
361
+ if value["_type"] == "path":
362
+ return Path(value["value"])
363
+ elif value["_type"] == "url": # noqa: RET505
364
+ return Url(**value["value"])
365
+ raise ValueError(f"Invalid format: {value}")
366
+
367
+
368
+ class PathOrUrlListType(TypeDecorator):
369
+ """SQLAlchemy custom type to convert list[Path | Url] to JSON."""
370
+
371
+ impl = String
372
+ cache_ok = True
373
+
374
+ def process_bind_param(self, value: list[Path | Url] | None, dialect):
375
+ """Serialize list[Path | Url] to JSON."""
376
+ if value is None:
377
+ return None
378
+
379
+ serialized = []
380
+ for item in value:
381
+ if isinstance(item, Path):
382
+ serialized.append({"_type": "path", "value": str(item)})
383
+ elif isinstance(item, Url):
384
+ serialized.append({"_type": "url", "value": item.model_dump()})
385
+ else:
386
+ raise TypeError(f"Unsupported item type in list: {type(item)}")
387
+
388
+ return json.dumps(serialized)
389
+
390
+ def process_result_value(self, value, dialect):
391
+ """Deserialize JSON to list[Path | Url]."""
392
+ if value is None:
393
+ return None
394
+
395
+ try:
396
+ items = json.loads(value)
397
+ result = []
398
+ for item in items:
399
+ if item["_type"] == "path":
400
+ result.append(Path(item["value"]))
401
+ elif item["_type"] == "url":
402
+ result.append(Url(**item["value"]))
403
+ else:
404
+ raise ValueError(f"Unknown _type: {item.get('_type')}")
405
+ return result # noqa: TRY300
406
+ except (json.JSONDecodeError, KeyError, TypeError) as e:
407
+ raise ValueError(f"Invalid format for PathListType: {value}") from e
408
+
409
+
410
+ def from_pydantic(
411
+ model_class: BaseModel,
412
+ name: str,
413
+ union_type_conversions: dict[type, type] | None = None,
414
+ base_model: SQLModel = SQLModel,
415
+ ) -> SQLModel:
416
+ """Creates an SQLModel class from a pydantic model.
417
+
418
+ Args:
419
+ model_class (BaseClass): the pydantic class to be converted.
420
+ name (str): the name given to the class.
421
+ union_type_conversions (dict[type, type], optional): union type conversion table. This is needed, because
422
+ SQLAlchemy expects all table columns to have a specific value. For example, a field with a type like
423
+ `int | float | bool` cannot be stored in a database table because the field's type
424
+ is ambiguous. In this case, storing whichever value originally stored in a the field as a
425
+ `float` will suffice (because `int` and `bool` can be represented by floats).
426
+ Therefore, a type conversion, such as `{int | float | bool: float}` is expected.
427
+ Defaults to `None`.
428
+ base_model (SQLModel, optional): a base SQLModel to override problematic fields in the `model_class`, such
429
+ as lists or derived nested types. The base class may have custom validators to help convert
430
+ these values into something more suitable to be stored in a database. Often storing the JSON
431
+ representation of the problematic types is enough. If the `model_class` consists of only
432
+ fields with primitive types, this argument can be left to its default value. Defaults to SQLModel.
433
+
434
+ Raises:
435
+ TypeError: one or more type conversions are missing for union types.
436
+
437
+ Returns:
438
+ SQLModel: the SQLModel corresponding to `model_class`.
439
+ """
440
+ # collect field in the base model, if defined, do not try to convert the type
441
+ base_fields = base_model.model_fields
442
+
443
+ if union_type_conversions is None:
444
+ union_type_conversions = {}
445
+
446
+ field_definitions = {}
447
+ for field_name, field_info in model_class.model_fields.items():
448
+ if field_name in base_fields:
449
+ annotation = base_fields[field_name].annotation
450
+ field_definitions[field_name] = (annotation, base_fields[field_name])
451
+ continue
452
+
453
+ if type(field_type := field_info.annotation) is UnionType:
454
+ if field_type not in union_type_conversions:
455
+ raise TypeError("Missing Union type conversion")
456
+
457
+ annotation = union_type_conversions[field_type]
458
+ else:
459
+ annotation = field_info.annotation
460
+
461
+ field_definitions[field_name] = (annotation, field_info)
462
+
463
+ return create_model(name, __base__=base_model, **field_definitions)
464
+
465
+
466
+ class _TensorConstant(SQLModel):
467
+ """Helper class to override the field types of nested and list types."""
468
+
469
+ values: Tensor = Field(sa_column=Column(JSON))
470
+ shape: list[int] = Field(sa_column=Column(JSON))
471
+
472
+
473
+ _BaseTensorConstantDB = from_pydantic(
474
+ TensorConstant,
475
+ "_BaseTensorConstantDB",
476
+ union_type_conversions={VariableType | None: float | None},
477
+ base_model=_TensorConstant,
478
+ )
479
+
480
+
481
+ class TensorConstantDB(_BaseTensorConstantDB, table=True):
482
+ """The SQLModel equivalent to `TensorConstant`."""
483
+
484
+ id: int | None = Field(primary_key=True, default=None)
485
+ problem_id: int | None = Field(default=None, foreign_key="problemdb.id")
486
+
487
+ # Back populates
488
+ problem: ProblemDB | None = Relationship(back_populates="tensor_constants")
489
+
490
+
491
+ _ConstantDB = from_pydantic(Constant, "_ConstantDB", union_type_conversions={VariableType: float})
492
+
493
+
494
+ class ConstantDB(_ConstantDB, table=True):
495
+ """The SQLModel equivalent to `Constant`."""
496
+
497
+ id: int | None = Field(primary_key=True, default=None)
498
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
499
+
500
+ # Back populates
501
+ problem: ProblemDB | None = Relationship(back_populates="constants")
502
+
503
+
504
+ _VariableDB = from_pydantic(
505
+ Variable,
506
+ "_VariableDB",
507
+ union_type_conversions={VariableType: float, VariableType | None: float | None},
508
+ )
509
+
510
+
511
+ class VariableDB(_VariableDB, table=True):
512
+ """The SQLModel equivalent to `Variable`."""
513
+
514
+ id: int | None = Field(primary_key=True, default=None)
515
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
516
+
517
+ # Back populates
518
+ problem: ProblemDB | None = Relationship(back_populates="variables")
519
+
520
+
521
+ class _TensorVariable(SQLModel):
522
+ """Helper class to override the field types of nested and list types."""
523
+
524
+ initial_values: Tensor | None = Field(sa_column=Column(JSON))
525
+ lowerbounds: Tensor | None = Field(sa_column=Column(JSON))
526
+ upperbounds: Tensor | None = Field(sa_column=Column(JSON))
527
+ shape: list[int] = Field(sa_column=Column(JSON))
528
+
529
+
530
+ _TensorVariableDB = from_pydantic(
531
+ TensorVariable,
532
+ "_TensorVariableDB",
533
+ union_type_conversions={VariableType: float, VariableType | None: float | None},
534
+ base_model=_TensorVariable,
535
+ )
536
+
537
+
538
+ class TensorVariableDB(_TensorVariableDB, table=True):
539
+ """The SQLModel equivalent to `TensorVariable`."""
540
+
541
+ id: int | None = Field(primary_key=True, default=None)
542
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
543
+
544
+ # Back populates
545
+ problem: ProblemDB | None = Relationship(back_populates="tensor_variables")
546
+
547
+
548
+ class _Objective(SQLModel):
549
+ """Helper class to override the fields of nested and list types, and Paths."""
550
+
551
+ func: list | None = Field(sa_column=Column(JSON, nullable=True))
552
+ scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
553
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
554
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
555
+
556
+
557
+ _ObjectiveDB = from_pydantic(
558
+ Objective,
559
+ "_ObjectiveDB",
560
+ union_type_conversions={
561
+ str | None: str | None,
562
+ float | None: float | None,
563
+ Path | Url | None: PathOrUrlType | None,
564
+ },
565
+ base_model=_Objective,
566
+ )
567
+
568
+
569
+ class ObjectiveDB(_ObjectiveDB, table=True):
570
+ """The SQLModel equivalent to `Objective`."""
571
+
572
+ id: int | None = Field(primary_key=True, default=None)
573
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
574
+
575
+ # Back populates
576
+ problem: ProblemDB | None = Relationship(back_populates="objectives")
577
+
578
+
579
+ class _Constraint(SQLModel):
580
+ """Helper class to override the fields of nested and list types, and Paths."""
581
+
582
+ func: list = Field(sa_column=Column(JSON))
583
+ scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
584
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
585
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
586
+
587
+
588
+ _ConstraintDB = from_pydantic(
589
+ Constraint,
590
+ "_ConstraintDB",
591
+ union_type_conversions={
592
+ str | None: str | None,
593
+ float | None: float | None,
594
+ Path | Url | None: PathOrUrlType | None,
595
+ },
596
+ base_model=_Constraint,
597
+ )
598
+
599
+
600
+ class ConstraintDB(_ConstraintDB, table=True):
601
+ """The SQLModel equivalent to `Constraint`."""
602
+
603
+ id: int | None = Field(primary_key=True, default=None)
604
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
605
+
606
+ # Back populates
607
+ problem: ProblemDB | None = Relationship(back_populates="constraints")
608
+
609
+
610
+ class _ScalarizationFunction(SQLModel):
611
+ """Helper class to override the fields of nested and list types, and Paths."""
612
+
613
+ func: list = Field(sa_column=Column(JSON))
614
+ scenario_keys: list[str] = Field(sa_column=Column(JSON))
615
+
616
+
617
+ _ScalarizationFunctionDB = from_pydantic(
618
+ ScalarizationFunction,
619
+ "_ScalarizationFunctionDB",
620
+ union_type_conversions={str | None: str | None},
621
+ base_model=_ScalarizationFunction,
622
+ )
623
+
624
+
625
+ class ScalarizationFunctionDB(_ScalarizationFunctionDB, table=True):
626
+ """The SQLModel equivalent to `ScalarizationFunction`."""
627
+
628
+ id: int | None = Field(primary_key=True, default=None)
629
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
630
+
631
+ # Back populates
632
+ problem: ProblemDB | None = Relationship(back_populates="scalarization_funcs")
633
+
634
+
635
+ class _ExtraFunction(SQLModel):
636
+ """Helper class to override the fields of nested and list types, and Paths."""
637
+
638
+ func: list = Field(sa_column=Column(JSON))
639
+ scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
640
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
641
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
642
+
643
+
644
+ _ExtraFunctionDB = from_pydantic(
645
+ ExtraFunction,
646
+ "_ExtraFunctionDB",
647
+ union_type_conversions={
648
+ str | None: str | None,
649
+ Path | Url | None: PathOrUrlType | None,
650
+ },
651
+ base_model=_ExtraFunction,
652
+ )
653
+
654
+
655
+ class ExtraFunctionDB(_ExtraFunctionDB, table=True):
656
+ """The SQLModel equivalent to `ExtraFunction`."""
657
+
658
+ id: int | None = Field(primary_key=True, default=None)
659
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
660
+
661
+ # Back populates
662
+ problem: ProblemDB | None = Relationship(back_populates="extra_funcs")
663
+
664
+
665
+ class _DiscreteRepresentation(SQLModel):
666
+ """Helper class to override the fields of nested and list types, and Paths."""
667
+
668
+ non_dominated: bool = Field(default=False)
669
+ variable_values: dict[str, list[VariableType]] = Field(sa_column=Column(JSON))
670
+ objective_values: dict[str, list[float]] = Field(sa_column=Column(JSON))
671
+
672
+
673
+ _DiscreteRepresentationDB = from_pydantic(
674
+ DiscreteRepresentation,
675
+ "_DiscreteRepresentation",
676
+ base_model=_DiscreteRepresentation,
677
+ )
678
+
679
+
680
+ class DiscreteRepresentationDB(_DiscreteRepresentationDB, table=True):
681
+ """The SQLModel equivalent to `DiscreteRepresentation`."""
682
+
683
+ id: int | None = Field(primary_key=True, default=None)
684
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
685
+
686
+ # Back populates
687
+ problem: ProblemDB | None = Relationship(back_populates="discrete_representation")
688
+
689
+
690
+ class _Simulator(SQLModel):
691
+ """Helper class to override the fields of nested and list types, and Paths."""
692
+
693
+ file: Path | None = Field(sa_column=Column(PathOrUrlType), default=None)
694
+ url: Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
695
+ parameter_options: dict | None = Field(sa_column=Column(JSON), default=None)
696
+
697
+
698
+ _SimulatorDB = from_pydantic(
699
+ Simulator,
700
+ "_SimulatorDB",
701
+ union_type_conversions={
702
+ str | None: str | None,
703
+ Path | None: PathOrUrlType | None,
704
+ Url | None: PathOrUrlType | None,
705
+ },
706
+ base_model=_Simulator,
707
+ )
708
+
709
+
710
+ class SimulatorDB(_SimulatorDB, table=True):
711
+ """The SQLModel equivalent to `Simulator`."""
712
+
713
+ id: int | None = Field(primary_key=True, default=None)
714
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
715
+
716
+ # Back populates
717
+ problem: ProblemDB | None = Relationship(back_populates="simulators")