desdeo 2.0.0__py3-none-any.whl → 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. desdeo/adm/ADMAfsar.py +551 -0
  2. desdeo/adm/ADMChen.py +414 -0
  3. desdeo/adm/BaseADM.py +119 -0
  4. desdeo/adm/__init__.py +11 -0
  5. desdeo/api/__init__.py +6 -6
  6. desdeo/api/app.py +38 -28
  7. desdeo/api/config.py +65 -44
  8. desdeo/api/config.toml +23 -12
  9. desdeo/api/db.py +10 -8
  10. desdeo/api/db_init.py +12 -6
  11. desdeo/api/models/__init__.py +220 -20
  12. desdeo/api/models/archive.py +16 -27
  13. desdeo/api/models/emo.py +128 -0
  14. desdeo/api/models/enautilus.py +69 -0
  15. desdeo/api/models/gdm/gdm_aggregate.py +139 -0
  16. desdeo/api/models/gdm/gdm_base.py +69 -0
  17. desdeo/api/models/gdm/gdm_score_bands.py +114 -0
  18. desdeo/api/models/gdm/gnimbus.py +138 -0
  19. desdeo/api/models/generic.py +104 -0
  20. desdeo/api/models/generic_states.py +401 -0
  21. desdeo/api/models/nimbus.py +158 -0
  22. desdeo/api/models/preference.py +44 -6
  23. desdeo/api/models/problem.py +274 -64
  24. desdeo/api/models/session.py +4 -1
  25. desdeo/api/models/state.py +419 -52
  26. desdeo/api/models/user.py +7 -6
  27. desdeo/api/models/utopia.py +25 -0
  28. desdeo/api/routers/_EMO.backup +309 -0
  29. desdeo/api/routers/_NIMBUS.py +6 -3
  30. desdeo/api/routers/emo.py +497 -0
  31. desdeo/api/routers/enautilus.py +237 -0
  32. desdeo/api/routers/gdm/gdm_aggregate.py +234 -0
  33. desdeo/api/routers/gdm/gdm_base.py +420 -0
  34. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_manager.py +398 -0
  35. desdeo/api/routers/gdm/gdm_score_bands/gdm_score_bands_routers.py +377 -0
  36. desdeo/api/routers/gdm/gnimbus/gnimbus_manager.py +698 -0
  37. desdeo/api/routers/gdm/gnimbus/gnimbus_routers.py +591 -0
  38. desdeo/api/routers/generic.py +233 -0
  39. desdeo/api/routers/nimbus.py +705 -0
  40. desdeo/api/routers/problem.py +201 -4
  41. desdeo/api/routers/reference_point_method.py +20 -44
  42. desdeo/api/routers/session.py +50 -26
  43. desdeo/api/routers/user_authentication.py +180 -26
  44. desdeo/api/routers/utils.py +187 -0
  45. desdeo/api/routers/utopia.py +230 -0
  46. desdeo/api/schema.py +10 -4
  47. desdeo/api/tests/conftest.py +94 -2
  48. desdeo/api/tests/test_enautilus.py +330 -0
  49. desdeo/api/tests/test_models.py +550 -72
  50. desdeo/api/tests/test_routes.py +902 -43
  51. desdeo/api/utils/_database.py +263 -0
  52. desdeo/api/utils/database.py +28 -266
  53. desdeo/api/utils/emo_database.py +40 -0
  54. desdeo/core.py +7 -0
  55. desdeo/emo/__init__.py +154 -24
  56. desdeo/emo/hooks/archivers.py +18 -2
  57. desdeo/emo/methods/EAs.py +128 -5
  58. desdeo/emo/methods/bases.py +9 -56
  59. desdeo/emo/methods/templates.py +111 -0
  60. desdeo/emo/operators/crossover.py +544 -42
  61. desdeo/emo/operators/evaluator.py +10 -14
  62. desdeo/emo/operators/generator.py +127 -24
  63. desdeo/emo/operators/mutation.py +212 -41
  64. desdeo/emo/operators/scalar_selection.py +202 -0
  65. desdeo/emo/operators/selection.py +956 -214
  66. desdeo/emo/operators/termination.py +124 -16
  67. desdeo/emo/options/__init__.py +108 -0
  68. desdeo/emo/options/algorithms.py +435 -0
  69. desdeo/emo/options/crossover.py +164 -0
  70. desdeo/emo/options/generator.py +131 -0
  71. desdeo/emo/options/mutation.py +260 -0
  72. desdeo/emo/options/repair.py +61 -0
  73. desdeo/emo/options/scalar_selection.py +66 -0
  74. desdeo/emo/options/selection.py +127 -0
  75. desdeo/emo/options/templates.py +383 -0
  76. desdeo/emo/options/termination.py +143 -0
  77. desdeo/gdm/__init__.py +22 -0
  78. desdeo/gdm/gdmtools.py +45 -0
  79. desdeo/gdm/score_bands.py +114 -0
  80. desdeo/gdm/voting_rules.py +50 -0
  81. desdeo/mcdm/__init__.py +23 -1
  82. desdeo/mcdm/enautilus.py +338 -0
  83. desdeo/mcdm/gnimbus.py +484 -0
  84. desdeo/mcdm/nautilus_navigator.py +7 -6
  85. desdeo/mcdm/reference_point_method.py +70 -0
  86. desdeo/problem/__init__.py +5 -1
  87. desdeo/problem/external/__init__.py +18 -0
  88. desdeo/problem/external/core.py +356 -0
  89. desdeo/problem/external/pymoo_provider.py +266 -0
  90. desdeo/problem/external/runtime.py +44 -0
  91. desdeo/problem/infix_parser.py +2 -2
  92. desdeo/problem/pyomo_evaluator.py +25 -6
  93. desdeo/problem/schema.py +69 -48
  94. desdeo/problem/simulator_evaluator.py +65 -15
  95. desdeo/problem/testproblems/__init__.py +26 -11
  96. desdeo/problem/testproblems/benchmarks_server.py +120 -0
  97. desdeo/problem/testproblems/cake_problem.py +185 -0
  98. desdeo/problem/testproblems/dmitry_forest_problem_discrete.py +71 -0
  99. desdeo/problem/testproblems/forest_problem.py +77 -69
  100. desdeo/problem/testproblems/multi_valued_constraints.py +119 -0
  101. desdeo/problem/testproblems/{river_pollution_problem.py → river_pollution_problems.py} +28 -22
  102. desdeo/problem/testproblems/single_objective.py +289 -0
  103. desdeo/problem/testproblems/zdt_problem.py +4 -1
  104. desdeo/tools/__init__.py +39 -21
  105. desdeo/tools/desc_gen.py +22 -0
  106. desdeo/tools/generics.py +22 -2
  107. desdeo/tools/group_scalarization.py +3090 -0
  108. desdeo/tools/indicators_binary.py +107 -1
  109. desdeo/tools/indicators_unary.py +3 -16
  110. desdeo/tools/message.py +33 -2
  111. desdeo/tools/non_dominated_sorting.py +4 -3
  112. desdeo/tools/patterns.py +9 -7
  113. desdeo/tools/pyomo_solver_interfaces.py +48 -35
  114. desdeo/tools/reference_vectors.py +118 -351
  115. desdeo/tools/scalarization.py +340 -1413
  116. desdeo/tools/score_bands.py +491 -328
  117. desdeo/tools/utils.py +117 -49
  118. desdeo/tools/visualizations.py +67 -0
  119. desdeo/utopia_stuff/utopia_problem.py +1 -1
  120. desdeo/utopia_stuff/utopia_problem_old.py +1 -1
  121. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info}/METADATA +46 -28
  122. desdeo-2.1.0.dist-info/RECORD +180 -0
  123. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info}/WHEEL +1 -1
  124. desdeo-2.0.0.dist-info/RECORD +0 -120
  125. /desdeo/api/utils/{logger.py → _logger.py} +0 -0
  126. {desdeo-2.0.0.dist-info → desdeo-2.1.0.dist-info/licenses}/LICENSE +0 -0
@@ -8,7 +8,6 @@ from sqlmodel import JSON, Column, Field, Relationship, SQLModel
8
8
  from .problem import ProblemDB
9
9
 
10
10
  if TYPE_CHECKING:
11
- from .archive import ArchiveEntryDB
12
11
  from .user import User
13
12
 
14
13
 
@@ -23,7 +22,10 @@ class PreferenceType(TypeDecorator):
23
22
 
24
23
  def process_bind_param(self, value, dialect):
25
24
  """Preference to JSON."""
26
- if isinstance(value, Bounds | ReferencePoint):
25
+ if isinstance(
26
+ value,
27
+ Bounds | ReferencePoint | PreferredRanges | PreferredSolutions | NonPreferredSolutions,
28
+ ):
27
29
  return value.model_dump()
28
30
 
29
31
  msg = f"No JSON serialization set for preference type '{type(value)}'."
@@ -39,6 +41,12 @@ class PreferenceType(TypeDecorator):
39
41
  return ReferencePoint.model_validate(value)
40
42
  case "bounds":
41
43
  return Bounds.model_validate(value)
44
+ case "preferred_solutions":
45
+ return PreferredSolutions.model_validate(value)
46
+ case "non_preferred_solutions":
47
+ return NonPreferredSolutions.model_validate(value)
48
+ case "preferred_ranges": # Add this case
49
+ return PreferredRanges.model_validate(value)
42
50
  case _:
43
51
  msg = f"No preference_type '{value['preference_type']}' found."
44
52
  print(msg)
@@ -49,15 +57,18 @@ class PreferenceType(TypeDecorator):
49
57
  class PreferenceBase(SQLModel):
50
58
  """The base model for representing preferences."""
51
59
 
52
- __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_on": "type", "polymorphic_identity": "preference_base"}
60
+ __mapper_args__: ClassVar[dict[str, str]] = {
61
+ "polymorphic_on": "type",
62
+ "polymorphic_identity": "preference_base",
63
+ }
53
64
 
54
- preference_type: Literal["unset"] = "unset"
65
+ preference_type: str = "unset"
55
66
 
56
67
 
57
68
  class ReferencePoint(PreferenceBase):
58
69
  """Model for representing a reference point type of preference."""
59
70
 
60
- __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "preference_base"}
71
+ __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "reference_point"}
61
72
 
62
73
  preference_type: Literal["reference_point"] = "reference_point"
63
74
  aspiration_levels: dict[str, float] = Field(sa_column=Column(JSON, nullable=False))
@@ -75,6 +86,34 @@ class Bounds(PreferenceBase):
75
86
  upper_bounds: dict[str, float | None] = Field(sa_column=Column(JSON, nullable=False))
76
87
 
77
88
 
89
+ class PreferredRanges(PreferenceBase):
90
+ """Model for representing desired upper and lower bounds for objective functions."""
91
+
92
+ __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "preferred_ranges"}
93
+
94
+ preference_type: Literal["preferred_ranges"] = "preferred_ranges"
95
+
96
+ preferred_ranges: dict[str, list[float]] = Field(sa_column=Column(JSON, nullable=False))
97
+
98
+
99
+ class PreferredSolutions(PreferenceBase):
100
+ """Model for representing a preferred solution type of preference."""
101
+
102
+ __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "preferred_solutions"}
103
+
104
+ preference_type: Literal["preferred_solutions"] = "preferred_solutions"
105
+ preferred_solutions: dict[str, list[float]] = Field(sa_column=Column(JSON, nullable=False))
106
+
107
+
108
+ class NonPreferredSolutions(PreferenceBase):
109
+ """Model for representing a non-preferred solution type of preference."""
110
+
111
+ __mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "non_preferred_solutions"}
112
+
113
+ preference_type: Literal["non_preferred_solutions"] = "non_preferred_solutions"
114
+ non_preferred_solutions: dict[str, list[float]] = Field(sa_column=Column(JSON, nullable=False))
115
+
116
+
78
117
  class PreferenceDB(SQLModel, table=True):
79
118
  """Database model for storing preferences."""
80
119
 
@@ -87,4 +126,3 @@ class PreferenceDB(SQLModel, table=True):
87
126
  # Back populates
88
127
  problem: "ProblemDB" = Relationship(back_populates="preferences")
89
128
  user: "User" = Relationship(back_populates="preferences")
90
- solutions: list["ArchiveEntryDB"] = Relationship(back_populates="preference")
@@ -3,11 +3,12 @@
3
3
  import json
4
4
  from pathlib import Path
5
5
  from types import UnionType
6
- from typing import TYPE_CHECKING, Optional
6
+ from typing import TYPE_CHECKING
7
7
 
8
+ from fastapi import UploadFile
8
9
  from pydantic import BaseModel, ConfigDict, create_model
9
- from sqlalchemy.types import String, TypeDecorator
10
- from sqlmodel import JSON, Column, Field, Relationship, SQLModel
10
+ from sqlalchemy.types import JSON, String, TypeDecorator
11
+ from sqlmodel import Column, Field, Relationship, SQLModel
11
12
 
12
13
  from desdeo.problem.schema import (
13
14
  Constant,
@@ -21,19 +22,22 @@ from desdeo.problem.schema import (
21
22
  Tensor,
22
23
  TensorConstant,
23
24
  TensorVariable,
25
+ Url,
24
26
  Variable,
25
27
  VariableDomainTypeEnum,
26
28
  VariableType,
27
29
  )
28
30
 
31
+ from desdeo.tools.utils import available_solvers
32
+
29
33
  if TYPE_CHECKING:
30
- from .archive import ArchiveEntryDB
34
+ from .archive import UserSavedSolutionDB
31
35
  from .preference import PreferenceDB
32
36
  from .user import User
33
37
 
34
38
 
35
39
  class ProblemBase(SQLModel):
36
- """."""
40
+ """The base model for `ProblemDB` and related requests/responses."""
37
41
 
38
42
  model_config = ConfigDict(from_attributes=True)
39
43
 
@@ -53,8 +57,25 @@ class ProblemGetRequest(SQLModel):
53
57
  problem_id: int
54
58
 
55
59
 
60
+ class ProblemSelectSolverRequest(SQLModel):
61
+ """Model to request a specific solver for a problem."""
62
+
63
+ problem_id: int = Field(
64
+ description="ID of the problem that the solver is assigned to."
65
+ )
66
+ solver_string_representation: str = Field(
67
+ description=f"One of the following: {[x for x, _ in available_solvers.items()]}"
68
+ )
69
+
70
+
71
+ class ProblemAddFromJSONRequest(SQLModel):
72
+ """Model to request addition of a problem based on the contents of a JSON file."""
73
+
74
+ json_file: UploadFile
75
+
76
+
56
77
  class ProblemInfo(ProblemBase):
57
- """."""
78
+ """Problem info request return data."""
58
79
 
59
80
  id: int
60
81
  user_id: int
@@ -78,9 +99,11 @@ class ProblemInfo(ProblemBase):
78
99
  discrete_representation: "DiscreteRepresentationDB | None"
79
100
  simulators: list["SimulatorDB"] | None
80
101
 
102
+ problem_metadata: "ProblemMetaDataPublic | None"
103
+
81
104
 
82
105
  class ProblemInfoSmall(ProblemBase):
83
- """."""
106
+ """Problem info request return data, but smaller."""
84
107
 
85
108
  id: int
86
109
  user_id: int
@@ -93,9 +116,11 @@ class ProblemInfoSmall(ProblemBase):
93
116
  scenario_keys: list[str] | None
94
117
  variable_domain: VariableDomainTypeEnum
95
118
 
119
+ problem_metadata: "ProblemMetaDataPublic | None"
120
+
96
121
 
97
122
  class ProblemDB(ProblemBase, table=True):
98
- """."""
123
+ """The table model to represent the `Problem` class in the database."""
99
124
 
100
125
  model_config = ConfigDict(from_attributes=True)
101
126
 
@@ -114,7 +139,7 @@ class ProblemDB(ProblemBase, table=True):
114
139
 
115
140
  # Back populates
116
141
  user: "User" = Relationship(back_populates="problems")
117
- solutions: list["ArchiveEntryDB"] = Relationship(back_populates="problem")
142
+ solutions: list["UserSavedSolutionDB"] = Relationship(back_populates="problem")
118
143
  preferences: list["PreferenceDB"] = Relationship(back_populates="problem")
119
144
 
120
145
  # Populated by other models
@@ -128,6 +153,7 @@ class ProblemDB(ProblemBase, table=True):
128
153
  extra_funcs: list["ExtraFunctionDB"] = Relationship(back_populates="problem")
129
154
  discrete_representation: "DiscreteRepresentationDB" = Relationship(back_populates="problem")
130
155
  simulators: list["SimulatorDB"] = Relationship(back_populates="problem")
156
+ problem_metadata: "ProblemMetaDataDB" = Relationship(back_populates="problem")
131
157
 
132
158
  @classmethod
133
159
  def from_problem(cls, problem_instance: Problem, user: "User") -> "ProblemDB":
@@ -167,62 +193,218 @@ class ProblemDB(ProblemBase, table=True):
167
193
  variables=[VariableDB.model_validate(var) for var in scalar_variables],
168
194
  tensor_variables=[TensorVariableDB.model_validate(var) for var in tensor_variables],
169
195
  objectives=[ObjectiveDB.model_validate(obj) for obj in problem_instance.objectives],
170
- constraints=[ConstraintDB.model_validate(con) for con in problem_instance.constraints]
171
- if problem_instance.constraints is not None
172
- else [],
173
- scalarization_funcs=[
174
- ScalarizationFunctionDB.model_validate(scal) for scal in problem_instance.scalarization_funcs
175
- ]
176
- if problem_instance.scalarization_funcs is not None
177
- else [],
178
- extra_funcs=[ExtraFunctionDB.model_validate(extra) for extra in problem_instance.extra_funcs]
179
- if problem_instance.extra_funcs is not None
180
- else [],
181
- discrete_representation=DiscreteRepresentationDB.model_validate(problem_instance.discrete_representation)
182
- if problem_instance.discrete_representation is not None
183
- else None,
184
- simulators=[SimulatorDB.model_validate(sim) for sim in problem_instance.simulators]
185
- if problem_instance.simulators is not None
186
- else [],
196
+ constraints=(
197
+ [ConstraintDB.model_validate(con) for con in problem_instance.constraints]
198
+ if problem_instance.constraints is not None
199
+ else []
200
+ ),
201
+ scalarization_funcs=(
202
+ [ScalarizationFunctionDB.model_validate(scal) for scal in problem_instance.scalarization_funcs]
203
+ if problem_instance.scalarization_funcs is not None
204
+ else []
205
+ ),
206
+ extra_funcs=(
207
+ [ExtraFunctionDB.model_validate(extra) for extra in problem_instance.extra_funcs]
208
+ if problem_instance.extra_funcs is not None
209
+ else []
210
+ ),
211
+ discrete_representation=(
212
+ DiscreteRepresentationDB.model_validate(problem_instance.discrete_representation)
213
+ if problem_instance.discrete_representation is not None
214
+ else None
215
+ ),
216
+ simulators=(
217
+ [SimulatorDB.model_validate(sim) for sim in problem_instance.simulators]
218
+ if problem_instance.simulators is not None
219
+ else []
220
+ ),
187
221
  )
188
222
 
189
223
 
190
- class PathType(TypeDecorator):
191
- """SQLAlchemy custom type to convert Path to string (credit to @strfx on GitHUb!)."""
224
+ ### PROBLEM METADATA ###
225
+ class ForestProblemMetaData(SQLModel, table=True):
226
+ """A problem metadata class to hold UTOPIA forest problem specific information."""
192
227
 
193
- impl = String
228
+ id: int | None = Field(primary_key=True, default=None)
229
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
230
+
231
+ metadata_type: str = "forest_problem_metadata"
232
+
233
+ map_json: str = Field()
234
+ schedule_dict: dict = Field(sa_column=Column(JSON))
235
+ years: list[str] = Field(sa_column=Column(JSON))
236
+ stand_id_field: str = Field()
237
+ stand_descriptor: dict | None = Field(sa_column=Column(JSON), default=None)
238
+ compensation: float | None = Field(default=None)
239
+
240
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="forest_metadata")
241
+
242
+
243
+ class RepresentativeNonDominatedSolutions(SQLModel, table=True):
244
+ """A problem metadata class to store representative solutions sets, i.e., non-dominated sets...
245
+
246
+ A problem metadata class to store representative solutions sets, i.e., non-dominated sets that
247
+ represent/approximate the Pareto optimal solution set of the problem.
248
+
249
+ Note:
250
+ It is assumed that the solution set is non-dominated.
251
+ """
252
+
253
+ id: int | None = Field(primary_key=True, default=None)
254
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
255
+
256
+ metadata_type: str = "representative_non_dominated_solutions"
257
+
258
+ name: str = Field(description="The name of the representative set.")
259
+ description: str | None = Field(description="A description of the representative set. Optional.", default=None)
260
+
261
+ solution_data: dict[str, list[float]] = Field(
262
+ sa_column=Column(JSON),
263
+ description="The non-dominated solutions. It is assumed that columns "
264
+ "exist for each variable and objective function. For functions, the "
265
+ "`_min` variant should be present, and any tensor variables should be "
266
+ "unrolled.",
267
+ )
268
+
269
+ ideal: dict[str, float] = Field(
270
+ sa_column=Column(JSON), description="The ideal objective function values of the representative set."
271
+ )
272
+ nadir: dict[str, float] = Field(
273
+ sa_column=Column(JSON), description="The nadir objective function values of the representative set."
274
+ )
275
+
276
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="representative_nd_metadata")
277
+
278
+
279
+ class SolverSelectionMetadata(SQLModel, table=True):
280
+ """A problem metadata class to store the preferred solver of a problem.
281
+
282
+ A problem metadata class to store the preferred solver of a problem.
283
+ See desdeo/tools/utils.py -> available_solvers for available solvers.
284
+ """
285
+
286
+ id: int | None = Field(primary_key=True, default=None)
287
+ metadata_id: int | None = Field(foreign_key="problemmetadatadb.id", default=None)
194
288
 
195
- def process_bind_param(self, value, dialect):
196
- """Path to string."""
197
- if isinstance(value, Path):
198
- return str(value)
199
- return value
289
+ metadata_type: str = "solver_selection_metadata"
290
+
291
+ # The solver's string representation is used in endpoints to fetch the proper solver from available solvers.
292
+ solver_string_representation: str = Field(description="The string representation of the selected solver.")
293
+
294
+ metadata_instance: "ProblemMetaDataDB" = Relationship(back_populates="solver_selection_metadata")
295
+
296
+
297
+ class ProblemMetaDataDB(SQLModel, table=True):
298
+ """Store Problem MetaData to DB with this class."""
299
+
300
+ id: int | None = Field(primary_key=True, default=None)
301
+ problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
302
+
303
+ forest_metadata: list[ForestProblemMetaData] = Relationship(back_populates="metadata_instance")
304
+ representative_nd_metadata: list[RepresentativeNonDominatedSolutions] = Relationship(
305
+ back_populates="metadata_instance"
306
+ )
307
+ solver_selection_metadata: list[SolverSelectionMetadata] = Relationship(back_populates="metadata_instance")
308
+ problem: ProblemDB = Relationship(back_populates="problem_metadata")
309
+
310
+ @property
311
+ def all_metadata(
312
+ self,
313
+ ) -> list[ForestProblemMetaData | RepresentativeNonDominatedSolutions | SolverSelectionMetadata]:
314
+ """Return all metadata in one list."""
315
+ return (
316
+ (self.forest_metadata or [])
317
+ + (self.representative_nd_metadata or [])
318
+ + (self.solver_selection_metadata or [])
319
+ )
320
+
321
+
322
+ class ProblemMetaDataPublic(SQLModel):
323
+ """Response model for ProblemMetaData."""
324
+
325
+ problem_id: int
326
+
327
+ forest_metadata: list[ForestProblemMetaData] | None
328
+ representative_nd_metadata: list[RepresentativeNonDominatedSolutions] | None
329
+
330
+
331
+ class ProblemMetaDataGetRequest(SQLModel):
332
+ """Request model for getting specific type of metadata from a specific problem."""
333
+
334
+ problem_id: int
335
+ metadata_type: str
336
+
337
+
338
+ ### PATH TYPES ###
339
+ class PathOrUrlType(TypeDecorator):
340
+ """Helper class for dealing with Paths and Urls."""
341
+
342
+ impl = JSON
343
+ cache_ok = True
344
+
345
+ def process_bind_param(self, value: Path | Url | None, dialect):
346
+ """Convert to string or JSON."""
347
+ if value is None:
348
+ return None
349
+ elif isinstance(value, Path): # noqa: RET505
350
+ return {"_type": "path", "value": str(value)}
351
+ elif isinstance(value, Url):
352
+ return {"_type": "url", "value": value.model_dump()}
353
+ else:
354
+ raise ValueError(f"Unsupported type: {type(value)}")
200
355
 
201
356
  def process_result_value(self, value, dialect):
202
- """String to Path."""
203
- if value is not None:
204
- return Path(value)
205
- return value
357
+ """Convert back to Path or URL."""
358
+ if value is None:
359
+ return None
360
+ elif isinstance(value, dict) and "_type" in value: # noqa: RET505
361
+ if value["_type"] == "path":
362
+ return Path(value["value"])
363
+ elif value["_type"] == "url": # noqa: RET505
364
+ return Url(**value["value"])
365
+ raise ValueError(f"Invalid format: {value}")
206
366
 
207
367
 
208
- class PathListType(TypeDecorator):
209
- """SQLAlchemy custom type to convert list[Path] to JSON."""
368
+ class PathOrUrlListType(TypeDecorator):
369
+ """SQLAlchemy custom type to convert list[Path | Url] to JSON."""
210
370
 
211
371
  impl = String
372
+ cache_ok = True
373
+
374
+ def process_bind_param(self, value: list[Path | Url] | None, dialect):
375
+ """Serialize list[Path | Url] to JSON."""
376
+ if value is None:
377
+ return None
212
378
 
213
- def process_bind_param(self, value, dialect):
214
- """list[Path] to JSON."""
215
- if isinstance(value, list) and all(isinstance(item, Path) for item in value):
216
- return json.dumps([str(item) for item in value])
217
- return value # Handle as a normal string if not a list of Paths
379
+ serialized = []
380
+ for item in value:
381
+ if isinstance(item, Path):
382
+ serialized.append({"_type": "path", "value": str(item)})
383
+ elif isinstance(item, Url):
384
+ serialized.append({"_type": "url", "value": item.model_dump()})
385
+ else:
386
+ raise TypeError(f"Unsupported item type in list: {type(item)}")
387
+
388
+ return json.dumps(serialized)
218
389
 
219
390
  def process_result_value(self, value, dialect):
220
- """JSON to list[Path]."""
221
- # Deserialize JSON back to a list of Path objects
222
- if value is not None:
223
- path_strings = json.loads(value)
224
- return [Path(item) for item in path_strings]
225
- return None
391
+ """Deserialize JSON to list[Path | Url]."""
392
+ if value is None:
393
+ return None
394
+
395
+ try:
396
+ items = json.loads(value)
397
+ result = []
398
+ for item in items:
399
+ if item["_type"] == "path":
400
+ result.append(Path(item["value"]))
401
+ elif item["_type"] == "url":
402
+ result.append(Url(**item["value"]))
403
+ else:
404
+ raise ValueError(f"Unknown _type: {item.get('_type')}")
405
+ return result # noqa: TRY300
406
+ except (json.JSONDecodeError, KeyError, TypeError) as e:
407
+ raise ValueError(f"Invalid format for PathListType: {value}") from e
226
408
 
227
409
 
228
410
  def from_pydantic(
@@ -320,7 +502,9 @@ class ConstantDB(_ConstantDB, table=True):
320
502
 
321
503
 
322
504
  _VariableDB = from_pydantic(
323
- Variable, "_VariableDB", union_type_conversions={VariableType: float, VariableType | None: float | None}
505
+ Variable,
506
+ "_VariableDB",
507
+ union_type_conversions={VariableType: float, VariableType | None: float | None},
324
508
  )
325
509
 
326
510
 
@@ -366,14 +550,18 @@ class _Objective(SQLModel):
366
550
 
367
551
  func: list | None = Field(sa_column=Column(JSON, nullable=True))
368
552
  scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
369
- surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
370
- simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
553
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
554
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
371
555
 
372
556
 
373
557
  _ObjectiveDB = from_pydantic(
374
558
  Objective,
375
559
  "_ObjectiveDB",
376
- union_type_conversions={str | None: str | None, float | None: float | None},
560
+ union_type_conversions={
561
+ str | None: str | None,
562
+ float | None: float | None,
563
+ Path | Url | None: PathOrUrlType | None,
564
+ },
377
565
  base_model=_Objective,
378
566
  )
379
567
 
@@ -393,14 +581,18 @@ class _Constraint(SQLModel):
393
581
 
394
582
  func: list = Field(sa_column=Column(JSON))
395
583
  scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
396
- surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
397
- simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
584
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
585
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
398
586
 
399
587
 
400
588
  _ConstraintDB = from_pydantic(
401
589
  Constraint,
402
590
  "_ConstraintDB",
403
- union_type_conversions={str | None: str | None, float | None: float | None},
591
+ union_type_conversions={
592
+ str | None: str | None,
593
+ float | None: float | None,
594
+ Path | Url | None: PathOrUrlType | None,
595
+ },
404
596
  base_model=_Constraint,
405
597
  )
406
598
 
@@ -445,12 +637,18 @@ class _ExtraFunction(SQLModel):
445
637
 
446
638
  func: list = Field(sa_column=Column(JSON))
447
639
  scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
448
- surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
449
- simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
640
+ surrogates: list[Path] | None = Field(sa_column=Column(PathOrUrlListType), default=None)
641
+ simulator_path: Path | Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
450
642
 
451
643
 
452
644
  _ExtraFunctionDB = from_pydantic(
453
- ExtraFunction, "_ExtraFunctionDB", union_type_conversions={str | None: str | None}, base_model=_ExtraFunction
645
+ ExtraFunction,
646
+ "_ExtraFunctionDB",
647
+ union_type_conversions={
648
+ str | None: str | None,
649
+ Path | Url | None: PathOrUrlType | None,
650
+ },
651
+ base_model=_ExtraFunction,
454
652
  )
455
653
 
456
654
 
@@ -473,7 +671,9 @@ class _DiscreteRepresentation(SQLModel):
473
671
 
474
672
 
475
673
  _DiscreteRepresentationDB = from_pydantic(
476
- DiscreteRepresentation, "_DiscreteRepresentation", base_model=_DiscreteRepresentation
674
+ DiscreteRepresentation,
675
+ "_DiscreteRepresentation",
676
+ base_model=_DiscreteRepresentation,
477
677
  )
478
678
 
479
679
 
@@ -490,11 +690,21 @@ class DiscreteRepresentationDB(_DiscreteRepresentationDB, table=True):
490
690
  class _Simulator(SQLModel):
491
691
  """Helper class to override the fields of nested and list types, and Paths."""
492
692
 
493
- file: Path = Field(sa_column=Column(PathType))
693
+ file: Path | None = Field(sa_column=Column(PathOrUrlType), default=None)
694
+ url: Url | None = Field(sa_column=Column(PathOrUrlType), default=None)
494
695
  parameter_options: dict | None = Field(sa_column=Column(JSON), default=None)
495
696
 
496
697
 
497
- _SimulatorDB = from_pydantic(Simulator, "_SimulatorDB", base_model=_Simulator)
698
+ _SimulatorDB = from_pydantic(
699
+ Simulator,
700
+ "_SimulatorDB",
701
+ union_type_conversions={
702
+ str | None: str | None,
703
+ Path | None: PathOrUrlType | None,
704
+ Url | None: PathOrUrlType | None,
705
+ },
706
+ base_model=_Simulator,
707
+ )
498
708
 
499
709
 
500
710
  class SimulatorDB(_SimulatorDB, table=True):
@@ -42,5 +42,8 @@ class InteractiveSessionDB(InteractiveSessionBase, table=True):
42
42
  info: str | None = Field(default=None)
43
43
 
44
44
  # Back populates
45
- states: list["StateDB"] = Relationship(back_populates="session")
45
+ states: list["StateDB"] = Relationship(
46
+ back_populates="session",
47
+ sa_relationship_kwargs={"cascade": "all, delete-orphan"},
48
+ )
46
49
  user: "User" = Relationship(back_populates="sessions")