desdeo 1.1.3__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- desdeo/__init__.py +8 -8
- desdeo/api/README.md +73 -0
- desdeo/api/__init__.py +15 -0
- desdeo/api/app.py +40 -0
- desdeo/api/config.py +69 -0
- desdeo/api/config.toml +53 -0
- desdeo/api/db.py +25 -0
- desdeo/api/db_init.py +79 -0
- desdeo/api/db_models.py +164 -0
- desdeo/api/malaga_db_init.py +27 -0
- desdeo/api/models/__init__.py +66 -0
- desdeo/api/models/archive.py +34 -0
- desdeo/api/models/preference.py +90 -0
- desdeo/api/models/problem.py +507 -0
- desdeo/api/models/reference_point_method.py +18 -0
- desdeo/api/models/session.py +46 -0
- desdeo/api/models/state.py +96 -0
- desdeo/api/models/user.py +51 -0
- desdeo/api/routers/_NAUTILUS.py +245 -0
- desdeo/api/routers/_NAUTILUS_navigator.py +233 -0
- desdeo/api/routers/_NIMBUS.py +762 -0
- desdeo/api/routers/__init__.py +5 -0
- desdeo/api/routers/problem.py +110 -0
- desdeo/api/routers/reference_point_method.py +117 -0
- desdeo/api/routers/session.py +76 -0
- desdeo/api/routers/test.py +16 -0
- desdeo/api/routers/user_authentication.py +366 -0
- desdeo/api/schema.py +94 -0
- desdeo/api/tests/__init__.py +0 -0
- desdeo/api/tests/conftest.py +59 -0
- desdeo/api/tests/test_models.py +701 -0
- desdeo/api/tests/test_routes.py +216 -0
- desdeo/api/utils/database.py +274 -0
- desdeo/api/utils/logger.py +29 -0
- desdeo/core.py +27 -0
- desdeo/emo/__init__.py +29 -0
- desdeo/emo/hooks/archivers.py +172 -0
- desdeo/emo/methods/EAs.py +418 -0
- desdeo/emo/methods/__init__.py +0 -0
- desdeo/emo/methods/bases.py +59 -0
- desdeo/emo/operators/__init__.py +1 -0
- desdeo/emo/operators/crossover.py +780 -0
- desdeo/emo/operators/evaluator.py +118 -0
- desdeo/emo/operators/generator.py +356 -0
- desdeo/emo/operators/mutation.py +1053 -0
- desdeo/emo/operators/selection.py +1036 -0
- desdeo/emo/operators/termination.py +178 -0
- desdeo/explanations/__init__.py +6 -0
- desdeo/explanations/explainer.py +100 -0
- desdeo/explanations/utils.py +90 -0
- desdeo/mcdm/__init__.py +19 -0
- desdeo/mcdm/nautili.py +345 -0
- desdeo/mcdm/nautilus.py +477 -0
- desdeo/mcdm/nautilus_navigator.py +655 -0
- desdeo/mcdm/nimbus.py +417 -0
- desdeo/mcdm/pareto_navigator.py +269 -0
- desdeo/mcdm/reference_point_method.py +116 -0
- desdeo/problem/__init__.py +79 -0
- desdeo/problem/evaluator.py +561 -0
- desdeo/problem/gurobipy_evaluator.py +562 -0
- desdeo/problem/infix_parser.py +341 -0
- desdeo/problem/json_parser.py +944 -0
- desdeo/problem/pyomo_evaluator.py +468 -0
- desdeo/problem/schema.py +1808 -0
- desdeo/problem/simulator_evaluator.py +298 -0
- desdeo/problem/sympy_evaluator.py +244 -0
- desdeo/problem/testproblems/__init__.py +73 -0
- desdeo/problem/testproblems/binh_and_korn_problem.py +88 -0
- desdeo/problem/testproblems/dtlz2_problem.py +102 -0
- desdeo/problem/testproblems/forest_problem.py +275 -0
- desdeo/problem/testproblems/knapsack_problem.py +163 -0
- desdeo/problem/testproblems/mcwb_problem.py +831 -0
- desdeo/problem/testproblems/mixed_variable_dimenrions_problem.py +83 -0
- desdeo/problem/testproblems/momip_problem.py +172 -0
- desdeo/problem/testproblems/nimbus_problem.py +143 -0
- desdeo/problem/testproblems/pareto_navigator_problem.py +89 -0
- desdeo/problem/testproblems/re_problem.py +492 -0
- desdeo/problem/testproblems/river_pollution_problem.py +434 -0
- desdeo/problem/testproblems/rocket_injector_design_problem.py +140 -0
- desdeo/problem/testproblems/simple_problem.py +351 -0
- desdeo/problem/testproblems/simulator_problem.py +92 -0
- desdeo/problem/testproblems/spanish_sustainability_problem.py +945 -0
- desdeo/problem/testproblems/zdt_problem.py +271 -0
- desdeo/problem/utils.py +245 -0
- desdeo/tools/GenerateReferencePoints.py +181 -0
- desdeo/tools/__init__.py +102 -0
- desdeo/tools/generics.py +145 -0
- desdeo/tools/gurobipy_solver_interfaces.py +258 -0
- desdeo/tools/indicators_binary.py +11 -0
- desdeo/tools/indicators_unary.py +375 -0
- desdeo/tools/interaction_schema.py +38 -0
- desdeo/tools/intersection.py +54 -0
- desdeo/tools/iterative_pareto_representer.py +99 -0
- desdeo/tools/message.py +234 -0
- desdeo/tools/ng_solver_interfaces.py +199 -0
- desdeo/tools/non_dominated_sorting.py +133 -0
- desdeo/tools/patterns.py +281 -0
- desdeo/tools/proximal_solver.py +99 -0
- desdeo/tools/pyomo_solver_interfaces.py +464 -0
- desdeo/tools/reference_vectors.py +462 -0
- desdeo/tools/scalarization.py +3138 -0
- desdeo/tools/scipy_solver_interfaces.py +454 -0
- desdeo/tools/score_bands.py +464 -0
- desdeo/tools/utils.py +320 -0
- desdeo/utopia_stuff/__init__.py +0 -0
- desdeo/utopia_stuff/data/1.json +15 -0
- desdeo/utopia_stuff/data/2.json +13 -0
- desdeo/utopia_stuff/data/3.json +15 -0
- desdeo/utopia_stuff/data/4.json +17 -0
- desdeo/utopia_stuff/data/5.json +15 -0
- desdeo/utopia_stuff/from_json.py +40 -0
- desdeo/utopia_stuff/reinit_user.py +38 -0
- desdeo/utopia_stuff/utopia_db_init.py +212 -0
- desdeo/utopia_stuff/utopia_problem.py +403 -0
- desdeo/utopia_stuff/utopia_problem_old.py +415 -0
- desdeo/utopia_stuff/utopia_reference_solutions.py +79 -0
- desdeo-2.0.0.dist-info/LICENSE +21 -0
- desdeo-2.0.0.dist-info/METADATA +168 -0
- desdeo-2.0.0.dist-info/RECORD +120 -0
- {desdeo-1.1.3.dist-info → desdeo-2.0.0.dist-info}/WHEEL +1 -1
- desdeo-1.1.3.dist-info/METADATA +0 -18
- desdeo-1.1.3.dist-info/RECORD +0 -4
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
"""Defines a models for representing preferences."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, ClassVar, Literal
|
|
4
|
+
|
|
5
|
+
from sqlalchemy.types import TypeDecorator
|
|
6
|
+
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
|
|
7
|
+
|
|
8
|
+
from .problem import ProblemDB
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from .archive import ArchiveEntryDB
|
|
12
|
+
from .user import User
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PreferenceType(TypeDecorator):
|
|
16
|
+
"""SQLAlchemy custom type to convert a preferences to JSON and back.
|
|
17
|
+
|
|
18
|
+
The reason for this TypeDecorator is to avoid model_dump when initializing
|
|
19
|
+
`PreferenceDB` with instances and derivatives of `PreferenceBase`.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
impl = JSON
|
|
23
|
+
|
|
24
|
+
def process_bind_param(self, value, dialect):
|
|
25
|
+
"""Preference to JSON."""
|
|
26
|
+
if isinstance(value, Bounds | ReferencePoint):
|
|
27
|
+
return value.model_dump()
|
|
28
|
+
|
|
29
|
+
msg = f"No JSON serialization set for preference type '{type(value)}'."
|
|
30
|
+
print(msg)
|
|
31
|
+
|
|
32
|
+
return value
|
|
33
|
+
|
|
34
|
+
def process_result_value(self, value, dialect):
|
|
35
|
+
"""JSON to Preference."""
|
|
36
|
+
if "preference_type" in value:
|
|
37
|
+
match value["preference_type"]:
|
|
38
|
+
case "reference_point":
|
|
39
|
+
return ReferencePoint.model_validate(value)
|
|
40
|
+
case "bounds":
|
|
41
|
+
return Bounds.model_validate(value)
|
|
42
|
+
case _:
|
|
43
|
+
msg = f"No preference_type '{value['preference_type']}' found."
|
|
44
|
+
print(msg)
|
|
45
|
+
|
|
46
|
+
return value
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class PreferenceBase(SQLModel):
|
|
50
|
+
"""The base model for representing preferences."""
|
|
51
|
+
|
|
52
|
+
__mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_on": "type", "polymorphic_identity": "preference_base"}
|
|
53
|
+
|
|
54
|
+
preference_type: Literal["unset"] = "unset"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class ReferencePoint(PreferenceBase):
|
|
58
|
+
"""Model for representing a reference point type of preference."""
|
|
59
|
+
|
|
60
|
+
__mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "preference_base"}
|
|
61
|
+
|
|
62
|
+
preference_type: Literal["reference_point"] = "reference_point"
|
|
63
|
+
aspiration_levels: dict[str, float] = Field(sa_column=Column(JSON, nullable=False))
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class Bounds(PreferenceBase):
|
|
67
|
+
"""Model for representing desired upper and lower bounds for objective functions."""
|
|
68
|
+
|
|
69
|
+
__mapper_args__: ClassVar[dict[str, str]] = {"polymorphic_identity": "bounds"}
|
|
70
|
+
|
|
71
|
+
preference_type: Literal["bounds"] = "bounds"
|
|
72
|
+
|
|
73
|
+
# Bound can also be None, indicating that it is not bound
|
|
74
|
+
lower_bounds: dict[str, float | None] = Field(sa_column=Column(JSON, nullable=False))
|
|
75
|
+
upper_bounds: dict[str, float | None] = Field(sa_column=Column(JSON, nullable=False))
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class PreferenceDB(SQLModel, table=True):
|
|
79
|
+
"""Database model for storing preferences."""
|
|
80
|
+
|
|
81
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
82
|
+
user_id: int | None = Field(foreign_key="user.id", default=None)
|
|
83
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
84
|
+
|
|
85
|
+
preference: PreferenceBase | None = Field(sa_column=Column(PreferenceType), default=None)
|
|
86
|
+
|
|
87
|
+
# Back populates
|
|
88
|
+
problem: "ProblemDB" = Relationship(back_populates="preferences")
|
|
89
|
+
user: "User" = Relationship(back_populates="preferences")
|
|
90
|
+
solutions: list["ArchiveEntryDB"] = Relationship(back_populates="preference")
|
|
@@ -0,0 +1,507 @@
|
|
|
1
|
+
"""."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from types import UnionType
|
|
6
|
+
from typing import TYPE_CHECKING, Optional
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, ConfigDict, create_model
|
|
9
|
+
from sqlalchemy.types import String, TypeDecorator
|
|
10
|
+
from sqlmodel import JSON, Column, Field, Relationship, SQLModel
|
|
11
|
+
|
|
12
|
+
from desdeo.problem.schema import (
|
|
13
|
+
Constant,
|
|
14
|
+
Constraint,
|
|
15
|
+
DiscreteRepresentation,
|
|
16
|
+
ExtraFunction,
|
|
17
|
+
Objective,
|
|
18
|
+
Problem,
|
|
19
|
+
ScalarizationFunction,
|
|
20
|
+
Simulator,
|
|
21
|
+
Tensor,
|
|
22
|
+
TensorConstant,
|
|
23
|
+
TensorVariable,
|
|
24
|
+
Variable,
|
|
25
|
+
VariableDomainTypeEnum,
|
|
26
|
+
VariableType,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from .archive import ArchiveEntryDB
|
|
31
|
+
from .preference import PreferenceDB
|
|
32
|
+
from .user import User
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class ProblemBase(SQLModel):
|
|
36
|
+
"""."""
|
|
37
|
+
|
|
38
|
+
model_config = ConfigDict(from_attributes=True)
|
|
39
|
+
|
|
40
|
+
# Model fields
|
|
41
|
+
name: str | None = Field()
|
|
42
|
+
description: str | None = Field()
|
|
43
|
+
is_convex: bool | None = Field(nullable=True, default=None)
|
|
44
|
+
is_linear: bool | None = Field(nullable=True, default=None)
|
|
45
|
+
is_twice_differentiable: bool | None = Field(nullable=True, default=None)
|
|
46
|
+
scenario_keys: list[str] | None = Field(sa_column=Column(JSON, nullable=True), default=None)
|
|
47
|
+
variable_domain: VariableDomainTypeEnum | None = Field()
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class ProblemGetRequest(SQLModel):
|
|
51
|
+
"""Model to deal with problem fetching requests."""
|
|
52
|
+
|
|
53
|
+
problem_id: int
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ProblemInfo(ProblemBase):
|
|
57
|
+
"""."""
|
|
58
|
+
|
|
59
|
+
id: int
|
|
60
|
+
user_id: int
|
|
61
|
+
|
|
62
|
+
name: str
|
|
63
|
+
description: str
|
|
64
|
+
is_convex: bool | None
|
|
65
|
+
is_linear: bool | None
|
|
66
|
+
is_twice_differentiable: bool | None
|
|
67
|
+
scenario_keys: list[str] | None
|
|
68
|
+
variable_domain: VariableDomainTypeEnum
|
|
69
|
+
|
|
70
|
+
constants: list["ConstantDB"] | None
|
|
71
|
+
tensor_constants: list["TensorConstantDB"] | None
|
|
72
|
+
variables: list["VariableDB"] | None
|
|
73
|
+
tensor_variables: list["TensorVariableDB"] | None
|
|
74
|
+
objectives: list["ObjectiveDB"]
|
|
75
|
+
constraints: list["ConstraintDB"] | None
|
|
76
|
+
scalarization_funcs: list["ScalarizationFunctionDB"] | None
|
|
77
|
+
extra_funcs: list["ExtraFunctionDB"] | None
|
|
78
|
+
discrete_representation: "DiscreteRepresentationDB | None"
|
|
79
|
+
simulators: list["SimulatorDB"] | None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class ProblemInfoSmall(ProblemBase):
|
|
83
|
+
"""."""
|
|
84
|
+
|
|
85
|
+
id: int
|
|
86
|
+
user_id: int
|
|
87
|
+
|
|
88
|
+
name: str
|
|
89
|
+
description: str
|
|
90
|
+
is_convex: bool | None
|
|
91
|
+
is_linear: bool | None
|
|
92
|
+
is_twice_differentiable: bool | None
|
|
93
|
+
scenario_keys: list[str] | None
|
|
94
|
+
variable_domain: VariableDomainTypeEnum
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class ProblemDB(ProblemBase, table=True):
|
|
98
|
+
"""."""
|
|
99
|
+
|
|
100
|
+
model_config = ConfigDict(from_attributes=True)
|
|
101
|
+
|
|
102
|
+
# Database specific
|
|
103
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
104
|
+
user_id: int | None = Field(foreign_key="user.id", default=None)
|
|
105
|
+
|
|
106
|
+
# Model fields
|
|
107
|
+
name: str = Field()
|
|
108
|
+
description: str = Field()
|
|
109
|
+
is_convex: bool | None = Field(nullable=True, default=None)
|
|
110
|
+
is_linear: bool | None = Field(nullable=True, default=None)
|
|
111
|
+
is_twice_differentiable: bool | None = Field(nullable=True, default=None)
|
|
112
|
+
scenario_keys: list[str] | None = Field(sa_column=Column(JSON, nullable=True), default=None)
|
|
113
|
+
variable_domain: VariableDomainTypeEnum = Field()
|
|
114
|
+
|
|
115
|
+
# Back populates
|
|
116
|
+
user: "User" = Relationship(back_populates="problems")
|
|
117
|
+
solutions: list["ArchiveEntryDB"] = Relationship(back_populates="problem")
|
|
118
|
+
preferences: list["PreferenceDB"] = Relationship(back_populates="problem")
|
|
119
|
+
|
|
120
|
+
# Populated by other models
|
|
121
|
+
constants: list["ConstantDB"] = Relationship(back_populates="problem")
|
|
122
|
+
tensor_constants: list["TensorConstantDB"] = Relationship(back_populates="problem")
|
|
123
|
+
variables: list["VariableDB"] = Relationship(back_populates="problem")
|
|
124
|
+
tensor_variables: list["TensorVariableDB"] = Relationship(back_populates="problem")
|
|
125
|
+
objectives: list["ObjectiveDB"] = Relationship(back_populates="problem")
|
|
126
|
+
constraints: list["ConstraintDB"] = Relationship(back_populates="problem")
|
|
127
|
+
scalarization_funcs: list["ScalarizationFunctionDB"] = Relationship(back_populates="problem")
|
|
128
|
+
extra_funcs: list["ExtraFunctionDB"] = Relationship(back_populates="problem")
|
|
129
|
+
discrete_representation: "DiscreteRepresentationDB" = Relationship(back_populates="problem")
|
|
130
|
+
simulators: list["SimulatorDB"] = Relationship(back_populates="problem")
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
def from_problem(cls, problem_instance: Problem, user: "User") -> "ProblemDB":
|
|
134
|
+
"""Initialized the model from an instance of `Problem`.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
problem_instance (Problem): the `Problem` instance from which to initialize
|
|
138
|
+
a `ProblemDB` model.
|
|
139
|
+
user (User): the user the instance of `ProblemDB` is assigned to.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
ProblemDB: the new instance of `ProblemDB`.
|
|
143
|
+
"""
|
|
144
|
+
scalar_constants = (
|
|
145
|
+
[const for const in problem_instance.constants if isinstance(const, Constant)]
|
|
146
|
+
if problem_instance.constants is not None
|
|
147
|
+
else []
|
|
148
|
+
)
|
|
149
|
+
tensor_constants = (
|
|
150
|
+
[const for const in problem_instance.constants if isinstance(const, TensorConstant)]
|
|
151
|
+
if problem_instance.constants is not None
|
|
152
|
+
else []
|
|
153
|
+
)
|
|
154
|
+
scalar_variables = [var for var in problem_instance.variables if isinstance(var, Variable)]
|
|
155
|
+
tensor_variables = [var for var in problem_instance.variables if isinstance(var, TensorVariable)]
|
|
156
|
+
return cls(
|
|
157
|
+
user_id=user.id,
|
|
158
|
+
name=problem_instance.name,
|
|
159
|
+
description=problem_instance.description,
|
|
160
|
+
is_convex=problem_instance.is_convex_,
|
|
161
|
+
is_linear=problem_instance.is_linear_,
|
|
162
|
+
is_twice_differentiable=problem_instance.is_twice_differentiable_,
|
|
163
|
+
variable_domain=problem_instance.variable_domain,
|
|
164
|
+
scenario_keys=problem_instance.scenario_keys,
|
|
165
|
+
constants=[ConstantDB.model_validate(const) for const in scalar_constants],
|
|
166
|
+
tensor_constants=[TensorConstantDB.model_validate(const) for const in tensor_constants],
|
|
167
|
+
variables=[VariableDB.model_validate(var) for var in scalar_variables],
|
|
168
|
+
tensor_variables=[TensorVariableDB.model_validate(var) for var in tensor_variables],
|
|
169
|
+
objectives=[ObjectiveDB.model_validate(obj) for obj in problem_instance.objectives],
|
|
170
|
+
constraints=[ConstraintDB.model_validate(con) for con in problem_instance.constraints]
|
|
171
|
+
if problem_instance.constraints is not None
|
|
172
|
+
else [],
|
|
173
|
+
scalarization_funcs=[
|
|
174
|
+
ScalarizationFunctionDB.model_validate(scal) for scal in problem_instance.scalarization_funcs
|
|
175
|
+
]
|
|
176
|
+
if problem_instance.scalarization_funcs is not None
|
|
177
|
+
else [],
|
|
178
|
+
extra_funcs=[ExtraFunctionDB.model_validate(extra) for extra in problem_instance.extra_funcs]
|
|
179
|
+
if problem_instance.extra_funcs is not None
|
|
180
|
+
else [],
|
|
181
|
+
discrete_representation=DiscreteRepresentationDB.model_validate(problem_instance.discrete_representation)
|
|
182
|
+
if problem_instance.discrete_representation is not None
|
|
183
|
+
else None,
|
|
184
|
+
simulators=[SimulatorDB.model_validate(sim) for sim in problem_instance.simulators]
|
|
185
|
+
if problem_instance.simulators is not None
|
|
186
|
+
else [],
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
class PathType(TypeDecorator):
|
|
191
|
+
"""SQLAlchemy custom type to convert Path to string (credit to @strfx on GitHUb!)."""
|
|
192
|
+
|
|
193
|
+
impl = String
|
|
194
|
+
|
|
195
|
+
def process_bind_param(self, value, dialect):
|
|
196
|
+
"""Path to string."""
|
|
197
|
+
if isinstance(value, Path):
|
|
198
|
+
return str(value)
|
|
199
|
+
return value
|
|
200
|
+
|
|
201
|
+
def process_result_value(self, value, dialect):
|
|
202
|
+
"""String to Path."""
|
|
203
|
+
if value is not None:
|
|
204
|
+
return Path(value)
|
|
205
|
+
return value
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
class PathListType(TypeDecorator):
|
|
209
|
+
"""SQLAlchemy custom type to convert list[Path] to JSON."""
|
|
210
|
+
|
|
211
|
+
impl = String
|
|
212
|
+
|
|
213
|
+
def process_bind_param(self, value, dialect):
|
|
214
|
+
"""list[Path] to JSON."""
|
|
215
|
+
if isinstance(value, list) and all(isinstance(item, Path) for item in value):
|
|
216
|
+
return json.dumps([str(item) for item in value])
|
|
217
|
+
return value # Handle as a normal string if not a list of Paths
|
|
218
|
+
|
|
219
|
+
def process_result_value(self, value, dialect):
|
|
220
|
+
"""JSON to list[Path]."""
|
|
221
|
+
# Deserialize JSON back to a list of Path objects
|
|
222
|
+
if value is not None:
|
|
223
|
+
path_strings = json.loads(value)
|
|
224
|
+
return [Path(item) for item in path_strings]
|
|
225
|
+
return None
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def from_pydantic(
|
|
229
|
+
model_class: BaseModel,
|
|
230
|
+
name: str,
|
|
231
|
+
union_type_conversions: dict[type, type] | None = None,
|
|
232
|
+
base_model: SQLModel = SQLModel,
|
|
233
|
+
) -> SQLModel:
|
|
234
|
+
"""Creates an SQLModel class from a pydantic model.
|
|
235
|
+
|
|
236
|
+
Args:
|
|
237
|
+
model_class (BaseClass): the pydantic class to be converted.
|
|
238
|
+
name (str): the name given to the class.
|
|
239
|
+
union_type_conversions (dict[type, type], optional): union type conversion table. This is needed, because
|
|
240
|
+
SQLAlchemy expects all table columns to have a specific value. For example, a field with a type like
|
|
241
|
+
`int | float | bool` cannot be stored in a database table because the field's type
|
|
242
|
+
is ambiguous. In this case, storing whichever value originally stored in a the field as a
|
|
243
|
+
`float` will suffice (because `int` and `bool` can be represented by floats).
|
|
244
|
+
Therefore, a type conversion, such as `{int | float | bool: float}` is expected.
|
|
245
|
+
Defaults to `None`.
|
|
246
|
+
base_model (SQLModel, optional): a base SQLModel to override problematic fields in the `model_class`, such
|
|
247
|
+
as lists or derived nested types. The base class may have custom validators to help convert
|
|
248
|
+
these values into something more suitable to be stored in a database. Often storing the JSON
|
|
249
|
+
representation of the problematic types is enough. If the `model_class` consists of only
|
|
250
|
+
fields with primitive types, this argument can be left to its default value. Defaults to SQLModel.
|
|
251
|
+
|
|
252
|
+
Raises:
|
|
253
|
+
TypeError: one or more type conversions are missing for union types.
|
|
254
|
+
|
|
255
|
+
Returns:
|
|
256
|
+
SQLModel: the SQLModel corresponding to `model_class`.
|
|
257
|
+
"""
|
|
258
|
+
# collect field in the base model, if defined, do not try to convert the type
|
|
259
|
+
base_fields = base_model.model_fields
|
|
260
|
+
|
|
261
|
+
if union_type_conversions is None:
|
|
262
|
+
union_type_conversions = {}
|
|
263
|
+
|
|
264
|
+
field_definitions = {}
|
|
265
|
+
for field_name, field_info in model_class.model_fields.items():
|
|
266
|
+
if field_name in base_fields:
|
|
267
|
+
annotation = base_fields[field_name].annotation
|
|
268
|
+
field_definitions[field_name] = (annotation, base_fields[field_name])
|
|
269
|
+
continue
|
|
270
|
+
|
|
271
|
+
if type(field_type := field_info.annotation) is UnionType:
|
|
272
|
+
if field_type not in union_type_conversions:
|
|
273
|
+
raise TypeError("Missing Union type conversion")
|
|
274
|
+
|
|
275
|
+
annotation = union_type_conversions[field_type]
|
|
276
|
+
else:
|
|
277
|
+
annotation = field_info.annotation
|
|
278
|
+
|
|
279
|
+
field_definitions[field_name] = (annotation, field_info)
|
|
280
|
+
|
|
281
|
+
return create_model(name, __base__=base_model, **field_definitions)
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
class _TensorConstant(SQLModel):
|
|
285
|
+
"""Helper class to override the field types of nested and list types."""
|
|
286
|
+
|
|
287
|
+
values: Tensor = Field(sa_column=Column(JSON))
|
|
288
|
+
shape: list[int] = Field(sa_column=Column(JSON))
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
_BaseTensorConstantDB = from_pydantic(
|
|
292
|
+
TensorConstant,
|
|
293
|
+
"_BaseTensorConstantDB",
|
|
294
|
+
union_type_conversions={VariableType | None: float | None},
|
|
295
|
+
base_model=_TensorConstant,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
class TensorConstantDB(_BaseTensorConstantDB, table=True):
|
|
300
|
+
"""The SQLModel equivalent to `TensorConstant`."""
|
|
301
|
+
|
|
302
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
303
|
+
problem_id: int | None = Field(default=None, foreign_key="problemdb.id")
|
|
304
|
+
|
|
305
|
+
# Back populates
|
|
306
|
+
problem: ProblemDB | None = Relationship(back_populates="tensor_constants")
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
_ConstantDB = from_pydantic(Constant, "_ConstantDB", union_type_conversions={VariableType: float})
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
class ConstantDB(_ConstantDB, table=True):
|
|
313
|
+
"""The SQLModel equivalent to `Constant`."""
|
|
314
|
+
|
|
315
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
316
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
317
|
+
|
|
318
|
+
# Back populates
|
|
319
|
+
problem: ProblemDB | None = Relationship(back_populates="constants")
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
_VariableDB = from_pydantic(
|
|
323
|
+
Variable, "_VariableDB", union_type_conversions={VariableType: float, VariableType | None: float | None}
|
|
324
|
+
)
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class VariableDB(_VariableDB, table=True):
|
|
328
|
+
"""The SQLModel equivalent to `Variable`."""
|
|
329
|
+
|
|
330
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
331
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
332
|
+
|
|
333
|
+
# Back populates
|
|
334
|
+
problem: ProblemDB | None = Relationship(back_populates="variables")
|
|
335
|
+
|
|
336
|
+
|
|
337
|
+
class _TensorVariable(SQLModel):
|
|
338
|
+
"""Helper class to override the field types of nested and list types."""
|
|
339
|
+
|
|
340
|
+
initial_values: Tensor | None = Field(sa_column=Column(JSON))
|
|
341
|
+
lowerbounds: Tensor | None = Field(sa_column=Column(JSON))
|
|
342
|
+
upperbounds: Tensor | None = Field(sa_column=Column(JSON))
|
|
343
|
+
shape: list[int] = Field(sa_column=Column(JSON))
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
_TensorVariableDB = from_pydantic(
|
|
347
|
+
TensorVariable,
|
|
348
|
+
"_TensorVariableDB",
|
|
349
|
+
union_type_conversions={VariableType: float, VariableType | None: float | None},
|
|
350
|
+
base_model=_TensorVariable,
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
class TensorVariableDB(_TensorVariableDB, table=True):
|
|
355
|
+
"""The SQLModel equivalent to `TensorVariable`."""
|
|
356
|
+
|
|
357
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
358
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
359
|
+
|
|
360
|
+
# Back populates
|
|
361
|
+
problem: ProblemDB | None = Relationship(back_populates="tensor_variables")
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
class _Objective(SQLModel):
|
|
365
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
366
|
+
|
|
367
|
+
func: list | None = Field(sa_column=Column(JSON, nullable=True))
|
|
368
|
+
scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
|
|
369
|
+
surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
|
|
370
|
+
simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
_ObjectiveDB = from_pydantic(
|
|
374
|
+
Objective,
|
|
375
|
+
"_ObjectiveDB",
|
|
376
|
+
union_type_conversions={str | None: str | None, float | None: float | None},
|
|
377
|
+
base_model=_Objective,
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
class ObjectiveDB(_ObjectiveDB, table=True):
|
|
382
|
+
"""The SQLModel equivalent to `Objective`."""
|
|
383
|
+
|
|
384
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
385
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
386
|
+
|
|
387
|
+
# Back populates
|
|
388
|
+
problem: ProblemDB | None = Relationship(back_populates="objectives")
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
class _Constraint(SQLModel):
|
|
392
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
393
|
+
|
|
394
|
+
func: list = Field(sa_column=Column(JSON))
|
|
395
|
+
scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
|
|
396
|
+
surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
|
|
397
|
+
simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
_ConstraintDB = from_pydantic(
|
|
401
|
+
Constraint,
|
|
402
|
+
"_ConstraintDB",
|
|
403
|
+
union_type_conversions={str | None: str | None, float | None: float | None},
|
|
404
|
+
base_model=_Constraint,
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
class ConstraintDB(_ConstraintDB, table=True):
|
|
409
|
+
"""The SQLModel equivalent to `Constraint`."""
|
|
410
|
+
|
|
411
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
412
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
413
|
+
|
|
414
|
+
# Back populates
|
|
415
|
+
problem: ProblemDB | None = Relationship(back_populates="constraints")
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
class _ScalarizationFunction(SQLModel):
|
|
419
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
420
|
+
|
|
421
|
+
func: list = Field(sa_column=Column(JSON))
|
|
422
|
+
scenario_keys: list[str] = Field(sa_column=Column(JSON))
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
_ScalarizationFunctionDB = from_pydantic(
|
|
426
|
+
ScalarizationFunction,
|
|
427
|
+
"_ScalarizationFunctionDB",
|
|
428
|
+
union_type_conversions={str | None: str | None},
|
|
429
|
+
base_model=_ScalarizationFunction,
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
class ScalarizationFunctionDB(_ScalarizationFunctionDB, table=True):
|
|
434
|
+
"""The SQLModel equivalent to `ScalarizationFunction`."""
|
|
435
|
+
|
|
436
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
437
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
438
|
+
|
|
439
|
+
# Back populates
|
|
440
|
+
problem: ProblemDB | None = Relationship(back_populates="scalarization_funcs")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
class _ExtraFunction(SQLModel):
|
|
444
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
445
|
+
|
|
446
|
+
func: list = Field(sa_column=Column(JSON))
|
|
447
|
+
scenario_keys: list[str] | None = Field(sa_column=Column(JSON), default=None)
|
|
448
|
+
surrogates: list[Path] | None = Field(sa_column=Column(PathListType), default=None)
|
|
449
|
+
simulator_path: Path | None = Field(sa_column=Column(PathType), default=None)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
_ExtraFunctionDB = from_pydantic(
|
|
453
|
+
ExtraFunction, "_ExtraFunctionDB", union_type_conversions={str | None: str | None}, base_model=_ExtraFunction
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
class ExtraFunctionDB(_ExtraFunctionDB, table=True):
|
|
458
|
+
"""The SQLModel equivalent to `ExtraFunction`."""
|
|
459
|
+
|
|
460
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
461
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
462
|
+
|
|
463
|
+
# Back populates
|
|
464
|
+
problem: ProblemDB | None = Relationship(back_populates="extra_funcs")
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
class _DiscreteRepresentation(SQLModel):
|
|
468
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
469
|
+
|
|
470
|
+
non_dominated: bool = Field(default=False)
|
|
471
|
+
variable_values: dict[str, list[VariableType]] = Field(sa_column=Column(JSON))
|
|
472
|
+
objective_values: dict[str, list[float]] = Field(sa_column=Column(JSON))
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
_DiscreteRepresentationDB = from_pydantic(
|
|
476
|
+
DiscreteRepresentation, "_DiscreteRepresentation", base_model=_DiscreteRepresentation
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
|
|
480
|
+
class DiscreteRepresentationDB(_DiscreteRepresentationDB, table=True):
|
|
481
|
+
"""The SQLModel equivalent to `DiscreteRepresentation`."""
|
|
482
|
+
|
|
483
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
484
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
485
|
+
|
|
486
|
+
# Back populates
|
|
487
|
+
problem: ProblemDB | None = Relationship(back_populates="discrete_representation")
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
class _Simulator(SQLModel):
|
|
491
|
+
"""Helper class to override the fields of nested and list types, and Paths."""
|
|
492
|
+
|
|
493
|
+
file: Path = Field(sa_column=Column(PathType))
|
|
494
|
+
parameter_options: dict | None = Field(sa_column=Column(JSON), default=None)
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
_SimulatorDB = from_pydantic(Simulator, "_SimulatorDB", base_model=_Simulator)
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
class SimulatorDB(_SimulatorDB, table=True):
|
|
501
|
+
"""The SQLModel equivalent to `Simulator`."""
|
|
502
|
+
|
|
503
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
504
|
+
problem_id: int | None = Field(foreign_key="problemdb.id", default=None)
|
|
505
|
+
|
|
506
|
+
# Back populates
|
|
507
|
+
problem: ProblemDB | None = Relationship(back_populates="simulators")
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"""Models specific to the reference point method."""
|
|
2
|
+
|
|
3
|
+
from sqlmodel import JSON, Column, Field, SQLModel
|
|
4
|
+
|
|
5
|
+
from .preference import ReferencePoint
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class RPMSolveRequest(SQLModel):
|
|
9
|
+
"""Model of the request to the reference point method."""
|
|
10
|
+
|
|
11
|
+
problem_id: int
|
|
12
|
+
session_id: int | None = Field(default=None)
|
|
13
|
+
parent_state_id: int | None = Field(default=None)
|
|
14
|
+
|
|
15
|
+
scalarization_options: dict[str, float | str | bool] | None = Field(sa_column=Column(JSON), default=None)
|
|
16
|
+
solver: str | None = Field(default=None)
|
|
17
|
+
solver_options: dict[str, float | str | bool] | None = Field(sa_column=Column(JSON), default=None)
|
|
18
|
+
preference: ReferencePoint = Field(Column(JSON))
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Defines Session models to manage user sessions."""
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from sqlmodel import Field, Relationship, SQLModel
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from .state import StateDB
|
|
9
|
+
from .user import User
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CreateSessionRequest(SQLModel):
|
|
13
|
+
"""Model of the request to create a new session."""
|
|
14
|
+
|
|
15
|
+
info: str | None = Field(default=None)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class GetSessionRequest(SQLModel):
|
|
19
|
+
"""Model of the request to get a specific session."""
|
|
20
|
+
|
|
21
|
+
session_id: int = Field()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class InteractiveSessionBase(SQLModel):
|
|
25
|
+
"""The base model for representing interactive sessions."""
|
|
26
|
+
|
|
27
|
+
id: int | None
|
|
28
|
+
user_id: int | None
|
|
29
|
+
|
|
30
|
+
info: str | None
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
InteractiveSessionInfo = InteractiveSessionBase
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class InteractiveSessionDB(InteractiveSessionBase, table=True):
|
|
37
|
+
"""Database model to store sessions."""
|
|
38
|
+
|
|
39
|
+
id: int | None = Field(primary_key=True, default=None)
|
|
40
|
+
user_id: int | None = Field(foreign_key="user.id", default=None)
|
|
41
|
+
|
|
42
|
+
info: str | None = Field(default=None)
|
|
43
|
+
|
|
44
|
+
# Back populates
|
|
45
|
+
states: list["StateDB"] = Relationship(back_populates="session")
|
|
46
|
+
user: "User" = Relationship(back_populates="sessions")
|