fractal-server 2.12.1__py3-none-any.whl → 2.13.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +9 -12
- fractal_server/app/models/v2/dataset.py +2 -2
- fractal_server/app/models/v2/job.py +11 -9
- fractal_server/app/models/v2/task.py +2 -3
- fractal_server/app/models/v2/task_group.py +6 -2
- fractal_server/app/models/v2/workflowtask.py +15 -8
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +1 -1
- fractal_server/app/routes/api/v2/dataset.py +4 -4
- fractal_server/app/routes/api/v2/images.py +11 -11
- fractal_server/app/routes/api/v2/project.py +2 -2
- fractal_server/app/routes/api/v2/status.py +1 -1
- fractal_server/app/routes/api/v2/submit.py +8 -6
- fractal_server/app/routes/api/v2/task.py +4 -2
- fractal_server/app/routes/api/v2/task_collection.py +3 -2
- fractal_server/app/routes/api/v2/task_group.py +2 -2
- fractal_server/app/routes/api/v2/workflow.py +3 -3
- fractal_server/app/routes/api/v2/workflow_import.py +3 -3
- fractal_server/app/routes/api/v2/workflowtask.py +3 -1
- fractal_server/app/routes/auth/_aux_auth.py +4 -1
- fractal_server/app/routes/auth/current_user.py +3 -5
- fractal_server/app/routes/auth/group.py +1 -1
- fractal_server/app/routes/auth/users.py +2 -4
- fractal_server/app/routes/aux/_runner.py +1 -1
- fractal_server/app/routes/aux/validate_user_settings.py +1 -2
- fractal_server/app/runner/executors/_job_states.py +13 -0
- fractal_server/app/runner/executors/slurm/_slurm_config.py +26 -18
- fractal_server/app/runner/executors/slurm/ssh/__init__.py +0 -3
- fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +31 -22
- fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +2 -5
- fractal_server/app/runner/executors/slurm/ssh/executor.py +21 -27
- fractal_server/app/runner/executors/slurm/sudo/__init__.py +0 -3
- fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +1 -2
- fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +37 -47
- fractal_server/app/runner/executors/slurm/sudo/executor.py +25 -24
- fractal_server/app/runner/v2/__init__.py +0 -9
- fractal_server/app/runner/v2/_local/_local_config.py +5 -4
- fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +4 -4
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +2 -2
- fractal_server/app/runner/v2/deduplicate_list.py +1 -1
- fractal_server/app/runner/v2/runner.py +9 -4
- fractal_server/app/runner/v2/task_interface.py +15 -7
- fractal_server/app/schemas/_filter_validators.py +6 -3
- fractal_server/app/schemas/_validators.py +7 -5
- fractal_server/app/schemas/user.py +23 -18
- fractal_server/app/schemas/user_group.py +25 -11
- fractal_server/app/schemas/user_settings.py +31 -24
- fractal_server/app/schemas/v2/dataset.py +48 -35
- fractal_server/app/schemas/v2/dumps.py +16 -14
- fractal_server/app/schemas/v2/job.py +49 -29
- fractal_server/app/schemas/v2/manifest.py +32 -28
- fractal_server/app/schemas/v2/project.py +18 -8
- fractal_server/app/schemas/v2/task.py +86 -75
- fractal_server/app/schemas/v2/task_collection.py +41 -30
- fractal_server/app/schemas/v2/task_group.py +39 -20
- fractal_server/app/schemas/v2/workflow.py +24 -12
- fractal_server/app/schemas/v2/workflowtask.py +63 -61
- fractal_server/app/security/__init__.py +1 -1
- fractal_server/config.py +32 -25
- fractal_server/images/models.py +18 -12
- fractal_server/main.py +1 -1
- fractal_server/tasks/v2/utils_background.py +1 -1
- fractal_server/tasks/v2/utils_database.py +1 -1
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/METADATA +9 -10
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/RECORD +69 -72
- fractal_server/app/runner/v2/_local_experimental/__init__.py +0 -121
- fractal_server/app/runner/v2/_local_experimental/_local_config.py +0 -108
- fractal_server/app/runner/v2/_local_experimental/_submit_setup.py +0 -42
- fractal_server/app/runner/v2/_local_experimental/executor.py +0 -157
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/entry_points.txt +0 -0
@@ -11,13 +11,13 @@ These models are used in at least two situations:
|
|
11
11
|
from typing import Optional
|
12
12
|
|
13
13
|
from pydantic import BaseModel
|
14
|
-
from pydantic import
|
14
|
+
from pydantic import ConfigDict
|
15
15
|
|
16
16
|
from fractal_server.images.models import AttributeFiltersType
|
17
17
|
|
18
18
|
|
19
|
-
class ProjectDumpV2(BaseModel
|
20
|
-
|
19
|
+
class ProjectDumpV2(BaseModel):
|
20
|
+
model_config = ConfigDict(extra="forbid")
|
21
21
|
id: int
|
22
22
|
name: str
|
23
23
|
timestamp_created: str
|
@@ -28,10 +28,10 @@ class TaskDumpV2(BaseModel):
|
|
28
28
|
name: str
|
29
29
|
type: str
|
30
30
|
|
31
|
-
command_non_parallel: Optional[str]
|
32
|
-
command_parallel: Optional[str]
|
31
|
+
command_non_parallel: Optional[str] = None
|
32
|
+
command_parallel: Optional[str] = None
|
33
33
|
source: Optional[str] = None
|
34
|
-
version: Optional[str]
|
34
|
+
version: Optional[str] = None
|
35
35
|
|
36
36
|
input_types: dict[str, bool]
|
37
37
|
output_types: dict[str, bool]
|
@@ -39,29 +39,31 @@ class TaskDumpV2(BaseModel):
|
|
39
39
|
|
40
40
|
class WorkflowTaskDumpV2(BaseModel):
|
41
41
|
"""
|
42
|
-
We do not include 'extra=
|
43
|
-
'input_filters' field and we want to avoid
|
44
|
-
for the endpoints that GET datasets.
|
42
|
+
We do not include 'model_config = ConfigDict(extra="forbid")'
|
43
|
+
because legacy data may include 'input_filters' field and we want to avoid
|
44
|
+
response-validation errors for the endpoints that GET datasets.
|
45
45
|
"""
|
46
46
|
|
47
47
|
id: int
|
48
48
|
workflow_id: int
|
49
|
-
order: Optional[int]
|
49
|
+
order: Optional[int] = None
|
50
50
|
|
51
51
|
type_filters: dict[str, bool]
|
52
52
|
|
53
|
-
task_id: Optional[int]
|
54
|
-
task: Optional[TaskDumpV2]
|
53
|
+
task_id: Optional[int] = None
|
54
|
+
task: Optional[TaskDumpV2] = None
|
55
55
|
|
56
56
|
|
57
|
-
class WorkflowDumpV2(BaseModel
|
57
|
+
class WorkflowDumpV2(BaseModel):
|
58
|
+
model_config = ConfigDict(extra="forbid")
|
58
59
|
id: int
|
59
60
|
name: str
|
60
61
|
project_id: int
|
61
62
|
timestamp_created: str
|
62
63
|
|
63
64
|
|
64
|
-
class DatasetDumpV2(BaseModel
|
65
|
+
class DatasetDumpV2(BaseModel):
|
66
|
+
model_config = ConfigDict(extra="forbid")
|
65
67
|
id: int
|
66
68
|
name: str
|
67
69
|
project_id: int
|
@@ -3,10 +3,13 @@ from enum import Enum
|
|
3
3
|
from typing import Optional
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
|
-
from pydantic import
|
6
|
+
from pydantic import ConfigDict
|
7
7
|
from pydantic import Field
|
8
|
-
from pydantic import
|
9
|
-
from pydantic import
|
8
|
+
from pydantic import field_serializer
|
9
|
+
from pydantic import field_validator
|
10
|
+
from pydantic import model_validator
|
11
|
+
from pydantic import ValidationInfo
|
12
|
+
from pydantic.types import AwareDatetime
|
10
13
|
from pydantic.types import StrictStr
|
11
14
|
|
12
15
|
from .._filter_validators import validate_attribute_filters
|
@@ -39,28 +42,31 @@ class JobStatusTypeV2(str, Enum):
|
|
39
42
|
FAILED = "failed"
|
40
43
|
|
41
44
|
|
42
|
-
class JobCreateV2(BaseModel
|
45
|
+
class JobCreateV2(BaseModel):
|
46
|
+
|
47
|
+
model_config = ConfigDict(extra="forbid")
|
43
48
|
|
44
49
|
first_task_index: Optional[int] = None
|
45
50
|
last_task_index: Optional[int] = None
|
46
51
|
slurm_account: Optional[StrictStr] = None
|
47
|
-
worker_init: Optional[str]
|
52
|
+
worker_init: Optional[str] = None
|
48
53
|
|
49
54
|
attribute_filters: AttributeFiltersType = Field(default_factory=dict)
|
50
55
|
|
51
56
|
# Validators
|
52
|
-
_worker_init =
|
53
|
-
valstr("worker_init")
|
57
|
+
_worker_init = field_validator("worker_init")(
|
58
|
+
classmethod(valstr("worker_init"))
|
54
59
|
)
|
55
|
-
_dict_keys =
|
56
|
-
root_validate_dict_keys
|
60
|
+
_dict_keys = model_validator(mode="before")(
|
61
|
+
classmethod(root_validate_dict_keys)
|
57
62
|
)
|
58
|
-
_attribute_filters =
|
59
|
-
validate_attribute_filters
|
63
|
+
_attribute_filters = field_validator("attribute_filters")(
|
64
|
+
classmethod(validate_attribute_filters)
|
60
65
|
)
|
61
66
|
|
62
|
-
@
|
63
|
-
|
67
|
+
@field_validator("first_task_index")
|
68
|
+
@classmethod
|
69
|
+
def first_task_index_non_negative(cls, v):
|
64
70
|
"""
|
65
71
|
Check that `first_task_index` is non-negative.
|
66
72
|
"""
|
@@ -70,8 +76,9 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
70
76
|
)
|
71
77
|
return v
|
72
78
|
|
73
|
-
@
|
74
|
-
|
79
|
+
@field_validator("last_task_index")
|
80
|
+
@classmethod
|
81
|
+
def first_last_task_indices(cls, v, info: ValidationInfo):
|
75
82
|
"""
|
76
83
|
Check that `last_task_index` is non-negative, and that it is not
|
77
84
|
smaller than `first_task_index`.
|
@@ -81,7 +88,7 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
81
88
|
f"last_task_index cannot be negative (given: {v})"
|
82
89
|
)
|
83
90
|
|
84
|
-
first_task_index =
|
91
|
+
first_task_index = info.data.get("first_task_index")
|
85
92
|
last_task_index = v
|
86
93
|
if first_task_index is not None and last_task_index is not None:
|
87
94
|
if first_task_index > last_task_index:
|
@@ -95,26 +102,39 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
|
|
95
102
|
class JobReadV2(BaseModel):
|
96
103
|
|
97
104
|
id: int
|
98
|
-
project_id: Optional[int]
|
105
|
+
project_id: Optional[int] = None
|
99
106
|
project_dump: ProjectDumpV2
|
100
107
|
user_email: str
|
101
|
-
slurm_account: Optional[str]
|
102
|
-
workflow_id: Optional[int]
|
108
|
+
slurm_account: Optional[str] = None
|
109
|
+
workflow_id: Optional[int] = None
|
103
110
|
workflow_dump: WorkflowDumpV2
|
104
|
-
dataset_id: Optional[int]
|
111
|
+
dataset_id: Optional[int] = None
|
105
112
|
dataset_dump: DatasetDumpV2
|
106
|
-
start_timestamp:
|
107
|
-
end_timestamp: Optional[
|
113
|
+
start_timestamp: AwareDatetime
|
114
|
+
end_timestamp: Optional[AwareDatetime] = None
|
108
115
|
status: str
|
109
|
-
log: Optional[str]
|
110
|
-
working_dir: Optional[str]
|
111
|
-
working_dir_user: Optional[str]
|
112
|
-
first_task_index: Optional[int]
|
113
|
-
last_task_index: Optional[int]
|
114
|
-
worker_init: Optional[str]
|
116
|
+
log: Optional[str] = None
|
117
|
+
working_dir: Optional[str] = None
|
118
|
+
working_dir_user: Optional[str] = None
|
119
|
+
first_task_index: Optional[int] = None
|
120
|
+
last_task_index: Optional[int] = None
|
121
|
+
worker_init: Optional[str] = None
|
115
122
|
attribute_filters: AttributeFiltersType
|
116
123
|
|
124
|
+
@field_serializer("start_timestamp")
|
125
|
+
def serialize_datetime_start(v: datetime) -> str:
|
126
|
+
return v.isoformat()
|
127
|
+
|
128
|
+
@field_serializer("end_timestamp")
|
129
|
+
def serialize_datetime_end(v: Optional[datetime]) -> Optional[str]:
|
130
|
+
if v is None:
|
131
|
+
return None
|
132
|
+
else:
|
133
|
+
return v.isoformat()
|
134
|
+
|
135
|
+
|
136
|
+
class JobUpdateV2(BaseModel):
|
117
137
|
|
118
|
-
|
138
|
+
model_config = ConfigDict(extra="forbid")
|
119
139
|
|
120
140
|
status: JobStatusTypeV2
|
@@ -3,9 +3,9 @@ from typing import Optional
|
|
3
3
|
|
4
4
|
from pydantic import BaseModel
|
5
5
|
from pydantic import Field
|
6
|
+
from pydantic import field_validator
|
6
7
|
from pydantic import HttpUrl
|
7
|
-
from pydantic import
|
8
|
-
from pydantic import validator
|
8
|
+
from pydantic import model_validator
|
9
9
|
|
10
10
|
from .._validators import valstr
|
11
11
|
|
@@ -50,27 +50,24 @@ class TaskManifestV2(BaseModel):
|
|
50
50
|
args_schema_non_parallel: Optional[dict[str, Any]] = None
|
51
51
|
args_schema_parallel: Optional[dict[str, Any]] = None
|
52
52
|
docs_info: Optional[str] = None
|
53
|
-
docs_link: Optional[
|
53
|
+
docs_link: Optional[str] = None
|
54
54
|
|
55
55
|
category: Optional[str] = None
|
56
56
|
modality: Optional[str] = None
|
57
57
|
tags: list[str] = Field(default_factory=list)
|
58
58
|
|
59
|
-
@
|
60
|
-
def validate_executable_args_meta(
|
61
|
-
|
62
|
-
|
63
|
-
executable_parallel = values.get("executable_parallel")
|
59
|
+
@model_validator(mode="after")
|
60
|
+
def validate_executable_args_meta(self):
|
61
|
+
executable_non_parallel = self.executable_non_parallel
|
62
|
+
executable_parallel = self.executable_parallel
|
64
63
|
if (executable_non_parallel is None) and (executable_parallel is None):
|
65
|
-
|
66
64
|
raise ValueError(
|
67
65
|
"`TaskManifestV2.executable_non_parallel` and "
|
68
66
|
"`TaskManifestV2.executable_parallel` cannot be both None."
|
69
67
|
)
|
70
68
|
|
71
69
|
elif executable_non_parallel is None:
|
72
|
-
|
73
|
-
meta_non_parallel = values.get("meta_non_parallel")
|
70
|
+
meta_non_parallel = self.meta_non_parallel
|
74
71
|
if meta_non_parallel != {}:
|
75
72
|
raise ValueError(
|
76
73
|
"`TaskManifestV2.meta_non_parallel` must be an empty dict "
|
@@ -78,7 +75,7 @@ class TaskManifestV2(BaseModel):
|
|
78
75
|
f"Given: {meta_non_parallel}."
|
79
76
|
)
|
80
77
|
|
81
|
-
args_schema_non_parallel =
|
78
|
+
args_schema_non_parallel = self.args_schema_non_parallel
|
82
79
|
if args_schema_non_parallel is not None:
|
83
80
|
raise ValueError(
|
84
81
|
"`TaskManifestV2.args_schema_non_parallel` must be None "
|
@@ -87,8 +84,7 @@ class TaskManifestV2(BaseModel):
|
|
87
84
|
)
|
88
85
|
|
89
86
|
elif executable_parallel is None:
|
90
|
-
|
91
|
-
meta_parallel = values.get("meta_parallel")
|
87
|
+
meta_parallel = self.meta_parallel
|
92
88
|
if meta_parallel != {}:
|
93
89
|
raise ValueError(
|
94
90
|
"`TaskManifestV2.meta_parallel` must be an empty dict if "
|
@@ -96,7 +92,7 @@ class TaskManifestV2(BaseModel):
|
|
96
92
|
f"Given: {meta_parallel}."
|
97
93
|
)
|
98
94
|
|
99
|
-
args_schema_parallel =
|
95
|
+
args_schema_parallel = self.args_schema_parallel
|
100
96
|
if args_schema_parallel is not None:
|
101
97
|
raise ValueError(
|
102
98
|
"`TaskManifestV2.args_schema_parallel` must be None if "
|
@@ -104,7 +100,14 @@ class TaskManifestV2(BaseModel):
|
|
104
100
|
f"Given: {args_schema_parallel}."
|
105
101
|
)
|
106
102
|
|
107
|
-
return
|
103
|
+
return self
|
104
|
+
|
105
|
+
@field_validator("docs_link", mode="after")
|
106
|
+
@classmethod
|
107
|
+
def validate_docs_link(cls, value):
|
108
|
+
if value is not None:
|
109
|
+
HttpUrl(value)
|
110
|
+
return value
|
108
111
|
|
109
112
|
|
110
113
|
class ManifestV2(BaseModel):
|
@@ -137,10 +140,10 @@ class ManifestV2(BaseModel):
|
|
137
140
|
args_schema_version: Optional[str] = None
|
138
141
|
authors: Optional[str] = None
|
139
142
|
|
140
|
-
@
|
141
|
-
def _check_args_schemas_are_present(
|
142
|
-
has_args_schemas =
|
143
|
-
task_list =
|
143
|
+
@model_validator(mode="after")
|
144
|
+
def _check_args_schemas_are_present(self):
|
145
|
+
has_args_schemas = self.has_args_schemas
|
146
|
+
task_list = self.task_list
|
144
147
|
if has_args_schemas is True:
|
145
148
|
for task in task_list:
|
146
149
|
if task.executable_parallel is not None:
|
@@ -157,11 +160,11 @@ class ManifestV2(BaseModel):
|
|
157
160
|
f"task '{task.name}' has "
|
158
161
|
f"{task.args_schema_non_parallel=}."
|
159
162
|
)
|
160
|
-
return
|
163
|
+
return self
|
161
164
|
|
162
|
-
@
|
163
|
-
def _unique_task_names(
|
164
|
-
task_list =
|
165
|
+
@model_validator(mode="after")
|
166
|
+
def _unique_task_names(self):
|
167
|
+
task_list = self.task_list
|
165
168
|
task_list_names = [t.name for t in task_list]
|
166
169
|
if len(set(task_list_names)) != len(task_list_names):
|
167
170
|
raise ValueError(
|
@@ -170,14 +173,15 @@ class ManifestV2(BaseModel):
|
|
170
173
|
f"Given: {task_list_names}.",
|
171
174
|
)
|
172
175
|
)
|
173
|
-
return
|
176
|
+
return self
|
174
177
|
|
175
|
-
@
|
178
|
+
@field_validator("manifest_version")
|
179
|
+
@classmethod
|
176
180
|
def manifest_version_2(cls, value):
|
177
181
|
if value != "2":
|
178
182
|
raise ValueError(f"Wrong manifest version (given {value})")
|
179
183
|
return value
|
180
184
|
|
181
|
-
_authors =
|
182
|
-
valstr("authors", accept_none=True)
|
185
|
+
_authors = field_validator("authors")(
|
186
|
+
classmethod(valstr("authors", accept_none=True))
|
183
187
|
)
|
@@ -2,28 +2,38 @@ from datetime import datetime
|
|
2
2
|
from typing import Optional
|
3
3
|
|
4
4
|
from pydantic import BaseModel
|
5
|
-
from pydantic import
|
6
|
-
from pydantic import
|
5
|
+
from pydantic import ConfigDict
|
6
|
+
from pydantic import field_serializer
|
7
|
+
from pydantic import field_validator
|
8
|
+
from pydantic.types import AwareDatetime
|
7
9
|
|
8
10
|
from .._validators import valstr
|
9
11
|
|
10
12
|
|
11
|
-
class ProjectCreateV2(BaseModel
|
13
|
+
class ProjectCreateV2(BaseModel):
|
14
|
+
|
15
|
+
model_config = ConfigDict(extra="forbid")
|
12
16
|
|
13
17
|
name: str
|
14
18
|
# Validators
|
15
|
-
_name =
|
19
|
+
_name = field_validator("name")(classmethod(valstr("name")))
|
16
20
|
|
17
21
|
|
18
22
|
class ProjectReadV2(BaseModel):
|
19
23
|
|
20
24
|
id: int
|
21
25
|
name: str
|
22
|
-
timestamp_created:
|
26
|
+
timestamp_created: AwareDatetime
|
27
|
+
|
28
|
+
@field_serializer("timestamp_created")
|
29
|
+
def serialize_datetime(v: datetime) -> str:
|
30
|
+
return v.isoformat()
|
31
|
+
|
23
32
|
|
33
|
+
class ProjectUpdateV2(BaseModel):
|
24
34
|
|
25
|
-
|
35
|
+
model_config = ConfigDict(extra="forbid")
|
26
36
|
|
27
|
-
name: Optional[str]
|
37
|
+
name: Optional[str] = None
|
28
38
|
# Validators
|
29
|
-
_name =
|
39
|
+
_name = field_validator("name")(classmethod(valstr("name")))
|
@@ -3,11 +3,11 @@ from typing import Literal
|
|
3
3
|
from typing import Optional
|
4
4
|
|
5
5
|
from pydantic import BaseModel
|
6
|
-
from pydantic import
|
6
|
+
from pydantic import ConfigDict
|
7
7
|
from pydantic import Field
|
8
|
+
from pydantic import field_validator
|
8
9
|
from pydantic import HttpUrl
|
9
|
-
from pydantic import
|
10
|
-
from pydantic import validator
|
10
|
+
from pydantic import model_validator
|
11
11
|
|
12
12
|
from fractal_server.app.schemas._validators import val_unique_list
|
13
13
|
from fractal_server.app.schemas._validators import valdict_keys
|
@@ -15,7 +15,8 @@ from fractal_server.app.schemas._validators import valstr
|
|
15
15
|
from fractal_server.string_tools import validate_cmd
|
16
16
|
|
17
17
|
|
18
|
-
class TaskCreateV2(BaseModel
|
18
|
+
class TaskCreateV2(BaseModel):
|
19
|
+
model_config = ConfigDict(extra="forbid")
|
19
20
|
|
20
21
|
name: str
|
21
22
|
|
@@ -29,7 +30,7 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
29
30
|
args_schema_parallel: Optional[dict[str, Any]] = None
|
30
31
|
args_schema_version: Optional[str] = None
|
31
32
|
docs_info: Optional[str] = None
|
32
|
-
docs_link: Optional[
|
33
|
+
docs_link: Optional[str] = None
|
33
34
|
|
34
35
|
input_types: dict[str, bool] = Field(default={})
|
35
36
|
output_types: dict[str, bool] = Field(default={})
|
@@ -40,10 +41,10 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
40
41
|
authors: Optional[str] = None
|
41
42
|
|
42
43
|
# Validators
|
43
|
-
@
|
44
|
-
def validate_commands(
|
45
|
-
command_parallel =
|
46
|
-
command_non_parallel =
|
44
|
+
@model_validator(mode="after")
|
45
|
+
def validate_commands(self):
|
46
|
+
command_parallel = self.command_parallel
|
47
|
+
command_non_parallel = self.command_non_parallel
|
47
48
|
if (command_parallel is None) and (command_non_parallel is None):
|
48
49
|
raise ValueError(
|
49
50
|
"Task must have at least one valid command "
|
@@ -54,58 +55,65 @@ class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
|
54
55
|
if command_non_parallel is not None:
|
55
56
|
validate_cmd(command_non_parallel)
|
56
57
|
|
57
|
-
return
|
58
|
+
return self
|
58
59
|
|
59
|
-
_name =
|
60
|
-
_command_non_parallel =
|
61
|
-
"command_non_parallel"
|
62
|
-
)(valstr("command_non_parallel"))
|
63
|
-
_command_parallel = validator("command_parallel", allow_reuse=True)(
|
64
|
-
valstr("command_parallel")
|
60
|
+
_name = field_validator("name")(classmethod(valstr("name")))
|
61
|
+
_command_non_parallel = field_validator("command_non_parallel")(
|
62
|
+
classmethod(valstr("command_non_parallel"))
|
65
63
|
)
|
66
|
-
|
64
|
+
_command_parallel = field_validator("command_parallel")(
|
65
|
+
classmethod(valstr("command_parallel"))
|
66
|
+
)
|
67
|
+
_version = field_validator("version")(classmethod(valstr("version")))
|
67
68
|
|
68
|
-
_meta_non_parallel =
|
69
|
-
valdict_keys("meta_non_parallel")
|
69
|
+
_meta_non_parallel = field_validator("meta_non_parallel")(
|
70
|
+
classmethod(valdict_keys("meta_non_parallel"))
|
71
|
+
)
|
72
|
+
_meta_parallel = field_validator("meta_parallel")(
|
73
|
+
classmethod(valdict_keys("meta_parallel"))
|
70
74
|
)
|
71
|
-
|
72
|
-
valdict_keys("
|
75
|
+
_args_schema_non_parallel = field_validator("args_schema_non_parallel")(
|
76
|
+
classmethod(valdict_keys("args_schema_non_parallel"))
|
73
77
|
)
|
74
|
-
|
75
|
-
"
|
76
|
-
)(valdict_keys("args_schema_non_parallel"))
|
77
|
-
_args_schema_parallel = validator(
|
78
|
-
"args_schema_parallel", allow_reuse=True
|
79
|
-
)(valdict_keys("args_schema_parallel"))
|
80
|
-
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
81
|
-
valstr("args_schema_version")
|
78
|
+
_args_schema_parallel = field_validator("args_schema_parallel")(
|
79
|
+
classmethod(valdict_keys("args_schema_parallel"))
|
82
80
|
)
|
83
|
-
|
84
|
-
|
81
|
+
_args_schema_version = field_validator("args_schema_version")(
|
82
|
+
classmethod(valstr("args_schema_version"))
|
85
83
|
)
|
86
|
-
|
87
|
-
valdict_keys("
|
84
|
+
_input_types = field_validator("input_types")(
|
85
|
+
classmethod(valdict_keys("input_types"))
|
86
|
+
)
|
87
|
+
_output_types = field_validator("output_types")(
|
88
|
+
classmethod(valdict_keys("output_types"))
|
88
89
|
)
|
89
90
|
|
90
|
-
_category =
|
91
|
-
valstr("category", accept_none=True)
|
91
|
+
_category = field_validator("category")(
|
92
|
+
classmethod(valstr("category", accept_none=True))
|
92
93
|
)
|
93
|
-
_modality =
|
94
|
-
valstr("modality", accept_none=True)
|
94
|
+
_modality = field_validator("modality")(
|
95
|
+
classmethod(valstr("modality", accept_none=True))
|
95
96
|
)
|
96
|
-
_authors =
|
97
|
-
valstr("authors", accept_none=True)
|
97
|
+
_authors = field_validator("authors")(
|
98
|
+
classmethod(valstr("authors", accept_none=True))
|
98
99
|
)
|
99
100
|
|
100
|
-
@
|
101
|
+
@field_validator("tags")
|
102
|
+
@classmethod
|
101
103
|
def validate_list_of_strings(cls, value):
|
102
104
|
for i, tag in enumerate(value):
|
103
|
-
value[i] = valstr(f"tags[{i}]")(tag)
|
104
|
-
return val_unique_list("tags")(value)
|
105
|
+
value[i] = valstr(f"tags[{i}]")(cls, tag)
|
106
|
+
return val_unique_list("tags")(cls, value)
|
105
107
|
|
108
|
+
@field_validator("docs_link", mode="after")
|
109
|
+
@classmethod
|
110
|
+
def validate_docs_link(cls, value):
|
111
|
+
if value is not None:
|
112
|
+
HttpUrl(value)
|
113
|
+
return value
|
106
114
|
|
107
|
-
class TaskReadV2(BaseModel):
|
108
115
|
|
116
|
+
class TaskReadV2(BaseModel):
|
109
117
|
id: int
|
110
118
|
name: str
|
111
119
|
type: Literal["parallel", "non_parallel", "compound"]
|
@@ -120,7 +128,7 @@ class TaskReadV2(BaseModel):
|
|
120
128
|
args_schema_parallel: Optional[dict[str, Any]] = None
|
121
129
|
args_schema_version: Optional[str] = None
|
122
130
|
docs_info: Optional[str] = None
|
123
|
-
docs_link: Optional[
|
131
|
+
docs_link: Optional[str] = None
|
124
132
|
input_types: dict[str, bool]
|
125
133
|
output_types: dict[str, bool]
|
126
134
|
|
@@ -132,7 +140,8 @@ class TaskReadV2(BaseModel):
|
|
132
140
|
tags: list[str]
|
133
141
|
|
134
142
|
|
135
|
-
class TaskUpdateV2(BaseModel
|
143
|
+
class TaskUpdateV2(BaseModel):
|
144
|
+
model_config = ConfigDict(extra="forbid")
|
136
145
|
|
137
146
|
command_parallel: Optional[str] = None
|
138
147
|
command_non_parallel: Optional[str] = None
|
@@ -145,67 +154,69 @@ class TaskUpdateV2(BaseModel, extra=Extra.forbid):
|
|
145
154
|
tags: Optional[list[str]] = None
|
146
155
|
|
147
156
|
# Validators
|
148
|
-
@
|
157
|
+
@field_validator("input_types", "output_types")
|
158
|
+
@classmethod
|
149
159
|
def val_is_dict(cls, v):
|
150
160
|
if not isinstance(v, dict):
|
151
161
|
raise ValueError
|
152
162
|
return v
|
153
163
|
|
154
|
-
_command_parallel =
|
155
|
-
valstr("command_parallel")
|
164
|
+
_command_parallel = field_validator("command_parallel")(
|
165
|
+
classmethod(valstr("command_parallel"))
|
166
|
+
)
|
167
|
+
_command_non_parallel = field_validator("command_non_parallel")(
|
168
|
+
classmethod(valstr("command_non_parallel"))
|
156
169
|
)
|
157
|
-
|
158
|
-
"
|
159
|
-
)(valstr("command_non_parallel"))
|
160
|
-
_input_types = validator("input_types", allow_reuse=True)(
|
161
|
-
valdict_keys("input_types")
|
170
|
+
_input_types = field_validator("input_types")(
|
171
|
+
classmethod(valdict_keys("input_types"))
|
162
172
|
)
|
163
|
-
_output_types =
|
164
|
-
valdict_keys("output_types")
|
173
|
+
_output_types = field_validator("output_types")(
|
174
|
+
classmethod(valdict_keys("output_types"))
|
165
175
|
)
|
166
176
|
|
167
|
-
_category =
|
168
|
-
valstr("category", accept_none=True)
|
177
|
+
_category = field_validator("category")(
|
178
|
+
classmethod(valstr("category", accept_none=True))
|
169
179
|
)
|
170
|
-
_modality =
|
171
|
-
valstr("modality", accept_none=True)
|
180
|
+
_modality = field_validator("modality")(
|
181
|
+
classmethod(valstr("modality", accept_none=True))
|
172
182
|
)
|
173
|
-
_authors =
|
174
|
-
valstr("authors", accept_none=True)
|
183
|
+
_authors = field_validator("authors")(
|
184
|
+
classmethod(valstr("authors", accept_none=True))
|
175
185
|
)
|
176
186
|
|
177
|
-
@
|
187
|
+
@field_validator("tags")
|
188
|
+
@classmethod
|
178
189
|
def validate_tags(cls, value):
|
179
190
|
for i, tag in enumerate(value):
|
180
|
-
value[i] = valstr(f"tags[{i}]")(tag)
|
181
|
-
return val_unique_list("tags")(value)
|
191
|
+
value[i] = valstr(f"tags[{i}]")(cls, tag)
|
192
|
+
return val_unique_list("tags")(cls, value)
|
182
193
|
|
183
194
|
|
184
|
-
class TaskImportV2(BaseModel
|
195
|
+
class TaskImportV2(BaseModel):
|
196
|
+
model_config = ConfigDict(extra="forbid")
|
185
197
|
|
186
198
|
pkg_name: str
|
187
199
|
version: Optional[str] = None
|
188
200
|
name: str
|
189
|
-
_pkg_name =
|
190
|
-
_version =
|
191
|
-
valstr("version", accept_none=True)
|
201
|
+
_pkg_name = field_validator("pkg_name")(classmethod(valstr("pkg_name")))
|
202
|
+
_version = field_validator("version")(
|
203
|
+
classmethod(valstr("version", accept_none=True))
|
192
204
|
)
|
193
|
-
_name =
|
205
|
+
_name = field_validator("name")(classmethod(valstr("name")))
|
194
206
|
|
195
207
|
|
196
208
|
class TaskImportV2Legacy(BaseModel):
|
197
209
|
source: str
|
198
|
-
_source =
|
210
|
+
_source = field_validator("source")(classmethod(valstr("source")))
|
199
211
|
|
200
212
|
|
201
213
|
class TaskExportV2(BaseModel):
|
202
|
-
|
203
214
|
pkg_name: str
|
204
215
|
version: Optional[str] = None
|
205
216
|
name: str
|
206
217
|
|
207
|
-
_pkg_name =
|
208
|
-
_version =
|
209
|
-
valstr("version", accept_none=True)
|
218
|
+
_pkg_name = field_validator("pkg_name")(classmethod(valstr("pkg_name")))
|
219
|
+
_version = field_validator("version")(
|
220
|
+
classmethod(valstr("version", accept_none=True))
|
210
221
|
)
|
211
|
-
_name =
|
222
|
+
_name = field_validator("name")(classmethod(valstr("name")))
|