fractal-server 2.12.1__py3-none-any.whl → 2.13.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +9 -12
- fractal_server/app/models/v2/__init__.py +4 -0
- fractal_server/app/models/v2/accounting.py +35 -0
- fractal_server/app/models/v2/dataset.py +2 -2
- fractal_server/app/models/v2/job.py +11 -9
- fractal_server/app/models/v2/task.py +2 -3
- fractal_server/app/models/v2/task_group.py +6 -2
- fractal_server/app/models/v2/workflowtask.py +15 -8
- fractal_server/app/routes/admin/v2/__init__.py +4 -0
- fractal_server/app/routes/admin/v2/accounting.py +108 -0
- fractal_server/app/routes/admin/v2/impersonate.py +35 -0
- fractal_server/app/routes/admin/v2/job.py +5 -13
- fractal_server/app/routes/admin/v2/task.py +1 -1
- fractal_server/app/routes/admin/v2/task_group.py +5 -13
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
- fractal_server/app/routes/api/v2/dataset.py +4 -4
- fractal_server/app/routes/api/v2/images.py +11 -11
- fractal_server/app/routes/api/v2/project.py +2 -2
- fractal_server/app/routes/api/v2/status.py +1 -1
- fractal_server/app/routes/api/v2/submit.py +9 -6
- fractal_server/app/routes/api/v2/task.py +4 -2
- fractal_server/app/routes/api/v2/task_collection.py +3 -2
- fractal_server/app/routes/api/v2/task_group.py +4 -7
- fractal_server/app/routes/api/v2/workflow.py +3 -3
- fractal_server/app/routes/api/v2/workflow_import.py +3 -3
- fractal_server/app/routes/api/v2/workflowtask.py +3 -1
- fractal_server/app/routes/auth/_aux_auth.py +4 -1
- fractal_server/app/routes/auth/current_user.py +3 -5
- fractal_server/app/routes/auth/group.py +1 -1
- fractal_server/app/routes/auth/users.py +2 -4
- fractal_server/app/routes/aux/__init__.py +0 -20
- fractal_server/app/routes/aux/_runner.py +1 -1
- fractal_server/app/routes/aux/validate_user_settings.py +1 -2
- fractal_server/app/runner/executors/_job_states.py +13 -0
- fractal_server/app/runner/executors/slurm/_slurm_config.py +26 -18
- fractal_server/app/runner/executors/slurm/ssh/__init__.py +0 -3
- fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +31 -22
- fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +2 -5
- fractal_server/app/runner/executors/slurm/ssh/executor.py +21 -27
- fractal_server/app/runner/executors/slurm/sudo/__init__.py +0 -3
- fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +1 -2
- fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +37 -47
- fractal_server/app/runner/executors/slurm/sudo/executor.py +25 -24
- fractal_server/app/runner/v2/__init__.py +4 -9
- fractal_server/app/runner/v2/_local/__init__.py +3 -0
- fractal_server/app/runner/v2/_local/_local_config.py +5 -4
- fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +4 -4
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py +2 -0
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py +4 -2
- fractal_server/app/runner/v2/deduplicate_list.py +1 -1
- fractal_server/app/runner/v2/runner.py +25 -10
- fractal_server/app/runner/v2/runner_functions.py +12 -11
- fractal_server/app/runner/v2/task_interface.py +15 -7
- fractal_server/app/schemas/_filter_validators.py +6 -3
- fractal_server/app/schemas/_validators.py +7 -5
- fractal_server/app/schemas/user.py +23 -18
- fractal_server/app/schemas/user_group.py +25 -11
- fractal_server/app/schemas/user_settings.py +31 -24
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/accounting.py +18 -0
- fractal_server/app/schemas/v2/dataset.py +48 -35
- fractal_server/app/schemas/v2/dumps.py +16 -14
- fractal_server/app/schemas/v2/job.py +49 -29
- fractal_server/app/schemas/v2/manifest.py +32 -28
- fractal_server/app/schemas/v2/project.py +18 -8
- fractal_server/app/schemas/v2/task.py +86 -75
- fractal_server/app/schemas/v2/task_collection.py +41 -30
- fractal_server/app/schemas/v2/task_group.py +39 -20
- fractal_server/app/schemas/v2/workflow.py +24 -12
- fractal_server/app/schemas/v2/workflowtask.py +63 -61
- fractal_server/app/security/__init__.py +1 -1
- fractal_server/config.py +86 -73
- fractal_server/images/models.py +18 -12
- fractal_server/main.py +1 -1
- fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
- fractal_server/tasks/v2/utils_background.py +2 -2
- fractal_server/tasks/v2/utils_database.py +1 -1
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.1.dist-info}/METADATA +9 -10
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.1.dist-info}/RECORD +83 -81
- fractal_server/app/runner/v2/_local_experimental/__init__.py +0 -121
- fractal_server/app/runner/v2/_local_experimental/_local_config.py +0 -108
- fractal_server/app/runner/v2/_local_experimental/_submit_setup.py +0 -42
- fractal_server/app/runner/v2/_local_experimental/executor.py +0 -157
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.1.dist-info}/LICENSE +0 -0
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.1.dist-info}/WHEEL +0 -0
- {fractal_server-2.12.1.dist-info → fractal_server-2.13.1.dist-info}/entry_points.txt +0 -0
@@ -18,6 +18,7 @@ from .runner_functions import run_v2_task_non_parallel
|
|
18
18
|
from .runner_functions import run_v2_task_parallel
|
19
19
|
from .task_interface import TaskOutput
|
20
20
|
from fractal_server.app.db import get_sync_db
|
21
|
+
from fractal_server.app.models.v2 import AccountingRecord
|
21
22
|
from fractal_server.app.models.v2 import DatasetV2
|
22
23
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
23
24
|
from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
|
@@ -31,6 +32,7 @@ def execute_tasks_v2(
|
|
31
32
|
wf_task_list: list[WorkflowTaskV2],
|
32
33
|
dataset: DatasetV2,
|
33
34
|
executor: ThreadPoolExecutor,
|
35
|
+
user_id: int,
|
34
36
|
workflow_dir_local: Path,
|
35
37
|
workflow_dir_remote: Optional[Path] = None,
|
36
38
|
logger_name: Optional[str] = None,
|
@@ -75,17 +77,20 @@ def execute_tasks_v2(
|
|
75
77
|
with next(get_sync_db()) as db:
|
76
78
|
db_dataset = db.get(DatasetV2, dataset.id)
|
77
79
|
new_history_item = _DatasetHistoryItemV2(
|
78
|
-
workflowtask=
|
80
|
+
workflowtask=dict(
|
81
|
+
**wftask.model_dump(exclude={"task"}),
|
82
|
+
task=wftask.task.model_dump(),
|
83
|
+
),
|
79
84
|
status=WorkflowTaskStatusTypeV2.SUBMITTED,
|
80
85
|
parallelization=dict(), # FIXME: re-include parallelization
|
81
|
-
).
|
86
|
+
).model_dump()
|
82
87
|
db_dataset.history.append(new_history_item)
|
83
88
|
flag_modified(db_dataset, "history")
|
84
89
|
db.merge(db_dataset)
|
85
90
|
db.commit()
|
86
91
|
# TASK EXECUTION (V2)
|
87
92
|
if task.type == "non_parallel":
|
88
|
-
current_task_output = run_v2_task_non_parallel(
|
93
|
+
current_task_output, num_tasks = run_v2_task_non_parallel(
|
89
94
|
images=filtered_images,
|
90
95
|
zarr_dir=zarr_dir,
|
91
96
|
wftask=wftask,
|
@@ -93,22 +98,20 @@ def execute_tasks_v2(
|
|
93
98
|
workflow_dir_local=workflow_dir_local,
|
94
99
|
workflow_dir_remote=workflow_dir_remote,
|
95
100
|
executor=executor,
|
96
|
-
logger_name=logger_name,
|
97
101
|
submit_setup_call=submit_setup_call,
|
98
102
|
)
|
99
103
|
elif task.type == "parallel":
|
100
|
-
current_task_output = run_v2_task_parallel(
|
104
|
+
current_task_output, num_tasks = run_v2_task_parallel(
|
101
105
|
images=filtered_images,
|
102
106
|
wftask=wftask,
|
103
107
|
task=task,
|
104
108
|
workflow_dir_local=workflow_dir_local,
|
105
109
|
workflow_dir_remote=workflow_dir_remote,
|
106
110
|
executor=executor,
|
107
|
-
logger_name=logger_name,
|
108
111
|
submit_setup_call=submit_setup_call,
|
109
112
|
)
|
110
113
|
elif task.type == "compound":
|
111
|
-
current_task_output = run_v2_task_compound(
|
114
|
+
current_task_output, num_tasks = run_v2_task_compound(
|
112
115
|
images=filtered_images,
|
113
116
|
zarr_dir=zarr_dir,
|
114
117
|
wftask=wftask,
|
@@ -116,7 +119,6 @@ def execute_tasks_v2(
|
|
116
119
|
workflow_dir_local=workflow_dir_local,
|
117
120
|
workflow_dir_remote=workflow_dir_remote,
|
118
121
|
executor=executor,
|
119
|
-
logger_name=logger_name,
|
120
122
|
submit_setup_call=submit_setup_call,
|
121
123
|
)
|
122
124
|
else:
|
@@ -132,16 +134,19 @@ def execute_tasks_v2(
|
|
132
134
|
and current_task_output.image_list_removals == []
|
133
135
|
):
|
134
136
|
current_task_output = TaskOutput(
|
135
|
-
**current_task_output.
|
137
|
+
**current_task_output.model_dump(
|
138
|
+
exclude={"image_list_updates"}
|
139
|
+
),
|
136
140
|
image_list_updates=[
|
137
141
|
dict(zarr_url=img["zarr_url"]) for img in filtered_images
|
138
142
|
],
|
139
143
|
)
|
140
144
|
|
141
145
|
# Update image list
|
146
|
+
num_new_images = 0
|
142
147
|
current_task_output.check_zarr_urls_are_unique()
|
143
148
|
for image_obj in current_task_output.image_list_updates:
|
144
|
-
image = image_obj.
|
149
|
+
image = image_obj.model_dump()
|
145
150
|
# Edit existing image
|
146
151
|
tmp_image_paths = [img["zarr_url"] for img in tmp_images]
|
147
152
|
if image["zarr_url"] in tmp_image_paths:
|
@@ -241,6 +246,7 @@ def execute_tasks_v2(
|
|
241
246
|
SingleImage(**new_image)
|
242
247
|
# Add image into the dataset image list
|
243
248
|
tmp_images.append(new_image)
|
249
|
+
num_new_images += 1
|
244
250
|
|
245
251
|
# Remove images from tmp_images
|
246
252
|
for img_zarr_url in current_task_output.image_list_removals:
|
@@ -276,4 +282,13 @@ def execute_tasks_v2(
|
|
276
282
|
db.merge(db_dataset)
|
277
283
|
db.commit()
|
278
284
|
|
285
|
+
# Create accounting record
|
286
|
+
record = AccountingRecord(
|
287
|
+
user_id=user_id,
|
288
|
+
num_tasks=num_tasks,
|
289
|
+
num_new_images=num_new_images,
|
290
|
+
)
|
291
|
+
db.add(record)
|
292
|
+
db.commit()
|
293
|
+
|
279
294
|
logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
|
@@ -115,9 +115,8 @@ def run_v2_task_non_parallel(
|
|
115
115
|
workflow_dir_local: Path,
|
116
116
|
workflow_dir_remote: Optional[Path] = None,
|
117
117
|
executor: Executor,
|
118
|
-
logger_name: Optional[str] = None,
|
119
118
|
submit_setup_call: Callable = no_op_submit_setup_call,
|
120
|
-
) -> TaskOutput:
|
119
|
+
) -> tuple[TaskOutput, int]:
|
121
120
|
"""
|
122
121
|
This runs server-side (see `executor` argument)
|
123
122
|
"""
|
@@ -154,10 +153,11 @@ def run_v2_task_non_parallel(
|
|
154
153
|
**executor_options,
|
155
154
|
)
|
156
155
|
output = future.result()
|
156
|
+
num_tasks = 1
|
157
157
|
if output is None:
|
158
|
-
return TaskOutput()
|
158
|
+
return (TaskOutput(), num_tasks)
|
159
159
|
else:
|
160
|
-
return _cast_and_validate_TaskOutput(output)
|
160
|
+
return (_cast_and_validate_TaskOutput(output), num_tasks)
|
161
161
|
|
162
162
|
|
163
163
|
def run_v2_task_parallel(
|
@@ -168,12 +168,11 @@ def run_v2_task_parallel(
|
|
168
168
|
executor: Executor,
|
169
169
|
workflow_dir_local: Path,
|
170
170
|
workflow_dir_remote: Optional[Path] = None,
|
171
|
-
logger_name: Optional[str] = None,
|
172
171
|
submit_setup_call: Callable = no_op_submit_setup_call,
|
173
|
-
) -> TaskOutput:
|
172
|
+
) -> tuple[TaskOutput, int]:
|
174
173
|
|
175
174
|
if len(images) == 0:
|
176
|
-
return TaskOutput()
|
175
|
+
return (TaskOutput(), 0)
|
177
176
|
|
178
177
|
_check_parallelization_list_size(images)
|
179
178
|
|
@@ -216,8 +215,9 @@ def run_v2_task_parallel(
|
|
216
215
|
else:
|
217
216
|
outputs[ind] = _cast_and_validate_TaskOutput(output)
|
218
217
|
|
218
|
+
num_tasks = len(images)
|
219
219
|
merged_output = merge_outputs(outputs)
|
220
|
-
return merged_output
|
220
|
+
return (merged_output, num_tasks)
|
221
221
|
|
222
222
|
|
223
223
|
def run_v2_task_compound(
|
@@ -229,7 +229,6 @@ def run_v2_task_compound(
|
|
229
229
|
executor: Executor,
|
230
230
|
workflow_dir_local: Path,
|
231
231
|
workflow_dir_remote: Optional[Path] = None,
|
232
|
-
logger_name: Optional[str] = None,
|
233
232
|
submit_setup_call: Callable = no_op_submit_setup_call,
|
234
233
|
) -> TaskOutput:
|
235
234
|
|
@@ -273,11 +272,13 @@ def run_v2_task_compound(
|
|
273
272
|
parallelization_list = init_task_output.parallelization_list
|
274
273
|
parallelization_list = deduplicate_list(parallelization_list)
|
275
274
|
|
275
|
+
num_task = 1 + len(parallelization_list)
|
276
|
+
|
276
277
|
# 3/B: parallel part of a compound task
|
277
278
|
_check_parallelization_list_size(parallelization_list)
|
278
279
|
|
279
280
|
if len(parallelization_list) == 0:
|
280
|
-
return TaskOutput()
|
281
|
+
return (TaskOutput(), 0)
|
281
282
|
|
282
283
|
list_function_kwargs = []
|
283
284
|
for ind, parallelization_item in enumerate(parallelization_list):
|
@@ -313,4 +314,4 @@ def run_v2_task_compound(
|
|
313
314
|
outputs[ind] = validated_output
|
314
315
|
|
315
316
|
merged_output = merge_outputs(outputs)
|
316
|
-
return merged_output
|
317
|
+
return (merged_output, num_task)
|
@@ -1,15 +1,17 @@
|
|
1
1
|
from typing import Any
|
2
2
|
|
3
3
|
from pydantic import BaseModel
|
4
|
-
from pydantic import
|
4
|
+
from pydantic import ConfigDict
|
5
5
|
from pydantic import Field
|
6
|
-
from pydantic import
|
6
|
+
from pydantic import field_validator
|
7
7
|
|
8
8
|
from ....images import SingleImageTaskOutput
|
9
9
|
from fractal_server.urls import normalize_url
|
10
10
|
|
11
11
|
|
12
|
-
class TaskOutput(BaseModel
|
12
|
+
class TaskOutput(BaseModel):
|
13
|
+
|
14
|
+
model_config = ConfigDict(extra="forbid")
|
13
15
|
|
14
16
|
image_list_updates: list[SingleImageTaskOutput] = Field(
|
15
17
|
default_factory=list
|
@@ -35,21 +37,27 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
|
|
35
37
|
msg = f"{msg}\n{duplicate}"
|
36
38
|
raise ValueError(msg)
|
37
39
|
|
38
|
-
@
|
40
|
+
@field_validator("image_list_removals")
|
41
|
+
@classmethod
|
39
42
|
def normalize_paths(cls, v: list[str]) -> list[str]:
|
40
43
|
return [normalize_url(zarr_url) for zarr_url in v]
|
41
44
|
|
42
45
|
|
43
|
-
class InitArgsModel(BaseModel
|
46
|
+
class InitArgsModel(BaseModel):
|
47
|
+
|
48
|
+
model_config = ConfigDict(extra="forbid")
|
44
49
|
|
45
50
|
zarr_url: str
|
46
51
|
init_args: dict[str, Any] = Field(default_factory=dict)
|
47
52
|
|
48
|
-
@
|
53
|
+
@field_validator("zarr_url")
|
54
|
+
@classmethod
|
49
55
|
def normalize_path(cls, v: str) -> str:
|
50
56
|
return normalize_url(v)
|
51
57
|
|
52
58
|
|
53
|
-
class InitTaskOutput(BaseModel
|
59
|
+
class InitTaskOutput(BaseModel):
|
60
|
+
|
61
|
+
model_config = ConfigDict(extra="forbid")
|
54
62
|
|
55
63
|
parallelization_list: list[InitArgsModel] = Field(default_factory=list)
|
@@ -5,22 +5,25 @@ from fractal_server.images.models import AttributeFiltersType
|
|
5
5
|
|
6
6
|
|
7
7
|
def validate_type_filters(
|
8
|
-
type_filters: Optional[dict[str, bool]]
|
8
|
+
cls, type_filters: Optional[dict[str, bool]]
|
9
9
|
) -> dict[str, bool]:
|
10
10
|
if type_filters is None:
|
11
11
|
raise ValueError("'type_filters' cannot be 'None'.")
|
12
12
|
|
13
|
-
type_filters = valdict_keys("type_filters")(type_filters)
|
13
|
+
type_filters = valdict_keys("type_filters")(cls, type_filters)
|
14
14
|
return type_filters
|
15
15
|
|
16
16
|
|
17
17
|
def validate_attribute_filters(
|
18
|
+
cls,
|
18
19
|
attribute_filters: Optional[AttributeFiltersType],
|
19
20
|
) -> AttributeFiltersType:
|
20
21
|
if attribute_filters is None:
|
21
22
|
raise ValueError("'attribute_filters' cannot be 'None'.")
|
22
23
|
|
23
|
-
attribute_filters = valdict_keys("attribute_filters")(
|
24
|
+
attribute_filters = valdict_keys("attribute_filters")(
|
25
|
+
cls, attribute_filters
|
26
|
+
)
|
24
27
|
for key, values in attribute_filters.items():
|
25
28
|
if values == []:
|
26
29
|
raise ValueError(
|
@@ -11,7 +11,7 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
11
11
|
If `accept_none`, the validator also accepts `None`.
|
12
12
|
"""
|
13
13
|
|
14
|
-
def val(string: Optional[str]) -> Optional[str]:
|
14
|
+
def val(cls, string: Optional[str]) -> Optional[str]:
|
15
15
|
if string is None:
|
16
16
|
if accept_none:
|
17
17
|
return string
|
@@ -28,14 +28,16 @@ def valstr(attribute: str, accept_none: bool = False):
|
|
28
28
|
|
29
29
|
|
30
30
|
def valdict_keys(attribute: str):
|
31
|
-
def val(d: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
31
|
+
def val(cls, d: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]:
|
32
32
|
"""
|
33
33
|
Apply valstr to every key of the dictionary, and fail if there are
|
34
34
|
identical keys.
|
35
35
|
"""
|
36
36
|
if d is not None:
|
37
37
|
old_keys = list(d.keys())
|
38
|
-
new_keys = [
|
38
|
+
new_keys = [
|
39
|
+
valstr(f"{attribute}[{key}]")(cls, key) for key in old_keys
|
40
|
+
]
|
39
41
|
if len(new_keys) != len(set(new_keys)):
|
40
42
|
raise ValueError(
|
41
43
|
f"Dictionary contains multiple identical keys: '{d}'."
|
@@ -53,7 +55,7 @@ def val_absolute_path(attribute: str, accept_none: bool = False):
|
|
53
55
|
Check that a string attribute is an absolute path
|
54
56
|
"""
|
55
57
|
|
56
|
-
def val(string: Optional[str]) -> Optional[str]:
|
58
|
+
def val(cls, string: Optional[str]) -> Optional[str]:
|
57
59
|
if string is None:
|
58
60
|
if accept_none:
|
59
61
|
return string
|
@@ -75,7 +77,7 @@ def val_absolute_path(attribute: str, accept_none: bool = False):
|
|
75
77
|
|
76
78
|
|
77
79
|
def val_unique_list(attribute: str):
|
78
|
-
def val(must_be_unique: Optional[list]) -> Optional[list]:
|
80
|
+
def val(cls, must_be_unique: Optional[list]) -> Optional[list]:
|
79
81
|
if must_be_unique is not None:
|
80
82
|
if len(set(must_be_unique)) != len(must_be_unique):
|
81
83
|
raise ValueError(f"`{attribute}` list has repetitions")
|
@@ -2,9 +2,10 @@ from typing import Optional
|
|
2
2
|
|
3
3
|
from fastapi_users import schemas
|
4
4
|
from pydantic import BaseModel
|
5
|
-
from pydantic import
|
5
|
+
from pydantic import ConfigDict
|
6
6
|
from pydantic import Field
|
7
|
-
from pydantic import
|
7
|
+
from pydantic import field_validator
|
8
|
+
from pydantic import ValidationInfo
|
8
9
|
|
9
10
|
from ._validators import val_unique_list
|
10
11
|
from ._validators import valstr
|
@@ -41,12 +42,12 @@ class UserRead(schemas.BaseUser[int]):
|
|
41
42
|
username:
|
42
43
|
"""
|
43
44
|
|
44
|
-
username: Optional[str]
|
45
|
+
username: Optional[str] = None
|
45
46
|
group_ids_names: Optional[list[tuple[int, str]]] = None
|
46
47
|
oauth_accounts: list[OAuthAccountRead]
|
47
48
|
|
48
49
|
|
49
|
-
class UserUpdate(schemas.BaseUserUpdate
|
50
|
+
class UserUpdate(schemas.BaseUserUpdate):
|
50
51
|
"""
|
51
52
|
Schema for `User` update.
|
52
53
|
|
@@ -54,33 +55,35 @@ class UserUpdate(schemas.BaseUserUpdate, extra=Extra.forbid):
|
|
54
55
|
username:
|
55
56
|
"""
|
56
57
|
|
57
|
-
|
58
|
+
model_config = ConfigDict(extra="forbid")
|
59
|
+
|
60
|
+
username: Optional[str] = None
|
58
61
|
|
59
62
|
# Validators
|
60
|
-
_username =
|
63
|
+
_username = field_validator("username")(classmethod(valstr("username")))
|
61
64
|
|
62
|
-
@
|
65
|
+
@field_validator(
|
63
66
|
"is_active",
|
64
67
|
"is_verified",
|
65
68
|
"is_superuser",
|
66
69
|
"email",
|
67
70
|
"password",
|
68
|
-
always=False,
|
69
71
|
)
|
70
|
-
|
72
|
+
@classmethod
|
73
|
+
def cant_set_none(cls, v, info: ValidationInfo):
|
71
74
|
if v is None:
|
72
|
-
raise ValueError(f"Cannot set {
|
75
|
+
raise ValueError(f"Cannot set {info.field_name}=None")
|
73
76
|
return v
|
74
77
|
|
75
78
|
|
76
|
-
class UserUpdateStrict(BaseModel
|
79
|
+
class UserUpdateStrict(BaseModel):
|
77
80
|
"""
|
78
81
|
Schema for `User` self-editing.
|
79
82
|
|
80
83
|
Attributes:
|
81
84
|
"""
|
82
85
|
|
83
|
-
|
86
|
+
model_config = ConfigDict(extra="forbid")
|
84
87
|
|
85
88
|
|
86
89
|
class UserCreate(schemas.BaseUserCreate):
|
@@ -91,21 +94,23 @@ class UserCreate(schemas.BaseUserCreate):
|
|
91
94
|
username:
|
92
95
|
"""
|
93
96
|
|
94
|
-
username: Optional[str]
|
97
|
+
username: Optional[str] = None
|
95
98
|
|
96
99
|
# Validators
|
97
100
|
|
98
|
-
_username =
|
101
|
+
_username = field_validator("username")(classmethod(valstr("username")))
|
99
102
|
|
100
103
|
|
101
|
-
class UserUpdateGroups(BaseModel
|
104
|
+
class UserUpdateGroups(BaseModel):
|
102
105
|
"""
|
103
106
|
Schema for `POST /auth/users/{user_id}/set-groups/`
|
104
107
|
|
105
108
|
"""
|
106
109
|
|
107
|
-
|
110
|
+
model_config = ConfigDict(extra="forbid")
|
111
|
+
|
112
|
+
group_ids: list[int] = Field(min_length=1)
|
108
113
|
|
109
|
-
_group_ids =
|
110
|
-
val_unique_list("group_ids")
|
114
|
+
_group_ids = field_validator("group_ids")(
|
115
|
+
classmethod(val_unique_list("group_ids"))
|
111
116
|
)
|
@@ -2,9 +2,11 @@ from datetime import datetime
|
|
2
2
|
from typing import Optional
|
3
3
|
|
4
4
|
from pydantic import BaseModel
|
5
|
-
from pydantic import
|
5
|
+
from pydantic import ConfigDict
|
6
6
|
from pydantic import Field
|
7
|
-
from pydantic import
|
7
|
+
from pydantic import field_serializer
|
8
|
+
from pydantic import field_validator
|
9
|
+
from pydantic.types import AwareDatetime
|
8
10
|
|
9
11
|
from ._validators import val_absolute_path
|
10
12
|
from ._validators import val_unique_list
|
@@ -32,12 +34,16 @@ class UserGroupRead(BaseModel):
|
|
32
34
|
|
33
35
|
id: int
|
34
36
|
name: str
|
35
|
-
timestamp_created:
|
37
|
+
timestamp_created: AwareDatetime
|
36
38
|
user_ids: Optional[list[int]] = None
|
37
39
|
viewer_paths: list[str]
|
38
40
|
|
41
|
+
@field_serializer("timestamp_created")
|
42
|
+
def serialize_datetime(v: datetime) -> str:
|
43
|
+
return v.isoformat()
|
39
44
|
|
40
|
-
|
45
|
+
|
46
|
+
class UserGroupCreate(BaseModel):
|
41
47
|
"""
|
42
48
|
Schema for `UserGroup` creation
|
43
49
|
|
@@ -45,27 +51,35 @@ class UserGroupCreate(BaseModel, extra=Extra.forbid):
|
|
45
51
|
name: Group name
|
46
52
|
"""
|
47
53
|
|
54
|
+
model_config = ConfigDict(extra="forbid")
|
55
|
+
|
48
56
|
name: str
|
49
57
|
viewer_paths: list[str] = Field(default_factory=list)
|
50
58
|
|
51
|
-
@
|
59
|
+
@field_validator("viewer_paths")
|
60
|
+
@classmethod
|
52
61
|
def viewer_paths_validator(cls, value):
|
53
62
|
for i, path in enumerate(value):
|
54
|
-
value[i] = val_absolute_path(f"viewer_paths[{i}]")(path)
|
55
|
-
value = val_unique_list("viewer_paths")(value)
|
63
|
+
value[i] = val_absolute_path(f"viewer_paths[{i}]")(cls, path)
|
64
|
+
value = val_unique_list("viewer_paths")(cls, value)
|
56
65
|
return value
|
57
66
|
|
58
67
|
|
59
|
-
class UserGroupUpdate(BaseModel
|
68
|
+
class UserGroupUpdate(BaseModel):
|
60
69
|
"""
|
61
70
|
Schema for `UserGroup` update
|
62
71
|
"""
|
63
72
|
|
73
|
+
model_config = ConfigDict(extra="forbid")
|
74
|
+
|
64
75
|
viewer_paths: Optional[list[str]] = None
|
65
76
|
|
66
|
-
@
|
77
|
+
@field_validator("viewer_paths")
|
78
|
+
@classmethod
|
67
79
|
def viewer_paths_validator(cls, value):
|
80
|
+
if value is None:
|
81
|
+
raise ValueError("Cannot set `viewer_paths=None`.")
|
68
82
|
for i, path in enumerate(value):
|
69
|
-
value[i] = val_absolute_path(f"viewer_paths[{i}]")(path)
|
70
|
-
value = val_unique_list("viewer_paths")(value)
|
83
|
+
value[i] = val_absolute_path(f"viewer_paths[{i}]")(cls, path)
|
84
|
+
value = val_unique_list("viewer_paths")(cls, value)
|
71
85
|
return value
|
@@ -1,8 +1,8 @@
|
|
1
1
|
from typing import Optional
|
2
2
|
|
3
3
|
from pydantic import BaseModel
|
4
|
-
from pydantic import
|
5
|
-
from pydantic import
|
4
|
+
from pydantic import ConfigDict
|
5
|
+
from pydantic import field_validator
|
6
6
|
from pydantic.types import StrictStr
|
7
7
|
|
8
8
|
from ._validators import val_absolute_path
|
@@ -41,11 +41,13 @@ class UserSettingsReadStrict(BaseModel):
|
|
41
41
|
project_dir: Optional[str] = None
|
42
42
|
|
43
43
|
|
44
|
-
class UserSettingsUpdate(BaseModel
|
44
|
+
class UserSettingsUpdate(BaseModel):
|
45
45
|
"""
|
46
46
|
Schema reserved for superusers
|
47
47
|
"""
|
48
48
|
|
49
|
+
model_config = ConfigDict(extra="forbid")
|
50
|
+
|
49
51
|
ssh_host: Optional[str] = None
|
50
52
|
ssh_username: Optional[str] = None
|
51
53
|
ssh_private_key_path: Optional[str] = None
|
@@ -55,46 +57,51 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
|
|
55
57
|
slurm_accounts: Optional[list[StrictStr]] = None
|
56
58
|
project_dir: Optional[str] = None
|
57
59
|
|
58
|
-
_ssh_host =
|
59
|
-
valstr("ssh_host", accept_none=True)
|
60
|
+
_ssh_host = field_validator("ssh_host")(
|
61
|
+
classmethod(valstr("ssh_host", accept_none=True))
|
62
|
+
)
|
63
|
+
_ssh_username = field_validator("ssh_username")(
|
64
|
+
classmethod(valstr("ssh_username", accept_none=True))
|
60
65
|
)
|
61
|
-
|
62
|
-
|
66
|
+
_ssh_private_key_path = field_validator("ssh_private_key_path")(
|
67
|
+
classmethod(
|
68
|
+
val_absolute_path("ssh_private_key_path", accept_none=True)
|
69
|
+
)
|
63
70
|
)
|
64
|
-
_ssh_private_key_path = validator(
|
65
|
-
"ssh_private_key_path", allow_reuse=True
|
66
|
-
)(val_absolute_path("ssh_private_key_path", accept_none=True))
|
67
71
|
|
68
|
-
_ssh_tasks_dir =
|
69
|
-
val_absolute_path("ssh_tasks_dir", accept_none=True)
|
72
|
+
_ssh_tasks_dir = field_validator("ssh_tasks_dir")(
|
73
|
+
classmethod(val_absolute_path("ssh_tasks_dir", accept_none=True))
|
70
74
|
)
|
71
|
-
_ssh_jobs_dir =
|
72
|
-
val_absolute_path("ssh_jobs_dir", accept_none=True)
|
75
|
+
_ssh_jobs_dir = field_validator("ssh_jobs_dir")(
|
76
|
+
classmethod(val_absolute_path("ssh_jobs_dir", accept_none=True))
|
73
77
|
)
|
74
78
|
|
75
|
-
_slurm_user =
|
76
|
-
valstr("slurm_user", accept_none=True)
|
79
|
+
_slurm_user = field_validator("slurm_user")(
|
80
|
+
classmethod(valstr("slurm_user", accept_none=True))
|
77
81
|
)
|
78
82
|
|
79
|
-
@
|
83
|
+
@field_validator("slurm_accounts")
|
84
|
+
@classmethod
|
80
85
|
def slurm_accounts_validator(cls, value):
|
81
86
|
if value is None:
|
82
87
|
return value
|
83
88
|
for i, item in enumerate(value):
|
84
|
-
value[i] = valstr(f"slurm_accounts[{i}]")(item)
|
85
|
-
return val_unique_list("slurm_accounts")(value)
|
89
|
+
value[i] = valstr(f"slurm_accounts[{i}]")(cls, item)
|
90
|
+
return val_unique_list("slurm_accounts")(cls, value)
|
86
91
|
|
87
|
-
@
|
92
|
+
@field_validator("project_dir")
|
93
|
+
@classmethod
|
88
94
|
def project_dir_validator(cls, value):
|
89
95
|
if value is None:
|
90
96
|
return None
|
91
97
|
validate_cmd(value)
|
92
|
-
return val_absolute_path("project_dir")(value)
|
98
|
+
return val_absolute_path("project_dir")(cls, value)
|
93
99
|
|
94
100
|
|
95
|
-
class UserSettingsUpdateStrict(BaseModel
|
101
|
+
class UserSettingsUpdateStrict(BaseModel):
|
102
|
+
model_config = ConfigDict(extra="forbid")
|
96
103
|
slurm_accounts: Optional[list[StrictStr]] = None
|
97
104
|
|
98
|
-
_slurm_accounts =
|
99
|
-
val_unique_list("slurm_accounts")
|
105
|
+
_slurm_accounts = field_validator("slurm_accounts")(
|
106
|
+
classmethod(val_unique_list("slurm_accounts"))
|
100
107
|
)
|
@@ -0,0 +1,18 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
|
3
|
+
from pydantic import BaseModel
|
4
|
+
from pydantic import field_serializer
|
5
|
+
from pydantic.types import AwareDatetime
|
6
|
+
|
7
|
+
|
8
|
+
class AccountingRecordRead(BaseModel):
|
9
|
+
|
10
|
+
id: int
|
11
|
+
user_id: int
|
12
|
+
timestamp: AwareDatetime
|
13
|
+
num_tasks: int
|
14
|
+
num_new_images: int
|
15
|
+
|
16
|
+
@field_serializer("timestamp")
|
17
|
+
def serialize_datetime(v: datetime) -> str:
|
18
|
+
return v.isoformat()
|