fractal-server 1.4.6__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +0 -1
- fractal_server/app/models/__init__.py +6 -8
- fractal_server/app/models/linkuserproject.py +9 -0
- fractal_server/app/models/security.py +6 -0
- fractal_server/app/models/v1/__init__.py +12 -0
- fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
- fractal_server/app/models/{job.py → v1/job.py} +5 -5
- fractal_server/app/models/{project.py → v1/project.py} +5 -5
- fractal_server/app/models/{state.py → v1/state.py} +2 -2
- fractal_server/app/models/{task.py → v1/task.py} +7 -2
- fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
- fractal_server/app/models/v2/__init__.py +22 -0
- fractal_server/app/models/v2/collection_state.py +21 -0
- fractal_server/app/models/v2/dataset.py +54 -0
- fractal_server/app/models/v2/job.py +51 -0
- fractal_server/app/models/v2/project.py +30 -0
- fractal_server/app/models/v2/task.py +93 -0
- fractal_server/app/models/v2/workflow.py +35 -0
- fractal_server/app/models/v2/workflowtask.py +49 -0
- fractal_server/app/routes/admin/__init__.py +0 -0
- fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
- fractal_server/app/routes/admin/v2.py +309 -0
- fractal_server/app/routes/api/v1/__init__.py +7 -7
- fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
- fractal_server/app/routes/api/v1/dataset.py +48 -41
- fractal_server/app/routes/api/v1/job.py +14 -14
- fractal_server/app/routes/api/v1/project.py +30 -27
- fractal_server/app/routes/api/v1/task.py +26 -16
- fractal_server/app/routes/api/v1/task_collection.py +28 -16
- fractal_server/app/routes/api/v1/workflow.py +28 -28
- fractal_server/app/routes/api/v1/workflowtask.py +11 -11
- fractal_server/app/routes/api/v2/__init__.py +34 -0
- fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
- fractal_server/app/routes/api/v2/dataset.py +293 -0
- fractal_server/app/routes/api/v2/images.py +279 -0
- fractal_server/app/routes/api/v2/job.py +200 -0
- fractal_server/app/routes/api/v2/project.py +186 -0
- fractal_server/app/routes/api/v2/status.py +150 -0
- fractal_server/app/routes/api/v2/submit.py +210 -0
- fractal_server/app/routes/api/v2/task.py +222 -0
- fractal_server/app/routes/api/v2/task_collection.py +239 -0
- fractal_server/app/routes/api/v2/task_legacy.py +59 -0
- fractal_server/app/routes/api/v2/workflow.py +380 -0
- fractal_server/app/routes/api/v2/workflowtask.py +265 -0
- fractal_server/app/routes/aux/_job.py +2 -2
- fractal_server/app/runner/__init__.py +0 -379
- fractal_server/app/runner/async_wrap.py +27 -0
- fractal_server/app/runner/components.py +5 -0
- fractal_server/app/runner/exceptions.py +129 -0
- fractal_server/app/runner/executors/__init__.py +0 -0
- fractal_server/app/runner/executors/slurm/__init__.py +3 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
- fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
- fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
- fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
- fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +42 -1
- fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +46 -27
- fractal_server/app/runner/filenames.py +6 -0
- fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
- fractal_server/app/runner/task_files.py +103 -0
- fractal_server/app/runner/v1/__init__.py +366 -0
- fractal_server/app/runner/{_common.py → v1/_common.py} +56 -111
- fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
- fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
- fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
- fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
- fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
- fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
- fractal_server/app/runner/v1/common.py +117 -0
- fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
- fractal_server/app/runner/v2/__init__.py +336 -0
- fractal_server/app/runner/v2/_local/__init__.py +162 -0
- fractal_server/app/runner/v2/_local/_local_config.py +118 -0
- fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
- fractal_server/app/runner/v2/_local/executor.py +100 -0
- fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
- fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
- fractal_server/app/runner/v2/deduplicate_list.py +23 -0
- fractal_server/app/runner/v2/handle_failed_job.py +165 -0
- fractal_server/app/runner/v2/merge_outputs.py +38 -0
- fractal_server/app/runner/v2/runner.py +343 -0
- fractal_server/app/runner/v2/runner_functions.py +374 -0
- fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
- fractal_server/app/runner/v2/task_interface.py +62 -0
- fractal_server/app/runner/v2/v1_compat.py +31 -0
- fractal_server/app/schemas/__init__.py +1 -42
- fractal_server/app/schemas/_validators.py +28 -5
- fractal_server/app/schemas/v1/__init__.py +36 -0
- fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
- fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
- fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
- fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
- fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
- fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
- fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
- fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
- fractal_server/app/schemas/v2/__init__.py +37 -0
- fractal_server/app/schemas/v2/dataset.py +126 -0
- fractal_server/app/schemas/v2/dumps.py +87 -0
- fractal_server/app/schemas/v2/job.py +114 -0
- fractal_server/app/schemas/v2/manifest.py +159 -0
- fractal_server/app/schemas/v2/project.py +34 -0
- fractal_server/app/schemas/v2/status.py +16 -0
- fractal_server/app/schemas/v2/task.py +151 -0
- fractal_server/app/schemas/v2/task_collection.py +109 -0
- fractal_server/app/schemas/v2/workflow.py +79 -0
- fractal_server/app/schemas/v2/workflowtask.py +208 -0
- fractal_server/config.py +13 -10
- fractal_server/images/__init__.py +4 -0
- fractal_server/images/models.py +136 -0
- fractal_server/images/tools.py +84 -0
- fractal_server/main.py +11 -3
- fractal_server/migrations/env.py +0 -2
- fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
- fractal_server/tasks/__init__.py +0 -5
- fractal_server/tasks/endpoint_operations.py +13 -19
- fractal_server/tasks/utils.py +35 -0
- fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
- fractal_server/tasks/v1/__init__.py +0 -0
- fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
- fractal_server/tasks/v1/get_collection_data.py +14 -0
- fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
- fractal_server/tasks/v2/__init__.py +0 -0
- fractal_server/tasks/v2/background_operations.py +381 -0
- fractal_server/tasks/v2/get_collection_data.py +14 -0
- fractal_server/urls.py +13 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/METADATA +11 -12
- fractal_server-2.0.0.dist-info/RECORD +169 -0
- fractal_server/app/runner/_slurm/.gitignore +0 -2
- fractal_server/app/runner/common.py +0 -307
- fractal_server/app/schemas/json_schemas/manifest.json +0 -81
- fractal_server-1.4.6.dist-info/RECORD +0 -97
- /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
- /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,159 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import BaseModel
|
5
|
+
from pydantic import Field
|
6
|
+
from pydantic import HttpUrl
|
7
|
+
from pydantic import root_validator
|
8
|
+
from pydantic import validator
|
9
|
+
|
10
|
+
|
11
|
+
class TaskManifestV2(BaseModel):
|
12
|
+
"""
|
13
|
+
Represents a task within a V2 manifest.
|
14
|
+
|
15
|
+
Attributes:
|
16
|
+
name:
|
17
|
+
The task name
|
18
|
+
executable:
|
19
|
+
Path to the executable relative to the package root
|
20
|
+
|
21
|
+
Note: by package root we mean "as it will be installed". If a
|
22
|
+
package `Pkg` installs in the folder `pkg` the executable
|
23
|
+
`pkg/executable.py`, this attribute must contain only
|
24
|
+
`executable.py`.
|
25
|
+
input_type:
|
26
|
+
The input type accepted by the task
|
27
|
+
output_type:
|
28
|
+
The output type returned by the task
|
29
|
+
meta:
|
30
|
+
Additional information about the package, such as hash of the
|
31
|
+
executable, specific runtime requirements (e.g., need_gpu=True),
|
32
|
+
etc.
|
33
|
+
args_schema:
|
34
|
+
JSON Schema for task arguments
|
35
|
+
docs_info:
|
36
|
+
Additional information about the Task, coming from the docstring.
|
37
|
+
docs_link:
|
38
|
+
Link to Task docs.
|
39
|
+
"""
|
40
|
+
|
41
|
+
name: str
|
42
|
+
executable_non_parallel: Optional[str] = None
|
43
|
+
executable_parallel: Optional[str] = None
|
44
|
+
input_types: dict[str, bool] = Field(default_factory=dict)
|
45
|
+
output_types: dict[str, bool] = Field(default_factory=dict)
|
46
|
+
meta_non_parallel: dict[str, Any] = Field(default_factory=dict)
|
47
|
+
meta_parallel: dict[str, Any] = Field(default_factory=dict)
|
48
|
+
args_schema_non_parallel: Optional[dict[str, Any]] = None
|
49
|
+
args_schema_parallel: Optional[dict[str, Any]] = None
|
50
|
+
docs_info: Optional[str] = None
|
51
|
+
docs_link: Optional[HttpUrl] = None
|
52
|
+
|
53
|
+
@root_validator
|
54
|
+
def validate_executable_args_meta(cls, values):
|
55
|
+
|
56
|
+
executable_non_parallel = values.get("executable_non_parallel")
|
57
|
+
executable_parallel = values.get("executable_parallel")
|
58
|
+
if (executable_non_parallel is None) and (executable_parallel is None):
|
59
|
+
|
60
|
+
raise ValueError(
|
61
|
+
"`TaskManifestV2.executable_non_parallel` and "
|
62
|
+
"`TaskManifestV2.executable_parallel` cannot be both None."
|
63
|
+
)
|
64
|
+
|
65
|
+
elif executable_non_parallel is None:
|
66
|
+
|
67
|
+
meta_non_parallel = values.get("meta_non_parallel")
|
68
|
+
if meta_non_parallel != {}:
|
69
|
+
raise ValueError(
|
70
|
+
"`TaskManifestV2.meta_non_parallel` must be an empty dict "
|
71
|
+
"if `TaskManifestV2.executable_non_parallel` is None. "
|
72
|
+
f"Given: {meta_non_parallel}."
|
73
|
+
)
|
74
|
+
|
75
|
+
args_schema_non_parallel = values.get("args_schema_non_parallel")
|
76
|
+
if args_schema_non_parallel is not None:
|
77
|
+
raise ValueError(
|
78
|
+
"`TaskManifestV2.args_schema_non_parallel` must be None "
|
79
|
+
"if `TaskManifestV2.executable_non_parallel` is None. "
|
80
|
+
f"Given: {args_schema_non_parallel}."
|
81
|
+
)
|
82
|
+
|
83
|
+
elif executable_parallel is None:
|
84
|
+
|
85
|
+
meta_parallel = values.get("meta_parallel")
|
86
|
+
if meta_parallel != {}:
|
87
|
+
raise ValueError(
|
88
|
+
"`TaskManifestV2.meta_parallel` must be an empty dict if "
|
89
|
+
"`TaskManifestV2.executable_parallel` is None. "
|
90
|
+
f"Given: {meta_parallel}."
|
91
|
+
)
|
92
|
+
|
93
|
+
args_schema_parallel = values.get("args_schema_parallel")
|
94
|
+
if args_schema_parallel is not None:
|
95
|
+
raise ValueError(
|
96
|
+
"`TaskManifestV2.args_schema_parallel` must be None if "
|
97
|
+
"`TaskManifestV2.executable_parallel` is None. "
|
98
|
+
f"Given: {args_schema_parallel}."
|
99
|
+
)
|
100
|
+
|
101
|
+
return values
|
102
|
+
|
103
|
+
|
104
|
+
class ManifestV2(BaseModel):
|
105
|
+
"""
|
106
|
+
Packages containing tasks are required to include a special file
|
107
|
+
`__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal.
|
108
|
+
|
109
|
+
This model class and the model classes it depends on provide the base
|
110
|
+
schema to read, write and validate manifests.
|
111
|
+
|
112
|
+
Attributes:
|
113
|
+
manifest_version:
|
114
|
+
A version string that provides indication for compatibility between
|
115
|
+
manifests as the schema evolves. This is for instance used by
|
116
|
+
Fractal to determine which subclass of the present base class needs
|
117
|
+
be used to read and validate the input.
|
118
|
+
task_list : list[TaskManifestType]
|
119
|
+
The list of tasks, represented as specified by subclasses of the
|
120
|
+
_TaskManifestBase (a.k.a. TaskManifestType)
|
121
|
+
has_args_schemas:
|
122
|
+
`True` if the manifest incldues JSON Schemas for the arguments of
|
123
|
+
each task.
|
124
|
+
args_schema_version:
|
125
|
+
Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
|
126
|
+
"""
|
127
|
+
|
128
|
+
manifest_version: str
|
129
|
+
task_list: list[TaskManifestV2]
|
130
|
+
has_args_schemas: bool = False
|
131
|
+
args_schema_version: Optional[str]
|
132
|
+
|
133
|
+
@root_validator()
|
134
|
+
def _check_args_schemas_are_present(cls, values):
|
135
|
+
has_args_schemas = values["has_args_schemas"]
|
136
|
+
task_list = values["task_list"]
|
137
|
+
if has_args_schemas is True:
|
138
|
+
for task in task_list:
|
139
|
+
if task.executable_parallel is not None:
|
140
|
+
if task.args_schema_parallel is None:
|
141
|
+
raise ValueError(
|
142
|
+
f"Manifest has {has_args_schemas=}, but "
|
143
|
+
f"task '{task.name}' has "
|
144
|
+
f"{task.args_schema_parallel=}."
|
145
|
+
)
|
146
|
+
if task.executable_non_parallel is not None:
|
147
|
+
if task.args_schema_non_parallel is None:
|
148
|
+
raise ValueError(
|
149
|
+
f"Manifest has {has_args_schemas=}, but "
|
150
|
+
f"task '{task.name}' has "
|
151
|
+
f"{task.args_schema_non_parallel=}."
|
152
|
+
)
|
153
|
+
return values
|
154
|
+
|
155
|
+
@validator("manifest_version")
|
156
|
+
def manifest_version_2(cls, value):
|
157
|
+
if value != "2":
|
158
|
+
raise ValueError(f"Wrong manifest version (given {value})")
|
159
|
+
return value
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import BaseModel
|
5
|
+
from pydantic import Extra
|
6
|
+
from pydantic import validator
|
7
|
+
|
8
|
+
from .._validators import valstr
|
9
|
+
from .._validators import valutc
|
10
|
+
|
11
|
+
|
12
|
+
class ProjectCreateV2(BaseModel, extra=Extra.forbid):
|
13
|
+
|
14
|
+
name: str
|
15
|
+
# Validators
|
16
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
17
|
+
|
18
|
+
|
19
|
+
class ProjectReadV2(BaseModel):
|
20
|
+
|
21
|
+
id: int
|
22
|
+
name: str
|
23
|
+
timestamp_created: datetime
|
24
|
+
# Validators
|
25
|
+
_timestamp_created = validator("timestamp_created", allow_reuse=True)(
|
26
|
+
valutc("timestamp_created")
|
27
|
+
)
|
28
|
+
|
29
|
+
|
30
|
+
class ProjectUpdateV2(BaseModel):
|
31
|
+
|
32
|
+
name: Optional[str]
|
33
|
+
# Validators
|
34
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
@@ -0,0 +1,16 @@
|
|
1
|
+
from pydantic import BaseModel
|
2
|
+
from pydantic import Field
|
3
|
+
|
4
|
+
from .workflowtask import WorkflowTaskStatusTypeV2
|
5
|
+
|
6
|
+
|
7
|
+
class StatusReadV2(BaseModel):
|
8
|
+
"""
|
9
|
+
Response type for the
|
10
|
+
`/project/{project_id}/status/` endpoint
|
11
|
+
"""
|
12
|
+
|
13
|
+
status: dict[
|
14
|
+
str,
|
15
|
+
WorkflowTaskStatusTypeV2,
|
16
|
+
] = Field(default_factory=dict)
|
@@ -0,0 +1,151 @@
|
|
1
|
+
from typing import Any
|
2
|
+
from typing import Literal
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Extra
|
7
|
+
from pydantic import Field
|
8
|
+
from pydantic import HttpUrl
|
9
|
+
from pydantic import root_validator
|
10
|
+
from pydantic import validator
|
11
|
+
|
12
|
+
from .._validators import valdictkeys
|
13
|
+
from .._validators import valstr
|
14
|
+
from ..v1.task import TaskReadV1
|
15
|
+
|
16
|
+
|
17
|
+
class TaskCreateV2(BaseModel, extra=Extra.forbid):
|
18
|
+
|
19
|
+
name: str
|
20
|
+
|
21
|
+
command_non_parallel: Optional[str]
|
22
|
+
command_parallel: Optional[str]
|
23
|
+
source: str
|
24
|
+
|
25
|
+
meta_non_parallel: Optional[dict[str, Any]]
|
26
|
+
meta_parallel: Optional[dict[str, Any]]
|
27
|
+
version: Optional[str]
|
28
|
+
args_schema_non_parallel: Optional[dict[str, Any]]
|
29
|
+
args_schema_parallel: Optional[dict[str, Any]]
|
30
|
+
args_schema_version: Optional[str]
|
31
|
+
docs_info: Optional[str]
|
32
|
+
docs_link: Optional[HttpUrl]
|
33
|
+
|
34
|
+
input_types: dict[str, bool] = Field(default={})
|
35
|
+
output_types: dict[str, bool] = Field(default={})
|
36
|
+
|
37
|
+
# Validators
|
38
|
+
@root_validator
|
39
|
+
def validate_commands(cls, values):
|
40
|
+
command_parallel = values.get("command_parallel")
|
41
|
+
command_non_parallel = values.get("command_non_parallel")
|
42
|
+
if (command_parallel is None) and (command_non_parallel is None):
|
43
|
+
raise ValueError(
|
44
|
+
"Task must have at least one valid command "
|
45
|
+
"(parallel and/or non_parallel)"
|
46
|
+
)
|
47
|
+
return values
|
48
|
+
|
49
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
50
|
+
_command_non_parallel = validator(
|
51
|
+
"command_non_parallel", allow_reuse=True
|
52
|
+
)(valstr("command_non_parallel"))
|
53
|
+
_command_parallel = validator("command_parallel", allow_reuse=True)(
|
54
|
+
valstr("command_parallel")
|
55
|
+
)
|
56
|
+
_source = validator("source", allow_reuse=True)(valstr("source"))
|
57
|
+
_version = validator("version", allow_reuse=True)(valstr("version"))
|
58
|
+
|
59
|
+
_meta_non_parallel = validator("meta_non_parallel", allow_reuse=True)(
|
60
|
+
valdictkeys("meta_non_parallel")
|
61
|
+
)
|
62
|
+
_meta_parallel = validator("meta_parallel", allow_reuse=True)(
|
63
|
+
valdictkeys("meta_parallel")
|
64
|
+
)
|
65
|
+
_args_schema_non_parallel = validator(
|
66
|
+
"args_schema_non_parallel", allow_reuse=True
|
67
|
+
)(valdictkeys("args_schema_non_parallel"))
|
68
|
+
_args_schema_parallel = validator(
|
69
|
+
"args_schema_parallel", allow_reuse=True
|
70
|
+
)(valdictkeys("args_schema_parallel"))
|
71
|
+
_args_schema_version = validator("args_schema_version", allow_reuse=True)(
|
72
|
+
valstr("args_schema_version")
|
73
|
+
)
|
74
|
+
_input_types = validator("input_types", allow_reuse=True)(
|
75
|
+
valdictkeys("input_types")
|
76
|
+
)
|
77
|
+
_output_types = validator("output_types", allow_reuse=True)(
|
78
|
+
valdictkeys("output_types")
|
79
|
+
)
|
80
|
+
|
81
|
+
|
82
|
+
class TaskReadV2(BaseModel):
|
83
|
+
|
84
|
+
id: int
|
85
|
+
name: str
|
86
|
+
type: Literal["parallel", "non_parallel", "compound"]
|
87
|
+
source: str
|
88
|
+
owner: Optional[str]
|
89
|
+
version: Optional[str]
|
90
|
+
|
91
|
+
command_non_parallel: Optional[str]
|
92
|
+
command_parallel: Optional[str]
|
93
|
+
meta_parallel: dict[str, Any]
|
94
|
+
meta_non_parallel: dict[str, Any]
|
95
|
+
args_schema_non_parallel: Optional[dict[str, Any]] = None
|
96
|
+
args_schema_parallel: Optional[dict[str, Any]] = None
|
97
|
+
args_schema_version: Optional[str]
|
98
|
+
docs_info: Optional[str]
|
99
|
+
docs_link: Optional[HttpUrl]
|
100
|
+
input_types: dict[str, bool]
|
101
|
+
output_types: dict[str, bool]
|
102
|
+
|
103
|
+
|
104
|
+
class TaskLegacyReadV2(TaskReadV1):
|
105
|
+
is_v2_compatible: bool
|
106
|
+
|
107
|
+
|
108
|
+
class TaskUpdateV2(BaseModel):
|
109
|
+
|
110
|
+
name: Optional[str]
|
111
|
+
version: Optional[str]
|
112
|
+
command_parallel: Optional[str]
|
113
|
+
command_non_parallel: Optional[str]
|
114
|
+
input_types: Optional[dict[str, bool]]
|
115
|
+
output_types: Optional[dict[str, bool]]
|
116
|
+
|
117
|
+
# Validators
|
118
|
+
@validator("input_types", "output_types")
|
119
|
+
def val_is_dict(cls, v):
|
120
|
+
if not isinstance(v, dict):
|
121
|
+
raise ValueError
|
122
|
+
return v
|
123
|
+
|
124
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
125
|
+
_version = validator("version", allow_reuse=True)(
|
126
|
+
valstr("version", accept_none=True)
|
127
|
+
)
|
128
|
+
_command_parallel = validator("command_parallel", allow_reuse=True)(
|
129
|
+
valstr("command_parallel")
|
130
|
+
)
|
131
|
+
_command_non_parallel = validator(
|
132
|
+
"command_non_parallel", allow_reuse=True
|
133
|
+
)(valstr("command_non_parallel"))
|
134
|
+
_input_types = validator("input_types", allow_reuse=True)(
|
135
|
+
valdictkeys("input_types")
|
136
|
+
)
|
137
|
+
_output_types = validator("output_types", allow_reuse=True)(
|
138
|
+
valdictkeys("output_types")
|
139
|
+
)
|
140
|
+
|
141
|
+
|
142
|
+
class TaskImportV2(BaseModel):
|
143
|
+
|
144
|
+
source: str
|
145
|
+
_source = validator("source", allow_reuse=True)(valstr("source"))
|
146
|
+
|
147
|
+
|
148
|
+
class TaskExportV2(BaseModel):
|
149
|
+
|
150
|
+
source: str
|
151
|
+
_source = validator("source", allow_reuse=True)(valstr("source"))
|
@@ -0,0 +1,109 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Literal
|
3
|
+
from typing import Optional
|
4
|
+
|
5
|
+
from pydantic import BaseModel
|
6
|
+
from pydantic import Field
|
7
|
+
from pydantic import validator
|
8
|
+
|
9
|
+
from .._validators import valdictkeys
|
10
|
+
from .._validators import valstr
|
11
|
+
from .task import TaskReadV2
|
12
|
+
|
13
|
+
|
14
|
+
class TaskCollectPipV2(BaseModel):
|
15
|
+
"""
|
16
|
+
TaskCollectPipV2 class
|
17
|
+
|
18
|
+
This class only encodes the attributes required to trigger a
|
19
|
+
task-collection operation. Other attributes (that are assigned *during*
|
20
|
+
task collection) are defined as part of fractal-server.
|
21
|
+
|
22
|
+
Two cases are supported:
|
23
|
+
|
24
|
+
1. `package` is the path of a local wheel file;
|
25
|
+
2. `package` is the name of a package that can be installed via `pip`.
|
26
|
+
|
27
|
+
|
28
|
+
Attributes:
|
29
|
+
package:
|
30
|
+
The name of a `pip`-installable package, or the path to a local
|
31
|
+
wheel file.
|
32
|
+
package_version: Version of the package
|
33
|
+
package_extras: Package extras to include in the `pip install` command
|
34
|
+
python_version: Python version to install and run the package tasks
|
35
|
+
pinned_package_versions:
|
36
|
+
dictionary 'package':'version' used to pin versions for specific
|
37
|
+
packages.
|
38
|
+
|
39
|
+
"""
|
40
|
+
|
41
|
+
package: str
|
42
|
+
package_version: Optional[str] = None
|
43
|
+
package_extras: Optional[str] = None
|
44
|
+
python_version: Optional[str] = None
|
45
|
+
pinned_package_versions: Optional[dict[str, str]] = None
|
46
|
+
|
47
|
+
_pinned_package_versions = validator(
|
48
|
+
"pinned_package_versions", allow_reuse=True
|
49
|
+
)(valdictkeys("pinned_package_versions"))
|
50
|
+
_package_extras = validator("package_extras", allow_reuse=True)(
|
51
|
+
valstr("package_extras")
|
52
|
+
)
|
53
|
+
_python_version = validator("python_version", allow_reuse=True)(
|
54
|
+
valstr("python_version")
|
55
|
+
)
|
56
|
+
|
57
|
+
@validator("package")
|
58
|
+
def package_validator(cls, value):
|
59
|
+
if "/" in value:
|
60
|
+
if not value.endswith(".whl"):
|
61
|
+
raise ValueError(
|
62
|
+
"Local-package path must be a wheel file "
|
63
|
+
f"(given {value})."
|
64
|
+
)
|
65
|
+
if not Path(value).is_absolute():
|
66
|
+
raise ValueError(
|
67
|
+
f"Local-package path must be absolute: (given {value})."
|
68
|
+
)
|
69
|
+
return value
|
70
|
+
|
71
|
+
@validator("package_version")
|
72
|
+
def package_version_validator(cls, v, values):
|
73
|
+
|
74
|
+
valstr("package_version")(v)
|
75
|
+
|
76
|
+
if values["package"].endswith(".whl"):
|
77
|
+
raise ValueError(
|
78
|
+
"Cannot provide version when package is a Wheel file."
|
79
|
+
)
|
80
|
+
return v
|
81
|
+
|
82
|
+
|
83
|
+
class TaskCollectStatusV2(BaseModel):
|
84
|
+
"""
|
85
|
+
TaskCollectStatus class
|
86
|
+
|
87
|
+
Attributes:
|
88
|
+
status:
|
89
|
+
package:
|
90
|
+
venv_path:
|
91
|
+
task_list:
|
92
|
+
log:
|
93
|
+
info:
|
94
|
+
"""
|
95
|
+
|
96
|
+
status: Literal["pending", "installing", "collecting", "fail", "OK"]
|
97
|
+
package: str
|
98
|
+
venv_path: Path
|
99
|
+
task_list: Optional[list[TaskReadV2]] = Field(default=[])
|
100
|
+
log: Optional[str]
|
101
|
+
info: Optional[str]
|
102
|
+
|
103
|
+
def sanitised_dict(self):
|
104
|
+
"""
|
105
|
+
Return `self.dict()` after casting `self.venv_path` to a string
|
106
|
+
"""
|
107
|
+
d = self.dict()
|
108
|
+
d["venv_path"] = str(self.venv_path)
|
109
|
+
return d
|
@@ -0,0 +1,79 @@
|
|
1
|
+
from datetime import datetime
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from pydantic import BaseModel
|
5
|
+
from pydantic import Extra
|
6
|
+
from pydantic import validator
|
7
|
+
|
8
|
+
from .._validators import valstr
|
9
|
+
from .._validators import valutc
|
10
|
+
from .project import ProjectReadV2
|
11
|
+
from .workflowtask import WorkflowTaskExportV2
|
12
|
+
from .workflowtask import WorkflowTaskImportV2
|
13
|
+
from .workflowtask import WorkflowTaskReadV2
|
14
|
+
|
15
|
+
|
16
|
+
class WorkflowCreateV2(BaseModel, extra=Extra.forbid):
|
17
|
+
|
18
|
+
name: str
|
19
|
+
|
20
|
+
# Validators
|
21
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
22
|
+
|
23
|
+
|
24
|
+
class WorkflowReadV2(BaseModel):
|
25
|
+
|
26
|
+
id: int
|
27
|
+
name: str
|
28
|
+
project_id: int
|
29
|
+
task_list: list[WorkflowTaskReadV2]
|
30
|
+
project: ProjectReadV2
|
31
|
+
timestamp_created: datetime
|
32
|
+
|
33
|
+
_timestamp_created = validator("timestamp_created", allow_reuse=True)(
|
34
|
+
valutc("timestamp_created")
|
35
|
+
)
|
36
|
+
|
37
|
+
|
38
|
+
class WorkflowUpdateV2(BaseModel):
|
39
|
+
|
40
|
+
name: Optional[str]
|
41
|
+
reordered_workflowtask_ids: Optional[list[int]]
|
42
|
+
|
43
|
+
# Validators
|
44
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
45
|
+
|
46
|
+
@validator("reordered_workflowtask_ids")
|
47
|
+
def check_positive_and_unique(cls, value):
|
48
|
+
if any(i < 0 for i in value):
|
49
|
+
raise ValueError("Negative `id` in `reordered_workflowtask_ids`")
|
50
|
+
if len(value) != len(set(value)):
|
51
|
+
raise ValueError("`reordered_workflowtask_ids` has repetitions")
|
52
|
+
return value
|
53
|
+
|
54
|
+
|
55
|
+
class WorkflowImportV2(BaseModel):
|
56
|
+
"""
|
57
|
+
Class for `Workflow` import.
|
58
|
+
|
59
|
+
Attributes:
|
60
|
+
task_list:
|
61
|
+
"""
|
62
|
+
|
63
|
+
name: str
|
64
|
+
task_list: list[WorkflowTaskImportV2]
|
65
|
+
|
66
|
+
# Validators
|
67
|
+
_name = validator("name", allow_reuse=True)(valstr("name"))
|
68
|
+
|
69
|
+
|
70
|
+
class WorkflowExportV2(BaseModel):
|
71
|
+
"""
|
72
|
+
Class for `Workflow` export.
|
73
|
+
|
74
|
+
Attributes:
|
75
|
+
task_list:
|
76
|
+
"""
|
77
|
+
|
78
|
+
name: str
|
79
|
+
task_list: list[WorkflowTaskExportV2]
|