fractal-server 1.4.10__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +24 -22
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -4
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +1 -1
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.10.dist-info/RECORD +0 -98
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,109 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Field
6
+ from pydantic import HttpUrl
7
+ from pydantic import root_validator
8
+ from pydantic import validator
9
+
10
+
11
+ class TaskManifestV2(BaseModel):
12
+ """
13
+ Represents a task within a V2 manifest.
14
+
15
+ Attributes:
16
+ name:
17
+ The task name
18
+ executable:
19
+ Path to the executable relative to the package root
20
+
21
+ Note: by package root we mean "as it will be installed". If a
22
+ package `Pkg` installs in the folder `pkg` the executable
23
+ `pkg/executable.py`, this attribute must contain only
24
+ `executable.py`.
25
+ input_type:
26
+ The input type accepted by the task
27
+ output_type:
28
+ The output type returned by the task
29
+ meta:
30
+ Additional information about the package, such as hash of the
31
+ executable, specific runtime requirements (e.g., need_gpu=True),
32
+ etc.
33
+ args_schema:
34
+ JSON Schema for task arguments
35
+ docs_info:
36
+ Additional information about the Task, coming from the docstring.
37
+ docs_link:
38
+ Link to Task docs.
39
+ """
40
+
41
+ name: str
42
+ executable_non_parallel: Optional[str] = None
43
+ executable_parallel: Optional[str] = None
44
+ input_types: dict[str, bool] = Field(default_factory=dict)
45
+ output_types: dict[str, bool] = Field(default_factory=dict)
46
+ meta_parallel: dict[str, Any] = Field(default_factory=dict)
47
+ meta_non_parallel: dict[str, Any] = Field(default_factory=dict)
48
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
49
+ args_schema_parallel: Optional[dict[str, Any]] = None
50
+ docs_info: Optional[str] = None
51
+ docs_link: Optional[HttpUrl] = None
52
+
53
+
54
+ class ManifestV2(BaseModel):
55
+ """
56
+ Packages containing tasks are required to include a special file
57
+ `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal.
58
+
59
+ This model class and the model classes it depends on provide the base
60
+ schema to read, write and validate manifests.
61
+
62
+ Attributes:
63
+ manifest_version:
64
+ A version string that provides indication for compatibility between
65
+ manifests as the schema evolves. This is for instance used by
66
+ Fractal to determine which subclass of the present base class needs
67
+ be used to read and validate the input.
68
+ task_list : list[TaskManifestType]
69
+ The list of tasks, represented as specified by subclasses of the
70
+ _TaskManifestBase (a.k.a. TaskManifestType)
71
+ has_args_schemas:
72
+ `True` if the manifest incldues JSON Schemas for the arguments of
73
+ each task.
74
+ args_schema_version:
75
+ Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
76
+ """
77
+
78
+ manifest_version: str
79
+ task_list: list[TaskManifestV2]
80
+ has_args_schemas: bool = False
81
+ args_schema_version: Optional[str]
82
+
83
+ @root_validator()
84
+ def _check_args_schemas_are_present(cls, values):
85
+ has_args_schemas = values["has_args_schemas"]
86
+ task_list = values["task_list"]
87
+ if has_args_schemas is True:
88
+ for task in task_list:
89
+ if task.executable_parallel is not None:
90
+ if task.args_schema_parallel is None:
91
+ raise ValueError(
92
+ f"Manifest has {has_args_schemas=}, but "
93
+ f"task '{task.name}' has "
94
+ f"{task.args_schema_parallel=}."
95
+ )
96
+ if task.executable_non_parallel is not None:
97
+ if task.args_schema_non_parallel is None:
98
+ raise ValueError(
99
+ f"Manifest has {has_args_schemas=}, but "
100
+ f"task '{task.name}' has "
101
+ f"{task.args_schema_non_parallel=}."
102
+ )
103
+ return values
104
+
105
+ @validator("manifest_version")
106
+ def manifest_version_2(cls, value):
107
+ if value != "2":
108
+ raise ValueError(f"Wrong manifest version (given {value})")
109
+ return value
@@ -0,0 +1,36 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import validator
6
+
7
+ from .._validators import valstr
8
+ from .._validators import valutc
9
+
10
+
11
+ class ProjectCreateV2(BaseModel):
12
+
13
+ name: str
14
+ read_only: bool = False
15
+ # Validators
16
+ _name = validator("name", allow_reuse=True)(valstr("name"))
17
+
18
+
19
+ class ProjectReadV2(BaseModel):
20
+
21
+ id: int
22
+ name: str
23
+ read_only: bool
24
+ timestamp_created: datetime
25
+ # Validators
26
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
27
+ valutc("timestamp_created")
28
+ )
29
+
30
+
31
+ class ProjectUpdateV2(BaseModel):
32
+
33
+ name: Optional[str]
34
+ read_only: Optional[bool]
35
+ # Validators
36
+ _name = validator("name", allow_reuse=True)(valstr("name"))
@@ -0,0 +1,121 @@
1
+ from typing import Any
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import HttpUrl
8
+ from pydantic import root_validator
9
+ from pydantic import validator
10
+
11
+ from .._validators import valstr
12
+
13
+
14
+ class TaskCreateV2(BaseModel):
15
+ class Config:
16
+ extra = "forbid"
17
+
18
+ name: str
19
+
20
+ command_non_parallel: Optional[str]
21
+ command_parallel: Optional[str]
22
+ source: str
23
+
24
+ meta_parallel: Optional[dict[str, Any]]
25
+ meta_non_parallel: Optional[dict[str, Any]]
26
+ version: Optional[str]
27
+ args_schema_non_parallel: Optional[dict[str, Any]]
28
+ args_schema_parallel: Optional[dict[str, Any]]
29
+ args_schema_version: Optional[str]
30
+ docs_info: Optional[str]
31
+ docs_link: Optional[HttpUrl]
32
+
33
+ input_types: dict[str, bool] = Field(default={})
34
+ output_types: dict[str, bool] = Field(default={})
35
+
36
+ # Validators
37
+ @root_validator
38
+ def validate_commands(cls, values):
39
+ command_parallel = values.get("command_parallel")
40
+ command_non_parallel = values.get("command_non_parallel")
41
+ if (command_parallel is None) and (command_non_parallel is None):
42
+ raise ValueError(
43
+ "Task must have at least one valid command "
44
+ "(parallel and/or non_parallel)"
45
+ )
46
+ return values
47
+
48
+ _name = validator("name", allow_reuse=True)(valstr("name"))
49
+ _command_non_parallel = validator(
50
+ "command_non_parallel", allow_reuse=True
51
+ )(valstr("command_non_parallel"))
52
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
53
+ valstr("command_parallel")
54
+ )
55
+ _source = validator("source", allow_reuse=True)(valstr("source"))
56
+ _version = validator("version", allow_reuse=True)(valstr("version"))
57
+ _args_schema_version = validator("args_schema_version", allow_reuse=True)(
58
+ valstr("args_schema_version")
59
+ )
60
+
61
+
62
+ class TaskReadV2(BaseModel):
63
+
64
+ id: int
65
+ name: str
66
+ type: Literal["parallel", "non_parallel", "compound"]
67
+ source: str
68
+ owner: Optional[str]
69
+ version: Optional[str]
70
+
71
+ command_non_parallel: Optional[str]
72
+ command_parallel: Optional[str]
73
+ meta_parallel: dict[str, Any]
74
+ meta_non_parallel: dict[str, Any]
75
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
76
+ args_schema_parallel: Optional[dict[str, Any]] = None
77
+ args_schema_version: Optional[str]
78
+ docs_info: Optional[str]
79
+ docs_link: Optional[HttpUrl]
80
+ input_types: dict[str, bool]
81
+ output_types: dict[str, bool]
82
+
83
+
84
+ class TaskUpdateV2(BaseModel):
85
+
86
+ name: Optional[str]
87
+ version: Optional[str]
88
+ command_parallel: Optional[str]
89
+ command_non_parallel: Optional[str]
90
+ input_types: Optional[dict[str, bool]]
91
+ output_types: Optional[dict[str, bool]]
92
+
93
+ # Validators
94
+ @validator("input_types", "output_types")
95
+ def val_is_dict(cls, v):
96
+ if not isinstance(v, dict):
97
+ raise ValueError
98
+ return v
99
+
100
+ _name = validator("name", allow_reuse=True)(valstr("name"))
101
+ _version = validator("version", allow_reuse=True)(
102
+ valstr("version", accept_none=True)
103
+ )
104
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
105
+ valstr("command_parallel")
106
+ )
107
+ _command_non_parallel = validator(
108
+ "command_non_parallel", allow_reuse=True
109
+ )(valstr("command_non_parallel"))
110
+
111
+
112
+ class TaskImportV2(BaseModel):
113
+
114
+ source: str
115
+ _source = validator("source", allow_reuse=True)(valstr("source"))
116
+
117
+
118
+ class TaskExportV2(BaseModel):
119
+
120
+ source: str
121
+ _source = validator("source", allow_reuse=True)(valstr("source"))
@@ -0,0 +1,105 @@
1
+ from pathlib import Path
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+ from .._validators import valstr
10
+ from .task import TaskReadV2
11
+
12
+
13
+ class TaskCollectPipV2(BaseModel):
14
+ """
15
+ TaskCollectPipV2 class
16
+
17
+ This class only encodes the attributes required to trigger a
18
+ task-collection operation. Other attributes (that are assigned *during*
19
+ task collection) are defined as part of fractal-server.
20
+
21
+ Two cases are supported:
22
+
23
+ 1. `package` is the path of a local wheel file;
24
+ 2. `package` is the name of a package that can be installed via `pip`.
25
+
26
+
27
+ Attributes:
28
+ package:
29
+ The name of a `pip`-installable package, or the path to a local
30
+ wheel file.
31
+ package_version: Version of the package
32
+ package_extras: Package extras to include in the `pip install` command
33
+ python_version: Python version to install and run the package tasks
34
+ pinned_package_versions:
35
+ dictionary 'package':'version' used to pin versions for specific
36
+ packages.
37
+
38
+ """
39
+
40
+ package: str
41
+ package_version: Optional[str] = None
42
+ package_extras: Optional[str] = None
43
+ python_version: Optional[str] = None
44
+ pinned_package_versions: Optional[dict[str, str]] = None
45
+
46
+ _package_extras = validator("package_extras", allow_reuse=True)(
47
+ valstr("package_extras")
48
+ )
49
+ _python_version = validator("python_version", allow_reuse=True)(
50
+ valstr("python_version")
51
+ )
52
+
53
+ @validator("package")
54
+ def package_validator(cls, value):
55
+ if "/" in value:
56
+ if not value.endswith(".whl"):
57
+ raise ValueError(
58
+ "Local-package path must be a wheel file "
59
+ f"(given {value})."
60
+ )
61
+ if not Path(value).is_absolute():
62
+ raise ValueError(
63
+ f"Local-package path must be absolute: (given {value})."
64
+ )
65
+ return value
66
+
67
+ @validator("package_version")
68
+ def package_version_validator(cls, v, values):
69
+
70
+ valstr("package_version")(v)
71
+
72
+ if values["package"].endswith(".whl"):
73
+ raise ValueError(
74
+ "Cannot provide version when package is a Wheel file."
75
+ )
76
+ return v
77
+
78
+
79
+ class TaskCollectStatusV2(BaseModel):
80
+ """
81
+ TaskCollectStatus class
82
+
83
+ Attributes:
84
+ status:
85
+ package:
86
+ venv_path:
87
+ task_list:
88
+ log:
89
+ info:
90
+ """
91
+
92
+ status: Literal["pending", "installing", "collecting", "fail", "OK"]
93
+ package: str
94
+ venv_path: Path
95
+ task_list: Optional[list[TaskReadV2]] = Field(default=[])
96
+ log: Optional[str]
97
+ info: Optional[str]
98
+
99
+ def sanitised_dict(self):
100
+ """
101
+ Return `self.dict()` after casting `self.venv_path` to a string
102
+ """
103
+ d = self.dict()
104
+ d["venv_path"] = str(self.venv_path)
105
+ return d
@@ -0,0 +1,78 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import validator
6
+
7
+ from .._validators import valstr
8
+ from .._validators import valutc
9
+ from ..v1.project import ProjectReadV1
10
+ from .workflowtask import WorkflowTaskExportV2
11
+ from .workflowtask import WorkflowTaskImportV2
12
+ from .workflowtask import WorkflowTaskReadV2
13
+
14
+
15
+ class WorkflowCreateV2(BaseModel):
16
+
17
+ name: str
18
+
19
+ # Validators
20
+ _name = validator("name", allow_reuse=True)(valstr("name"))
21
+
22
+
23
+ class WorkflowReadV2(BaseModel):
24
+
25
+ id: int
26
+ name: str
27
+ project_id: int
28
+ task_list: list[WorkflowTaskReadV2]
29
+ project: ProjectReadV1
30
+ timestamp_created: datetime
31
+
32
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
33
+ valutc("timestamp_created")
34
+ )
35
+
36
+
37
+ class WorkflowUpdateV2(BaseModel):
38
+
39
+ name: Optional[str]
40
+ reordered_workflowtask_ids: Optional[list[int]]
41
+
42
+ # Validators
43
+ _name = validator("name", allow_reuse=True)(valstr("name"))
44
+
45
+ @validator("reordered_workflowtask_ids")
46
+ def check_positive_and_unique(cls, value):
47
+ if any(i < 0 for i in value):
48
+ raise ValueError("Negative `id` in `reordered_workflowtask_ids`")
49
+ if len(value) != len(set(value)):
50
+ raise ValueError("`reordered_workflowtask_ids` has repetitions")
51
+ return value
52
+
53
+
54
+ class WorkflowImportV2(BaseModel):
55
+ """
56
+ Class for `Workflow` import.
57
+
58
+ Attributes:
59
+ task_list:
60
+ """
61
+
62
+ name: str
63
+ task_list: list[WorkflowTaskImportV2]
64
+
65
+ # Validators
66
+ _name = validator("name", allow_reuse=True)(valstr("name"))
67
+
68
+
69
+ class WorkflowExportV2(BaseModel):
70
+ """
71
+ Class for `Workflow` export.
72
+
73
+ Attributes:
74
+ task_list:
75
+ """
76
+
77
+ name: str
78
+ task_list: list[WorkflowTaskExportV2]
@@ -0,0 +1,118 @@
1
+ from enum import Enum
2
+ from typing import Any
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+ from .._validators import valint
10
+ from ..v1.task import TaskExportV1
11
+ from ..v1.task import TaskImportV1
12
+ from ..v1.task import TaskReadV1
13
+ from .task import TaskExportV2
14
+ from .task import TaskImportV2
15
+ from .task import TaskReadV2
16
+ from fractal_server.images import Filters
17
+
18
+
19
+ class WorkflowTaskStatusTypeV2(str, Enum):
20
+ """
21
+ Define the available values for the status of a `WorkflowTask`.
22
+
23
+ This model is used within the `Dataset.history` attribute, which is
24
+ constructed in the runner and then used in the API (e.g. in the
25
+ `api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
26
+
27
+ Attributes:
28
+ SUBMITTED: The `WorkflowTask` is part of a running job.
29
+ DONE: The most-recent execution of this `WorkflowTask` was successful.
30
+ FAILED: The most-recent execution of this `WorkflowTask` failed.
31
+ """
32
+
33
+ SUBMITTED = "submitted"
34
+ DONE = "done"
35
+ FAILED = "failed"
36
+
37
+
38
+ class WorkflowTaskCreateV2(BaseModel):
39
+
40
+ meta_parallel: Optional[dict[str, Any]]
41
+ meta_non_parallel: Optional[dict[str, Any]]
42
+ args_non_parallel: Optional[dict[str, Any]]
43
+ args_parallel: Optional[dict[str, Any]]
44
+ order: Optional[int]
45
+ input_filters: Filters = Field(default_factory=Filters)
46
+
47
+ is_legacy_task: bool = False
48
+
49
+ # Validators
50
+
51
+ _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
52
+ # FIXME validate: if `is_legacy_task`, `args_non_parallel` must be None
53
+
54
+
55
+ class WorkflowTaskReadV2(BaseModel):
56
+
57
+ id: int
58
+
59
+ workflow_id: int
60
+ order: Optional[int]
61
+ meta_parallel: Optional[dict[str, Any]]
62
+ meta_non_parallel: Optional[dict[str, Any]]
63
+
64
+ args_non_parallel: Optional[dict[str, Any]]
65
+ args_parallel: Optional[dict[str, Any]]
66
+
67
+ input_filters: Filters
68
+
69
+ is_legacy_task: bool
70
+ task_type: str
71
+ task_id: Optional[int]
72
+ task: Optional[TaskReadV2]
73
+ task_legacy_id: Optional[int]
74
+ task_legacy: Optional[TaskReadV1]
75
+
76
+
77
+ class WorkflowTaskUpdateV2(BaseModel):
78
+
79
+ meta_parallel: Optional[dict[str, Any]]
80
+ meta_non_parallel: Optional[dict[str, Any]]
81
+ args_non_parallel: Optional[dict[str, Any]]
82
+ args_parallel: Optional[dict[str, Any]]
83
+ input_filters: Optional[Filters]
84
+
85
+ # Validators
86
+
87
+ @validator("meta_parallel", "meta_non_parallel")
88
+ def check_no_parallelisation_level(cls, m):
89
+ if "parallelization_level" in m:
90
+ raise ValueError(
91
+ "Overriding task parallelization level currently not allowed"
92
+ )
93
+ return m
94
+
95
+
96
+ class WorkflowTaskImportV2(BaseModel):
97
+
98
+ meta_parallel: Optional[dict[str, Any]] = None
99
+ meta_non_parallel: Optional[dict[str, Any]] = None
100
+ args: Optional[dict[str, Any]] = None # FIXME
101
+
102
+ input_filters: Optional[Filters] = None
103
+
104
+ is_legacy_task: bool = False
105
+ task: Optional[TaskImportV2] = None
106
+ task_legacy: Optional[TaskImportV1] = None
107
+
108
+
109
+ class WorkflowTaskExportV2(BaseModel):
110
+
111
+ meta_parallel: Optional[dict[str, Any]] = None
112
+ meta_non_parallel: Optional[dict[str, Any]] = None
113
+ args: Optional[dict[str, Any]] = None # FIXME
114
+ input_filters: Filters = Field(default_factory=Filters)
115
+
116
+ is_legacy_task: bool = False
117
+ task: Optional[TaskExportV2]
118
+ task_legacy: Optional[TaskExportV1]
fractal_server/config.py CHANGED
@@ -390,6 +390,11 @@ class Settings(BaseSettings):
390
390
 
391
391
  info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
392
392
  if self.FRACTAL_RUNNER_BACKEND == "slurm":
393
+
394
+ from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
395
+ load_slurm_config_file,
396
+ )
397
+
393
398
  if not self.FRACTAL_SLURM_CONFIG_FILE:
394
399
  raise FractalConfigurationError(
395
400
  f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
@@ -401,10 +406,6 @@ class Settings(BaseSettings):
401
406
  f"{self.FRACTAL_SLURM_CONFIG_FILE} not found."
402
407
  )
403
408
 
404
- from fractal_server.app.runner._slurm._slurm_config import (
405
- load_slurm_config_file,
406
- )
407
-
408
409
  load_slurm_config_file(self.FRACTAL_SLURM_CONFIG_FILE)
409
410
  if not shutil.which("sbatch"):
410
411
  raise FractalConfigurationError(
@@ -0,0 +1,50 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+ from typing import Union
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+
10
+ class SingleImage(BaseModel):
11
+
12
+ path: str
13
+ origin: Optional[str] = None
14
+
15
+ attributes: dict[str, Any] = Field(default_factory=dict)
16
+ types: dict[str, bool] = Field(default_factory=dict)
17
+
18
+ @validator("attributes")
19
+ def validate_attributes(
20
+ cls, v: dict[str, Any]
21
+ ) -> dict[str, Union[int, float, str, bool]]:
22
+ for key, value in v.items():
23
+ if not isinstance(value, (int, float, str, bool)):
24
+ raise ValueError(
25
+ f"SingleImage.attributes[{key}] must be a scalar "
26
+ f"(int, float, str or bool). Given {value} ({type(value)})"
27
+ )
28
+ return v
29
+
30
+
31
+ class Filters(BaseModel):
32
+ attributes: dict[str, Any] = Field(default_factory=dict)
33
+ types: dict[str, bool] = Field(default_factory=dict)
34
+
35
+ class Config:
36
+ extra = "forbid"
37
+
38
+ # Validators
39
+ @validator("attributes")
40
+ def validate_attributes(
41
+ cls, v: dict[str, Any]
42
+ ) -> dict[str, Union[int, float, str, bool, None]]:
43
+ for key, value in v.items():
44
+ if not isinstance(value, (int, float, str, bool, type(None))):
45
+ raise ValueError(
46
+ f"Filters.attributes[{key}] must be a scalar "
47
+ "(int, float, str, bool, or None). "
48
+ f"Given {value} ({type(value)})"
49
+ )
50
+ return v