fractal-server 1.4.10__py3-none-any.whl → 2.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +3 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +11 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +274 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +14 -14
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/dataset.py +309 -0
  33. fractal_server/app/routes/api/v2/images.py +207 -0
  34. fractal_server/app/routes/api/v2/job.py +200 -0
  35. fractal_server/app/routes/api/v2/project.py +202 -0
  36. fractal_server/app/routes/api/v2/submit.py +220 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +397 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/components.py +5 -0
  44. fractal_server/app/runner/exceptions.py +129 -0
  45. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  46. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  51. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -19
  52. fractal_server/app/runner/filenames.py +6 -0
  53. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  54. fractal_server/app/runner/task_files.py +103 -0
  55. fractal_server/app/runner/{__init__.py → v1/__init__.py} +22 -20
  56. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  57. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -5
  58. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  59. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  60. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  61. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  62. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  63. fractal_server/app/runner/v1/common.py +117 -0
  64. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  65. fractal_server/app/runner/v2/__init__.py +336 -0
  66. fractal_server/app/runner/v2/_local/__init__.py +167 -0
  67. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  68. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  69. fractal_server/app/runner/v2/_local/executor.py +100 -0
  70. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +34 -45
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/deduplicate_list.py +22 -0
  74. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  75. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  76. fractal_server/app/runner/v2/runner.py +267 -0
  77. fractal_server/app/runner/v2/runner_functions.py +341 -0
  78. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  79. fractal_server/app/runner/v2/task_interface.py +43 -0
  80. fractal_server/app/runner/v2/v1_compat.py +21 -0
  81. fractal_server/app/schemas/__init__.py +4 -42
  82. fractal_server/app/schemas/v1/__init__.py +42 -0
  83. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  84. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  85. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  86. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  87. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  88. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  89. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  90. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  91. fractal_server/app/schemas/v2/__init__.py +34 -0
  92. fractal_server/app/schemas/v2/dataset.py +89 -0
  93. fractal_server/app/schemas/v2/dumps.py +87 -0
  94. fractal_server/app/schemas/v2/job.py +114 -0
  95. fractal_server/app/schemas/v2/manifest.py +159 -0
  96. fractal_server/app/schemas/v2/project.py +37 -0
  97. fractal_server/app/schemas/v2/task.py +120 -0
  98. fractal_server/app/schemas/v2/task_collection.py +105 -0
  99. fractal_server/app/schemas/v2/workflow.py +79 -0
  100. fractal_server/app/schemas/v2/workflowtask.py +119 -0
  101. fractal_server/config.py +5 -4
  102. fractal_server/images/__init__.py +2 -0
  103. fractal_server/images/models.py +50 -0
  104. fractal_server/images/tools.py +85 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/env.py +0 -2
  107. fractal_server/migrations/versions/d71e732236cd_v2.py +239 -0
  108. fractal_server/tasks/__init__.py +0 -5
  109. fractal_server/tasks/endpoint_operations.py +13 -19
  110. fractal_server/tasks/utils.py +35 -0
  111. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  112. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  113. fractal_server/tasks/v1/get_collection_data.py +14 -0
  114. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  115. fractal_server/tasks/v2/background_operations.py +381 -0
  116. fractal_server/tasks/v2/get_collection_data.py +14 -0
  117. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/METADATA +1 -1
  118. fractal_server-2.0.0a1.dist-info/RECORD +160 -0
  119. fractal_server/app/runner/_slurm/.gitignore +0 -2
  120. fractal_server/app/runner/common.py +0 -311
  121. fractal_server-1.4.10.dist-info/RECORD +0 -98
  122. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  123. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  124. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/LICENSE +0 -0
  125. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/WHEEL +0 -0
  126. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,159 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Field
6
+ from pydantic import HttpUrl
7
+ from pydantic import root_validator
8
+ from pydantic import validator
9
+
10
+
11
+ class TaskManifestV2(BaseModel):
12
+ """
13
+ Represents a task within a V2 manifest.
14
+
15
+ Attributes:
16
+ name:
17
+ The task name
18
+ executable:
19
+ Path to the executable relative to the package root
20
+
21
+ Note: by package root we mean "as it will be installed". If a
22
+ package `Pkg` installs in the folder `pkg` the executable
23
+ `pkg/executable.py`, this attribute must contain only
24
+ `executable.py`.
25
+ input_type:
26
+ The input type accepted by the task
27
+ output_type:
28
+ The output type returned by the task
29
+ meta:
30
+ Additional information about the package, such as hash of the
31
+ executable, specific runtime requirements (e.g., need_gpu=True),
32
+ etc.
33
+ args_schema:
34
+ JSON Schema for task arguments
35
+ docs_info:
36
+ Additional information about the Task, coming from the docstring.
37
+ docs_link:
38
+ Link to Task docs.
39
+ """
40
+
41
+ name: str
42
+ executable_non_parallel: Optional[str] = None
43
+ executable_parallel: Optional[str] = None
44
+ input_types: dict[str, bool] = Field(default_factory=dict)
45
+ output_types: dict[str, bool] = Field(default_factory=dict)
46
+ meta_non_parallel: dict[str, Any] = Field(default_factory=dict)
47
+ meta_parallel: dict[str, Any] = Field(default_factory=dict)
48
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
49
+ args_schema_parallel: Optional[dict[str, Any]] = None
50
+ docs_info: Optional[str] = None
51
+ docs_link: Optional[HttpUrl] = None
52
+
53
+ @root_validator
54
+ def validate_executable_args_meta(cls, values):
55
+
56
+ executable_non_parallel = values.get("executable_non_parallel")
57
+ executable_parallel = values.get("executable_parallel")
58
+ if (executable_non_parallel is None) and (executable_parallel is None):
59
+
60
+ raise ValueError(
61
+ "`TaskManifestV2.executable_non_parallel` and "
62
+ "`TaskManifestV2.executable_parallel` cannot be both None."
63
+ )
64
+
65
+ elif executable_non_parallel is None:
66
+
67
+ meta_non_parallel = values.get("meta_non_parallel")
68
+ if meta_non_parallel != {}:
69
+ raise ValueError(
70
+ "`TaskManifestV2.meta_non_parallel` must be an empty dict "
71
+ "if `TaskManifestV2.executable_non_parallel` is None. "
72
+ f"Given: {meta_non_parallel}."
73
+ )
74
+
75
+ args_schema_non_parallel = values.get("args_schema_non_parallel")
76
+ if args_schema_non_parallel is not None:
77
+ raise ValueError(
78
+ "`TaskManifestV2.args_schema_non_parallel` must be None "
79
+ "if `TaskManifestV2.executable_non_parallel` is None. "
80
+ f"Given: {args_schema_non_parallel}."
81
+ )
82
+
83
+ elif executable_parallel is None:
84
+
85
+ meta_parallel = values.get("meta_parallel")
86
+ if meta_parallel != {}:
87
+ raise ValueError(
88
+ "`TaskManifestV2.meta_parallel` must be an empty dict if "
89
+ "`TaskManifestV2.executable_parallel` is None. "
90
+ f"Given: {meta_parallel}."
91
+ )
92
+
93
+ args_schema_parallel = values.get("args_schema_parallel")
94
+ if args_schema_parallel is not None:
95
+ raise ValueError(
96
+ "`TaskManifestV2.args_schema_parallel` must be None if "
97
+ "`TaskManifestV2.executable_parallel` is None. "
98
+ f"Given: {args_schema_parallel}."
99
+ )
100
+
101
+ return values
102
+
103
+
104
+ class ManifestV2(BaseModel):
105
+ """
106
+ Packages containing tasks are required to include a special file
107
+ `__FRACTAL_MANIFEST__.json` in order to be discovered and used by Fractal.
108
+
109
+ This model class and the model classes it depends on provide the base
110
+ schema to read, write and validate manifests.
111
+
112
+ Attributes:
113
+ manifest_version:
114
+ A version string that provides indication for compatibility between
115
+ manifests as the schema evolves. This is for instance used by
116
+ Fractal to determine which subclass of the present base class needs
117
+ be used to read and validate the input.
118
+ task_list : list[TaskManifestType]
119
+ The list of tasks, represented as specified by subclasses of the
120
+ _TaskManifestBase (a.k.a. TaskManifestType)
121
+ has_args_schemas:
122
+ `True` if the manifest incldues JSON Schemas for the arguments of
123
+ each task.
124
+ args_schema_version:
125
+ Label of how `args_schema`s were generated (e.g. `pydantic_v1`).
126
+ """
127
+
128
+ manifest_version: str
129
+ task_list: list[TaskManifestV2]
130
+ has_args_schemas: bool = False
131
+ args_schema_version: Optional[str]
132
+
133
+ @root_validator()
134
+ def _check_args_schemas_are_present(cls, values):
135
+ has_args_schemas = values["has_args_schemas"]
136
+ task_list = values["task_list"]
137
+ if has_args_schemas is True:
138
+ for task in task_list:
139
+ if task.executable_parallel is not None:
140
+ if task.args_schema_parallel is None:
141
+ raise ValueError(
142
+ f"Manifest has {has_args_schemas=}, but "
143
+ f"task '{task.name}' has "
144
+ f"{task.args_schema_parallel=}."
145
+ )
146
+ if task.executable_non_parallel is not None:
147
+ if task.args_schema_non_parallel is None:
148
+ raise ValueError(
149
+ f"Manifest has {has_args_schemas=}, but "
150
+ f"task '{task.name}' has "
151
+ f"{task.args_schema_non_parallel=}."
152
+ )
153
+ return values
154
+
155
+ @validator("manifest_version")
156
+ def manifest_version_2(cls, value):
157
+ if value != "2":
158
+ raise ValueError(f"Wrong manifest version (given {value})")
159
+ return value
@@ -0,0 +1,37 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Extra
6
+ from pydantic import validator
7
+
8
+ from .._validators import valstr
9
+ from .._validators import valutc
10
+
11
+
12
+ class ProjectCreateV2(BaseModel, extra=Extra.forbid):
13
+
14
+ name: str
15
+ read_only: bool = False
16
+ # Validators
17
+ _name = validator("name", allow_reuse=True)(valstr("name"))
18
+
19
+
20
+ class ProjectReadV2(BaseModel):
21
+
22
+ id: int
23
+ name: str
24
+ read_only: bool
25
+ timestamp_created: datetime
26
+ # Validators
27
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
28
+ valutc("timestamp_created")
29
+ )
30
+
31
+
32
+ class ProjectUpdateV2(BaseModel):
33
+
34
+ name: Optional[str]
35
+ read_only: Optional[bool]
36
+ # Validators
37
+ _name = validator("name", allow_reuse=True)(valstr("name"))
@@ -0,0 +1,120 @@
1
+ from typing import Any
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Extra
7
+ from pydantic import Field
8
+ from pydantic import HttpUrl
9
+ from pydantic import root_validator
10
+ from pydantic import validator
11
+
12
+ from .._validators import valstr
13
+
14
+
15
+ class TaskCreateV2(BaseModel, extra=Extra.forbid):
16
+
17
+ name: str
18
+
19
+ command_non_parallel: Optional[str]
20
+ command_parallel: Optional[str]
21
+ source: str
22
+
23
+ meta_parallel: Optional[dict[str, Any]]
24
+ meta_non_parallel: Optional[dict[str, Any]]
25
+ version: Optional[str]
26
+ args_schema_non_parallel: Optional[dict[str, Any]]
27
+ args_schema_parallel: Optional[dict[str, Any]]
28
+ args_schema_version: Optional[str]
29
+ docs_info: Optional[str]
30
+ docs_link: Optional[HttpUrl]
31
+
32
+ input_types: dict[str, bool] = Field(default={})
33
+ output_types: dict[str, bool] = Field(default={})
34
+
35
+ # Validators
36
+ @root_validator
37
+ def validate_commands(cls, values):
38
+ command_parallel = values.get("command_parallel")
39
+ command_non_parallel = values.get("command_non_parallel")
40
+ if (command_parallel is None) and (command_non_parallel is None):
41
+ raise ValueError(
42
+ "Task must have at least one valid command "
43
+ "(parallel and/or non_parallel)"
44
+ )
45
+ return values
46
+
47
+ _name = validator("name", allow_reuse=True)(valstr("name"))
48
+ _command_non_parallel = validator(
49
+ "command_non_parallel", allow_reuse=True
50
+ )(valstr("command_non_parallel"))
51
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
52
+ valstr("command_parallel")
53
+ )
54
+ _source = validator("source", allow_reuse=True)(valstr("source"))
55
+ _version = validator("version", allow_reuse=True)(valstr("version"))
56
+ _args_schema_version = validator("args_schema_version", allow_reuse=True)(
57
+ valstr("args_schema_version")
58
+ )
59
+
60
+
61
+ class TaskReadV2(BaseModel):
62
+
63
+ id: int
64
+ name: str
65
+ type: Literal["parallel", "non_parallel", "compound"]
66
+ source: str
67
+ owner: Optional[str]
68
+ version: Optional[str]
69
+
70
+ command_non_parallel: Optional[str]
71
+ command_parallel: Optional[str]
72
+ meta_parallel: dict[str, Any]
73
+ meta_non_parallel: dict[str, Any]
74
+ args_schema_non_parallel: Optional[dict[str, Any]] = None
75
+ args_schema_parallel: Optional[dict[str, Any]] = None
76
+ args_schema_version: Optional[str]
77
+ docs_info: Optional[str]
78
+ docs_link: Optional[HttpUrl]
79
+ input_types: dict[str, bool]
80
+ output_types: dict[str, bool]
81
+
82
+
83
+ class TaskUpdateV2(BaseModel):
84
+
85
+ name: Optional[str]
86
+ version: Optional[str]
87
+ command_parallel: Optional[str]
88
+ command_non_parallel: Optional[str]
89
+ input_types: Optional[dict[str, bool]]
90
+ output_types: Optional[dict[str, bool]]
91
+
92
+ # Validators
93
+ @validator("input_types", "output_types")
94
+ def val_is_dict(cls, v):
95
+ if not isinstance(v, dict):
96
+ raise ValueError
97
+ return v
98
+
99
+ _name = validator("name", allow_reuse=True)(valstr("name"))
100
+ _version = validator("version", allow_reuse=True)(
101
+ valstr("version", accept_none=True)
102
+ )
103
+ _command_parallel = validator("command_parallel", allow_reuse=True)(
104
+ valstr("command_parallel")
105
+ )
106
+ _command_non_parallel = validator(
107
+ "command_non_parallel", allow_reuse=True
108
+ )(valstr("command_non_parallel"))
109
+
110
+
111
+ class TaskImportV2(BaseModel):
112
+
113
+ source: str
114
+ _source = validator("source", allow_reuse=True)(valstr("source"))
115
+
116
+
117
+ class TaskExportV2(BaseModel):
118
+
119
+ source: str
120
+ _source = validator("source", allow_reuse=True)(valstr("source"))
@@ -0,0 +1,105 @@
1
+ from pathlib import Path
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+ from .._validators import valstr
10
+ from .task import TaskReadV2
11
+
12
+
13
+ class TaskCollectPipV2(BaseModel):
14
+ """
15
+ TaskCollectPipV2 class
16
+
17
+ This class only encodes the attributes required to trigger a
18
+ task-collection operation. Other attributes (that are assigned *during*
19
+ task collection) are defined as part of fractal-server.
20
+
21
+ Two cases are supported:
22
+
23
+ 1. `package` is the path of a local wheel file;
24
+ 2. `package` is the name of a package that can be installed via `pip`.
25
+
26
+
27
+ Attributes:
28
+ package:
29
+ The name of a `pip`-installable package, or the path to a local
30
+ wheel file.
31
+ package_version: Version of the package
32
+ package_extras: Package extras to include in the `pip install` command
33
+ python_version: Python version to install and run the package tasks
34
+ pinned_package_versions:
35
+ dictionary 'package':'version' used to pin versions for specific
36
+ packages.
37
+
38
+ """
39
+
40
+ package: str
41
+ package_version: Optional[str] = None
42
+ package_extras: Optional[str] = None
43
+ python_version: Optional[str] = None
44
+ pinned_package_versions: Optional[dict[str, str]] = None
45
+
46
+ _package_extras = validator("package_extras", allow_reuse=True)(
47
+ valstr("package_extras")
48
+ )
49
+ _python_version = validator("python_version", allow_reuse=True)(
50
+ valstr("python_version")
51
+ )
52
+
53
+ @validator("package")
54
+ def package_validator(cls, value):
55
+ if "/" in value:
56
+ if not value.endswith(".whl"):
57
+ raise ValueError(
58
+ "Local-package path must be a wheel file "
59
+ f"(given {value})."
60
+ )
61
+ if not Path(value).is_absolute():
62
+ raise ValueError(
63
+ f"Local-package path must be absolute: (given {value})."
64
+ )
65
+ return value
66
+
67
+ @validator("package_version")
68
+ def package_version_validator(cls, v, values):
69
+
70
+ valstr("package_version")(v)
71
+
72
+ if values["package"].endswith(".whl"):
73
+ raise ValueError(
74
+ "Cannot provide version when package is a Wheel file."
75
+ )
76
+ return v
77
+
78
+
79
+ class TaskCollectStatusV2(BaseModel):
80
+ """
81
+ TaskCollectStatus class
82
+
83
+ Attributes:
84
+ status:
85
+ package:
86
+ venv_path:
87
+ task_list:
88
+ log:
89
+ info:
90
+ """
91
+
92
+ status: Literal["pending", "installing", "collecting", "fail", "OK"]
93
+ package: str
94
+ venv_path: Path
95
+ task_list: Optional[list[TaskReadV2]] = Field(default=[])
96
+ log: Optional[str]
97
+ info: Optional[str]
98
+
99
+ def sanitised_dict(self):
100
+ """
101
+ Return `self.dict()` after casting `self.venv_path` to a string
102
+ """
103
+ d = self.dict()
104
+ d["venv_path"] = str(self.venv_path)
105
+ return d
@@ -0,0 +1,79 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import Extra
6
+ from pydantic import validator
7
+
8
+ from .._validators import valstr
9
+ from .._validators import valutc
10
+ from ..v1.project import ProjectReadV1
11
+ from .workflowtask import WorkflowTaskExportV2
12
+ from .workflowtask import WorkflowTaskImportV2
13
+ from .workflowtask import WorkflowTaskReadV2
14
+
15
+
16
+ class WorkflowCreateV2(BaseModel, extra=Extra.forbid):
17
+
18
+ name: str
19
+
20
+ # Validators
21
+ _name = validator("name", allow_reuse=True)(valstr("name"))
22
+
23
+
24
+ class WorkflowReadV2(BaseModel):
25
+
26
+ id: int
27
+ name: str
28
+ project_id: int
29
+ task_list: list[WorkflowTaskReadV2]
30
+ project: ProjectReadV1
31
+ timestamp_created: datetime
32
+
33
+ _timestamp_created = validator("timestamp_created", allow_reuse=True)(
34
+ valutc("timestamp_created")
35
+ )
36
+
37
+
38
+ class WorkflowUpdateV2(BaseModel):
39
+
40
+ name: Optional[str]
41
+ reordered_workflowtask_ids: Optional[list[int]]
42
+
43
+ # Validators
44
+ _name = validator("name", allow_reuse=True)(valstr("name"))
45
+
46
+ @validator("reordered_workflowtask_ids")
47
+ def check_positive_and_unique(cls, value):
48
+ if any(i < 0 for i in value):
49
+ raise ValueError("Negative `id` in `reordered_workflowtask_ids`")
50
+ if len(value) != len(set(value)):
51
+ raise ValueError("`reordered_workflowtask_ids` has repetitions")
52
+ return value
53
+
54
+
55
+ class WorkflowImportV2(BaseModel):
56
+ """
57
+ Class for `Workflow` import.
58
+
59
+ Attributes:
60
+ task_list:
61
+ """
62
+
63
+ name: str
64
+ task_list: list[WorkflowTaskImportV2]
65
+
66
+ # Validators
67
+ _name = validator("name", allow_reuse=True)(valstr("name"))
68
+
69
+
70
+ class WorkflowExportV2(BaseModel):
71
+ """
72
+ Class for `Workflow` export.
73
+
74
+ Attributes:
75
+ task_list:
76
+ """
77
+
78
+ name: str
79
+ task_list: list[WorkflowTaskExportV2]
@@ -0,0 +1,119 @@
1
+ from enum import Enum
2
+ from typing import Any
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Extra
7
+ from pydantic import Field
8
+ from pydantic import validator
9
+
10
+ from .._validators import valint
11
+ from ..v1.task import TaskExportV1
12
+ from ..v1.task import TaskImportV1
13
+ from ..v1.task import TaskReadV1
14
+ from .task import TaskExportV2
15
+ from .task import TaskImportV2
16
+ from .task import TaskReadV2
17
+ from fractal_server.images import Filters
18
+
19
+
20
+ class WorkflowTaskStatusTypeV2(str, Enum):
21
+ """
22
+ Define the available values for the status of a `WorkflowTask`.
23
+
24
+ This model is used within the `Dataset.history` attribute, which is
25
+ constructed in the runner and then used in the API (e.g. in the
26
+ `api/v2/project/{project_id}/dataset/{dataset_id}/status` endpoint).
27
+
28
+ Attributes:
29
+ SUBMITTED: The `WorkflowTask` is part of a running job.
30
+ DONE: The most-recent execution of this `WorkflowTask` was successful.
31
+ FAILED: The most-recent execution of this `WorkflowTask` failed.
32
+ """
33
+
34
+ SUBMITTED = "submitted"
35
+ DONE = "done"
36
+ FAILED = "failed"
37
+
38
+
39
+ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
40
+
41
+ meta_parallel: Optional[dict[str, Any]]
42
+ meta_non_parallel: Optional[dict[str, Any]]
43
+ args_non_parallel: Optional[dict[str, Any]]
44
+ args_parallel: Optional[dict[str, Any]]
45
+ order: Optional[int]
46
+ input_filters: Filters = Field(default_factory=Filters)
47
+
48
+ is_legacy_task: bool = False
49
+
50
+ # Validators
51
+
52
+ _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
53
+ # FIXME validate: if `is_legacy_task`, `args_non_parallel` must be None
54
+
55
+
56
+ class WorkflowTaskReadV2(BaseModel):
57
+
58
+ id: int
59
+
60
+ workflow_id: int
61
+ order: Optional[int]
62
+ meta_parallel: Optional[dict[str, Any]]
63
+ meta_non_parallel: Optional[dict[str, Any]]
64
+
65
+ args_non_parallel: Optional[dict[str, Any]]
66
+ args_parallel: Optional[dict[str, Any]]
67
+
68
+ input_filters: Filters
69
+
70
+ is_legacy_task: bool
71
+ task_type: str
72
+ task_id: Optional[int]
73
+ task: Optional[TaskReadV2]
74
+ task_legacy_id: Optional[int]
75
+ task_legacy: Optional[TaskReadV1]
76
+
77
+
78
+ class WorkflowTaskUpdateV2(BaseModel):
79
+
80
+ meta_parallel: Optional[dict[str, Any]]
81
+ meta_non_parallel: Optional[dict[str, Any]]
82
+ args_non_parallel: Optional[dict[str, Any]]
83
+ args_parallel: Optional[dict[str, Any]]
84
+ input_filters: Optional[Filters]
85
+
86
+ # Validators
87
+
88
+ @validator("meta_parallel", "meta_non_parallel")
89
+ def check_no_parallelisation_level(cls, m):
90
+ if "parallelization_level" in m:
91
+ raise ValueError(
92
+ "Overriding task parallelization level currently not allowed"
93
+ )
94
+ return m
95
+
96
+
97
+ class WorkflowTaskImportV2(BaseModel):
98
+
99
+ meta_parallel: Optional[dict[str, Any]] = None
100
+ meta_non_parallel: Optional[dict[str, Any]] = None
101
+ args: Optional[dict[str, Any]] = None # FIXME
102
+
103
+ input_filters: Optional[Filters] = None
104
+
105
+ is_legacy_task: bool = False
106
+ task: Optional[TaskImportV2] = None
107
+ task_legacy: Optional[TaskImportV1] = None
108
+
109
+
110
+ class WorkflowTaskExportV2(BaseModel):
111
+
112
+ meta_parallel: Optional[dict[str, Any]] = None
113
+ meta_non_parallel: Optional[dict[str, Any]] = None
114
+ args: Optional[dict[str, Any]] = None # FIXME
115
+ input_filters: Filters = Field(default_factory=Filters)
116
+
117
+ is_legacy_task: bool = False
118
+ task: Optional[TaskExportV2]
119
+ task_legacy: Optional[TaskExportV1]
fractal_server/config.py CHANGED
@@ -390,6 +390,11 @@ class Settings(BaseSettings):
390
390
 
391
391
  info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
392
392
  if self.FRACTAL_RUNNER_BACKEND == "slurm":
393
+
394
+ from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
395
+ load_slurm_config_file,
396
+ )
397
+
393
398
  if not self.FRACTAL_SLURM_CONFIG_FILE:
394
399
  raise FractalConfigurationError(
395
400
  f"Must set FRACTAL_SLURM_CONFIG_FILE when {info}"
@@ -401,10 +406,6 @@ class Settings(BaseSettings):
401
406
  f"{self.FRACTAL_SLURM_CONFIG_FILE} not found."
402
407
  )
403
408
 
404
- from fractal_server.app.runner._slurm._slurm_config import (
405
- load_slurm_config_file,
406
- )
407
-
408
409
  load_slurm_config_file(self.FRACTAL_SLURM_CONFIG_FILE)
409
410
  if not shutil.which("sbatch"):
410
411
  raise FractalConfigurationError(
@@ -0,0 +1,2 @@
1
+ from .models import Filters # noqa: F401
2
+ from .models import SingleImage # noqa: F401
@@ -0,0 +1,50 @@
1
+ from typing import Any
2
+ from typing import Optional
3
+ from typing import Union
4
+
5
+ from pydantic import BaseModel
6
+ from pydantic import Field
7
+ from pydantic import validator
8
+
9
+
10
+ class SingleImage(BaseModel):
11
+
12
+ path: str
13
+ origin: Optional[str] = None
14
+
15
+ attributes: dict[str, Any] = Field(default_factory=dict)
16
+ types: dict[str, bool] = Field(default_factory=dict)
17
+
18
+ @validator("attributes")
19
+ def validate_attributes(
20
+ cls, v: dict[str, Any]
21
+ ) -> dict[str, Union[int, float, str, bool]]:
22
+ for key, value in v.items():
23
+ if not isinstance(value, (int, float, str, bool)):
24
+ raise ValueError(
25
+ f"SingleImage.attributes[{key}] must be a scalar "
26
+ f"(int, float, str or bool). Given {value} ({type(value)})"
27
+ )
28
+ return v
29
+
30
+
31
+ class Filters(BaseModel):
32
+ attributes: dict[str, Any] = Field(default_factory=dict)
33
+ types: dict[str, bool] = Field(default_factory=dict)
34
+
35
+ class Config:
36
+ extra = "forbid"
37
+
38
+ # Validators
39
+ @validator("attributes")
40
+ def validate_attributes(
41
+ cls, v: dict[str, Any]
42
+ ) -> dict[str, Union[int, float, str, bool, None]]:
43
+ for key, value in v.items():
44
+ if not isinstance(value, (int, float, str, bool, type(None))):
45
+ raise ValueError(
46
+ f"Filters.attributes[{key}] must be a scalar "
47
+ "(int, float, str, bool, or None). "
48
+ f"Given {value} ({type(value)})"
49
+ )
50
+ return v