fractal-server 2.11.1__py3-none-any.whl → 2.12.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/routes/aux/_job.py +1 -3
  5. fractal_server/app/runner/filenames.py +0 -2
  6. fractal_server/app/runner/shutdown.py +3 -27
  7. fractal_server/config.py +1 -15
  8. fractal_server/main.py +1 -12
  9. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  10. fractal_server/string_tools.py +0 -21
  11. fractal_server/tasks/utils.py +0 -24
  12. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/METADATA +1 -1
  13. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/RECORD +16 -60
  14. fractal_server/app/models/v1/__init__.py +0 -13
  15. fractal_server/app/models/v1/dataset.py +0 -71
  16. fractal_server/app/models/v1/job.py +0 -101
  17. fractal_server/app/models/v1/project.py +0 -29
  18. fractal_server/app/models/v1/state.py +0 -34
  19. fractal_server/app/models/v1/task.py +0 -85
  20. fractal_server/app/models/v1/workflow.py +0 -133
  21. fractal_server/app/routes/admin/v1.py +0 -377
  22. fractal_server/app/routes/api/v1/__init__.py +0 -26
  23. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  24. fractal_server/app/routes/api/v1/dataset.py +0 -554
  25. fractal_server/app/routes/api/v1/job.py +0 -195
  26. fractal_server/app/routes/api/v1/project.py +0 -475
  27. fractal_server/app/routes/api/v1/task.py +0 -203
  28. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  29. fractal_server/app/routes/api/v1/workflow.py +0 -355
  30. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  31. fractal_server/app/runner/async_wrap_v1.py +0 -27
  32. fractal_server/app/runner/v1/__init__.py +0 -415
  33. fractal_server/app/runner/v1/_common.py +0 -620
  34. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  35. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  36. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  37. fractal_server/app/runner/v1/_local/executor.py +0 -100
  38. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  39. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  40. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  41. fractal_server/app/runner/v1/common.py +0 -117
  42. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  43. fractal_server/app/schemas/v1/__init__.py +0 -37
  44. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  45. fractal_server/app/schemas/v1/dataset.py +0 -165
  46. fractal_server/app/schemas/v1/dumps.py +0 -64
  47. fractal_server/app/schemas/v1/manifest.py +0 -126
  48. fractal_server/app/schemas/v1/project.py +0 -66
  49. fractal_server/app/schemas/v1/state.py +0 -18
  50. fractal_server/app/schemas/v1/task.py +0 -167
  51. fractal_server/app/schemas/v1/task_collection.py +0 -110
  52. fractal_server/app/schemas/v1/workflow.py +0 -212
  53. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  54. fractal_server/tasks/v1/__init__.py +0 -0
  55. fractal_server/tasks/v1/background_operations.py +0 -352
  56. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  57. fractal_server/tasks/v1/get_collection_data.py +0 -14
  58. fractal_server/tasks/v1/utils.py +0 -67
  59. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/LICENSE +0 -0
  60. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/WHEEL +0 -0
  61. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/entry_points.txt +0 -0
@@ -1,312 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
6
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
7
- # Marco Franzon <marco.franzon@exact-lab.it>
8
- #
9
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
10
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
11
- # Institute for Biomedical Research and Pelkmans Lab from the University of
12
- # Zurich.
13
- """
14
- Slurm Bakend
15
-
16
- This backend runs fractal workflows in a SLURM cluster using Clusterfutures
17
- Executor objects.
18
- """
19
- from pathlib import Path
20
- from typing import Any
21
- from typing import Optional
22
- from typing import Union
23
-
24
- from ...async_wrap_v1 import async_wrap_v1
25
- from ...executors.slurm.sudo.executor import FractalSlurmExecutor
26
- from ...set_start_and_last_task_index import set_start_and_last_task_index
27
- from .._common import execute_tasks
28
- from ..common import TaskParameters
29
- from ._submit_setup import _slurm_submit_setup
30
- from fractal_server.app.models.v1 import Workflow
31
- from fractal_server.app.models.v1 import WorkflowTask
32
- from fractal_server.app.runner.executors.slurm._slurm_config import (
33
- _parse_mem_value,
34
- )
35
- from fractal_server.app.runner.executors.slurm._slurm_config import (
36
- load_slurm_config_file,
37
- )
38
- from fractal_server.app.runner.executors.slurm._slurm_config import logger
39
- from fractal_server.app.runner.executors.slurm._slurm_config import SlurmConfig
40
- from fractal_server.app.runner.executors.slurm._slurm_config import (
41
- SlurmConfigError,
42
- )
43
-
44
-
45
- def _process_workflow(
46
- *,
47
- workflow: Workflow,
48
- input_paths: list[Path],
49
- output_path: Path,
50
- input_metadata: dict[str, Any],
51
- input_history: list[dict[str, Any]],
52
- logger_name: str,
53
- workflow_dir_local: Path,
54
- workflow_dir_remote: Path,
55
- first_task_index: int,
56
- last_task_index: int,
57
- slurm_user: Optional[str] = None,
58
- slurm_account: Optional[str] = None,
59
- user_cache_dir: str,
60
- worker_init: Optional[Union[str, list[str]]] = None,
61
- ) -> dict[str, Any]:
62
- """
63
- Internal processing routine for the SLURM backend
64
-
65
- This function initialises the a FractalSlurmExecutor, setting logging,
66
- workflow working dir and user to impersonate. It then schedules the
67
- workflow tasks and returns the output dataset metadata.
68
-
69
- Cf.
70
- [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
71
-
72
- Returns:
73
- output_dataset_metadata: Metadata of the output dataset
74
- """
75
-
76
- if not slurm_user:
77
- raise RuntimeError(
78
- "slurm_user argument is required, for slurm backend"
79
- )
80
-
81
- if isinstance(worker_init, str):
82
- worker_init = worker_init.split("\n")
83
-
84
- with FractalSlurmExecutor(
85
- debug=True,
86
- keep_logs=True,
87
- slurm_user=slurm_user,
88
- user_cache_dir=user_cache_dir,
89
- workflow_dir_local=workflow_dir_local,
90
- workflow_dir_remote=workflow_dir_remote,
91
- common_script_lines=worker_init,
92
- slurm_account=slurm_account,
93
- ) as executor:
94
- output_task_pars = execute_tasks(
95
- executor=executor,
96
- task_list=workflow.task_list[
97
- first_task_index : (last_task_index + 1) # noqa
98
- ], # noqa
99
- task_pars=TaskParameters(
100
- input_paths=input_paths,
101
- output_path=output_path,
102
- metadata=input_metadata,
103
- history=input_history,
104
- ),
105
- workflow_dir_local=workflow_dir_local,
106
- workflow_dir_remote=workflow_dir_remote,
107
- submit_setup_call=_slurm_submit_setup,
108
- logger_name=logger_name,
109
- )
110
- output_dataset_metadata_history = dict(
111
- metadata=output_task_pars.metadata, history=output_task_pars.history
112
- )
113
- return output_dataset_metadata_history
114
-
115
-
116
- async def process_workflow(
117
- *,
118
- workflow: Workflow,
119
- input_paths: list[Path],
120
- output_path: Path,
121
- input_metadata: dict[str, Any],
122
- input_history: list[dict[str, Any]],
123
- logger_name: str,
124
- workflow_dir_local: Path,
125
- workflow_dir_remote: Optional[Path] = None,
126
- user_cache_dir: Optional[str] = None,
127
- slurm_user: Optional[str] = None,
128
- slurm_account: Optional[str] = None,
129
- worker_init: Optional[str] = None,
130
- first_task_index: Optional[int] = None,
131
- last_task_index: Optional[int] = None,
132
- ) -> dict[str, Any]:
133
- """
134
- Process workflow (SLURM backend public interface)
135
-
136
- Cf.
137
- [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
138
- """
139
-
140
- # Set values of first_task_index and last_task_index
141
- num_tasks = len(workflow.task_list)
142
- first_task_index, last_task_index = set_start_and_last_task_index(
143
- num_tasks,
144
- first_task_index=first_task_index,
145
- last_task_index=last_task_index,
146
- )
147
-
148
- output_dataset_metadata_history = await async_wrap_v1(_process_workflow)(
149
- workflow=workflow,
150
- input_paths=input_paths,
151
- output_path=output_path,
152
- input_metadata=input_metadata,
153
- input_history=input_history,
154
- logger_name=logger_name,
155
- workflow_dir_local=workflow_dir_local,
156
- workflow_dir_remote=workflow_dir_remote,
157
- slurm_user=slurm_user,
158
- slurm_account=slurm_account,
159
- user_cache_dir=user_cache_dir,
160
- worker_init=worker_init,
161
- first_task_index=first_task_index,
162
- last_task_index=last_task_index,
163
- )
164
- return output_dataset_metadata_history
165
-
166
-
167
- def get_slurm_config(
168
- wftask: WorkflowTask,
169
- workflow_dir_local: Path,
170
- workflow_dir_remote: Path,
171
- config_path: Optional[Path] = None,
172
- ) -> SlurmConfig:
173
- """
174
- Prepare a `SlurmConfig` configuration object
175
-
176
- The sources for `SlurmConfig` attributes, in increasing priority order, are
177
-
178
- 1. The general content of the Fractal SLURM configuration file.
179
- 2. The GPU-specific content of the Fractal SLURM configuration file, if
180
- appropriate.
181
- 3. Properties in `wftask.meta` (which, for `WorkflowTask`s added through
182
- `Workflow.insert_task`, also includes `wftask.task.meta`);
183
-
184
- Note: `wftask.meta` may be `None`.
185
-
186
- Arguments:
187
- wftask:
188
- WorkflowTask for which the SLURM configuration is is to be
189
- prepared.
190
- workflow_dir_local:
191
- Server-owned directory to store all task-execution-related relevant
192
- files (inputs, outputs, errors, and all meta files related to the
193
- job execution). Note: users cannot write directly to this folder.
194
- workflow_dir_remote:
195
- User-side directory with the same scope as `workflow_dir_local`,
196
- and where a user can write.
197
- config_path:
198
- Path of aFractal SLURM configuration file; if `None`, use
199
- `FRACTAL_SLURM_CONFIG_FILE` variable from settings.
200
-
201
- Returns:
202
- slurm_config:
203
- The SlurmConfig object
204
- """
205
-
206
- logger.debug(
207
- "[get_slurm_config] WorkflowTask meta attribute: {wftask.meta=}"
208
- )
209
-
210
- # Incorporate slurm_env.default_slurm_config
211
- slurm_env = load_slurm_config_file(config_path=config_path)
212
- slurm_dict = slurm_env.default_slurm_config.dict(
213
- exclude_unset=True, exclude={"mem"}
214
- )
215
- if slurm_env.default_slurm_config.mem:
216
- slurm_dict["mem_per_task_MB"] = slurm_env.default_slurm_config.mem
217
-
218
- # Incorporate slurm_env.batching_config
219
- for key, value in slurm_env.batching_config.dict().items():
220
- slurm_dict[key] = value
221
-
222
- # Incorporate slurm_env.user_local_exports
223
- slurm_dict["user_local_exports"] = slurm_env.user_local_exports
224
-
225
- logger.debug(
226
- "[get_slurm_config] Fractal SLURM configuration file: "
227
- f"{slurm_env.dict()=}"
228
- )
229
-
230
- # GPU-related options
231
- # Notes about priority:
232
- # 1. This block of definitions takes priority over other definitions from
233
- # slurm_env which are not under the `needs_gpu` subgroup
234
- # 2. This block of definitions has lower priority than whatever comes next
235
- # (i.e. from WorkflowTask.meta).
236
- if wftask.meta is not None:
237
- needs_gpu = wftask.meta.get("needs_gpu", False)
238
- else:
239
- needs_gpu = False
240
- logger.debug(f"[get_slurm_config] {needs_gpu=}")
241
- if needs_gpu:
242
- for key, value in slurm_env.gpu_slurm_config.dict(
243
- exclude_unset=True, exclude={"mem"}
244
- ).items():
245
- slurm_dict[key] = value
246
- if slurm_env.gpu_slurm_config.mem:
247
- slurm_dict["mem_per_task_MB"] = slurm_env.gpu_slurm_config.mem
248
-
249
- # Number of CPUs per task, for multithreading
250
- if wftask.meta is not None and "cpus_per_task" in wftask.meta:
251
- cpus_per_task = int(wftask.meta["cpus_per_task"])
252
- slurm_dict["cpus_per_task"] = cpus_per_task
253
-
254
- # Required memory per task, in MB
255
- if wftask.meta is not None and "mem" in wftask.meta:
256
- raw_mem = wftask.meta["mem"]
257
- mem_per_task_MB = _parse_mem_value(raw_mem)
258
- slurm_dict["mem_per_task_MB"] = mem_per_task_MB
259
-
260
- # Job name
261
- job_name = wftask.task.name.replace(" ", "_")
262
- slurm_dict["job_name"] = job_name
263
-
264
- # Optional SLURM arguments and extra lines
265
- if wftask.meta is not None:
266
- account = wftask.meta.get("account", None)
267
- if account is not None:
268
- error_msg = (
269
- f"Invalid {account=} property in WorkflowTask `meta` "
270
- "attribute.\n"
271
- "SLURM account must be set in the request body of the "
272
- "apply-workflow endpoint, or by modifying the user properties."
273
- )
274
- logger.error(error_msg)
275
- raise SlurmConfigError(error_msg)
276
- for key in ["time", "gres", "constraint"]:
277
- value = wftask.meta.get(key, None)
278
- if value:
279
- slurm_dict[key] = value
280
- if wftask.meta is not None:
281
- extra_lines = wftask.meta.get("extra_lines", [])
282
- else:
283
- extra_lines = []
284
- extra_lines = slurm_dict.get("extra_lines", []) + extra_lines
285
- if len(set(extra_lines)) != len(extra_lines):
286
- logger.debug(
287
- "[get_slurm_config] Removing repeated elements "
288
- f"from {extra_lines=}."
289
- )
290
- extra_lines = list(set(extra_lines))
291
- slurm_dict["extra_lines"] = extra_lines
292
-
293
- # Job-batching parameters (if None, they will be determined heuristically)
294
- if wftask.meta is not None:
295
- tasks_per_job = wftask.meta.get("tasks_per_job", None)
296
- parallel_tasks_per_job = wftask.meta.get(
297
- "parallel_tasks_per_job", None
298
- )
299
- else:
300
- tasks_per_job = None
301
- parallel_tasks_per_job = None
302
- slurm_dict["tasks_per_job"] = tasks_per_job
303
- slurm_dict["parallel_tasks_per_job"] = parallel_tasks_per_job
304
-
305
- # Put everything together
306
- logger.debug(
307
- "[get_slurm_config] Now create a SlurmConfig object based "
308
- f"on {slurm_dict=}"
309
- )
310
- slurm_config = SlurmConfig(**slurm_dict)
311
-
312
- return slurm_config
@@ -1,81 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
6
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
7
- #
8
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
9
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
10
- # Institute for Biomedical Research and Pelkmans Lab from the University of
11
- # Zurich.
12
- """
13
- Submodule to define _slurm_submit_setup, which is also the reference
14
- implementation of `submit_setup_call` in
15
- [fractal_server.app.runner.v1._common][]).
16
- """
17
- from pathlib import Path
18
-
19
- from ...task_files import get_task_file_paths
20
- from .get_slurm_config import get_slurm_config
21
- from fractal_server.app.models.v1 import WorkflowTask
22
-
23
-
24
- def _slurm_submit_setup(
25
- *,
26
- wftask: WorkflowTask,
27
- workflow_dir_local: Path,
28
- workflow_dir_remote: Path,
29
- ) -> dict[str, object]:
30
- """
31
- Collect WorfklowTask-specific configuration parameters from different
32
- sources, and inject them for execution.
33
-
34
- Here goes all the logic for reading attributes from the appropriate sources
35
- and transforming them into an appropriate `SlurmConfig` object (encoding
36
- SLURM configuration) and `TaskFiles` object (with details e.g. about file
37
- paths or filename prefixes).
38
-
39
- For now, this is the reference implementation for the argument
40
- `submit_setup_call` of
41
- [fractal_server.app.runner.v1._common.execute_tasks][].
42
-
43
- Arguments:
44
- wftask:
45
- WorkflowTask for which the configuration is to be assembled
46
- workflow_dir_local:
47
- Server-owned directory to store all task-execution-related relevant
48
- files (inputs, outputs, errors, and all meta files related to the
49
- job execution). Note: users cannot write directly to this folder.
50
- workflow_dir_remote:
51
- User-side directory with the same scope as `workflow_dir_local`,
52
- and where a user can write.
53
-
54
- Returns:
55
- submit_setup_dict:
56
- A dictionary that will be passed on to
57
- `FractalSlurmExecutor.submit` and `FractalSlurmExecutor.map`, so
58
- as to set extra options.
59
- """
60
-
61
- # Get SlurmConfig object
62
- slurm_config = get_slurm_config(
63
- wftask=wftask,
64
- workflow_dir_local=workflow_dir_local,
65
- workflow_dir_remote=workflow_dir_remote,
66
- )
67
-
68
- # Get TaskFiles object
69
- task_files = get_task_file_paths(
70
- workflow_dir_local=workflow_dir_local,
71
- workflow_dir_remote=workflow_dir_remote,
72
- task_order=wftask.order,
73
- task_name=wftask.task.name,
74
- )
75
-
76
- # Prepare and return output dictionary
77
- submit_setup_dict = dict(
78
- slurm_config=slurm_config,
79
- task_files=task_files,
80
- )
81
- return submit_setup_dict
@@ -1,163 +0,0 @@
1
- from pathlib import Path
2
- from typing import Optional
3
-
4
- from fractal_server.app.models.v1 import WorkflowTask
5
- from fractal_server.app.runner.executors.slurm._slurm_config import (
6
- _parse_mem_value,
7
- )
8
- from fractal_server.app.runner.executors.slurm._slurm_config import (
9
- load_slurm_config_file,
10
- )
11
- from fractal_server.app.runner.executors.slurm._slurm_config import logger
12
- from fractal_server.app.runner.executors.slurm._slurm_config import SlurmConfig
13
- from fractal_server.app.runner.executors.slurm._slurm_config import (
14
- SlurmConfigError,
15
- )
16
-
17
-
18
- def get_slurm_config(
19
- wftask: WorkflowTask,
20
- workflow_dir_local: Path,
21
- workflow_dir_remote: Path,
22
- config_path: Optional[Path] = None,
23
- ) -> SlurmConfig:
24
- """
25
- Prepare a `SlurmConfig` configuration object
26
-
27
- The sources for `SlurmConfig` attributes, in increasing priority order, are
28
-
29
- 1. The general content of the Fractal SLURM configuration file.
30
- 2. The GPU-specific content of the Fractal SLURM configuration file, if
31
- appropriate.
32
- 3. Properties in `wftask.meta` (which, for `WorkflowTask`s added through
33
- `Workflow.insert_task`, also includes `wftask.task.meta`);
34
-
35
- Note: `wftask.meta` may be `None`.
36
-
37
- Arguments:
38
- wftask:
39
- WorkflowTask for which the SLURM configuration is is to be
40
- prepared.
41
- workflow_dir_local:
42
- Server-owned directory to store all task-execution-related relevant
43
- files (inputs, outputs, errors, and all meta files related to the
44
- job execution). Note: users cannot write directly to this folder.
45
- workflow_dir_remote:
46
- User-side directory with the same scope as `workflow_dir_local`,
47
- and where a user can write.
48
- config_path:
49
- Path of aFractal SLURM configuration file; if `None`, use
50
- `FRACTAL_SLURM_CONFIG_FILE` variable from settings.
51
-
52
- Returns:
53
- slurm_config:
54
- The SlurmConfig object
55
- """
56
-
57
- logger.debug(
58
- "[get_slurm_config] WorkflowTask meta attribute: {wftask.meta=}"
59
- )
60
-
61
- # Incorporate slurm_env.default_slurm_config
62
- slurm_env = load_slurm_config_file(config_path=config_path)
63
- slurm_dict = slurm_env.default_slurm_config.dict(
64
- exclude_unset=True, exclude={"mem"}
65
- )
66
- if slurm_env.default_slurm_config.mem:
67
- slurm_dict["mem_per_task_MB"] = slurm_env.default_slurm_config.mem
68
-
69
- # Incorporate slurm_env.batching_config
70
- for key, value in slurm_env.batching_config.dict().items():
71
- slurm_dict[key] = value
72
-
73
- # Incorporate slurm_env.user_local_exports
74
- slurm_dict["user_local_exports"] = slurm_env.user_local_exports
75
-
76
- logger.debug(
77
- "[get_slurm_config] Fractal SLURM configuration file: "
78
- f"{slurm_env.dict()=}"
79
- )
80
-
81
- # GPU-related options
82
- # Notes about priority:
83
- # 1. This block of definitions takes priority over other definitions from
84
- # slurm_env which are not under the `needs_gpu` subgroup
85
- # 2. This block of definitions has lower priority than whatever comes next
86
- # (i.e. from WorkflowTask.meta).
87
- if wftask.meta is not None:
88
- needs_gpu = wftask.meta.get("needs_gpu", False)
89
- else:
90
- needs_gpu = False
91
- logger.debug(f"[get_slurm_config] {needs_gpu=}")
92
- if needs_gpu:
93
- for key, value in slurm_env.gpu_slurm_config.dict(
94
- exclude_unset=True, exclude={"mem"}
95
- ).items():
96
- slurm_dict[key] = value
97
- if slurm_env.gpu_slurm_config.mem:
98
- slurm_dict["mem_per_task_MB"] = slurm_env.gpu_slurm_config.mem
99
-
100
- # Number of CPUs per task, for multithreading
101
- if wftask.meta is not None and "cpus_per_task" in wftask.meta:
102
- cpus_per_task = int(wftask.meta["cpus_per_task"])
103
- slurm_dict["cpus_per_task"] = cpus_per_task
104
-
105
- # Required memory per task, in MB
106
- if wftask.meta is not None and "mem" in wftask.meta:
107
- raw_mem = wftask.meta["mem"]
108
- mem_per_task_MB = _parse_mem_value(raw_mem)
109
- slurm_dict["mem_per_task_MB"] = mem_per_task_MB
110
-
111
- # Job name
112
- job_name = wftask.task.name.replace(" ", "_")
113
- slurm_dict["job_name"] = job_name
114
-
115
- # Optional SLURM arguments and extra lines
116
- if wftask.meta is not None:
117
- account = wftask.meta.get("account", None)
118
- if account is not None:
119
- error_msg = (
120
- f"Invalid {account=} property in WorkflowTask `meta` "
121
- "attribute.\n"
122
- "SLURM account must be set in the request body of the "
123
- "apply-workflow endpoint, or by modifying the user properties."
124
- )
125
- logger.error(error_msg)
126
- raise SlurmConfigError(error_msg)
127
- for key in ["time", "gres", "constraint"]:
128
- value = wftask.meta.get(key, None)
129
- if value:
130
- slurm_dict[key] = value
131
- if wftask.meta is not None:
132
- extra_lines = wftask.meta.get("extra_lines", [])
133
- else:
134
- extra_lines = []
135
- extra_lines = slurm_dict.get("extra_lines", []) + extra_lines
136
- if len(set(extra_lines)) != len(extra_lines):
137
- logger.debug(
138
- "[get_slurm_config] Removing repeated elements "
139
- f"from {extra_lines=}."
140
- )
141
- extra_lines = list(set(extra_lines))
142
- slurm_dict["extra_lines"] = extra_lines
143
-
144
- # Job-batching parameters (if None, they will be determined heuristically)
145
- if wftask.meta is not None:
146
- tasks_per_job = wftask.meta.get("tasks_per_job", None)
147
- parallel_tasks_per_job = wftask.meta.get(
148
- "parallel_tasks_per_job", None
149
- )
150
- else:
151
- tasks_per_job = None
152
- parallel_tasks_per_job = None
153
- slurm_dict["tasks_per_job"] = tasks_per_job
154
- slurm_dict["parallel_tasks_per_job"] = parallel_tasks_per_job
155
-
156
- # Put everything together
157
- logger.debug(
158
- "[get_slurm_config] Now create a SlurmConfig object based "
159
- f"on {slurm_dict=}"
160
- )
161
- slurm_config = SlurmConfig(**slurm_dict)
162
-
163
- return slurm_config
@@ -1,117 +0,0 @@
1
- """
2
- Common utilities and routines for runner backends (public API)
3
-
4
- This module includes utilities and routines that are of use to implement
5
- runner backends but that should also be exposed to the other components of
6
- `Fractal Server`.
7
- """
8
- import json
9
- from json import JSONEncoder
10
- from pathlib import Path
11
- from typing import Any
12
-
13
- from pydantic import BaseModel
14
-
15
- from ....logger import close_logger as close_job_logger # noqa F401
16
- from ...models.v1 import Dataset
17
- from ...models.v1 import Workflow
18
-
19
-
20
- class TaskParameterEncoder(JSONEncoder):
21
- """
22
- Convenience JSONEncoder that serialises `Path`s as strings
23
- """
24
-
25
- def default(self, value):
26
- if isinstance(value, Path):
27
- return value.as_posix()
28
- return JSONEncoder.default(self, value)
29
-
30
-
31
- class TaskParameters(BaseModel):
32
- """
33
- Wrapper for task input parameters
34
-
35
- Instances of this class are used to pass parameters from the output of a
36
- task to the input of the next one.
37
-
38
- Attributes:
39
- input_paths:
40
- Input paths as derived by the input dataset.
41
- output_paths:
42
- Output path as derived from the output dataset.
43
- metadata:
44
- Dataset metadata, as found in the input dataset or as updated by
45
- the previous task.
46
- history:
47
- Dataset history, as found in the input dataset or as updated by
48
- the previous task.
49
- """
50
-
51
- input_paths: list[Path]
52
- output_path: Path
53
- metadata: dict[str, Any]
54
- history: list[dict[str, Any]]
55
-
56
- class Config:
57
- arbitrary_types_allowed = True
58
- extra = "forbid"
59
-
60
-
61
- def validate_workflow_compatibility(
62
- *,
63
- input_dataset: Dataset,
64
- workflow: Workflow,
65
- output_dataset: Dataset,
66
- first_task_index: int,
67
- last_task_index: int,
68
- ) -> None:
69
- """
70
- Check compatibility of workflow and input / ouptut dataset
71
- """
72
- # Check input_dataset type
73
- workflow_input_type = workflow.task_list[first_task_index].task.input_type
74
- if (
75
- workflow_input_type != "Any"
76
- and workflow_input_type != input_dataset.type
77
- ):
78
- raise TypeError(
79
- f"Incompatible types `{workflow_input_type}` of workflow "
80
- f"`{workflow.name}` and `{input_dataset.type}` of dataset "
81
- f"`{input_dataset.name}`"
82
- )
83
-
84
- # Check output_dataset type
85
- workflow_output_type = workflow.task_list[last_task_index].task.output_type
86
- if (
87
- workflow_output_type != "Any"
88
- and workflow_output_type != output_dataset.type
89
- ):
90
- raise TypeError(
91
- f"Incompatible types `{workflow_output_type}` of workflow "
92
- f"`{workflow.name}` and `{output_dataset.type}` of dataset "
93
- f"`{output_dataset.name}`"
94
- )
95
-
96
-
97
- def write_args_file(
98
- *args: dict[str, Any],
99
- path: Path,
100
- ):
101
- """
102
- Merge arbitrary dictionaries and write to file
103
-
104
- Args:
105
- *args:
106
- One or more dictionaries that will be merged into one respecting
107
- the order with which they are passed in, i.e., last in overrides
108
- previous ones.
109
- path:
110
- Destination for serialised file.
111
- """
112
- out = {}
113
- for d in args:
114
- out.update(d)
115
-
116
- with open(path, "w") as f:
117
- json.dump(out, f, cls=TaskParameterEncoder, indent=4)