fractal-server 2.11.1__py3-none-any.whl → 2.12.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/routes/aux/_job.py +1 -3
  5. fractal_server/app/runner/filenames.py +0 -2
  6. fractal_server/app/runner/shutdown.py +3 -27
  7. fractal_server/config.py +1 -15
  8. fractal_server/main.py +1 -12
  9. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  10. fractal_server/string_tools.py +0 -21
  11. fractal_server/tasks/utils.py +0 -24
  12. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/METADATA +1 -1
  13. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/RECORD +16 -60
  14. fractal_server/app/models/v1/__init__.py +0 -13
  15. fractal_server/app/models/v1/dataset.py +0 -71
  16. fractal_server/app/models/v1/job.py +0 -101
  17. fractal_server/app/models/v1/project.py +0 -29
  18. fractal_server/app/models/v1/state.py +0 -34
  19. fractal_server/app/models/v1/task.py +0 -85
  20. fractal_server/app/models/v1/workflow.py +0 -133
  21. fractal_server/app/routes/admin/v1.py +0 -377
  22. fractal_server/app/routes/api/v1/__init__.py +0 -26
  23. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  24. fractal_server/app/routes/api/v1/dataset.py +0 -554
  25. fractal_server/app/routes/api/v1/job.py +0 -195
  26. fractal_server/app/routes/api/v1/project.py +0 -475
  27. fractal_server/app/routes/api/v1/task.py +0 -203
  28. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  29. fractal_server/app/routes/api/v1/workflow.py +0 -355
  30. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  31. fractal_server/app/runner/async_wrap_v1.py +0 -27
  32. fractal_server/app/runner/v1/__init__.py +0 -415
  33. fractal_server/app/runner/v1/_common.py +0 -620
  34. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  35. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  36. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  37. fractal_server/app/runner/v1/_local/executor.py +0 -100
  38. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  39. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  40. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  41. fractal_server/app/runner/v1/common.py +0 -117
  42. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  43. fractal_server/app/schemas/v1/__init__.py +0 -37
  44. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  45. fractal_server/app/schemas/v1/dataset.py +0 -165
  46. fractal_server/app/schemas/v1/dumps.py +0 -64
  47. fractal_server/app/schemas/v1/manifest.py +0 -126
  48. fractal_server/app/schemas/v1/project.py +0 -66
  49. fractal_server/app/schemas/v1/state.py +0 -18
  50. fractal_server/app/schemas/v1/task.py +0 -167
  51. fractal_server/app/schemas/v1/task_collection.py +0 -110
  52. fractal_server/app/schemas/v1/workflow.py +0 -212
  53. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  54. fractal_server/tasks/v1/__init__.py +0 -0
  55. fractal_server/tasks/v1/background_operations.py +0 -352
  56. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  57. fractal_server/tasks/v1/get_collection_data.py +0 -14
  58. fractal_server/tasks/v1/utils.py +0 -67
  59. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/LICENSE +0 -0
  60. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/WHEEL +0 -0
  61. {fractal_server-2.11.1.dist-info → fractal_server-2.12.0a0.dist-info}/entry_points.txt +0 -0
@@ -1,186 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
6
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
7
- # Marco Franzon <marco.franzon@exact-lab.it>
8
- #
9
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
10
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
11
- # Institute for Biomedical Research and Pelkmans Lab from the University of
12
- # Zurich.
13
- """
14
- Local Bakend
15
-
16
- This backend runs Fractal workflows using `FractalThreadPoolExecutor` (a custom
17
- version of Python
18
- [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor))
19
- to run tasks in several threads.
20
- Incidentally, it also represents the reference implementation for a backend.
21
- """
22
- from pathlib import Path
23
- from typing import Any
24
- from typing import Optional
25
-
26
- from ....models.v1 import Workflow
27
- from ...async_wrap_v1 import async_wrap_v1
28
- from ...set_start_and_last_task_index import set_start_and_last_task_index
29
- from .._common import execute_tasks
30
- from ..common import TaskParameters
31
- from ._submit_setup import _local_submit_setup
32
- from .executor import FractalThreadPoolExecutor
33
-
34
-
35
- def _process_workflow(
36
- *,
37
- workflow: Workflow,
38
- input_paths: list[Path],
39
- output_path: Path,
40
- input_metadata: dict[str, Any],
41
- input_history: list[dict[str, Any]],
42
- logger_name: str,
43
- workflow_dir_local: Path,
44
- first_task_index: int,
45
- last_task_index: int,
46
- ) -> dict[str, Any]:
47
- """
48
- Internal processing routine
49
-
50
- Schedules the workflow using a `FractalThreadPoolExecutor`.
51
-
52
- Cf.
53
- [process_workflow][fractal_server.app.runner.v1._local.process_workflow]
54
- for the call signature.
55
- """
56
-
57
- with FractalThreadPoolExecutor() as executor:
58
- output_task_pars = execute_tasks(
59
- executor=executor,
60
- task_list=workflow.task_list[
61
- first_task_index : (last_task_index + 1) # noqa
62
- ], # noqa
63
- task_pars=TaskParameters(
64
- input_paths=input_paths,
65
- output_path=output_path,
66
- metadata=input_metadata,
67
- history=input_history,
68
- ),
69
- workflow_dir_local=workflow_dir_local,
70
- workflow_dir_remote=workflow_dir_local,
71
- logger_name=logger_name,
72
- submit_setup_call=_local_submit_setup,
73
- )
74
- output_dataset_metadata_history = dict(
75
- metadata=output_task_pars.metadata, history=output_task_pars.history
76
- )
77
- return output_dataset_metadata_history
78
-
79
-
80
- async def process_workflow(
81
- *,
82
- workflow: Workflow,
83
- input_paths: list[Path],
84
- output_path: Path,
85
- input_metadata: dict[str, Any],
86
- input_history: list[dict[str, Any]],
87
- logger_name: str,
88
- workflow_dir_local: Path,
89
- workflow_dir_remote: Optional[Path] = None,
90
- slurm_user: Optional[str] = None,
91
- slurm_account: Optional[str] = None,
92
- user_cache_dir: Optional[str] = None,
93
- worker_init: Optional[str] = None,
94
- first_task_index: Optional[int] = None,
95
- last_task_index: Optional[int] = None,
96
- ) -> dict[str, Any]:
97
- """
98
- Run a workflow
99
-
100
- This function is responsible for running a workflow on some input data,
101
- saving the output and taking care of any exception raised during the run.
102
-
103
- NOTE: This is the `local` backend's public interface, which also works as
104
- a reference implementation for other backends.
105
-
106
- Args:
107
- workflow:
108
- The workflow to be run
109
- input_paths:
110
- The paths to the input files to pass to the first task of the
111
- workflow
112
- output_path:
113
- The destination path for the last task of the workflow
114
- input_metadata:
115
- Initial metadata, passed to the first task
116
- logger_name:
117
- Name of the logger to log information on the run to
118
- workflow_dir_local:
119
- Working directory for this run.
120
- workflow_dir_remote:
121
- Working directory for this run, on the user side. This argument is
122
- present for compatibility with the standard backend interface, but
123
- for the `local` backend it cannot be different from
124
- `workflow_dir_local`.
125
- slurm_user:
126
- Username to impersonate to run the workflow. This argument is
127
- present for compatibility with the standard backend interface, but
128
- is ignored in the `local` backend.
129
- slurm_account:
130
- SLURM account to use when running the workflow. This argument is
131
- present for compatibility with the standard backend interface, but
132
- is ignored in the `local` backend.
133
- user_cache_dir:
134
- Cache directory of the user who will run the workflow. This
135
- argument is present for compatibility with the standard backend
136
- interface, but is ignored in the `local` backend.
137
- worker_init:
138
- Any additional, usually backend specific, information to be passed
139
- to the backend executor. This argument is present for compatibility
140
- with the standard backend interface, but is ignored in the `local`
141
- backend.
142
- first_task_index:
143
- Positional index of the first task to execute; if `None`, start
144
- from `0`.
145
- last_task_index:
146
- Positional index of the last task to execute; if `None`, proceed
147
- until the last task.
148
-
149
- Raises:
150
- TaskExecutionError: wrapper for errors raised during tasks' execution
151
- (positive exit codes).
152
- JobExecutionError: wrapper for errors raised by the tasks' executors
153
- (negative exit codes).
154
-
155
- Returns:
156
- output_dataset_metadata:
157
- The updated metadata for the dataset, as returned by the last task
158
- of the workflow
159
- """
160
-
161
- if workflow_dir_remote and (workflow_dir_remote != workflow_dir_local):
162
- raise NotImplementedError(
163
- "Local backend does not support different directories "
164
- f"{workflow_dir_local=} and {workflow_dir_remote=}"
165
- )
166
-
167
- # Set values of first_task_index and last_task_index
168
- num_tasks = len(workflow.task_list)
169
- first_task_index, last_task_index = set_start_and_last_task_index(
170
- num_tasks,
171
- first_task_index=first_task_index,
172
- last_task_index=last_task_index,
173
- )
174
-
175
- output_dataset_metadata_history = await async_wrap_v1(_process_workflow)(
176
- workflow=workflow,
177
- input_paths=input_paths,
178
- output_path=output_path,
179
- input_metadata=input_metadata,
180
- input_history=input_history,
181
- logger_name=logger_name,
182
- workflow_dir_local=workflow_dir_local,
183
- first_task_index=first_task_index,
184
- last_task_index=last_task_index,
185
- )
186
- return output_dataset_metadata_history
@@ -1,105 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
6
- #
7
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
8
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
9
- # Institute for Biomedical Research and Pelkmans Lab from the University of
10
- # Zurich.
11
- """
12
- Submodule to handle the local-backend configuration for a WorkflowTask
13
- """
14
- import json
15
- from pathlib import Path
16
- from typing import Optional
17
-
18
- from pydantic import BaseModel
19
- from pydantic import Extra
20
- from pydantic.error_wrappers import ValidationError
21
-
22
- from .....config import get_settings
23
- from .....syringe import Inject
24
- from ....models.v1 import WorkflowTask
25
-
26
-
27
- class LocalBackendConfigError(ValueError):
28
- """
29
- Local-backend configuration error
30
- """
31
-
32
- pass
33
-
34
-
35
- class LocalBackendConfig(BaseModel, extra=Extra.forbid):
36
- """
37
- Specifications of the local-backend configuration
38
-
39
- Attributes:
40
- parallel_tasks_per_job:
41
- Maximum number of tasks to be run in parallel as part of a call to
42
- `FractalThreadPoolExecutor.map`; if `None`, then all tasks will
43
- start at the same time.
44
- """
45
-
46
- parallel_tasks_per_job: Optional[int]
47
-
48
-
49
- def get_default_local_backend_config():
50
- """
51
- Return a default `LocalBackendConfig` configuration object
52
- """
53
- return LocalBackendConfig(parallel_tasks_per_job=None)
54
-
55
-
56
- def get_local_backend_config(
57
- wftask: WorkflowTask,
58
- config_path: Optional[Path] = None,
59
- ) -> LocalBackendConfig:
60
- """
61
- Prepare a `LocalBackendConfig` configuration object
62
-
63
- The sources for `parallel_tasks_per_job` attributes, starting from the
64
- highest-priority one, are
65
-
66
- 1. Properties in `wftask.meta`;
67
- 2. The general content of the local-backend configuration file;
68
- 3. The default value (`None`).
69
-
70
- Arguments:
71
- wftask:
72
- WorkflowTask (V1) for which the backend configuration should
73
- be prepared.
74
- config_path:
75
- Path of local-backend configuration file; if `None`, use
76
- `FRACTAL_LOCAL_CONFIG_FILE` variable from settings.
77
-
78
- Returns:
79
- A local-backend configuration object
80
- """
81
-
82
- key = "parallel_tasks_per_job"
83
- default = None
84
-
85
- if wftask.meta and key in wftask.meta:
86
- parallel_tasks_per_job = wftask.meta[key]
87
- else:
88
- if not config_path:
89
- settings = Inject(get_settings)
90
- config_path = settings.FRACTAL_LOCAL_CONFIG_FILE
91
- if config_path is None:
92
- parallel_tasks_per_job = default
93
- else:
94
- with config_path.open("r") as f:
95
- env = json.load(f)
96
- try:
97
- _ = LocalBackendConfig(**env)
98
- except ValidationError as e:
99
- raise LocalBackendConfigError(
100
- f"Error while loading {config_path=}. "
101
- f"Original error:\n{str(e)}"
102
- )
103
-
104
- parallel_tasks_per_job = env.get(key, default)
105
- return LocalBackendConfig(parallel_tasks_per_job=parallel_tasks_per_job)
@@ -1,48 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
6
- #
7
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
8
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
9
- # Institute for Biomedical Research and Pelkmans Lab from the University of
10
- # Zurich.
11
- """
12
- Submodule to define _local_submit_setup
13
- """
14
- from pathlib import Path
15
- from typing import Optional
16
-
17
- from ....models.v1 import WorkflowTask
18
- from ._local_config import get_local_backend_config
19
-
20
-
21
- def _local_submit_setup(
22
- *,
23
- wftask: WorkflowTask,
24
- workflow_dir_local: Optional[Path] = None,
25
- workflow_dir_remote: Optional[Path] = None,
26
- ) -> dict[str, object]:
27
- """
28
- Collect WorfklowTask-specific configuration parameters from different
29
- sources, and inject them for execution.
30
-
31
- Arguments:
32
- wftask:
33
- WorkflowTask for which the configuration is to be assembled
34
- workflow_dir_local:
35
- Not used in this function.
36
- workflow_dir_remote:
37
- Not used in this function.
38
-
39
- Returns:
40
- submit_setup_dict:
41
- A dictionary that will be passed on to
42
- `FractalThreadPoolExecutor.submit` and
43
- `FractalThreadPoolExecutor.map`, so as to set extra options.
44
- """
45
-
46
- local_backend_config = get_local_backend_config(wftask=wftask)
47
-
48
- return dict(local_backend_config=local_backend_config)
@@ -1,100 +0,0 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
6
- #
7
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
8
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
9
- # Institute for Biomedical Research and Pelkmans Lab from the University of
10
- # Zurich.
11
- """
12
- Custom version of Python
13
- [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor)).
14
- """
15
- from concurrent.futures import ThreadPoolExecutor
16
- from typing import Callable
17
- from typing import Iterable
18
- from typing import Optional
19
- from typing import Sequence
20
-
21
- from ._local_config import get_default_local_backend_config
22
- from ._local_config import LocalBackendConfig
23
-
24
-
25
- class FractalThreadPoolExecutor(ThreadPoolExecutor):
26
- """
27
- Custom version of
28
- [ThreadPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ThreadPoolExecutor))
29
- that overrides the `submit` and `map` methods
30
- """
31
-
32
- def submit(
33
- self,
34
- *args,
35
- local_backend_config: Optional[LocalBackendConfig] = None,
36
- **kwargs,
37
- ):
38
- """
39
- Compared to the `ThreadPoolExecutor` method, here we accept an addition
40
- keyword argument (`local_backend_config`), which is then simply
41
- ignored.
42
- """
43
- return super().submit(*args, **kwargs)
44
-
45
- def map(
46
- self,
47
- fn: Callable,
48
- *iterables: Sequence[Iterable],
49
- local_backend_config: Optional[LocalBackendConfig] = None,
50
- ):
51
- """
52
- Custom version of the `Executor.map` method
53
-
54
- The main change with the respect to the original `map` method is that
55
- the list of tasks to be executed is split into chunks, and then
56
- `super().map` is called (sequentially) on each chunk. The goal of this
57
- change is to limit parallelism, e.g. due to limited computational
58
- resources.
59
-
60
- Other changes from the `concurrent.futures` `map` method:
61
-
62
- 1. Removed `timeout` argument;
63
- 2. Removed `chunksize`;
64
- 3. All iterators (both inputs and output ones) are transformed into
65
- lists.
66
-
67
- Args:
68
- fn: A callable function.
69
- iterables: The argument iterables (one iterable per argument of
70
- `fn`).
71
- local_backend_config: The backend configuration, needed to extract
72
- `parallel_tasks_per_job`.
73
- """
74
-
75
- # Preliminary check
76
- iterable_lengths = [len(it) for it in iterables]
77
- if not len(set(iterable_lengths)) == 1:
78
- raise ValueError("Iterables have different lengths.")
79
-
80
- # Set total number of arguments
81
- n_elements = len(iterables[0])
82
-
83
- # Set parallel_tasks_per_job
84
- if local_backend_config is None:
85
- local_backend_config = get_default_local_backend_config()
86
- parallel_tasks_per_job = local_backend_config.parallel_tasks_per_job
87
- if parallel_tasks_per_job is None:
88
- parallel_tasks_per_job = n_elements
89
-
90
- # Execute tasks, in chunks of size parallel_tasks_per_job
91
- results = []
92
- for ind_chunk in range(0, n_elements, parallel_tasks_per_job):
93
- chunk_iterables = [
94
- it[ind_chunk : ind_chunk + parallel_tasks_per_job] # noqa
95
- for it in iterables
96
- ]
97
- map_iter = super().map(fn, *chunk_iterables)
98
- results.extend(list(map_iter))
99
-
100
- return iter(results)