fractal-server 1.4.9__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +44 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +36 -49
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -10
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +3 -4
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.9.dist-info/RECORD +0 -97
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.9.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,339 @@
1
+ import functools
2
+ import traceback
3
+ from concurrent.futures import Executor
4
+ from pathlib import Path
5
+ from typing import Callable
6
+ from typing import Literal
7
+ from typing import Optional
8
+
9
+ from ....images import SingleImage
10
+ from .deduplicate_list import deduplicate_list
11
+ from .merge_outputs import merge_outputs
12
+ from .runner_functions_low_level import run_single_task
13
+ from .task_interface import InitArgsModel
14
+ from .task_interface import InitTaskOutput
15
+ from .task_interface import TaskOutput
16
+ from .v1_compat import convert_v2_args_into_v1
17
+ from fractal_server.app.models.v1 import Task as TaskV1
18
+ from fractal_server.app.models.v2 import TaskV2
19
+ from fractal_server.app.models.v2 import WorkflowTaskV2
20
+ from fractal_server.app.runner.v2.components import _COMPONENT_KEY_
21
+ from fractal_server.app.runner.v2.components import _index_to_component
22
+
23
+
24
+ __all__ = [
25
+ "run_v2_task_non_parallel",
26
+ "run_v2_task_parallel",
27
+ "run_v2_task_compound",
28
+ "run_v1_task_parallel",
29
+ ]
30
+
31
+ MAX_PARALLELIZATION_LIST_SIZE = 20_000
32
+
33
+
34
+ def no_op_submit_setup_call(
35
+ *,
36
+ wftask: WorkflowTaskV2,
37
+ workflow_dir: Path,
38
+ workflow_dir_user: Path,
39
+ which_type: Literal["non_parallel", "parallel"],
40
+ ) -> dict:
41
+ """
42
+ Default (no-operation) interface of submit_setup_call in V2.
43
+ """
44
+ return {}
45
+
46
+
47
+ # Backend-specific configuration
48
+ def _get_executor_options(
49
+ *,
50
+ wftask: WorkflowTaskV2,
51
+ workflow_dir: Path,
52
+ workflow_dir_user: Path,
53
+ submit_setup_call: Callable,
54
+ which_type: Literal["non_parallel", "parallel"],
55
+ ) -> dict:
56
+ try:
57
+ options = submit_setup_call(
58
+ wftask=wftask,
59
+ workflow_dir=workflow_dir,
60
+ workflow_dir_user=workflow_dir_user,
61
+ which_type=which_type,
62
+ )
63
+ except Exception as e:
64
+ tb = "".join(traceback.format_tb(e.__traceback__))
65
+ raise RuntimeError(
66
+ f"{type(e)} error in {submit_setup_call=}\n"
67
+ f"Original traceback:\n{tb}"
68
+ )
69
+ return options
70
+
71
+
72
+ def _check_parallelization_list_size(my_list):
73
+ if len(my_list) > MAX_PARALLELIZATION_LIST_SIZE:
74
+ raise ValueError(
75
+ "Too many parallelization items.\n"
76
+ f" {len(my_list)}\n"
77
+ f" {MAX_PARALLELIZATION_LIST_SIZE=}\n"
78
+ )
79
+
80
+
81
+ def run_v2_task_non_parallel(
82
+ *,
83
+ images: list[SingleImage],
84
+ zarr_dir: str,
85
+ task: TaskV2,
86
+ wftask: WorkflowTaskV2,
87
+ workflow_dir: Path,
88
+ workflow_dir_user: Optional[Path] = None,
89
+ executor: Executor,
90
+ logger_name: Optional[str] = None,
91
+ submit_setup_call: Callable = no_op_submit_setup_call,
92
+ ) -> TaskOutput:
93
+ """
94
+ This runs server-side (see `executor` argument)
95
+ """
96
+
97
+ if not workflow_dir_user:
98
+ workflow_dir_user = workflow_dir
99
+
100
+ executor_options = _get_executor_options(
101
+ wftask=wftask,
102
+ workflow_dir=workflow_dir,
103
+ workflow_dir_user=workflow_dir_user,
104
+ submit_setup_call=submit_setup_call,
105
+ which_type="non_parallel",
106
+ )
107
+
108
+ function_kwargs = dict(
109
+ paths=[image["path"] for image in images],
110
+ zarr_dir=zarr_dir,
111
+ **(wftask.args_non_parallel or {}),
112
+ )
113
+ future = executor.submit(
114
+ functools.partial(
115
+ run_single_task,
116
+ wftask=wftask,
117
+ command=task.command_non_parallel,
118
+ workflow_dir=workflow_dir,
119
+ workflow_dir_user=workflow_dir_user,
120
+ ),
121
+ function_kwargs,
122
+ **executor_options,
123
+ )
124
+ output = future.result()
125
+ # FIXME V2: handle validation errors
126
+ if output is None:
127
+ return TaskOutput()
128
+ else:
129
+ validated_output = TaskOutput(**output)
130
+ return validated_output
131
+
132
+
133
+ def run_v2_task_parallel(
134
+ *,
135
+ images: list[SingleImage],
136
+ task: TaskV2,
137
+ wftask: WorkflowTaskV2,
138
+ executor: Executor,
139
+ workflow_dir: Path,
140
+ workflow_dir_user: Optional[Path] = None,
141
+ logger_name: Optional[str] = None,
142
+ submit_setup_call: Callable = no_op_submit_setup_call,
143
+ ) -> TaskOutput:
144
+
145
+ _check_parallelization_list_size(images)
146
+
147
+ executor_options = _get_executor_options(
148
+ wftask=wftask,
149
+ workflow_dir=workflow_dir,
150
+ workflow_dir_user=workflow_dir_user,
151
+ submit_setup_call=submit_setup_call,
152
+ which_type="parallel",
153
+ )
154
+
155
+ list_function_kwargs = []
156
+ for ind, image in enumerate(images):
157
+ list_function_kwargs.append(
158
+ dict(
159
+ path=image["path"],
160
+ **(wftask.args_parallel or {}),
161
+ ),
162
+ )
163
+ list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
164
+
165
+ results_iterator = executor.map(
166
+ functools.partial(
167
+ run_single_task,
168
+ wftask=wftask,
169
+ command=task.command_parallel,
170
+ workflow_dir=workflow_dir,
171
+ workflow_dir_user=workflow_dir_user,
172
+ ),
173
+ list_function_kwargs,
174
+ **executor_options,
175
+ )
176
+ # Explicitly iterate over the whole list, so that all futures are waited
177
+ outputs = list(results_iterator)
178
+
179
+ # Validate all non-None outputs
180
+ for ind, output in enumerate(outputs):
181
+ if output is None:
182
+ outputs[ind] = TaskOutput()
183
+ else:
184
+ # FIXME: improve handling of validation errors
185
+ validated_output = TaskOutput(**output)
186
+ outputs[ind] = validated_output
187
+
188
+ merged_output = merge_outputs(outputs)
189
+ return merged_output
190
+
191
+
192
+ def run_v2_task_compound(
193
+ *,
194
+ images: list[SingleImage],
195
+ zarr_dir: str,
196
+ task: TaskV2,
197
+ wftask: WorkflowTaskV2,
198
+ executor: Executor,
199
+ workflow_dir: Path,
200
+ workflow_dir_user: Optional[Path] = None,
201
+ logger_name: Optional[str] = None,
202
+ submit_setup_call: Callable = no_op_submit_setup_call,
203
+ ) -> TaskOutput:
204
+
205
+ executor_options_init = _get_executor_options(
206
+ wftask=wftask,
207
+ workflow_dir=workflow_dir,
208
+ workflow_dir_user=workflow_dir_user,
209
+ submit_setup_call=submit_setup_call,
210
+ which_type="non_parallel",
211
+ )
212
+ executor_options_compute = _get_executor_options(
213
+ wftask=wftask,
214
+ workflow_dir=workflow_dir,
215
+ workflow_dir_user=workflow_dir_user,
216
+ submit_setup_call=submit_setup_call,
217
+ which_type="parallel",
218
+ )
219
+
220
+ # 3/A: non-parallel init task
221
+ function_kwargs = dict(
222
+ paths=[image["path"] for image in images],
223
+ zarr_dir=zarr_dir,
224
+ **(wftask.args_non_parallel or {}),
225
+ )
226
+ future = executor.submit(
227
+ functools.partial(
228
+ run_single_task,
229
+ wftask=wftask,
230
+ command=task.command_non_parallel,
231
+ workflow_dir=workflow_dir,
232
+ workflow_dir_user=workflow_dir_user,
233
+ ),
234
+ function_kwargs,
235
+ **executor_options_init,
236
+ )
237
+ output = future.result()
238
+ if output is None:
239
+ init_task_output = InitTaskOutput()
240
+ else:
241
+ init_task_output = InitTaskOutput(**output)
242
+ parallelization_list = init_task_output.parallelization_list
243
+ parallelization_list = deduplicate_list(
244
+ parallelization_list, PydanticModel=InitArgsModel
245
+ )
246
+
247
+ # 3/B: parallel part of a compound task
248
+ _check_parallelization_list_size(parallelization_list)
249
+
250
+ list_function_kwargs = []
251
+ for ind, parallelization_item in enumerate(parallelization_list):
252
+ list_function_kwargs.append(
253
+ dict(
254
+ path=parallelization_item.path,
255
+ init_args=parallelization_item.init_args,
256
+ **(wftask.args_parallel or {}),
257
+ ),
258
+ )
259
+ list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
260
+
261
+ results_iterator = executor.map(
262
+ functools.partial(
263
+ run_single_task,
264
+ wftask=wftask,
265
+ command=task.command_parallel,
266
+ workflow_dir=workflow_dir,
267
+ workflow_dir_user=workflow_dir_user,
268
+ ),
269
+ list_function_kwargs,
270
+ **executor_options_compute,
271
+ )
272
+ # Explicitly iterate over the whole list, so that all futures are waited
273
+ outputs = list(results_iterator)
274
+
275
+ # Validate all non-None outputs
276
+ for ind, output in enumerate(outputs):
277
+ if output is None:
278
+ outputs[ind] = TaskOutput()
279
+ else:
280
+ # FIXME: improve handling of validation errors
281
+ validated_output = TaskOutput(**output)
282
+ outputs[ind] = validated_output
283
+
284
+ merged_output = merge_outputs(outputs)
285
+ return merged_output
286
+
287
+
288
+ def run_v1_task_parallel(
289
+ *,
290
+ images: list[SingleImage],
291
+ task_legacy: TaskV1,
292
+ wftask: WorkflowTaskV2,
293
+ executor: Executor,
294
+ workflow_dir: Path,
295
+ workflow_dir_user: Optional[Path] = None,
296
+ logger_name: Optional[str] = None,
297
+ submit_setup_call: Callable = no_op_submit_setup_call,
298
+ ) -> TaskOutput:
299
+
300
+ _check_parallelization_list_size(images)
301
+
302
+ executor_options = _get_executor_options(
303
+ wftask=wftask,
304
+ workflow_dir=workflow_dir,
305
+ workflow_dir_user=workflow_dir_user,
306
+ submit_setup_call=submit_setup_call,
307
+ which_type="parallel",
308
+ )
309
+
310
+ list_function_kwargs = []
311
+ for ind, image in enumerate(images):
312
+ list_function_kwargs.append(
313
+ convert_v2_args_into_v1(
314
+ dict(
315
+ path=image["path"],
316
+ **(wftask.args_parallel or {}),
317
+ )
318
+ ),
319
+ )
320
+ list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
321
+
322
+ results_iterator = executor.map(
323
+ functools.partial(
324
+ run_single_task,
325
+ wftask=wftask,
326
+ command=task_legacy.command,
327
+ workflow_dir=workflow_dir,
328
+ workflow_dir_user=workflow_dir_user,
329
+ is_task_v1=True,
330
+ ),
331
+ list_function_kwargs,
332
+ **executor_options,
333
+ )
334
+ # Explicitly iterate over the whole list, so that all futures are waited
335
+ list(results_iterator)
336
+
337
+ # Ignore any output metadata for V1 tasks, and return an empty object
338
+ out = TaskOutput()
339
+ return out
@@ -0,0 +1,134 @@
1
+ import json
2
+ import logging
3
+ import shutil
4
+ import subprocess # nosec
5
+ from pathlib import Path
6
+ from shlex import split as shlex_split
7
+ from typing import Any
8
+ from typing import Optional
9
+
10
+ from ..exceptions import JobExecutionError
11
+ from ..exceptions import TaskExecutionError
12
+ from .components import _COMPONENT_KEY_
13
+ from fractal_server.app.models.v2 import WorkflowTaskV2
14
+ from fractal_server.app.runner.task_files import get_task_file_paths
15
+
16
+
17
+ def _call_command_wrapper(cmd: str, log_path: Path) -> None:
18
+ """
19
+ Call a command and write its stdout and stderr to files
20
+
21
+ Raises:
22
+ TaskExecutionError: If the `subprocess.run` call returns a positive
23
+ exit code
24
+ JobExecutionError: If the `subprocess.run` call returns a negative
25
+ exit code (e.g. due to the subprocess receiving a
26
+ TERM or KILL signal)
27
+ """
28
+
29
+ # Verify that task command is executable
30
+ if shutil.which(shlex_split(cmd)[0]) is None:
31
+ msg = (
32
+ f'Command "{shlex_split(cmd)[0]}" is not valid. '
33
+ "Hint: make sure that it is executable."
34
+ )
35
+ raise TaskExecutionError(msg)
36
+
37
+ fp_log = open(log_path, "w")
38
+ try:
39
+ result = subprocess.run( # nosec
40
+ shlex_split(cmd),
41
+ stderr=fp_log,
42
+ stdout=fp_log,
43
+ )
44
+ except Exception as e:
45
+ raise e
46
+ finally:
47
+ fp_log.close()
48
+
49
+ if result.returncode > 0:
50
+ with log_path.open("r") as fp_stderr:
51
+ err = fp_stderr.read()
52
+ raise TaskExecutionError(err)
53
+ elif result.returncode < 0:
54
+ raise JobExecutionError(
55
+ info=f"Task failed with returncode={result.returncode}"
56
+ )
57
+
58
+
59
+ def run_single_task(
60
+ args: dict[str, Any],
61
+ command: str,
62
+ wftask: WorkflowTaskV2,
63
+ workflow_dir: Path,
64
+ workflow_dir_user: Optional[Path] = None,
65
+ logger_name: Optional[str] = None,
66
+ is_task_v1: bool = False,
67
+ ) -> dict[str, Any]:
68
+ """
69
+ Runs within an executor.
70
+ """
71
+
72
+ logger = logging.getLogger(logger_name)
73
+ logger.debug(f"Now start running {command=}")
74
+
75
+ if not workflow_dir_user:
76
+ workflow_dir_user = workflow_dir
77
+
78
+ component = args.pop(_COMPONENT_KEY_, None)
79
+ if component is None:
80
+ task_files = get_task_file_paths(
81
+ workflow_dir=workflow_dir,
82
+ workflow_dir_user=workflow_dir_user,
83
+ task_order=wftask.order,
84
+ )
85
+ else:
86
+ task_files = get_task_file_paths(
87
+ workflow_dir=workflow_dir,
88
+ workflow_dir_user=workflow_dir_user,
89
+ task_order=wftask.order,
90
+ component=component,
91
+ )
92
+
93
+ # Write arguments to args.json file
94
+ with task_files.args.open("w") as f:
95
+ json.dump(args, f, indent=2)
96
+
97
+ # Assemble full command
98
+ if is_task_v1:
99
+ full_command = (
100
+ f"{command} "
101
+ f"--json {task_files.args.as_posix()} "
102
+ f"--metadata-out {task_files.metadiff.as_posix()}"
103
+ )
104
+ else:
105
+ full_command = (
106
+ f"{command} "
107
+ f"--args-json {task_files.args.as_posix()} "
108
+ f"--out-json {task_files.metadiff.as_posix()}"
109
+ )
110
+
111
+ try:
112
+ _call_command_wrapper(
113
+ full_command,
114
+ log_path=task_files.log,
115
+ )
116
+ except TaskExecutionError as e:
117
+ e.workflow_task_order = wftask.order
118
+ e.workflow_task_id = wftask.id
119
+ e.task_name = wftask.task.name
120
+ raise e
121
+
122
+ try:
123
+ with task_files.metadiff.open("r") as f:
124
+ out_meta = json.load(f)
125
+ except FileNotFoundError as e:
126
+ logger.debug(
127
+ "Task did not produce output metadata. "
128
+ f"Original FileNotFoundError: {str(e)}"
129
+ )
130
+ out_meta = None
131
+
132
+ if out_meta == {}:
133
+ return None
134
+ return out_meta
@@ -0,0 +1,43 @@
1
+ from typing import Any
2
+
3
+ from pydantic import BaseModel
4
+ from pydantic import Field
5
+
6
+ from ....images import SingleImage
7
+ from fractal_server.images import Filters
8
+
9
+
10
+ class TaskOutput(BaseModel):
11
+ class Config:
12
+ extra = "forbid"
13
+
14
+ image_list_updates: list[SingleImage] = Field(default_factory=list)
15
+ image_list_removals: list[str] = Field(default_factory=list)
16
+ filters: Filters = Field(default_factory=Filters)
17
+
18
+ def check_paths_are_unique(self) -> None:
19
+ paths = [img.path for img in self.image_list_updates]
20
+ paths.extend(self.image_list_removals)
21
+ if len(paths) != len(set(paths)):
22
+ duplicates = [path for path in set(paths) if paths.count(path) > 1]
23
+ msg = (
24
+ "TaskOutput image-list updates/removals has non-unique paths:"
25
+ )
26
+ for duplicate in duplicates:
27
+ msg = f"{msg}\n{duplicate}"
28
+ raise ValueError(msg)
29
+
30
+
31
+ class InitArgsModel(BaseModel):
32
+ class Config:
33
+ extra = "forbid"
34
+
35
+ path: str
36
+ init_args: dict[str, Any] = Field(default_factory=dict)
37
+
38
+
39
+ class InitTaskOutput(BaseModel):
40
+ class Config:
41
+ extra = "forbid"
42
+
43
+ parallelization_list: list[InitArgsModel] = Field(default_factory=list)
@@ -0,0 +1,21 @@
1
+ from copy import deepcopy
2
+ from pathlib import Path
3
+ from typing import Any
4
+
5
+
6
+ def convert_v2_args_into_v1(kwargs_v2: dict[str, Any]) -> dict[str, Any]:
7
+
8
+ kwargs_v1 = deepcopy(kwargs_v2)
9
+
10
+ path = kwargs_v2.pop("path")
11
+ input_path = Path(path).parents[3].as_posix()
12
+ component = path.replace(input_path, "").lstrip("/")
13
+
14
+ kwargs_v1 = dict(
15
+ input_paths=[input_path],
16
+ output_path=input_path,
17
+ metadata={},
18
+ component=component,
19
+ )
20
+
21
+ return kwargs_v1
@@ -1,42 +1,4 @@
1
- """
2
- Schemas for API request/response bodies
3
- """
4
- from .applyworkflow import ApplyWorkflowCreate # noqa: F401
5
- from .applyworkflow import ApplyWorkflowRead # noqa: F401
6
- from .applyworkflow import ApplyWorkflowUpdate # noqa: F401
7
- from .applyworkflow import JobStatusType # noqa: F401
8
- from .dataset import DatasetCreate # noqa: F401
9
- from .dataset import DatasetRead # noqa: F401
10
- from .dataset import DatasetStatusRead # noqa: F401
11
- from .dataset import DatasetUpdate # noqa: F401
12
- from .dataset import ResourceCreate # noqa: F401
13
- from .dataset import ResourceRead # noqa: F401
14
- from .dataset import ResourceUpdate # noqa: F401
15
- from .manifest import ManifestV1 # noqa: F401
16
- from .manifest import TaskManifestV1 # noqa: F401
17
- from .project import ProjectCreate # noqa: F401
18
- from .project import ProjectRead # noqa: F401
19
- from .project import ProjectUpdate # noqa: F401
20
- from .state import _StateBase # noqa: F401
21
- from .state import StateRead # noqa: F401
22
- from .task import TaskCreate # noqa: F401
23
- from .task import TaskImport # noqa: F401
24
- from .task import TaskRead # noqa: F401
25
- from .task import TaskUpdate # noqa: F401
26
- from .task_collection import TaskCollectPip # noqa: F401
27
- from .task_collection import TaskCollectStatus # noqa: F401
28
- from .user import UserCreate # noqa: F401
29
- from .user import UserRead # noqa: F401
30
- from .user import UserUpdate # noqa: F401
31
- from .user import UserUpdateStrict # noqa: F401
32
- from .workflow import WorkflowCreate # noqa: F401
33
- from .workflow import WorkflowExport # noqa: F401
34
- from .workflow import WorkflowImport # noqa: F401
35
- from .workflow import WorkflowRead # noqa: F401
36
- from .workflow import WorkflowTaskCreate # noqa: F401
37
- from .workflow import WorkflowTaskExport # noqa: F401
38
- from .workflow import WorkflowTaskImport # noqa: F401
39
- from .workflow import WorkflowTaskRead # noqa: F401
40
- from .workflow import WorkflowTaskStatusType # noqa: F401
41
- from .workflow import WorkflowTaskUpdate # noqa: F401
42
- from .workflow import WorkflowUpdate # noqa: F401
1
+ from .state import * # noqa: F401, F403
2
+ from .user import * # noqa: F401, F403
3
+ from .v1 import * # noqa: F401, F403
4
+ from .v2 import * # noqa: F401, F403
@@ -0,0 +1,42 @@
1
+ """
2
+ Schemas for API request/response bodies
3
+ """
4
+ from ..state import _StateBase # noqa: F401
5
+ from ..state import StateRead # noqa: F401
6
+ from ..user import UserCreate # noqa: F401
7
+ from ..user import UserRead # noqa: F401
8
+ from ..user import UserUpdate # noqa: F401
9
+ from ..user import UserUpdateStrict # noqa: F401
10
+ from .applyworkflow import ApplyWorkflowCreateV1 # noqa: F401
11
+ from .applyworkflow import ApplyWorkflowReadV1 # noqa: F401
12
+ from .applyworkflow import ApplyWorkflowUpdateV1 # noqa: F401
13
+ from .applyworkflow import JobStatusTypeV1 # noqa: F401
14
+ from .dataset import DatasetCreateV1 # noqa: F401
15
+ from .dataset import DatasetReadV1 # noqa: F401
16
+ from .dataset import DatasetStatusReadV1 # noqa: F401
17
+ from .dataset import DatasetUpdateV1 # noqa: F401
18
+ from .dataset import ResourceCreateV1 # noqa: F401
19
+ from .dataset import ResourceReadV1 # noqa: F401
20
+ from .dataset import ResourceUpdateV1 # noqa: F401
21
+ from .manifest import ManifestV1 # noqa: F401
22
+ from .manifest import TaskManifestV1 # noqa: F401
23
+ from .project import ProjectCreateV1 # noqa: F401
24
+ from .project import ProjectReadV1 # noqa: F401
25
+ from .project import ProjectUpdateV1 # noqa: F401
26
+ from .task import TaskCreateV1 # noqa: F401
27
+ from .task import TaskImportV1 # noqa: F401
28
+ from .task import TaskReadV1 # noqa: F401
29
+ from .task import TaskUpdateV1 # noqa: F401
30
+ from .task_collection import TaskCollectPipV1 # noqa: F401
31
+ from .task_collection import TaskCollectStatusV1 # noqa: F401
32
+ from .workflow import WorkflowCreateV1 # noqa: F401
33
+ from .workflow import WorkflowExportV1 # noqa: F401
34
+ from .workflow import WorkflowImportV1 # noqa: F401
35
+ from .workflow import WorkflowReadV1 # noqa: F401
36
+ from .workflow import WorkflowTaskCreateV1 # noqa: F401
37
+ from .workflow import WorkflowTaskExportV1 # noqa: F401
38
+ from .workflow import WorkflowTaskImportV1 # noqa: F401
39
+ from .workflow import WorkflowTaskReadV1 # noqa: F401
40
+ from .workflow import WorkflowTaskStatusTypeV1 # noqa: F401
41
+ from .workflow import WorkflowTaskUpdateV1 # noqa: F401
42
+ from .workflow import WorkflowUpdateV1 # noqa: F401