fractal-server 2.14.0a10__py3-none-any.whl → 2.14.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/api/v2/submit.py +1 -1
  3. fractal_server/app/runner/components.py +0 -3
  4. fractal_server/app/runner/exceptions.py +4 -0
  5. fractal_server/app/runner/executors/base_runner.py +16 -17
  6. fractal_server/app/runner/executors/local/{_local_config.py → get_local_config.py} +0 -7
  7. fractal_server/app/runner/executors/local/runner.py +117 -58
  8. fractal_server/app/runner/executors/slurm_common/_check_jobs_status.py +4 -0
  9. fractal_server/app/runner/executors/slurm_ssh/executor.py +7 -5
  10. fractal_server/app/runner/executors/slurm_ssh/runner.py +6 -10
  11. fractal_server/app/runner/executors/slurm_sudo/runner.py +201 -96
  12. fractal_server/app/runner/task_files.py +8 -0
  13. fractal_server/app/runner/v2/__init__.py +0 -366
  14. fractal_server/app/runner/v2/_local.py +2 -2
  15. fractal_server/app/runner/v2/_slurm_ssh.py +2 -2
  16. fractal_server/app/runner/v2/_slurm_sudo.py +2 -2
  17. fractal_server/app/runner/v2/db_tools.py +87 -0
  18. fractal_server/app/runner/v2/runner.py +77 -81
  19. fractal_server/app/runner/v2/runner_functions.py +274 -436
  20. fractal_server/app/runner/v2/runner_functions_low_level.py +37 -39
  21. fractal_server/app/runner/v2/submit_workflow.py +366 -0
  22. fractal_server/app/runner/v2/task_interface.py +31 -0
  23. {fractal_server-2.14.0a10.dist-info → fractal_server-2.14.0a11.dist-info}/METADATA +1 -1
  24. {fractal_server-2.14.0a10.dist-info → fractal_server-2.14.0a11.dist-info}/RECORD +27 -28
  25. fractal_server/app/runner/executors/local/_submit_setup.py +0 -46
  26. fractal_server/app/runner/executors/slurm_common/_submit_setup.py +0 -84
  27. fractal_server/app/runner/v2/_db_tools.py +0 -48
  28. {fractal_server-2.14.0a10.dist-info → fractal_server-2.14.0a11.dist-info}/LICENSE +0 -0
  29. {fractal_server-2.14.0a10.dist-info → fractal_server-2.14.0a11.dist-info}/WHEEL +0 -0
  30. {fractal_server-2.14.0a10.dist-info → fractal_server-2.14.0a11.dist-info}/entry_points.txt +0 -0
@@ -2,19 +2,19 @@ import logging
2
2
  from copy import copy
3
3
  from copy import deepcopy
4
4
  from pathlib import Path
5
+ from typing import Any
6
+ from typing import Callable
7
+ from typing import Literal
5
8
  from typing import Optional
6
9
 
7
10
  from sqlalchemy.orm.attributes import flag_modified
8
- from sqlmodel import update
9
11
 
10
12
  from ....images import SingleImage
11
13
  from ....images.tools import filter_image_list
12
14
  from ....images.tools import find_image_by_zarr_url
13
15
  from ..exceptions import JobExecutionError
14
- from .runner_functions import no_op_submit_setup_call
16
+ from .merge_outputs import merge_outputs
15
17
  from .runner_functions import run_v2_task_compound
16
- from .runner_functions import run_v2_task_converter_compound
17
- from .runner_functions import run_v2_task_converter_non_parallel
18
18
  from .runner_functions import run_v2_task_non_parallel
19
19
  from .runner_functions import run_v2_task_parallel
20
20
  from .task_interface import TaskOutput
@@ -25,6 +25,7 @@ from fractal_server.app.models.v2 import HistoryRun
25
25
  from fractal_server.app.models.v2 import TaskGroupV2
26
26
  from fractal_server.app.models.v2 import WorkflowTaskV2
27
27
  from fractal_server.app.runner.executors.base_runner import BaseRunner
28
+ from fractal_server.app.runner.v2.db_tools import update_status_of_history_run
28
29
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
29
30
  from fractal_server.app.schemas.v2 import TaskDumpV2
30
31
  from fractal_server.app.schemas.v2 import TaskGroupDumpV2
@@ -41,7 +42,14 @@ def execute_tasks_v2(
41
42
  workflow_dir_local: Path,
42
43
  workflow_dir_remote: Optional[Path] = None,
43
44
  logger_name: Optional[str] = None,
44
- submit_setup_call: callable = no_op_submit_setup_call,
45
+ get_runner_config: Callable[
46
+ [
47
+ WorkflowTaskV2,
48
+ Literal["non_parallel", "parallel"],
49
+ Optional[Path],
50
+ ],
51
+ Any,
52
+ ],
45
53
  job_type_filters: dict[str, bool],
46
54
  job_attribute_filters: AttributeFiltersType,
47
55
  ) -> None:
@@ -54,6 +62,10 @@ def execute_tasks_v2(
54
62
  )
55
63
  workflow_dir_local.mkdir()
56
64
 
65
+ # For local backend, remote and local folders are the same
66
+ if workflow_dir_remote is None:
67
+ workflow_dir_remote = workflow_dir_local
68
+
57
69
  # Initialize local dataset attributes
58
70
  zarr_dir = dataset.zarr_dir
59
71
  tmp_images = deepcopy(dataset.images)
@@ -68,6 +80,7 @@ def execute_tasks_v2(
68
80
 
69
81
  # Filter images by types and attributes (in two steps)
70
82
  if wftask.task_type in ["compound", "parallel", "non_parallel"]:
83
+ # Non-converter task
71
84
  type_filters = copy(current_dataset_type_filters)
72
85
  type_filters_patch = merge_type_filters(
73
86
  task_input_types=task.input_types,
@@ -86,6 +99,8 @@ def execute_tasks_v2(
86
99
  attribute_filters=job_attribute_filters,
87
100
  )
88
101
  else:
102
+ # Converter task
103
+ filtered_images = []
89
104
  num_available_images = 0
90
105
 
91
106
  with next(get_sync_db()) as db:
@@ -113,12 +128,8 @@ def execute_tasks_v2(
113
128
  history_run_id = history_run.id
114
129
 
115
130
  # TASK EXECUTION (V2)
116
- if task.type == "non_parallel":
117
- (
118
- current_task_output,
119
- num_tasks,
120
- exceptions,
121
- ) = run_v2_task_non_parallel(
131
+ if task.type in ["non_parallel", "converter_non_parallel"]:
132
+ outcomes_dict, num_tasks = run_v2_task_non_parallel(
122
133
  images=filtered_images,
123
134
  zarr_dir=zarr_dir,
124
135
  wftask=wftask,
@@ -126,87 +137,64 @@ def execute_tasks_v2(
126
137
  workflow_dir_local=workflow_dir_local,
127
138
  workflow_dir_remote=workflow_dir_remote,
128
139
  runner=runner,
129
- submit_setup_call=submit_setup_call,
130
- history_run_id=history_run_id,
131
- dataset_id=dataset.id,
132
- )
133
- elif task.type == "converter_non_parallel":
134
- (
135
- current_task_output,
136
- num_tasks,
137
- exceptions,
138
- ) = run_v2_task_converter_non_parallel(
139
- zarr_dir=zarr_dir,
140
- wftask=wftask,
141
- task=task,
142
- workflow_dir_local=workflow_dir_local,
143
- workflow_dir_remote=workflow_dir_remote,
144
- executor=runner,
145
- submit_setup_call=submit_setup_call,
140
+ get_runner_config=get_runner_config,
146
141
  history_run_id=history_run_id,
147
142
  dataset_id=dataset.id,
143
+ task_type=task.type,
148
144
  )
149
145
  elif task.type == "parallel":
150
- current_task_output, num_tasks, exceptions = run_v2_task_parallel(
146
+ outcomes_dict, num_tasks = run_v2_task_parallel(
151
147
  images=filtered_images,
152
148
  wftask=wftask,
153
149
  task=task,
154
150
  workflow_dir_local=workflow_dir_local,
155
151
  workflow_dir_remote=workflow_dir_remote,
156
152
  runner=runner,
157
- submit_setup_call=submit_setup_call,
153
+ get_runner_config=get_runner_config,
158
154
  history_run_id=history_run_id,
159
155
  dataset_id=dataset.id,
160
156
  )
161
- elif task.type == "compound":
162
- current_task_output, num_tasks, exceptions = run_v2_task_compound(
157
+ elif task.type in ["compound", "converter_compound"]:
158
+ outcomes_dict, num_tasks = run_v2_task_compound(
163
159
  images=filtered_images,
164
160
  zarr_dir=zarr_dir,
165
161
  wftask=wftask,
166
162
  task=task,
167
163
  workflow_dir_local=workflow_dir_local,
168
164
  workflow_dir_remote=workflow_dir_remote,
169
- executor=runner,
170
- submit_setup_call=submit_setup_call,
171
- history_run_id=history_run_id,
172
- dataset_id=dataset.id,
173
- )
174
- elif task.type == "converter_compound":
175
- (
176
- current_task_output,
177
- num_tasks,
178
- exceptions,
179
- ) = run_v2_task_converter_compound(
180
- zarr_dir=zarr_dir,
181
- wftask=wftask,
182
- task=task,
183
- workflow_dir_local=workflow_dir_local,
184
- workflow_dir_remote=workflow_dir_remote,
185
- executor=runner,
186
- submit_setup_call=submit_setup_call,
165
+ runner=runner,
166
+ get_runner_config=get_runner_config,
187
167
  history_run_id=history_run_id,
188
168
  dataset_id=dataset.id,
169
+ task_type=task.type,
189
170
  )
190
171
  else:
191
172
  raise ValueError(f"Unexpected error: Invalid {task.type=}.")
192
173
 
193
174
  # POST TASK EXECUTION
194
175
 
195
- # If `current_task_output` includes no images (to be created, edited or
196
- # removed), then flag all the input images as modified. See
197
- # fractal-server issue #1374.
198
- if (
199
- current_task_output.image_list_updates == []
200
- and current_task_output.image_list_removals == []
201
- ):
202
- current_task_output = TaskOutput(
203
- **current_task_output.model_dump(
204
- exclude={"image_list_updates"}
205
- ),
206
- image_list_updates=[
207
- dict(zarr_url=img["zarr_url"]) for img in filtered_images
208
- ],
209
- )
176
+ non_failed_task_outputs = [
177
+ value.task_output
178
+ for value in outcomes_dict.values()
179
+ if value.task_output is not None
180
+ ]
181
+ if len(non_failed_task_outputs) > 0:
182
+ current_task_output = merge_outputs(non_failed_task_outputs)
183
+ # If `current_task_output` includes no images (to be created or
184
+ # removed), then flag all the input images as modified.
185
+ # See fractal-server issues #1374 and #2409.
186
+ if (
187
+ current_task_output.image_list_updates == []
188
+ and current_task_output.image_list_removals == []
189
+ ):
190
+ current_task_output = TaskOutput(
191
+ image_list_updates=[
192
+ dict(zarr_url=img["zarr_url"])
193
+ for img in filtered_images
194
+ ],
195
+ )
196
+ else:
197
+ current_task_output = TaskOutput()
210
198
 
211
199
  # Update image list
212
200
  num_new_images = 0
@@ -351,30 +339,38 @@ def execute_tasks_v2(
351
339
  db.add(record)
352
340
  db.commit()
353
341
 
354
- # Update History tables, and raise an error if task failed
355
- if exceptions == {}:
356
- db.execute(
357
- update(HistoryRun)
358
- .where(HistoryRun.id == history_run_id)
359
- .values(status=HistoryUnitStatus.DONE)
342
+ # Update `HistoryRun` entry, and raise an error if task failed
343
+ try:
344
+ first_exception = next(
345
+ value.exception
346
+ for value in outcomes_dict.values()
347
+ if value.exception is not None
360
348
  )
361
- db.commit()
362
- else:
363
- db.execute(
364
- update(HistoryRun)
365
- .where(HistoryRun.id == history_run_id)
366
- .values(status=HistoryUnitStatus.FAILED)
349
+ # An exception was found
350
+ update_status_of_history_run(
351
+ history_run_id=history_run_id,
352
+ status=HistoryUnitStatus.FAILED,
353
+ db_sync=db,
367
354
  )
368
- db.commit()
369
355
  logger.error(
370
356
  f'END {wftask.order}-th task (name="{task_name}") - '
371
357
  "ERROR."
372
358
  )
373
359
  # Raise first error
374
- for key, value in exceptions.items():
375
- raise JobExecutionError(
376
- info=(f"An error occurred.\nOriginal error:\n{value}")
360
+ raise JobExecutionError(
361
+ info=(
362
+ f"An error occurred.\n"
363
+ f"Original error:\n{first_exception}"
377
364
  )
365
+ )
366
+ except StopIteration:
367
+ # No exception was found
368
+ update_status_of_history_run(
369
+ history_run_id=history_run_id,
370
+ status=HistoryUnitStatus.DONE,
371
+ db_sync=db,
372
+ )
373
+ db.commit()
378
374
  logger.debug(
379
375
  f'END {wftask.order}-th task (name="{task_name}")'
380
376
  )