fractal-server 2.14.0a9__py3-none-any.whl → 2.14.0a11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +0 -10
  3. fractal_server/app/models/v2/job.py +3 -0
  4. fractal_server/app/routes/api/v2/__init__.py +2 -0
  5. fractal_server/app/routes/api/v2/history.py +14 -9
  6. fractal_server/app/routes/api/v2/images.py +5 -2
  7. fractal_server/app/routes/api/v2/submit.py +16 -14
  8. fractal_server/app/routes/api/v2/verify_image_types.py +64 -0
  9. fractal_server/app/routes/api/v2/workflow.py +11 -7
  10. fractal_server/app/runner/components.py +0 -3
  11. fractal_server/app/runner/exceptions.py +4 -0
  12. fractal_server/app/runner/executors/base_runner.py +16 -17
  13. fractal_server/app/runner/executors/local/{_local_config.py → get_local_config.py} +0 -7
  14. fractal_server/app/runner/executors/local/runner.py +117 -58
  15. fractal_server/app/runner/executors/{slurm_sudo → slurm_common}/_check_jobs_status.py +4 -0
  16. fractal_server/app/runner/executors/slurm_ssh/_check_job_status_ssh.py +67 -0
  17. fractal_server/app/runner/executors/slurm_ssh/executor.py +7 -5
  18. fractal_server/app/runner/executors/slurm_ssh/runner.py +707 -0
  19. fractal_server/app/runner/executors/slurm_sudo/runner.py +265 -114
  20. fractal_server/app/runner/task_files.py +8 -0
  21. fractal_server/app/runner/v2/__init__.py +0 -365
  22. fractal_server/app/runner/v2/_local.py +4 -2
  23. fractal_server/app/runner/v2/_slurm_ssh.py +4 -2
  24. fractal_server/app/runner/v2/_slurm_sudo.py +4 -2
  25. fractal_server/app/runner/v2/db_tools.py +87 -0
  26. fractal_server/app/runner/v2/runner.py +83 -89
  27. fractal_server/app/runner/v2/runner_functions.py +279 -436
  28. fractal_server/app/runner/v2/runner_functions_low_level.py +37 -39
  29. fractal_server/app/runner/v2/submit_workflow.py +366 -0
  30. fractal_server/app/runner/v2/task_interface.py +31 -0
  31. fractal_server/app/schemas/v2/dataset.py +4 -71
  32. fractal_server/app/schemas/v2/dumps.py +6 -5
  33. fractal_server/app/schemas/v2/job.py +6 -3
  34. fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
  35. fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
  36. {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a11.dist-info}/METADATA +1 -1
  37. {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a11.dist-info}/RECORD +40 -36
  38. fractal_server/app/runner/executors/local/_submit_setup.py +0 -46
  39. fractal_server/app/runner/executors/slurm_common/_submit_setup.py +0 -84
  40. fractal_server/app/runner/v2/_db_tools.py +0 -48
  41. {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a11.dist-info}/LICENSE +0 -0
  42. {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a11.dist-info}/WHEEL +0 -0
  43. {fractal_server-2.14.0a9.dist-info → fractal_server-2.14.0a11.dist-info}/entry_points.txt +0 -0
@@ -2,19 +2,19 @@ import logging
2
2
  from copy import copy
3
3
  from copy import deepcopy
4
4
  from pathlib import Path
5
+ from typing import Any
6
+ from typing import Callable
7
+ from typing import Literal
5
8
  from typing import Optional
6
9
 
7
10
  from sqlalchemy.orm.attributes import flag_modified
8
- from sqlmodel import update
9
11
 
10
12
  from ....images import SingleImage
11
13
  from ....images.tools import filter_image_list
12
14
  from ....images.tools import find_image_by_zarr_url
13
15
  from ..exceptions import JobExecutionError
14
- from .runner_functions import no_op_submit_setup_call
16
+ from .merge_outputs import merge_outputs
15
17
  from .runner_functions import run_v2_task_compound
16
- from .runner_functions import run_v2_task_converter_compound
17
- from .runner_functions import run_v2_task_converter_non_parallel
18
18
  from .runner_functions import run_v2_task_non_parallel
19
19
  from .runner_functions import run_v2_task_parallel
20
20
  from .task_interface import TaskOutput
@@ -25,6 +25,7 @@ from fractal_server.app.models.v2 import HistoryRun
25
25
  from fractal_server.app.models.v2 import TaskGroupV2
26
26
  from fractal_server.app.models.v2 import WorkflowTaskV2
27
27
  from fractal_server.app.runner.executors.base_runner import BaseRunner
28
+ from fractal_server.app.runner.v2.db_tools import update_status_of_history_run
28
29
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
29
30
  from fractal_server.app.schemas.v2 import TaskDumpV2
30
31
  from fractal_server.app.schemas.v2 import TaskGroupDumpV2
@@ -41,7 +42,15 @@ def execute_tasks_v2(
41
42
  workflow_dir_local: Path,
42
43
  workflow_dir_remote: Optional[Path] = None,
43
44
  logger_name: Optional[str] = None,
44
- submit_setup_call: callable = no_op_submit_setup_call,
45
+ get_runner_config: Callable[
46
+ [
47
+ WorkflowTaskV2,
48
+ Literal["non_parallel", "parallel"],
49
+ Optional[Path],
50
+ ],
51
+ Any,
52
+ ],
53
+ job_type_filters: dict[str, bool],
45
54
  job_attribute_filters: AttributeFiltersType,
46
55
  ) -> None:
47
56
  logger = logging.getLogger(logger_name)
@@ -53,10 +62,14 @@ def execute_tasks_v2(
53
62
  )
54
63
  workflow_dir_local.mkdir()
55
64
 
65
+ # For local backend, remote and local folders are the same
66
+ if workflow_dir_remote is None:
67
+ workflow_dir_remote = workflow_dir_local
68
+
56
69
  # Initialize local dataset attributes
57
70
  zarr_dir = dataset.zarr_dir
58
71
  tmp_images = deepcopy(dataset.images)
59
- current_dataset_type_filters = deepcopy(dataset.type_filters)
72
+ current_dataset_type_filters = copy(job_type_filters)
60
73
 
61
74
  for wftask in wf_task_list:
62
75
  task = wftask.task
@@ -67,6 +80,7 @@ def execute_tasks_v2(
67
80
 
68
81
  # Filter images by types and attributes (in two steps)
69
82
  if wftask.task_type in ["compound", "parallel", "non_parallel"]:
83
+ # Non-converter task
70
84
  type_filters = copy(current_dataset_type_filters)
71
85
  type_filters_patch = merge_type_filters(
72
86
  task_input_types=task.input_types,
@@ -85,6 +99,8 @@ def execute_tasks_v2(
85
99
  attribute_filters=job_attribute_filters,
86
100
  )
87
101
  else:
102
+ # Converter task
103
+ filtered_images = []
88
104
  num_available_images = 0
89
105
 
90
106
  with next(get_sync_db()) as db:
@@ -112,100 +128,73 @@ def execute_tasks_v2(
112
128
  history_run_id = history_run.id
113
129
 
114
130
  # TASK EXECUTION (V2)
115
- if task.type == "non_parallel":
116
- (
117
- current_task_output,
118
- num_tasks,
119
- exceptions,
120
- ) = run_v2_task_non_parallel(
131
+ if task.type in ["non_parallel", "converter_non_parallel"]:
132
+ outcomes_dict, num_tasks = run_v2_task_non_parallel(
121
133
  images=filtered_images,
122
134
  zarr_dir=zarr_dir,
123
135
  wftask=wftask,
124
136
  task=task,
125
137
  workflow_dir_local=workflow_dir_local,
126
138
  workflow_dir_remote=workflow_dir_remote,
127
- executor=runner,
128
- submit_setup_call=submit_setup_call,
129
- history_run_id=history_run_id,
130
- dataset_id=dataset.id,
131
- )
132
- elif task.type == "converter_non_parallel":
133
- (
134
- current_task_output,
135
- num_tasks,
136
- exceptions,
137
- ) = run_v2_task_converter_non_parallel(
138
- zarr_dir=zarr_dir,
139
- wftask=wftask,
140
- task=task,
141
- workflow_dir_local=workflow_dir_local,
142
- workflow_dir_remote=workflow_dir_remote,
143
- executor=runner,
144
- submit_setup_call=submit_setup_call,
139
+ runner=runner,
140
+ get_runner_config=get_runner_config,
145
141
  history_run_id=history_run_id,
146
142
  dataset_id=dataset.id,
143
+ task_type=task.type,
147
144
  )
148
145
  elif task.type == "parallel":
149
- current_task_output, num_tasks, exceptions = run_v2_task_parallel(
146
+ outcomes_dict, num_tasks = run_v2_task_parallel(
150
147
  images=filtered_images,
151
148
  wftask=wftask,
152
149
  task=task,
153
150
  workflow_dir_local=workflow_dir_local,
154
151
  workflow_dir_remote=workflow_dir_remote,
155
- executor=runner,
156
- submit_setup_call=submit_setup_call,
152
+ runner=runner,
153
+ get_runner_config=get_runner_config,
157
154
  history_run_id=history_run_id,
158
155
  dataset_id=dataset.id,
159
156
  )
160
- elif task.type == "compound":
161
- current_task_output, num_tasks, exceptions = run_v2_task_compound(
157
+ elif task.type in ["compound", "converter_compound"]:
158
+ outcomes_dict, num_tasks = run_v2_task_compound(
162
159
  images=filtered_images,
163
160
  zarr_dir=zarr_dir,
164
161
  wftask=wftask,
165
162
  task=task,
166
163
  workflow_dir_local=workflow_dir_local,
167
164
  workflow_dir_remote=workflow_dir_remote,
168
- executor=runner,
169
- submit_setup_call=submit_setup_call,
170
- history_run_id=history_run_id,
171
- dataset_id=dataset.id,
172
- )
173
- elif task.type == "converter_compound":
174
- (
175
- current_task_output,
176
- num_tasks,
177
- exceptions,
178
- ) = run_v2_task_converter_compound(
179
- zarr_dir=zarr_dir,
180
- wftask=wftask,
181
- task=task,
182
- workflow_dir_local=workflow_dir_local,
183
- workflow_dir_remote=workflow_dir_remote,
184
- executor=runner,
185
- submit_setup_call=submit_setup_call,
165
+ runner=runner,
166
+ get_runner_config=get_runner_config,
186
167
  history_run_id=history_run_id,
187
168
  dataset_id=dataset.id,
169
+ task_type=task.type,
188
170
  )
189
171
  else:
190
172
  raise ValueError(f"Unexpected error: Invalid {task.type=}.")
191
173
 
192
174
  # POST TASK EXECUTION
193
175
 
194
- # If `current_task_output` includes no images (to be created, edited or
195
- # removed), then flag all the input images as modified. See
196
- # fractal-server issue #1374.
197
- if (
198
- current_task_output.image_list_updates == []
199
- and current_task_output.image_list_removals == []
200
- ):
201
- current_task_output = TaskOutput(
202
- **current_task_output.model_dump(
203
- exclude={"image_list_updates"}
204
- ),
205
- image_list_updates=[
206
- dict(zarr_url=img["zarr_url"]) for img in filtered_images
207
- ],
208
- )
176
+ non_failed_task_outputs = [
177
+ value.task_output
178
+ for value in outcomes_dict.values()
179
+ if value.task_output is not None
180
+ ]
181
+ if len(non_failed_task_outputs) > 0:
182
+ current_task_output = merge_outputs(non_failed_task_outputs)
183
+ # If `current_task_output` includes no images (to be created or
184
+ # removed), then flag all the input images as modified.
185
+ # See fractal-server issues #1374 and #2409.
186
+ if (
187
+ current_task_output.image_list_updates == []
188
+ and current_task_output.image_list_removals == []
189
+ ):
190
+ current_task_output = TaskOutput(
191
+ image_list_updates=[
192
+ dict(zarr_url=img["zarr_url"])
193
+ for img in filtered_images
194
+ ],
195
+ )
196
+ else:
197
+ current_task_output = TaskOutput()
209
198
 
210
199
  # Update image list
211
200
  num_new_images = 0
@@ -333,13 +322,10 @@ def execute_tasks_v2(
333
322
  current_dataset_type_filters.update(type_filters_from_task_manifest)
334
323
 
335
324
  with next(get_sync_db()) as db:
336
- # Write current dataset attributes (history + filters) into the
337
- # database.
325
+ # Write current dataset images into the database.
338
326
  db_dataset = db.get(DatasetV2, dataset.id)
339
- db_dataset.type_filters = current_dataset_type_filters
340
327
  db_dataset.images = tmp_images
341
- for attribute_name in ["type_filters", "images"]:
342
- flag_modified(db_dataset, attribute_name)
328
+ flag_modified(db_dataset, "images")
343
329
  db.merge(db_dataset)
344
330
  db.commit()
345
331
  db.close() # FIXME: why is this needed?
@@ -353,30 +339,38 @@ def execute_tasks_v2(
353
339
  db.add(record)
354
340
  db.commit()
355
341
 
356
- # Update History tables, and raise an error if task failed
357
- if exceptions == {}:
358
- db.execute(
359
- update(HistoryRun)
360
- .where(HistoryRun.id == history_run_id)
361
- .values(status=HistoryUnitStatus.DONE)
342
+ # Update `HistoryRun` entry, and raise an error if task failed
343
+ try:
344
+ first_exception = next(
345
+ value.exception
346
+ for value in outcomes_dict.values()
347
+ if value.exception is not None
362
348
  )
363
- db.commit()
364
- else:
365
- db.execute(
366
- update(HistoryRun)
367
- .where(HistoryRun.id == history_run_id)
368
- .values(status=HistoryUnitStatus.FAILED)
349
+ # An exception was found
350
+ update_status_of_history_run(
351
+ history_run_id=history_run_id,
352
+ status=HistoryUnitStatus.FAILED,
353
+ db_sync=db,
369
354
  )
370
- db.commit()
371
355
  logger.error(
372
356
  f'END {wftask.order}-th task (name="{task_name}") - '
373
357
  "ERROR."
374
358
  )
375
359
  # Raise first error
376
- for key, value in exceptions.items():
377
- raise JobExecutionError(
378
- info=(f"An error occurred.\nOriginal error:\n{value}")
360
+ raise JobExecutionError(
361
+ info=(
362
+ f"An error occurred.\n"
363
+ f"Original error:\n{first_exception}"
379
364
  )
365
+ )
366
+ except StopIteration:
367
+ # No exception was found
368
+ update_status_of_history_run(
369
+ history_run_id=history_run_id,
370
+ status=HistoryUnitStatus.DONE,
371
+ db_sync=db,
372
+ )
373
+ db.commit()
380
374
  logger.debug(
381
375
  f'END {wftask.order}-th task (name="{task_name}")'
382
376
  )