fractal-server 2.4.2__py3-none-any.whl → 2.5.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +4 -1
  3. fractal_server/app/models/v1/task.py +0 -5
  4. fractal_server/app/models/v2/workflowtask.py +2 -10
  5. fractal_server/app/routes/admin/v2.py +0 -30
  6. fractal_server/app/routes/api/v2/__init__.py +0 -4
  7. fractal_server/app/routes/api/v2/_aux_functions.py +11 -46
  8. fractal_server/app/routes/api/v2/workflow.py +23 -54
  9. fractal_server/app/routes/api/v2/workflowtask.py +9 -33
  10. fractal_server/app/runner/v2/__init__.py +1 -4
  11. fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +1 -4
  12. fractal_server/app/runner/v2/handle_failed_job.py +2 -9
  13. fractal_server/app/runner/v2/runner.py +42 -70
  14. fractal_server/app/runner/v2/runner_functions.py +0 -58
  15. fractal_server/app/runner/v2/runner_functions_low_level.py +7 -21
  16. fractal_server/app/schemas/v2/__init__.py +0 -1
  17. fractal_server/app/schemas/v2/dumps.py +2 -23
  18. fractal_server/app/schemas/v2/task.py +0 -5
  19. fractal_server/app/schemas/v2/workflowtask.py +4 -29
  20. fractal_server/migrations/env.py +4 -7
  21. fractal_server/migrations/naming_convention.py +7 -0
  22. fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py +1 -1
  23. fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py +97 -0
  24. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a0.dist-info}/METADATA +1 -1
  25. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a0.dist-info}/RECORD +28 -28
  26. fractal_server/app/routes/api/v2/task_legacy.py +0 -59
  27. fractal_server/app/runner/v2/v1_compat.py +0 -31
  28. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a0.dist-info}/LICENSE +0 -0
  29. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a0.dist-info}/WHEEL +0 -0
  30. {fractal_server-2.4.2.dist-info → fractal_server-2.5.0a0.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.4.2"
1
+ __VERSION__ = "2.5.0a0"
@@ -2,6 +2,7 @@
2
2
  `db` module, loosely adapted from
3
3
  https://testdriven.io/blog/fastapi-sqlmodel/#async-sqlmodel
4
4
  """
5
+ import sqlite3
5
6
  from typing import AsyncGenerator
6
7
  from typing import Generator
7
8
 
@@ -21,7 +22,9 @@ from ...syringe import Inject
21
22
  logger = set_logger(__name__)
22
23
 
23
24
  SQLITE_WARNING_MESSAGE = (
24
- "SQLite is supported (for version >=3.37) but discouraged in production. "
25
+ "SQLite is supported (supported version >=3.37, "
26
+ f"current {sqlite3.sqlite_version=}) "
27
+ "but discouraged in production. "
25
28
  "Given its partial support for ForeignKey constraints, "
26
29
  "database consistency cannot be guaranteed."
27
30
  )
@@ -5,7 +5,6 @@ from typing import Optional
5
5
 
6
6
  from pydantic import HttpUrl
7
7
  from sqlalchemy import Column
8
- from sqlalchemy import sql
9
8
  from sqlalchemy.types import JSON
10
9
  from sqlmodel import Field
11
10
  from sqlmodel import SQLModel
@@ -49,10 +48,6 @@ class Task(_TaskBaseV1, SQLModel, table=True):
49
48
  docs_info: Optional[str] = None
50
49
  docs_link: Optional[HttpUrl] = None
51
50
 
52
- is_v2_compatible: bool = Field(
53
- default=False, sa_column_kwargs={"server_default": sql.false()}
54
- )
55
-
56
51
  @property
57
52
  def parallelization_level(self) -> Optional[str]:
58
53
  try:
@@ -8,7 +8,6 @@ from sqlmodel import Field
8
8
  from sqlmodel import Relationship
9
9
  from sqlmodel import SQLModel
10
10
 
11
- from ..v1.task import Task
12
11
  from .task import TaskV2
13
12
 
14
13
 
@@ -37,13 +36,6 @@ class WorkflowTaskV2(SQLModel, table=True):
37
36
  )
38
37
 
39
38
  # Task
40
- is_legacy_task: bool
41
39
  task_type: str
42
- task_id: Optional[int] = Field(foreign_key="taskv2.id")
43
- task: Optional[TaskV2] = Relationship(
44
- sa_relationship_kwargs=dict(lazy="selectin")
45
- )
46
- task_legacy_id: Optional[int] = Field(foreign_key="task.id")
47
- task_legacy: Optional[Task] = Relationship(
48
- sa_relationship_kwargs=dict(lazy="selectin")
49
- )
40
+ task_id: int = Field(foreign_key="taskv2.id")
41
+ task: TaskV2 = Relationship(sa_relationship_kwargs=dict(lazy="selectin"))
@@ -24,7 +24,6 @@ from ....utils import get_timestamp
24
24
  from ....zip_tools import _zip_folder_to_byte_stream_iterator
25
25
  from ...db import AsyncSession
26
26
  from ...db import get_async_db
27
- from ...models.v1 import Task
28
27
  from ...models.v2 import JobV2
29
28
  from ...models.v2 import ProjectV2
30
29
  from ...models.v2 import TaskV2
@@ -281,35 +280,6 @@ async def download_job_logs(
281
280
  )
282
281
 
283
282
 
284
- class TaskCompatibility(BaseModel):
285
- is_v2_compatible: bool
286
-
287
-
288
- @router_admin_v2.patch(
289
- "/task-v1/{task_id}/",
290
- status_code=status.HTTP_200_OK,
291
- )
292
- async def flag_task_v1_as_v2_compatible(
293
- task_id: int,
294
- compatibility: TaskCompatibility,
295
- user: UserOAuth = Depends(current_active_superuser),
296
- db: AsyncSession = Depends(get_async_db),
297
- ) -> Response:
298
-
299
- task = await db.get(Task, task_id)
300
- if task is None:
301
- raise HTTPException(
302
- status_code=status.HTTP_404_NOT_FOUND,
303
- detail=f"Task {task_id} not found",
304
- )
305
-
306
- task.is_v2_compatible = compatibility.is_v2_compatible
307
- await db.commit()
308
- await db.close()
309
-
310
- return Response(status_code=status.HTTP_200_OK)
311
-
312
-
313
283
  class TaskV2Minimal(BaseModel):
314
284
 
315
285
  id: int
@@ -12,7 +12,6 @@ from .submit import router as submit_job_router_v2
12
12
  from .task import router as task_router_v2
13
13
  from .task_collection import router as task_collection_router_v2
14
14
  from .task_collection_custom import router as task_collection_router_v2_custom
15
- from .task_legacy import router as task_legacy_router_v2
16
15
  from .workflow import router as workflow_router_v2
17
16
  from .workflowtask import router as workflowtask_router_v2
18
17
  from fractal_server.config import get_settings
@@ -38,9 +37,6 @@ router_api_v2.include_router(
38
37
  tags=["V2 Task Collection"],
39
38
  )
40
39
  router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
41
- router_api_v2.include_router(
42
- task_legacy_router_v2, prefix="/task-legacy", tags=["V2 Task Legacy"]
43
- )
44
40
  router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"])
45
41
  router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
46
42
  router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
@@ -13,7 +13,6 @@ from sqlmodel import select
13
13
  from sqlmodel.sql.expression import SelectOfScalar
14
14
 
15
15
  from ....db import AsyncSession
16
- from ....models.v1 import Task
17
16
  from ....models.v2 import DatasetV2
18
17
  from ....models.v2 import JobV2
19
18
  from ....models.v2 import LinkUserProjectV2
@@ -389,7 +388,6 @@ async def _workflow_insert_task(
389
388
  *,
390
389
  workflow_id: int,
391
390
  task_id: int,
392
- is_legacy_task: bool = False,
393
391
  order: Optional[int] = None,
394
392
  meta_parallel: Optional[dict[str, Any]] = None,
395
393
  meta_non_parallel: Optional[dict[str, Any]] = None,
@@ -404,7 +402,7 @@ async def _workflow_insert_task(
404
402
  Args:
405
403
  workflow_id:
406
404
  task_id:
407
- is_legacy_task:
405
+
408
406
  order:
409
407
  meta_parallel:
410
408
  meta_non_parallel:
@@ -420,52 +418,21 @@ async def _workflow_insert_task(
420
418
  if order is None:
421
419
  order = len(db_workflow.task_list)
422
420
 
423
- # Get task from db, and extract default arguments via a Task property
424
- # method
425
- # NOTE: this logic remains there for V1 tasks only. When we deprecate V1
426
- # tasks, we can simplify this block
427
- if is_legacy_task is True:
428
- db_task = await db.get(Task, task_id)
429
- if db_task is None:
430
- raise ValueError(f"Task {task_id} not found.")
431
- task_type = "parallel"
432
-
433
- final_args_parallel = db_task.default_args_from_args_schema.copy()
434
- final_args_non_parallel = {}
435
- final_meta_parallel = (db_task.meta or {}).copy()
436
- final_meta_non_parallel = {}
437
-
438
- else:
439
- db_task = await db.get(TaskV2, task_id)
440
- if db_task is None:
441
- raise ValueError(f"TaskV2 {task_id} not found.")
442
- task_type = db_task.type
443
-
444
- final_args_non_parallel = {}
445
- final_args_parallel = {}
446
- final_meta_parallel = (db_task.meta_parallel or {}).copy()
447
- final_meta_non_parallel = (db_task.meta_non_parallel or {}).copy()
448
-
449
- # Combine arg_parallel
450
- if args_parallel is not None:
451
- for k, v in args_parallel.items():
452
- final_args_parallel[k] = v
453
- if final_args_parallel == {}:
454
- final_args_parallel = None
455
- # Combine arg_non_parallel
456
- if args_non_parallel is not None:
457
- for k, v in args_non_parallel.items():
458
- final_args_non_parallel[k] = v
459
- if final_args_non_parallel == {}:
460
- final_args_non_parallel = None
421
+ # Get task from db
422
+ db_task = await db.get(TaskV2, task_id)
423
+ if db_task is None:
424
+ raise ValueError(f"TaskV2 {task_id} not found.")
425
+ task_type = db_task.type
461
426
 
462
427
  # Combine meta_parallel (higher priority)
463
428
  # and db_task.meta_parallel (lower priority)
429
+ final_meta_parallel = (db_task.meta_parallel or {}).copy()
464
430
  final_meta_parallel.update(meta_parallel or {})
465
431
  if final_meta_parallel == {}:
466
432
  final_meta_parallel = None
467
433
  # Combine meta_non_parallel (higher priority)
468
434
  # and db_task.meta_non_parallel (lower priority)
435
+ final_meta_non_parallel = (db_task.meta_non_parallel or {}).copy()
469
436
  final_meta_non_parallel.update(meta_non_parallel or {})
470
437
  if final_meta_non_parallel == {}:
471
438
  final_meta_non_parallel = None
@@ -479,11 +446,9 @@ async def _workflow_insert_task(
479
446
  # Create DB entry
480
447
  wf_task = WorkflowTaskV2(
481
448
  task_type=task_type,
482
- is_legacy_task=is_legacy_task,
483
- task_id=(task_id if not is_legacy_task else None),
484
- task_legacy_id=(task_id if is_legacy_task else None),
485
- args_non_parallel=final_args_non_parallel,
486
- args_parallel=final_args_parallel,
449
+ task_id=task_id,
450
+ args_non_parallel=args_non_parallel,
451
+ args_parallel=args_parallel,
487
452
  meta_parallel=final_meta_parallel,
488
453
  meta_non_parallel=final_meta_non_parallel,
489
454
  **input_filters_kwarg,
@@ -11,7 +11,6 @@ from .....logger import reset_logger_handlers
11
11
  from .....logger import set_logger
12
12
  from ....db import AsyncSession
13
13
  from ....db import get_async_db
14
- from ....models.v1 import Task as TaskV1
15
14
  from ....models.v2 import JobV2
16
15
  from ....models.v2 import ProjectV2
17
16
  from ....models.v2 import TaskV2
@@ -242,23 +241,13 @@ async def export_worfklow(
242
241
  # Emit a warning when exporting a workflow with custom tasks
243
242
  logger = set_logger(None)
244
243
  for wftask in workflow.task_list:
245
- if wftask.is_legacy_task:
246
- if wftask.task_legacy.owner is not None:
247
- logger.warning(
248
- f"Custom tasks (like the one with "
249
- f"id={wftask.task_legacy_id} and "
250
- f"source='{wftask.task_legacy.source}') are not meant to "
251
- "be portable; re-importing this workflow may not work as "
252
- "expected."
253
- )
254
- else:
255
- if wftask.task.owner is not None:
256
- logger.warning(
257
- f"Custom tasks (like the one with id={wftask.task_id} and "
258
- f'source="{wftask.task.source}") are not meant to be '
259
- "portable; re-importing this workflow may not work as "
260
- "expected."
261
- )
244
+ if wftask.task.owner is not None:
245
+ logger.warning(
246
+ f"Custom tasks (like the one with id={wftask.task_id} and "
247
+ f'source="{wftask.task.source}") are not meant to be '
248
+ "portable; re-importing this workflow may not work as "
249
+ "expected."
250
+ )
262
251
  reset_logger_handlers(logger)
263
252
 
264
253
  await db.close()
@@ -296,38 +285,22 @@ async def import_workflow(
296
285
 
297
286
  # Check that all required tasks are available
298
287
  source_to_id = {}
299
- source_to_id_legacy = {}
300
288
 
301
289
  for wf_task in workflow.task_list:
302
290
 
303
- if wf_task.is_legacy_task is True:
304
- source = wf_task.task_legacy.source
305
- if source not in source_to_id_legacy.keys():
306
- stm = select(TaskV1).where(TaskV1.source == source)
307
- tasks_by_source = (await db.execute(stm)).scalars().all()
308
- if len(tasks_by_source) != 1:
309
- raise HTTPException(
310
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
311
- detail=(
312
- f"Found {len(tasks_by_source)} tasks legacy "
313
- f"with {source=}."
314
- ),
315
- )
316
- source_to_id_legacy[source] = tasks_by_source[0].id
317
- else:
318
- source = wf_task.task.source
319
- if source not in source_to_id.keys():
320
- stm = select(TaskV2).where(TaskV2.source == source)
321
- tasks_by_source = (await db.execute(stm)).scalars().all()
322
- if len(tasks_by_source) != 1:
323
- raise HTTPException(
324
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
325
- detail=(
326
- f"Found {len(tasks_by_source)} tasks "
327
- f"with {source=}."
328
- ),
329
- )
330
- source_to_id[source] = tasks_by_source[0].id
291
+ source = wf_task.task.source
292
+ if source not in source_to_id.keys():
293
+ stm = select(TaskV2).where(TaskV2.source == source)
294
+ tasks_by_source = (await db.execute(stm)).scalars().all()
295
+ if len(tasks_by_source) != 1:
296
+ raise HTTPException(
297
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
298
+ detail=(
299
+ f"Found {len(tasks_by_source)} tasks "
300
+ f"with {source=}."
301
+ ),
302
+ )
303
+ source_to_id[source] = tasks_by_source[0].id
331
304
 
332
305
  # Create new Workflow (with empty task_list)
333
306
  db_workflow = WorkflowV2(
@@ -341,15 +314,11 @@ async def import_workflow(
341
314
  # Insert tasks
342
315
 
343
316
  for wf_task in workflow.task_list:
344
- if wf_task.is_legacy_task is True:
345
- source = wf_task.task_legacy.source
346
- task_id = source_to_id_legacy[source]
347
- else:
348
- source = wf_task.task.source
349
- task_id = source_to_id[source]
317
+ source = wf_task.task.source
318
+ task_id = source_to_id[source]
350
319
 
351
320
  new_wf_task = WorkflowTaskCreateV2(
352
- **wf_task.dict(exclude_none=True, exclude={"task", "task_legacy"})
321
+ **wf_task.dict(exclude_none=True, exclude={"task"})
353
322
  )
354
323
  # Insert task
355
324
  await _workflow_insert_task(
@@ -9,7 +9,6 @@ from fastapi import status
9
9
 
10
10
  from ....db import AsyncSession
11
11
  from ....db import get_async_db
12
- from ....models.v1 import Task
13
12
  from ....models.v2 import TaskV2
14
13
  from ....schemas.v2 import WorkflowTaskCreateV2
15
14
  from ....schemas.v2 import WorkflowTaskReadV2
@@ -44,27 +43,14 @@ async def create_workflowtask(
44
43
  project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
45
44
  )
46
45
 
47
- if new_task.is_legacy_task is True:
48
- task = await db.get(Task, task_id)
49
- if not task:
50
- raise HTTPException(
51
- status_code=status.HTTP_404_NOT_FOUND,
52
- detail=f"Task {task_id} not found.",
53
- )
54
- if not task.is_v2_compatible:
55
- raise HTTPException(
56
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
57
- detail=f"Task {task_id} is not V2-compatible.",
58
- )
59
- else:
60
- task = await db.get(TaskV2, task_id)
61
- if not task:
62
- raise HTTPException(
63
- status_code=status.HTTP_404_NOT_FOUND,
64
- detail=f"TaskV2 {task_id} not found.",
65
- )
46
+ task = await db.get(TaskV2, task_id)
47
+ if not task:
48
+ raise HTTPException(
49
+ status_code=status.HTTP_404_NOT_FOUND,
50
+ detail=f"TaskV2 {task_id} not found.",
51
+ )
66
52
 
67
- if new_task.is_legacy_task is True or task.type == "parallel":
53
+ if task.type == "parallel":
68
54
  if (
69
55
  new_task.meta_non_parallel is not None
70
56
  or new_task.args_non_parallel is not None
@@ -74,7 +60,7 @@ async def create_workflowtask(
74
60
  detail=(
75
61
  "Cannot set `WorkflowTaskV2.meta_non_parallel` or "
76
62
  "`WorkflowTask.args_non_parallel` if the associated Task "
77
- "is `parallel` (or legacy)."
63
+ "is `parallel`."
78
64
  ),
79
65
  )
80
66
  elif task.type == "non_parallel":
@@ -93,7 +79,6 @@ async def create_workflowtask(
93
79
 
94
80
  workflow_task = await _workflow_insert_task(
95
81
  workflow_id=workflow.id,
96
- is_legacy_task=new_task.is_legacy_task,
97
82
  task_id=task_id,
98
83
  order=new_task.order,
99
84
  meta_non_parallel=new_task.meta_non_parallel,
@@ -182,16 +167,7 @@ async def update_workflowtask(
182
167
  for key, value in workflow_task_update.dict(exclude_unset=True).items():
183
168
  if key == "args_parallel":
184
169
  # Get default arguments via a Task property method
185
- if db_wf_task.is_legacy_task:
186
- default_args = (
187
- db_wf_task.task_legacy.default_args_from_args_schema
188
- )
189
- actual_args = deepcopy(default_args)
190
- if value is not None:
191
- for k, v in value.items():
192
- actual_args[k] = v
193
- else:
194
- actual_args = deepcopy(value)
170
+ actual_args = deepcopy(value)
195
171
  if not actual_args:
196
172
  actual_args = None
197
173
  setattr(db_wf_task, key, actual_args)
@@ -208,10 +208,7 @@ async def submit_workflow(
208
208
  # Create all tasks subfolders
209
209
  for order in range(job.first_task_index, job.last_task_index + 1):
210
210
  this_wftask = workflow.task_list[order]
211
- if this_wftask.is_legacy_task:
212
- task_name = this_wftask.task_legacy.name
213
- else:
214
- task_name = this_wftask.task.name
211
+ task_name = this_wftask.task.name
215
212
  subfolder_name = task_subfolder_name(
216
213
  order=order,
217
214
  task_name=task_name,
@@ -116,10 +116,7 @@ def get_slurm_config(
116
116
  slurm_dict["mem_per_task_MB"] = mem_per_task_MB
117
117
 
118
118
  # Job name
119
- if wftask.is_legacy_task:
120
- job_name = wftask.task_legacy.name.replace(" ", "_")
121
- else:
122
- job_name = wftask.task.name.replace(" ", "_")
119
+ job_name = wftask.task.name.replace(" ", "_")
123
120
  slurm_dict["job_name"] = job_name
124
121
 
125
122
  # Optional SLURM arguments and extra lines
@@ -96,15 +96,8 @@ def assemble_history_failed_job(
96
96
 
97
97
  # Part 3/B: Append failed task to history
98
98
  if failed_wftask is not None:
99
- failed_wftask_dump = failed_wftask.model_dump(
100
- exclude={"task", "task_legacy"}
101
- )
102
- if failed_wftask.is_legacy_task:
103
- failed_wftask_dump[
104
- "task_legacy"
105
- ] = failed_wftask.task_legacy.model_dump()
106
- else:
107
- failed_wftask_dump["task"] = failed_wftask.task.model_dump()
99
+ failed_wftask_dump = failed_wftask.model_dump(exclude={"task"})
100
+ failed_wftask_dump["task"] = failed_wftask.task.model_dump()
108
101
  new_history_item = dict(
109
102
  workflowtask=failed_wftask_dump,
110
103
  status=WorkflowTaskStatusTypeV2.FAILED,
@@ -17,7 +17,6 @@ from ..filenames import FILTERS_FILENAME
17
17
  from ..filenames import HISTORY_FILENAME
18
18
  from ..filenames import IMAGES_FILENAME
19
19
  from .runner_functions import no_op_submit_setup_call
20
- from .runner_functions import run_v1_task_parallel
21
20
  from .runner_functions import run_v2_task_compound
22
21
  from .runner_functions import run_v2_task_non_parallel
23
22
  from .runner_functions import run_v2_task_parallel
@@ -53,16 +52,8 @@ def execute_tasks_v2(
53
52
 
54
53
  for wftask in wf_task_list:
55
54
  task = wftask.task
56
- task_legacy = wftask.task_legacy
57
- if wftask.is_legacy_task:
58
- task_name = task_legacy.name
59
- logger.debug(
60
- f"SUBMIT {wftask.order}-th task "
61
- f'(legacy, name="{task_name}")'
62
- )
63
- else:
64
- task_name = task.name
65
- logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
55
+ task_name = task.name
56
+ logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
66
57
 
67
58
  # PRE TASK EXECUTION
68
59
 
@@ -78,67 +69,53 @@ def execute_tasks_v2(
78
69
  filters=Filters(**pre_filters),
79
70
  )
80
71
  # Verify that filtered images comply with task input_types
81
- if not wftask.is_legacy_task:
82
- for image in filtered_images:
83
- if not match_filter(image, Filters(types=task.input_types)):
84
- raise JobExecutionError(
85
- "Invalid filtered image list\n"
86
- f"Task input types: {task.input_types=}\n"
87
- f'Image zarr_url: {image["zarr_url"]}\n'
88
- f'Image types: {image["types"]}\n'
89
- )
72
+ for image in filtered_images:
73
+ if not match_filter(image, Filters(types=task.input_types)):
74
+ raise JobExecutionError(
75
+ "Invalid filtered image list\n"
76
+ f"Task input types: {task.input_types=}\n"
77
+ f'Image zarr_url: {image["zarr_url"]}\n'
78
+ f'Image types: {image["types"]}\n'
79
+ )
90
80
 
91
81
  # TASK EXECUTION (V2)
92
- if not wftask.is_legacy_task:
93
- if task.type == "non_parallel":
94
- current_task_output = run_v2_task_non_parallel(
95
- images=filtered_images,
96
- zarr_dir=zarr_dir,
97
- wftask=wftask,
98
- task=task,
99
- workflow_dir_local=workflow_dir_local,
100
- workflow_dir_remote=workflow_dir_remote,
101
- executor=executor,
102
- logger_name=logger_name,
103
- submit_setup_call=submit_setup_call,
104
- )
105
- elif task.type == "parallel":
106
- current_task_output = run_v2_task_parallel(
107
- images=filtered_images,
108
- wftask=wftask,
109
- task=task,
110
- workflow_dir_local=workflow_dir_local,
111
- workflow_dir_remote=workflow_dir_remote,
112
- executor=executor,
113
- logger_name=logger_name,
114
- submit_setup_call=submit_setup_call,
115
- )
116
- elif task.type == "compound":
117
- current_task_output = run_v2_task_compound(
118
- images=filtered_images,
119
- zarr_dir=zarr_dir,
120
- wftask=wftask,
121
- task=task,
122
- workflow_dir_local=workflow_dir_local,
123
- workflow_dir_remote=workflow_dir_remote,
124
- executor=executor,
125
- logger_name=logger_name,
126
- submit_setup_call=submit_setup_call,
127
- )
128
- else:
129
- raise ValueError(f"Unexpected error: Invalid {task.type=}.")
130
- # TASK EXECUTION (V1)
131
- else:
132
- current_task_output = run_v1_task_parallel(
82
+ if task.type == "non_parallel":
83
+ current_task_output = run_v2_task_non_parallel(
133
84
  images=filtered_images,
85
+ zarr_dir=zarr_dir,
134
86
  wftask=wftask,
135
- task_legacy=task_legacy,
87
+ task=task,
88
+ workflow_dir_local=workflow_dir_local,
89
+ workflow_dir_remote=workflow_dir_remote,
136
90
  executor=executor,
137
91
  logger_name=logger_name,
92
+ submit_setup_call=submit_setup_call,
93
+ )
94
+ elif task.type == "parallel":
95
+ current_task_output = run_v2_task_parallel(
96
+ images=filtered_images,
97
+ wftask=wftask,
98
+ task=task,
99
+ workflow_dir_local=workflow_dir_local,
100
+ workflow_dir_remote=workflow_dir_remote,
101
+ executor=executor,
102
+ logger_name=logger_name,
103
+ submit_setup_call=submit_setup_call,
104
+ )
105
+ elif task.type == "compound":
106
+ current_task_output = run_v2_task_compound(
107
+ images=filtered_images,
108
+ zarr_dir=zarr_dir,
109
+ wftask=wftask,
110
+ task=task,
138
111
  workflow_dir_local=workflow_dir_local,
139
112
  workflow_dir_remote=workflow_dir_remote,
113
+ executor=executor,
114
+ logger_name=logger_name,
140
115
  submit_setup_call=submit_setup_call,
141
116
  )
117
+ else:
118
+ raise ValueError(f"Unexpected error: Invalid {task.type=}.")
142
119
 
143
120
  # POST TASK EXECUTION
144
121
 
@@ -191,8 +168,7 @@ def execute_tasks_v2(
191
168
  # Update image attributes/types with task output and manifest
192
169
  updated_attributes.update(image["attributes"])
193
170
  updated_types.update(image["types"])
194
- if not wftask.is_legacy_task:
195
- updated_types.update(task.output_types)
171
+ updated_types.update(task.output_types)
196
172
 
197
173
  # Unset attributes with None value
198
174
  updated_attributes = {
@@ -249,8 +225,7 @@ def execute_tasks_v2(
249
225
  if value is not None
250
226
  }
251
227
  updated_types.update(image["types"])
252
- if not wftask.is_legacy_task:
253
- updated_types.update(task.output_types)
228
+ updated_types.update(task.output_types)
254
229
  new_image = dict(
255
230
  zarr_url=image["zarr_url"],
256
231
  origin=image["origin"],
@@ -282,10 +257,7 @@ def execute_tasks_v2(
282
257
  )
283
258
 
284
259
  # Find manifest ouptut types
285
- if wftask.is_legacy_task:
286
- types_from_manifest = {}
287
- else:
288
- types_from_manifest = task.output_types
260
+ types_from_manifest = task.output_types
289
261
 
290
262
  # Find task-output types
291
263
  if current_task_output.filters is not None: