fractal-server 1.4.10__py3-none-any.whl → 2.0.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (126) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +3 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +11 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +274 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +14 -14
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/dataset.py +309 -0
  33. fractal_server/app/routes/api/v2/images.py +207 -0
  34. fractal_server/app/routes/api/v2/job.py +200 -0
  35. fractal_server/app/routes/api/v2/project.py +202 -0
  36. fractal_server/app/routes/api/v2/submit.py +220 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +397 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/components.py +5 -0
  44. fractal_server/app/runner/exceptions.py +129 -0
  45. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  46. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  51. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -19
  52. fractal_server/app/runner/filenames.py +6 -0
  53. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  54. fractal_server/app/runner/task_files.py +103 -0
  55. fractal_server/app/runner/{__init__.py → v1/__init__.py} +22 -20
  56. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  57. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -5
  58. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  59. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  60. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  61. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  62. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  63. fractal_server/app/runner/v1/common.py +117 -0
  64. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  65. fractal_server/app/runner/v2/__init__.py +336 -0
  66. fractal_server/app/runner/v2/_local/__init__.py +167 -0
  67. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  68. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  69. fractal_server/app/runner/v2/_local/executor.py +100 -0
  70. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +34 -45
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/deduplicate_list.py +22 -0
  74. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  75. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  76. fractal_server/app/runner/v2/runner.py +267 -0
  77. fractal_server/app/runner/v2/runner_functions.py +341 -0
  78. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  79. fractal_server/app/runner/v2/task_interface.py +43 -0
  80. fractal_server/app/runner/v2/v1_compat.py +21 -0
  81. fractal_server/app/schemas/__init__.py +4 -42
  82. fractal_server/app/schemas/v1/__init__.py +42 -0
  83. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  84. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  85. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  86. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  87. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  88. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  89. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  90. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  91. fractal_server/app/schemas/v2/__init__.py +34 -0
  92. fractal_server/app/schemas/v2/dataset.py +89 -0
  93. fractal_server/app/schemas/v2/dumps.py +87 -0
  94. fractal_server/app/schemas/v2/job.py +114 -0
  95. fractal_server/app/schemas/v2/manifest.py +159 -0
  96. fractal_server/app/schemas/v2/project.py +37 -0
  97. fractal_server/app/schemas/v2/task.py +120 -0
  98. fractal_server/app/schemas/v2/task_collection.py +105 -0
  99. fractal_server/app/schemas/v2/workflow.py +79 -0
  100. fractal_server/app/schemas/v2/workflowtask.py +119 -0
  101. fractal_server/config.py +5 -4
  102. fractal_server/images/__init__.py +2 -0
  103. fractal_server/images/models.py +50 -0
  104. fractal_server/images/tools.py +85 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/env.py +0 -2
  107. fractal_server/migrations/versions/d71e732236cd_v2.py +239 -0
  108. fractal_server/tasks/__init__.py +0 -5
  109. fractal_server/tasks/endpoint_operations.py +13 -19
  110. fractal_server/tasks/utils.py +35 -0
  111. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  112. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  113. fractal_server/tasks/v1/get_collection_data.py +14 -0
  114. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  115. fractal_server/tasks/v2/background_operations.py +381 -0
  116. fractal_server/tasks/v2/get_collection_data.py +14 -0
  117. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/METADATA +1 -1
  118. fractal_server-2.0.0a1.dist-info/RECORD +160 -0
  119. fractal_server/app/runner/_slurm/.gitignore +0 -2
  120. fractal_server/app/runner/common.py +0 -311
  121. fractal_server-1.4.10.dist-info/RECORD +0 -98
  122. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  123. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  124. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/LICENSE +0 -0
  125. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/WHEEL +0 -0
  126. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a1.dist-info}/entry_points.txt +0 -0
@@ -29,14 +29,14 @@ import cloudpickle
29
29
  from cfut import SlurmExecutor
30
30
  from cfut.util import random_string
31
31
 
32
- from ....config import get_settings
33
- from ....logger import set_logger
34
- from ....syringe import Inject
35
- from .._common import get_task_file_paths
36
- from .._common import SHUTDOWN_FILENAME
37
- from .._common import TaskFiles
38
- from ..common import JobExecutionError
39
- from ..common import TaskExecutionError
32
+ from .....config import get_settings
33
+ from .....logger import set_logger
34
+ from .....syringe import Inject
35
+ from ...exceptions import JobExecutionError
36
+ from ...exceptions import TaskExecutionError
37
+ from ...filenames import SHUTDOWN_FILENAME
38
+ from ...task_files import get_task_file_paths
39
+ from ...task_files import TaskFiles
40
40
  from ._batching import heuristics
41
41
  from ._executor_wait_thread import FractalSlurmWaitThread
42
42
  from ._slurm_config import get_default_slurm_config
@@ -46,6 +46,7 @@ from ._subprocess_run_as_user import _glob_as_user_strict
46
46
  from ._subprocess_run_as_user import _path_exists_as_user
47
47
  from ._subprocess_run_as_user import _run_command_as_user
48
48
  from fractal_server import __VERSION__
49
+ from fractal_server.app.runner.components import _COMPONENT_KEY_
49
50
 
50
51
 
51
52
  logger = set_logger(__name__)
@@ -544,7 +545,7 @@ class FractalSlurmExecutor(SlurmExecutor):
544
545
  single_task_submission: bool = False,
545
546
  args: Optional[Sequence[Any]] = None,
546
547
  kwargs: Optional[dict] = None,
547
- components: list[Any] = None,
548
+ components: Optional[list[Any]] = None,
548
549
  ) -> Future:
549
550
  """
550
551
  Submit a multi-task job to the pool, where each task is handled via the
@@ -580,6 +581,10 @@ class FractalSlurmExecutor(SlurmExecutor):
580
581
 
581
582
  # Define slurm-job-related files
582
583
  if single_task_submission:
584
+ if components is not None:
585
+ raise ValueError(
586
+ f"{single_task_submission=} but components is not None"
587
+ )
583
588
  job = SlurmJob(
584
589
  slurm_file_prefix=slurm_file_prefix,
585
590
  num_tasks_tot=1,
@@ -603,15 +608,23 @@ class FractalSlurmExecutor(SlurmExecutor):
603
608
  num_tasks_tot=num_tasks_tot,
604
609
  slurm_config=slurm_config,
605
610
  )
606
- job.wftask_file_prefixes = tuple(
607
- get_task_file_paths(
608
- workflow_dir=task_files.workflow_dir,
609
- workflow_dir_user=task_files.workflow_dir_user,
610
- task_order=task_files.task_order,
611
- component=component,
612
- ).file_prefix
613
- for component in components
614
- )
611
+
612
+ _prefixes = []
613
+ for component in components:
614
+ if isinstance(component, dict):
615
+ # This is needed for V2
616
+ actual_component = component.get(_COMPONENT_KEY_, None)
617
+ else:
618
+ actual_component = component
619
+ _prefixes.append(
620
+ get_task_file_paths(
621
+ workflow_dir=task_files.workflow_dir,
622
+ workflow_dir_user=task_files.workflow_dir_user,
623
+ task_order=task_files.task_order,
624
+ component=actual_component,
625
+ ).file_prefix
626
+ )
627
+ job.wftask_file_prefixes = tuple(_prefixes)
615
628
 
616
629
  # Define I/O pickle file names/paths
617
630
  job.input_pickle_files = tuple(
@@ -1001,7 +1014,7 @@ class FractalSlurmExecutor(SlurmExecutor):
1001
1014
  cmdlines.append(
1002
1015
  (
1003
1016
  f"{python_worker_interpreter}"
1004
- " -m fractal_server.app.runner._slurm.remote "
1017
+ " -m fractal_server.app.runner.executors.slurm.remote "
1005
1018
  f"--input-file {input_pickle_file} "
1006
1019
  f"--output-file {output_pickle_file}"
1007
1020
  )
@@ -0,0 +1,6 @@
1
+ HISTORY_FILENAME = "history.json"
2
+ FILTERS_FILENAME = "filters.json"
3
+ IMAGES_FILENAME = "images.json"
4
+ METADATA_FILENAME = "metadata.json"
5
+ SHUTDOWN_FILENAME = "shutdown"
6
+ WORKFLOW_LOG_FILENAME = "workflow.log"
@@ -0,0 +1,39 @@
1
+ from typing import Optional
2
+
3
+
4
+ def set_start_and_last_task_index(
5
+ num_tasks: int,
6
+ first_task_index: Optional[int] = None,
7
+ last_task_index: Optional[int] = None,
8
+ ) -> tuple[int, int]:
9
+ """
10
+ Handle `first_task_index` and `last_task_index`, by setting defaults and
11
+ validating values.
12
+
13
+ num_tasks:
14
+ Total number of tasks in a workflow task list
15
+ first_task_index:
16
+ Positional index of the first task to execute
17
+ last_task_index:
18
+ Positional index of the last task to execute
19
+ """
20
+ # Set default values
21
+ if first_task_index is None:
22
+ first_task_index = 0
23
+ if last_task_index is None:
24
+ last_task_index = num_tasks - 1
25
+
26
+ # Perform checks
27
+ if first_task_index < 0:
28
+ raise ValueError(f"{first_task_index=} cannot be negative")
29
+ if last_task_index < 0:
30
+ raise ValueError(f"{last_task_index=} cannot be negative")
31
+ if last_task_index > num_tasks - 1:
32
+ raise ValueError(
33
+ f"{last_task_index=} cannot be larger than {(num_tasks-1)=}"
34
+ )
35
+ if first_task_index > last_task_index:
36
+ raise ValueError(
37
+ f"{first_task_index=} cannot be larger than {last_task_index=}"
38
+ )
39
+ return (first_task_index, last_task_index)
@@ -0,0 +1,103 @@
1
+ from pathlib import Path
2
+ from typing import Optional
3
+
4
+
5
+ def sanitize_component(value: str) -> str:
6
+ """
7
+ Remove {" ", "/", "."} form a string, e.g. going from
8
+ 'plate.zarr/B/03/0' to 'plate_zarr_B_03_0'.
9
+ """
10
+ return value.replace(" ", "_").replace("/", "_").replace(".", "_")
11
+
12
+
13
+ class TaskFiles:
14
+ """
15
+ Group all file paths pertaining to a task
16
+
17
+ Attributes:
18
+ workflow_dir:
19
+ Server-owned directory to store all task-execution-related relevant
20
+ files (inputs, outputs, errors, and all meta files related to the
21
+ job execution). Note: users cannot write directly to this folder.
22
+ workflow_dir_user:
23
+ User-side directory with the same scope as `workflow_dir`, and
24
+ where a user can write.
25
+ task_order:
26
+ Positional order of the task within a workflow.
27
+ component:
28
+ Specific component to run the task for (relevant for tasks that
29
+ will be executed in parallel over many components).
30
+ file_prefix:
31
+ Prefix for all task-related files.
32
+ args:
33
+ Path for input json file.
34
+ metadiff:
35
+ Path for output json file with metadata update.
36
+ out:
37
+ Path for task-execution stdout.
38
+ err:
39
+ Path for task-execution stderr.
40
+ """
41
+
42
+ workflow_dir: Path
43
+ workflow_dir_user: Path
44
+ task_order: Optional[int] = None
45
+ component: Optional[str] = None # FIXME: this is actually for V1 only
46
+
47
+ file_prefix: str
48
+ args: Path
49
+ out: Path
50
+ err: Path
51
+ log: Path
52
+ metadiff: Path
53
+
54
+ def __init__(
55
+ self,
56
+ workflow_dir: Path,
57
+ workflow_dir_user: Path,
58
+ task_order: Optional[int] = None,
59
+ component: Optional[str] = None,
60
+ ):
61
+ self.workflow_dir = workflow_dir
62
+ self.workflow_dir_user = workflow_dir_user
63
+ self.task_order = task_order
64
+ self.component = component
65
+
66
+ if self.component is not None:
67
+ component_safe = sanitize_component(str(self.component))
68
+ component_safe = f"_par_{component_safe}"
69
+ else:
70
+ component_safe = ""
71
+
72
+ if self.task_order is not None:
73
+ order = str(self.task_order)
74
+ else:
75
+ order = "task"
76
+ self.file_prefix = f"{order}{component_safe}"
77
+ self.args = self.workflow_dir_user / f"{self.file_prefix}.args.json"
78
+ self.out = self.workflow_dir_user / f"{self.file_prefix}.out"
79
+ self.err = self.workflow_dir_user / f"{self.file_prefix}.err"
80
+ self.log = self.workflow_dir_user / f"{self.file_prefix}.log"
81
+ self.metadiff = (
82
+ self.workflow_dir_user / f"{self.file_prefix}.metadiff.json"
83
+ )
84
+
85
+
86
+ def get_task_file_paths(
87
+ workflow_dir: Path,
88
+ workflow_dir_user: Path,
89
+ task_order: Optional[int] = None,
90
+ component: Optional[str] = None,
91
+ ) -> TaskFiles:
92
+ """
93
+ Return the corrisponding TaskFiles object
94
+
95
+ This function is mainly used as a cache to avoid instantiating needless
96
+ objects.
97
+ """
98
+ return TaskFiles(
99
+ workflow_dir=workflow_dir,
100
+ workflow_dir_user=workflow_dir_user,
101
+ task_order=task_order,
102
+ component=component,
103
+ )
@@ -22,26 +22,26 @@ import traceback
22
22
  from pathlib import Path
23
23
  from typing import Optional
24
24
 
25
- from ... import __VERSION__
26
- from ...config import get_settings
27
- from ...logger import set_logger
28
- from ...syringe import Inject
29
- from ...utils import get_timestamp
30
- from ..db import DB
31
- from ..models import ApplyWorkflow
32
- from ..models import Dataset
33
- from ..models import Workflow
34
- from ..models import WorkflowTask
35
- from ..schemas import JobStatusType
36
- from ._common import WORKFLOW_LOG_FILENAME
25
+ from ....logger import set_logger
26
+ from ....syringe import Inject
27
+ from ....utils import get_timestamp
28
+ from ...db import DB
29
+ from ...models import ApplyWorkflow
30
+ from ...models import Dataset
31
+ from ...models import Workflow
32
+ from ...models import WorkflowTask
33
+ from ...schemas.v1 import JobStatusTypeV1
34
+ from ..exceptions import JobExecutionError
35
+ from ..exceptions import TaskExecutionError
36
+ from ..filenames import WORKFLOW_LOG_FILENAME
37
37
  from ._local import process_workflow as local_process_workflow
38
38
  from ._slurm import process_workflow as slurm_process_workflow
39
39
  from .common import close_job_logger
40
- from .common import JobExecutionError
41
- from .common import TaskExecutionError
42
40
  from .common import validate_workflow_compatibility # noqa: F401
43
41
  from .handle_failed_job import assemble_history_failed_job
44
42
  from .handle_failed_job import assemble_meta_failed_job
43
+ from fractal_server import __VERSION__
44
+ from fractal_server.config import get_settings
45
45
 
46
46
 
47
47
  _backends = {}
@@ -122,7 +122,7 @@ async def submit_workflow(
122
122
  log_msg += (
123
123
  f"Cannot fetch workflow {workflow_id} from database\n"
124
124
  )
125
- job.status = JobStatusType.FAILED
125
+ job.status = JobStatusTypeV1.FAILED
126
126
  job.end_timestamp = get_timestamp()
127
127
  job.log = log_msg
128
128
  db_sync.merge(job)
@@ -158,7 +158,9 @@ async def submit_workflow(
158
158
  WORKFLOW_DIR_USER = WORKFLOW_DIR
159
159
  elif FRACTAL_RUNNER_BACKEND == "slurm":
160
160
 
161
- from ._slurm._subprocess_run_as_user import _mkdir_as_user
161
+ from ..executors.slurm._subprocess_run_as_user import (
162
+ _mkdir_as_user,
163
+ )
162
164
 
163
165
  WORKFLOW_DIR_USER = (
164
166
  Path(user_cache_dir) / f"{WORKFLOW_DIR.name}"
@@ -259,7 +261,7 @@ async def submit_workflow(
259
261
  db_sync.merge(output_dataset)
260
262
 
261
263
  # Update job DB entry
262
- job.status = JobStatusType.DONE
264
+ job.status = JobStatusTypeV1.DONE
263
265
  job.end_timestamp = get_timestamp()
264
266
  with log_file_path.open("r") as f:
265
267
  logs = f.read()
@@ -289,7 +291,7 @@ async def submit_workflow(
289
291
 
290
292
  db_sync.merge(output_dataset)
291
293
 
292
- job.status = JobStatusType.FAILED
294
+ job.status = JobStatusTypeV1.FAILED
293
295
  job.end_timestamp = get_timestamp()
294
296
 
295
297
  exception_args_string = "\n".join(e.args)
@@ -322,7 +324,7 @@ async def submit_workflow(
322
324
 
323
325
  db_sync.merge(output_dataset)
324
326
 
325
- job.status = JobStatusType.FAILED
327
+ job.status = JobStatusTypeV1.FAILED
326
328
  job.end_timestamp = get_timestamp()
327
329
  error = e.assemble_error()
328
330
  job.log = f"JOB ERROR in Fractal job {job.id}:\nTRACEBACK:\n{error}"
@@ -351,7 +353,7 @@ async def submit_workflow(
351
353
 
352
354
  db_sync.merge(output_dataset)
353
355
 
354
- job.status = JobStatusType.FAILED
356
+ job.status = JobStatusTypeV1.FAILED
355
357
  job.end_timestamp = get_timestamp()
356
358
  job.log = (
357
359
  f"UNKNOWN ERROR in Fractal job {job.id}\n"
@@ -11,7 +11,6 @@ import subprocess # nosec
11
11
  import traceback
12
12
  from concurrent.futures import Executor
13
13
  from copy import deepcopy
14
- from functools import lru_cache
15
14
  from functools import partial
16
15
  from pathlib import Path
17
16
  from shlex import split as shlex_split
@@ -19,22 +18,19 @@ from typing import Any
19
18
  from typing import Callable
20
19
  from typing import Optional
21
20
 
22
- from ...config import get_settings
23
- from ...logger import get_logger
24
- from ...syringe import Inject
25
- from ..models import Task
26
- from ..models import WorkflowTask
27
- from ..schemas import WorkflowTaskStatusType
28
- from .common import JobExecutionError
29
- from .common import TaskExecutionError
21
+ from ....config import get_settings
22
+ from ....logger import get_logger
23
+ from ....syringe import Inject
24
+ from ...models import Task
25
+ from ...models import WorkflowTask
26
+ from ...schemas.v1 import WorkflowTaskStatusTypeV1
27
+ from ..exceptions import JobExecutionError
28
+ from ..exceptions import TaskExecutionError
30
29
  from .common import TaskParameters
31
30
  from .common import write_args_file
32
-
33
-
34
- HISTORY_FILENAME = "history.json"
35
- METADATA_FILENAME = "metadata.json"
36
- SHUTDOWN_FILENAME = "shutdown"
37
- WORKFLOW_LOG_FILENAME = "workflow.log"
31
+ from fractal_server.app.runner.filenames import HISTORY_FILENAME
32
+ from fractal_server.app.runner.filenames import METADATA_FILENAME
33
+ from fractal_server.app.runner.task_files import get_task_file_paths
38
34
 
39
35
 
40
36
  def no_op_submit_setup_call(
@@ -42,7 +38,6 @@ def no_op_submit_setup_call(
42
38
  wftask: WorkflowTask,
43
39
  workflow_dir: Path,
44
40
  workflow_dir_user: Path,
45
- task_pars: TaskParameters,
46
41
  ) -> dict:
47
42
  """
48
43
  Default (no-operation) interface of submit_setup_call.
@@ -50,14 +45,6 @@ def no_op_submit_setup_call(
50
45
  return {}
51
46
 
52
47
 
53
- def sanitize_component(value: str) -> str:
54
- """
55
- Remove {" ", "/", "."} form a string, e.g. going from
56
- 'plate.zarr/B/03/0' to 'plate_zarr_B_03_0'.
57
- """
58
- return value.replace(" ", "_").replace("/", "_").replace(".", "_")
59
-
60
-
61
48
  def _task_needs_image_list(_task: Task) -> bool:
62
49
  """
63
50
  Whether a task requires `metadata["image"]` in its `args.json` file.
@@ -78,98 +65,6 @@ def _task_needs_image_list(_task: Task) -> bool:
78
65
  return False
79
66
 
80
67
 
81
- class TaskFiles:
82
- """
83
- Group all file paths pertaining to a task
84
-
85
- Attributes:
86
- workflow_dir:
87
- Server-owned directory to store all task-execution-related relevant
88
- files (inputs, outputs, errors, and all meta files related to the
89
- job execution). Note: users cannot write directly to this folder.
90
- workflow_dir_user:
91
- User-side directory with the same scope as `workflow_dir`, and
92
- where a user can write.
93
- task_order:
94
- Positional order of the task within a workflow.
95
- component:
96
- Specific component to run the task for (relevant for tasks that
97
- will be executed in parallel over many components).
98
- file_prefix:
99
- Prefix for all task-related files.
100
- args:
101
- Path for input json file.
102
- metadiff:
103
- Path for output json file with metadata update.
104
- out:
105
- Path for task-execution stdout.
106
- err:
107
- Path for task-execution stderr.
108
- """
109
-
110
- workflow_dir: Path
111
- workflow_dir_user: Path
112
- task_order: Optional[int] = None
113
- component: Optional[str] = None
114
-
115
- file_prefix: str
116
- args: Path
117
- out: Path
118
- err: Path
119
- metadiff: Path
120
-
121
- def __init__(
122
- self,
123
- workflow_dir: Path,
124
- workflow_dir_user: Path,
125
- task_order: Optional[int] = None,
126
- component: Optional[str] = None,
127
- ):
128
- self.workflow_dir = workflow_dir
129
- self.workflow_dir_user = workflow_dir_user
130
- self.task_order = task_order
131
- self.component = component
132
-
133
- if self.component is not None:
134
- component_safe = sanitize_component(str(self.component))
135
- component_safe = f"_par_{component_safe}"
136
- else:
137
- component_safe = ""
138
-
139
- if self.task_order is not None:
140
- order = str(self.task_order)
141
- else:
142
- order = "task"
143
- self.file_prefix = f"{order}{component_safe}"
144
- self.args = self.workflow_dir_user / f"{self.file_prefix}.args.json"
145
- self.out = self.workflow_dir_user / f"{self.file_prefix}.out"
146
- self.err = self.workflow_dir_user / f"{self.file_prefix}.err"
147
- self.metadiff = (
148
- self.workflow_dir_user / f"{self.file_prefix}.metadiff.json"
149
- )
150
-
151
-
152
- @lru_cache()
153
- def get_task_file_paths(
154
- workflow_dir: Path,
155
- workflow_dir_user: Path,
156
- task_order: Optional[int] = None,
157
- component: Optional[str] = None,
158
- ) -> TaskFiles:
159
- """
160
- Return the corrisponding TaskFiles object
161
-
162
- This function is mainly used as a cache to avoid instantiating needless
163
- objects.
164
- """
165
- return TaskFiles(
166
- workflow_dir=workflow_dir,
167
- workflow_dir_user=workflow_dir_user,
168
- task_order=task_order,
169
- component=component,
170
- )
171
-
172
-
173
68
  def _call_command_wrapper(cmd: str, stdout: Path, stderr: Path) -> None:
174
69
  """
175
70
  Call a command and write its stdout and stderr to files
@@ -331,7 +226,7 @@ def call_single_task(
331
226
  wftask_dump["task"] = wftask.task.model_dump()
332
227
  new_history_item = dict(
333
228
  workflowtask=wftask_dump,
334
- status=WorkflowTaskStatusType.DONE,
229
+ status=WorkflowTaskStatusTypeV1.DONE,
335
230
  parallelization=None,
336
231
  )
337
232
  updated_history = task_pars.history.copy()
@@ -529,7 +424,6 @@ def call_parallel_task(
529
424
  try:
530
425
  extra_setup = submit_setup_call(
531
426
  wftask=wftask,
532
- task_pars=task_pars_depend,
533
427
  workflow_dir=workflow_dir,
534
428
  workflow_dir_user=workflow_dir_user,
535
429
  )
@@ -592,7 +486,7 @@ def call_parallel_task(
592
486
  wftask_dump["task"] = wftask.task.model_dump()
593
487
  new_history_item = dict(
594
488
  workflowtask=wftask_dump,
595
- status=WorkflowTaskStatusType.DONE,
489
+ status=WorkflowTaskStatusTypeV1.DONE,
596
490
  parallelization=dict(
597
491
  parallelization_level=wftask.parallelization_level,
598
492
  component_list=component_list,
@@ -681,7 +575,6 @@ def execute_tasks(
681
575
  try:
682
576
  extra_setup = submit_setup_call(
683
577
  wftask=this_wftask,
684
- task_pars=current_task_pars,
685
578
  workflow_dir=workflow_dir,
686
579
  workflow_dir_user=workflow_dir_user,
687
580
  )
@@ -23,11 +23,11 @@ from pathlib import Path
23
23
  from typing import Any
24
24
  from typing import Optional
25
25
 
26
- from ...models import Workflow
27
- from .._common import execute_tasks
28
- from ..common import async_wrap
29
- from ..common import set_start_and_last_task_index
30
- from ..common import TaskParameters
26
+ from ....models import Workflow # FIXME: this is v1 specific
27
+ from ...async_wrap import async_wrap
28
+ from ...set_start_and_last_task_index import set_start_and_last_task_index
29
+ from .._common import execute_tasks # FIXME: this is v1 specific
30
+ from ..common import TaskParameters # FIXME: this is v1 specific
31
31
  from ._submit_setup import _local_submit_setup
32
32
  from .executor import FractalThreadPoolExecutor
33
33
 
@@ -19,9 +19,9 @@ from pydantic import BaseModel
19
19
  from pydantic import Extra
20
20
  from pydantic.error_wrappers import ValidationError
21
21
 
22
- from ....config import get_settings
23
- from ....syringe import Inject
24
- from ...models import WorkflowTask
22
+ from .....config import get_settings
23
+ from .....syringe import Inject
24
+ from ....models.v1 import WorkflowTask
25
25
 
26
26
 
27
27
  class LocalBackendConfigError(ValueError):
@@ -63,15 +63,14 @@ def get_local_backend_config(
63
63
  The sources for `parallel_tasks_per_job` attributes, starting from the
64
64
  highest-priority one, are
65
65
 
66
- 1. Properties in `wftask.meta` (which, for `WorkflowTask`s added through
67
- `Workflow.insert_task`, also includes `wftask.task.meta`);
66
+ 1. Properties in `wftask.meta`;
68
67
  2. The general content of the local-backend configuration file;
69
68
  3. The default value (`None`).
70
69
 
71
70
  Arguments:
72
71
  wftask:
73
- WorkflowTask for which the backend configuration is is to be
74
- prepared.
72
+ WorkflowTask (V1) for which the backend configuration should
73
+ be prepared.
75
74
  config_path:
76
75
  Path of local-backend configuration file; if `None`, use
77
76
  `FRACTAL_LOCAL_CONFIG_FILE` variable from settings.
@@ -14,8 +14,7 @@ Submodule to define _local_submit_setup
14
14
  from pathlib import Path
15
15
  from typing import Optional
16
16
 
17
- from ...models import WorkflowTask
18
- from ..common import TaskParameters
17
+ from ....models.v1 import WorkflowTask
19
18
  from ._local_config import get_local_backend_config
20
19
 
21
20
 
@@ -24,7 +23,6 @@ def _local_submit_setup(
24
23
  wftask: WorkflowTask,
25
24
  workflow_dir: Optional[Path] = None,
26
25
  workflow_dir_user: Optional[Path] = None,
27
- task_pars: Optional[TaskParameters] = None,
28
26
  ) -> dict[str, object]:
29
27
  """
30
28
  Collect WorfklowTask-specific configuration parameters from different
@@ -33,8 +31,6 @@ def _local_submit_setup(
33
31
  Arguments:
34
32
  wftask:
35
33
  WorkflowTask for which the configuration is to be assembled
36
- task_pars:
37
- Not used in this function.
38
34
  workflow_dir:
39
35
  Not used in this function.
40
36
  workflow_dir_user: