fractal-server 2.14.4a0__py3-none-any.whl → 2.14.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +2 -2
  3. fractal_server/app/models/security.py +8 -8
  4. fractal_server/app/models/user_settings.py +8 -10
  5. fractal_server/app/models/v2/accounting.py +2 -3
  6. fractal_server/app/models/v2/dataset.py +1 -2
  7. fractal_server/app/models/v2/history.py +3 -4
  8. fractal_server/app/models/v2/job.py +10 -11
  9. fractal_server/app/models/v2/project.py +1 -2
  10. fractal_server/app/models/v2/task.py +13 -14
  11. fractal_server/app/models/v2/task_group.py +15 -16
  12. fractal_server/app/models/v2/workflow.py +1 -2
  13. fractal_server/app/models/v2/workflowtask.py +6 -7
  14. fractal_server/app/routes/admin/v2/accounting.py +3 -4
  15. fractal_server/app/routes/admin/v2/job.py +13 -14
  16. fractal_server/app/routes/admin/v2/project.py +2 -4
  17. fractal_server/app/routes/admin/v2/task.py +11 -13
  18. fractal_server/app/routes/admin/v2/task_group.py +15 -17
  19. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +5 -8
  20. fractal_server/app/routes/api/v2/__init__.py +2 -0
  21. fractal_server/app/routes/api/v2/_aux_functions.py +7 -9
  22. fractal_server/app/routes/api/v2/_aux_functions_history.py +1 -1
  23. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +1 -3
  24. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +5 -6
  25. fractal_server/app/routes/api/v2/dataset.py +6 -8
  26. fractal_server/app/routes/api/v2/history.py +5 -8
  27. fractal_server/app/routes/api/v2/images.py +2 -3
  28. fractal_server/app/routes/api/v2/job.py +5 -6
  29. fractal_server/app/routes/api/v2/pre_submission_checks.py +1 -3
  30. fractal_server/app/routes/api/v2/project.py +2 -4
  31. fractal_server/app/routes/api/v2/status_legacy.py +2 -4
  32. fractal_server/app/routes/api/v2/submit.py +3 -4
  33. fractal_server/app/routes/api/v2/task.py +6 -7
  34. fractal_server/app/routes/api/v2/task_collection.py +11 -13
  35. fractal_server/app/routes/api/v2/task_collection_custom.py +4 -4
  36. fractal_server/app/routes/api/v2/task_group.py +6 -8
  37. fractal_server/app/routes/api/v2/task_group_lifecycle.py +6 -9
  38. fractal_server/app/routes/api/v2/task_version_update.py +270 -0
  39. fractal_server/app/routes/api/v2/workflow.py +5 -6
  40. fractal_server/app/routes/api/v2/workflow_import.py +3 -5
  41. fractal_server/app/routes/api/v2/workflowtask.py +2 -114
  42. fractal_server/app/routes/auth/current_user.py +2 -2
  43. fractal_server/app/routes/pagination.py +2 -3
  44. fractal_server/app/runner/exceptions.py +16 -22
  45. fractal_server/app/runner/executors/base_runner.py +19 -7
  46. fractal_server/app/runner/executors/call_command_wrapper.py +52 -0
  47. fractal_server/app/runner/executors/local/get_local_config.py +2 -3
  48. fractal_server/app/runner/executors/local/runner.py +52 -13
  49. fractal_server/app/runner/executors/slurm_common/_batching.py +2 -3
  50. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +27 -29
  51. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +95 -63
  52. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +2 -3
  53. fractal_server/app/runner/executors/slurm_common/remote.py +47 -92
  54. fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +22 -22
  55. fractal_server/app/runner/executors/slurm_ssh/run_subprocess.py +2 -3
  56. fractal_server/app/runner/executors/slurm_ssh/runner.py +4 -6
  57. fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py +2 -6
  58. fractal_server/app/runner/executors/slurm_sudo/runner.py +9 -18
  59. fractal_server/app/runner/set_start_and_last_task_index.py +2 -5
  60. fractal_server/app/runner/shutdown.py +5 -11
  61. fractal_server/app/runner/task_files.py +3 -13
  62. fractal_server/app/runner/v2/_local.py +3 -4
  63. fractal_server/app/runner/v2/_slurm_ssh.py +5 -7
  64. fractal_server/app/runner/v2/_slurm_sudo.py +8 -10
  65. fractal_server/app/runner/v2/runner.py +4 -5
  66. fractal_server/app/runner/v2/runner_functions.py +20 -35
  67. fractal_server/app/runner/v2/submit_workflow.py +7 -10
  68. fractal_server/app/runner/v2/task_interface.py +2 -3
  69. fractal_server/app/runner/versions.py +3 -13
  70. fractal_server/app/schemas/user.py +2 -4
  71. fractal_server/app/schemas/user_group.py +1 -2
  72. fractal_server/app/schemas/user_settings.py +19 -21
  73. fractal_server/app/schemas/v2/dataset.py +2 -3
  74. fractal_server/app/schemas/v2/dumps.py +13 -15
  75. fractal_server/app/schemas/v2/history.py +6 -7
  76. fractal_server/app/schemas/v2/job.py +17 -18
  77. fractal_server/app/schemas/v2/manifest.py +12 -13
  78. fractal_server/app/schemas/v2/status_legacy.py +2 -2
  79. fractal_server/app/schemas/v2/task.py +29 -30
  80. fractal_server/app/schemas/v2/task_collection.py +8 -9
  81. fractal_server/app/schemas/v2/task_group.py +22 -23
  82. fractal_server/app/schemas/v2/workflow.py +1 -2
  83. fractal_server/app/schemas/v2/workflowtask.py +27 -29
  84. fractal_server/app/security/__init__.py +10 -12
  85. fractal_server/config.py +32 -42
  86. fractal_server/images/models.py +2 -4
  87. fractal_server/images/tools.py +4 -7
  88. fractal_server/logger.py +3 -5
  89. fractal_server/ssh/_fabric.py +41 -13
  90. fractal_server/string_tools.py +2 -2
  91. fractal_server/syringe.py +1 -1
  92. fractal_server/tasks/v2/local/collect.py +2 -3
  93. fractal_server/tasks/v2/local/deactivate.py +1 -1
  94. fractal_server/tasks/v2/local/reactivate.py +1 -1
  95. fractal_server/tasks/v2/ssh/collect.py +256 -245
  96. fractal_server/tasks/v2/ssh/deactivate.py +210 -187
  97. fractal_server/tasks/v2/ssh/reactivate.py +154 -146
  98. fractal_server/tasks/v2/utils_background.py +2 -3
  99. fractal_server/types/__init__.py +1 -2
  100. fractal_server/types/validators/_filter_validators.py +1 -2
  101. fractal_server/utils.py +4 -5
  102. fractal_server/zip_tools.py +1 -1
  103. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/METADATA +2 -9
  104. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/RECORD +107 -108
  105. fractal_server/app/history/__init__.py +0 -0
  106. fractal_server/app/runner/executors/slurm_common/utils_executors.py +0 -58
  107. fractal_server/app/runner/v2/runner_functions_low_level.py +0 -122
  108. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/LICENSE +0 -0
  109. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/WHEEL +0 -0
  110. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/entry_points.txt +0 -0
@@ -19,14 +19,17 @@ TASK_TYPES_MULTISUBMIT: list[TaskTypeType] = [
19
19
  logger = set_logger(__name__)
20
20
 
21
21
 
22
- class BaseRunner(object):
22
+ class BaseRunner:
23
23
  """
24
24
  Base class for Fractal runners.
25
25
  """
26
26
 
27
27
  def submit(
28
28
  self,
29
- func: callable,
29
+ base_command: str,
30
+ workflow_task_order: int,
31
+ workflow_task_id: int,
32
+ task_name: str,
30
33
  parameters: dict[str, Any],
31
34
  history_unit_id: int,
32
35
  task_type: TaskTypeType,
@@ -38,7 +41,10 @@ class BaseRunner(object):
38
41
  Run a single fractal task.
39
42
 
40
43
  Args:
41
- func: Function to be executed.
44
+ base_command:
45
+ workflow_task_order:
46
+ workflow_task_id:
47
+ task_name:
42
48
  parameters: Dictionary of parameters.
43
49
  history_unit_id:
44
50
  Database ID of the corresponding `HistoryUnit` entry.
@@ -51,7 +57,10 @@ class BaseRunner(object):
51
57
 
52
58
  def multisubmit(
53
59
  self,
54
- func: callable,
60
+ base_command: str,
61
+ workflow_task_order: int,
62
+ workflow_task_id: int,
63
+ task_name: str,
55
64
  list_parameters: list[dict[str, Any]],
56
65
  history_unit_ids: list[int],
57
66
  list_task_files: list[TaskFiles],
@@ -63,7 +72,10 @@ class BaseRunner(object):
63
72
  Run a parallel fractal task.
64
73
 
65
74
  Args:
66
- func: Function to be executed.
75
+ base_command:
76
+ workflow_task_order:
77
+ workflow_task_id:
78
+ task_name:
67
79
  parameters:
68
80
  Dictionary of parameters. Must include `zarr_urls` key.
69
81
  history_unit_ids:
@@ -138,9 +150,9 @@ class BaseRunner(object):
138
150
  f"{len(list_parameters)=}."
139
151
  )
140
152
 
141
- subfolders = set(
153
+ subfolders = {
142
154
  task_file.wftask_subfolder_local for task_file in list_task_files
143
- )
155
+ }
144
156
  if len(subfolders) != 1:
145
157
  raise ValueError(f"More than one subfolders: {subfolders}.")
146
158
 
@@ -0,0 +1,52 @@
1
+ import os
2
+ import shlex
3
+ import shutil
4
+ import subprocess # nosec
5
+
6
+ from fractal_server.app.runner.exceptions import TaskExecutionError
7
+ from fractal_server.string_tools import validate_cmd
8
+
9
+
10
+ def call_command_wrapper(*, cmd: str, log_path: str) -> None:
11
+ """
12
+ Call a command and write its stdout and stderr to files
13
+
14
+ Args:
15
+ cmd:
16
+ log_path:
17
+ """
18
+ try:
19
+ validate_cmd(cmd)
20
+ except ValueError as e:
21
+ raise TaskExecutionError(f"Invalid command. Original error: {str(e)}")
22
+
23
+ split_cmd = shlex.split(cmd)
24
+
25
+ # Verify that task command is executable
26
+ if shutil.which(split_cmd[0]) is None:
27
+ msg = (
28
+ f'Command "{split_cmd[0]}" is not valid. '
29
+ "Hint: make sure that it is executable."
30
+ )
31
+ raise TaskExecutionError(msg)
32
+
33
+ with open(log_path, "w") as fp_log:
34
+ try:
35
+ result = subprocess.run( # nosec
36
+ split_cmd,
37
+ stderr=fp_log,
38
+ stdout=fp_log,
39
+ )
40
+ except Exception as e:
41
+ # This is likely unreachable
42
+ raise e
43
+
44
+ if result.returncode != 0:
45
+ stderr = ""
46
+ if os.path.isfile(log_path):
47
+ with open(log_path) as fp_stderr:
48
+ stderr = fp_stderr.read()
49
+ raise TaskExecutionError(
50
+ f"Task failed with returncode={result.returncode}.\n"
51
+ f"STDERR: {stderr}"
52
+ )
@@ -14,7 +14,6 @@ Submodule to handle the local-backend configuration for a WorkflowTask
14
14
  import json
15
15
  from pathlib import Path
16
16
  from typing import Literal
17
- from typing import Optional
18
17
 
19
18
  from pydantic import BaseModel
20
19
  from pydantic import ConfigDict
@@ -45,7 +44,7 @@ class LocalBackendConfig(BaseModel):
45
44
  """
46
45
 
47
46
  model_config = ConfigDict(extra="forbid")
48
- parallel_tasks_per_job: Optional[int] = None
47
+ parallel_tasks_per_job: int | None = None
49
48
 
50
49
  @property
51
50
  def batch_size(self) -> int:
@@ -55,7 +54,7 @@ class LocalBackendConfig(BaseModel):
55
54
  def get_local_backend_config(
56
55
  wftask: WorkflowTaskV2,
57
56
  which_type: Literal["non_parallel", "parallel"],
58
- config_path: Optional[Path] = None,
57
+ config_path: Path | None = None,
59
58
  tot_tasks: int = 1,
60
59
  ) -> LocalBackendConfig:
61
60
  """
@@ -1,9 +1,11 @@
1
+ import json
1
2
  from concurrent.futures import Future
2
3
  from concurrent.futures import ThreadPoolExecutor
3
4
  from pathlib import Path
4
5
  from typing import Any
5
6
  from typing import Literal
6
7
 
8
+ from ..call_command_wrapper import call_command_wrapper
7
9
  from .get_local_config import LocalBackendConfig
8
10
  from fractal_server.app.db import get_sync_db
9
11
  from fractal_server.app.runner.exceptions import TaskExecutionError
@@ -16,10 +18,40 @@ from fractal_server.app.runner.v2.db_tools import update_status_of_history_unit
16
18
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
17
19
  from fractal_server.logger import set_logger
18
20
 
19
-
20
21
  logger = set_logger(__name__)
21
22
 
22
23
 
24
+ def run_single_task(
25
+ base_command: str,
26
+ parameters: dict[str, Any],
27
+ task_files: TaskFiles,
28
+ ):
29
+
30
+ # Write args.json file
31
+ with open(task_files.args_file_local, "w") as f:
32
+ json.dump(parameters, f)
33
+
34
+ # Run command
35
+ full_command = (
36
+ f"{base_command} "
37
+ f"--args-json {task_files.args_file_local} "
38
+ f"--out-json {task_files.metadiff_file_local}"
39
+ )
40
+
41
+ call_command_wrapper(
42
+ cmd=full_command,
43
+ log_path=task_files.log_file_local,
44
+ )
45
+
46
+ try:
47
+ with open(task_files.metadiff_file_local) as f:
48
+ out_meta = json.load(f)
49
+ return out_meta
50
+ except FileNotFoundError:
51
+ # Command completed, but it produced no metadiff file
52
+ return None
53
+
54
+
23
55
  class LocalRunner(BaseRunner):
24
56
  executor: ThreadPoolExecutor
25
57
  root_dir_local: Path
@@ -47,17 +79,20 @@ class LocalRunner(BaseRunner):
47
79
 
48
80
  def submit(
49
81
  self,
50
- func: callable,
82
+ base_command: str,
83
+ workflow_task_order: int,
84
+ workflow_task_id: int,
85
+ task_name: str,
51
86
  parameters: dict[str, Any],
52
87
  history_unit_id: int,
53
88
  task_files: TaskFiles,
89
+ config: LocalBackendConfig,
54
90
  task_type: Literal[
55
91
  "non_parallel",
56
92
  "converter_non_parallel",
57
93
  "compound",
58
94
  "converter_compound",
59
95
  ],
60
- config: LocalBackendConfig,
61
96
  user_id: int,
62
97
  ) -> tuple[Any, Exception]:
63
98
  logger.debug("[submit] START")
@@ -69,9 +104,10 @@ class LocalRunner(BaseRunner):
69
104
 
70
105
  # SUBMISSION PHASE
71
106
  future = self.executor.submit(
72
- func,
107
+ run_single_task,
108
+ base_command=base_command,
73
109
  parameters=parameters,
74
- remote_files=task_files.remote_files_dict,
110
+ task_files=task_files,
75
111
  )
76
112
  except Exception as e:
77
113
  logger.error(
@@ -111,7 +147,10 @@ class LocalRunner(BaseRunner):
111
147
 
112
148
  def multisubmit(
113
149
  self,
114
- func: callable,
150
+ base_command: str,
151
+ workflow_task_order: int,
152
+ workflow_task_id: int,
153
+ task_name: str,
115
154
  list_parameters: list[dict],
116
155
  history_unit_ids: list[int],
117
156
  list_task_files: list[TaskFiles],
@@ -139,8 +178,9 @@ class LocalRunner(BaseRunner):
139
178
  )
140
179
 
141
180
  workdir_local = list_task_files[0].wftask_subfolder_local
142
- if task_type == "parallel":
143
- workdir_local.mkdir()
181
+ # Note: the `mkdir` is not needed for compound tasks, but it is
182
+ # needed for parallel tasks
183
+ workdir_local.mkdir(exist_ok=True)
144
184
 
145
185
  # Set `n_elements` and `parallel_tasks_per_job`
146
186
  n_elements = len(list_parameters)
@@ -178,11 +218,10 @@ class LocalRunner(BaseRunner):
178
218
  positional_index = ind_chunk + ind_within_chunk
179
219
  try:
180
220
  future = self.executor.submit(
181
- func,
182
- parameters=kwargs,
183
- remote_files=list_task_files[
184
- positional_index
185
- ].remote_files_dict,
221
+ run_single_task,
222
+ base_command=base_command,
223
+ parameters=list_parameters[positional_index],
224
+ task_files=list_task_files[positional_index],
186
225
  )
187
226
  active_futures[positional_index] = future
188
227
  except Exception as e:
@@ -12,7 +12,6 @@
12
12
  Submodule to determine the number of total/parallel tasks per SLURM job.
13
13
  """
14
14
  import math
15
- from typing import Optional
16
15
 
17
16
  from fractal_server.logger import set_logger
18
17
 
@@ -58,8 +57,8 @@ def heuristics(
58
57
  # Number of parallel components (always known)
59
58
  tot_tasks: int,
60
59
  # Optional WorkflowTask attributes:
61
- tasks_per_job: Optional[int] = None,
62
- parallel_tasks_per_job: Optional[int] = None,
60
+ tasks_per_job: int | None = None,
61
+ parallel_tasks_per_job: int | None = None,
63
62
  # Task requirements (multiple possible sources):
64
63
  cpus_per_task: int,
65
64
  mem_per_task: int,
@@ -14,8 +14,6 @@ Submodule to handle the SLURM configuration for a WorkflowTask
14
14
  """
15
15
  import json
16
16
  from pathlib import Path
17
- from typing import Optional
18
- from typing import Union
19
17
 
20
18
  from pydantic import BaseModel
21
19
  from pydantic import ConfigDict
@@ -56,16 +54,16 @@ class _SlurmConfigSet(BaseModel):
56
54
 
57
55
  model_config = ConfigDict(extra="forbid")
58
56
 
59
- partition: Optional[str] = None
60
- cpus_per_task: Optional[int] = None
61
- mem: Optional[Union[int, str]] = None
62
- constraint: Optional[str] = None
63
- gres: Optional[str] = None
64
- time: Optional[str] = None
65
- account: Optional[str] = None
66
- extra_lines: Optional[list[str]] = None
67
- pre_submission_commands: Optional[list[str]] = None
68
- gpus: Optional[str] = None
57
+ partition: str | None = None
58
+ cpus_per_task: int | None = None
59
+ mem: int | str | None = None
60
+ constraint: str | None = None
61
+ gres: str | None = None
62
+ time: str | None = None
63
+ account: str | None = None
64
+ extra_lines: list[str] | None = None
65
+ pre_submission_commands: list[str] | None = None
66
+ gpus: str | None = None
69
67
 
70
68
 
71
69
  class _BatchingConfigSet(BaseModel):
@@ -89,8 +87,8 @@ class _BatchingConfigSet(BaseModel):
89
87
 
90
88
  target_cpus_per_job: int
91
89
  max_cpus_per_job: int
92
- target_mem_per_job: Union[int, str]
93
- max_mem_per_job: Union[int, str]
90
+ target_mem_per_job: int | str
91
+ max_mem_per_job: int | str
94
92
  target_num_jobs: int
95
93
  max_num_jobs: int
96
94
 
@@ -143,13 +141,13 @@ class SlurmConfigFile(BaseModel):
143
141
  model_config = ConfigDict(extra="forbid")
144
142
 
145
143
  default_slurm_config: _SlurmConfigSet
146
- gpu_slurm_config: Optional[_SlurmConfigSet] = None
144
+ gpu_slurm_config: _SlurmConfigSet | None = None
147
145
  batching_config: _BatchingConfigSet
148
- user_local_exports: Optional[dict[str, str]] = None
146
+ user_local_exports: dict[str, str] | None = None
149
147
 
150
148
 
151
149
  def load_slurm_config_file(
152
- config_path: Optional[Path] = None,
150
+ config_path: Path | None = None,
153
151
  ) -> SlurmConfigFile:
154
152
  """
155
153
  Load a SLURM configuration file and validate its content with
@@ -264,23 +262,23 @@ class SlurmConfig(BaseModel):
264
262
  shebang_line: str = "#!/bin/sh"
265
263
 
266
264
  # Optional SLURM parameters
267
- job_name: Optional[str] = None
268
- constraint: Optional[str] = None
269
- gres: Optional[str] = None
270
- gpus: Optional[str] = None
271
- time: Optional[str] = None
272
- account: Optional[str] = None
265
+ job_name: str | None = None
266
+ constraint: str | None = None
267
+ gres: str | None = None
268
+ gpus: str | None = None
269
+ time: str | None = None
270
+ account: str | None = None
273
271
 
274
272
  # Free-field attribute for extra lines to be added to the SLURM job
275
273
  # preamble
276
- extra_lines: Optional[list[str]] = Field(default_factory=list)
274
+ extra_lines: list[str] | None = Field(default_factory=list)
277
275
 
278
276
  # Variables that will be `export`ed in the SLURM submission script
279
- user_local_exports: Optional[dict[str, str]] = None
277
+ user_local_exports: dict[str, str] | None = None
280
278
 
281
279
  # Metaparameters needed to combine multiple tasks in each SLURM job
282
- tasks_per_job: Optional[int] = None
283
- parallel_tasks_per_job: Optional[int] = None
280
+ tasks_per_job: int | None = None
281
+ parallel_tasks_per_job: int | None = None
284
282
  target_cpus_per_job: int
285
283
  max_cpus_per_job: int
286
284
  target_mem_per_job: int
@@ -328,7 +326,7 @@ class SlurmConfig(BaseModel):
328
326
 
329
327
  def to_sbatch_preamble(
330
328
  self,
331
- remote_export_dir: Optional[str] = None,
329
+ remote_export_dir: str | None = None,
332
330
  ) -> list[str]:
333
331
  """
334
332
  Compile `SlurmConfig` object into the preamble of a SLURM submission
@@ -410,7 +408,7 @@ class SlurmConfig(BaseModel):
410
408
  return self.tasks_per_job
411
409
 
412
410
 
413
- def _parse_mem_value(raw_mem: Union[str, int]) -> int:
411
+ def _parse_mem_value(raw_mem: str | int) -> int:
414
412
  """
415
413
  Convert a memory-specification string into an integer (in MB units), or
416
414
  simply return the input if it is already an integer.