fractal-server 2.14.4a0__py3-none-any.whl → 2.14.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +2 -2
  3. fractal_server/app/models/security.py +8 -8
  4. fractal_server/app/models/user_settings.py +8 -10
  5. fractal_server/app/models/v2/accounting.py +2 -3
  6. fractal_server/app/models/v2/dataset.py +1 -2
  7. fractal_server/app/models/v2/history.py +3 -4
  8. fractal_server/app/models/v2/job.py +10 -11
  9. fractal_server/app/models/v2/project.py +1 -2
  10. fractal_server/app/models/v2/task.py +13 -14
  11. fractal_server/app/models/v2/task_group.py +15 -16
  12. fractal_server/app/models/v2/workflow.py +1 -2
  13. fractal_server/app/models/v2/workflowtask.py +6 -7
  14. fractal_server/app/routes/admin/v2/accounting.py +3 -4
  15. fractal_server/app/routes/admin/v2/job.py +13 -14
  16. fractal_server/app/routes/admin/v2/project.py +2 -4
  17. fractal_server/app/routes/admin/v2/task.py +11 -13
  18. fractal_server/app/routes/admin/v2/task_group.py +15 -17
  19. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +5 -8
  20. fractal_server/app/routes/api/v2/__init__.py +2 -0
  21. fractal_server/app/routes/api/v2/_aux_functions.py +7 -9
  22. fractal_server/app/routes/api/v2/_aux_functions_history.py +1 -1
  23. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +1 -3
  24. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +5 -6
  25. fractal_server/app/routes/api/v2/dataset.py +6 -8
  26. fractal_server/app/routes/api/v2/history.py +5 -8
  27. fractal_server/app/routes/api/v2/images.py +2 -3
  28. fractal_server/app/routes/api/v2/job.py +5 -6
  29. fractal_server/app/routes/api/v2/pre_submission_checks.py +1 -3
  30. fractal_server/app/routes/api/v2/project.py +2 -4
  31. fractal_server/app/routes/api/v2/status_legacy.py +2 -4
  32. fractal_server/app/routes/api/v2/submit.py +3 -4
  33. fractal_server/app/routes/api/v2/task.py +6 -7
  34. fractal_server/app/routes/api/v2/task_collection.py +11 -13
  35. fractal_server/app/routes/api/v2/task_collection_custom.py +4 -4
  36. fractal_server/app/routes/api/v2/task_group.py +6 -8
  37. fractal_server/app/routes/api/v2/task_group_lifecycle.py +6 -9
  38. fractal_server/app/routes/api/v2/task_version_update.py +270 -0
  39. fractal_server/app/routes/api/v2/workflow.py +5 -6
  40. fractal_server/app/routes/api/v2/workflow_import.py +3 -5
  41. fractal_server/app/routes/api/v2/workflowtask.py +2 -114
  42. fractal_server/app/routes/auth/current_user.py +2 -2
  43. fractal_server/app/routes/pagination.py +2 -3
  44. fractal_server/app/runner/exceptions.py +16 -22
  45. fractal_server/app/runner/executors/base_runner.py +19 -7
  46. fractal_server/app/runner/executors/call_command_wrapper.py +52 -0
  47. fractal_server/app/runner/executors/local/get_local_config.py +2 -3
  48. fractal_server/app/runner/executors/local/runner.py +52 -13
  49. fractal_server/app/runner/executors/slurm_common/_batching.py +2 -3
  50. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +27 -29
  51. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +95 -63
  52. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +2 -3
  53. fractal_server/app/runner/executors/slurm_common/remote.py +47 -92
  54. fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +22 -22
  55. fractal_server/app/runner/executors/slurm_ssh/run_subprocess.py +2 -3
  56. fractal_server/app/runner/executors/slurm_ssh/runner.py +4 -6
  57. fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py +2 -6
  58. fractal_server/app/runner/executors/slurm_sudo/runner.py +9 -18
  59. fractal_server/app/runner/set_start_and_last_task_index.py +2 -5
  60. fractal_server/app/runner/shutdown.py +5 -11
  61. fractal_server/app/runner/task_files.py +3 -13
  62. fractal_server/app/runner/v2/_local.py +3 -4
  63. fractal_server/app/runner/v2/_slurm_ssh.py +5 -7
  64. fractal_server/app/runner/v2/_slurm_sudo.py +8 -10
  65. fractal_server/app/runner/v2/runner.py +4 -5
  66. fractal_server/app/runner/v2/runner_functions.py +20 -35
  67. fractal_server/app/runner/v2/submit_workflow.py +7 -10
  68. fractal_server/app/runner/v2/task_interface.py +2 -3
  69. fractal_server/app/runner/versions.py +3 -13
  70. fractal_server/app/schemas/user.py +2 -4
  71. fractal_server/app/schemas/user_group.py +1 -2
  72. fractal_server/app/schemas/user_settings.py +19 -21
  73. fractal_server/app/schemas/v2/dataset.py +2 -3
  74. fractal_server/app/schemas/v2/dumps.py +13 -15
  75. fractal_server/app/schemas/v2/history.py +6 -7
  76. fractal_server/app/schemas/v2/job.py +17 -18
  77. fractal_server/app/schemas/v2/manifest.py +12 -13
  78. fractal_server/app/schemas/v2/status_legacy.py +2 -2
  79. fractal_server/app/schemas/v2/task.py +29 -30
  80. fractal_server/app/schemas/v2/task_collection.py +8 -9
  81. fractal_server/app/schemas/v2/task_group.py +22 -23
  82. fractal_server/app/schemas/v2/workflow.py +1 -2
  83. fractal_server/app/schemas/v2/workflowtask.py +27 -29
  84. fractal_server/app/security/__init__.py +10 -12
  85. fractal_server/config.py +32 -42
  86. fractal_server/images/models.py +2 -4
  87. fractal_server/images/tools.py +4 -7
  88. fractal_server/logger.py +3 -5
  89. fractal_server/ssh/_fabric.py +41 -13
  90. fractal_server/string_tools.py +2 -2
  91. fractal_server/syringe.py +1 -1
  92. fractal_server/tasks/v2/local/collect.py +2 -3
  93. fractal_server/tasks/v2/local/deactivate.py +1 -1
  94. fractal_server/tasks/v2/local/reactivate.py +1 -1
  95. fractal_server/tasks/v2/ssh/collect.py +256 -245
  96. fractal_server/tasks/v2/ssh/deactivate.py +210 -187
  97. fractal_server/tasks/v2/ssh/reactivate.py +154 -146
  98. fractal_server/tasks/v2/utils_background.py +2 -3
  99. fractal_server/types/__init__.py +1 -2
  100. fractal_server/types/validators/_filter_validators.py +1 -2
  101. fractal_server/utils.py +4 -5
  102. fractal_server/zip_tools.py +1 -1
  103. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/METADATA +2 -9
  104. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/RECORD +107 -108
  105. fractal_server/app/history/__init__.py +0 -0
  106. fractal_server/app/runner/executors/slurm_common/utils_executors.py +0 -58
  107. fractal_server/app/runner/v2/runner_functions_low_level.py +0 -122
  108. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/LICENSE +0 -0
  109. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/WHEEL +0 -0
  110. {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.6.dist-info}/entry_points.txt +0 -0
@@ -5,7 +5,6 @@ import subprocess # nosec
5
5
  import sys
6
6
  from concurrent.futures import ThreadPoolExecutor
7
7
  from pathlib import Path
8
- from typing import Optional
9
8
 
10
9
  from ..slurm_common.base_slurm_runner import BaseSlurmRunner
11
10
  from ..slurm_common.slurm_job_task_models import SlurmJob
@@ -21,7 +20,7 @@ logger = set_logger(__name__)
21
20
 
22
21
  def _subprocess_run_or_raise(
23
22
  full_command: str,
24
- ) -> Optional[subprocess.CompletedProcess]:
23
+ ) -> subprocess.CompletedProcess | None:
25
24
  try:
26
25
  output = subprocess.run( # nosec
27
26
  shlex.split(full_command),
@@ -43,7 +42,7 @@ def _subprocess_run_or_raise(
43
42
 
44
43
  class SudoSlurmRunner(BaseSlurmRunner):
45
44
  slurm_user: str
46
- slurm_account: Optional[str] = None
45
+ slurm_account: str | None = None
47
46
 
48
47
  def __init__(
49
48
  self,
@@ -51,11 +50,11 @@ class SudoSlurmRunner(BaseSlurmRunner):
51
50
  # Common
52
51
  root_dir_local: Path,
53
52
  root_dir_remote: Path,
54
- common_script_lines: Optional[list[str]] = None,
55
- user_cache_dir: Optional[str] = None,
56
- poll_interval: Optional[int] = None,
53
+ common_script_lines: list[str] | None = None,
54
+ user_cache_dir: str | None = None,
55
+ poll_interval: int | None = None,
57
56
  # Specific
58
- slurm_account: Optional[str] = None,
57
+ slurm_account: str | None = None,
59
58
  slurm_user: str,
60
59
  ) -> None:
61
60
  """
@@ -102,17 +101,13 @@ class SudoSlurmRunner(BaseSlurmRunner):
102
101
  source_target_list.extend(
103
102
  [
104
103
  (
105
- task.output_pickle_file_remote,
106
- task.output_pickle_file_local,
104
+ task.output_file_remote,
105
+ task.output_file_local,
107
106
  ),
108
107
  (
109
108
  task.task_files.log_file_remote,
110
109
  task.task_files.log_file_local,
111
110
  ),
112
- (
113
- task.task_files.args_file_remote,
114
- task.task_files.args_file_local,
115
- ),
116
111
  (
117
112
  task.task_files.metadiff_file_remote,
118
113
  task.task_files.metadiff_file_local,
@@ -121,17 +116,14 @@ class SudoSlurmRunner(BaseSlurmRunner):
121
116
  )
122
117
 
123
118
  for source, target in source_target_list:
124
- # NOTE: By setting encoding=None, we read/write bytes instead
125
- # of strings; this is needed to also handle pickle files.
126
119
  try:
127
120
  res = _run_command_as_user(
128
121
  cmd=f"cat {source}",
129
122
  user=self.slurm_user,
130
- encoding=None,
131
123
  check=True,
132
124
  )
133
125
  # Write local file
134
- with open(target, "wb") as f:
126
+ with open(target, "w") as f:
135
127
  f.write(res.stdout)
136
128
  logger.debug(
137
129
  f"[_fetch_artifacts_single_job] Copied {source} into "
@@ -171,7 +163,6 @@ class SudoSlurmRunner(BaseSlurmRunner):
171
163
  res = _run_command_as_user(
172
164
  cmd=cmd,
173
165
  user=self.slurm_user,
174
- encoding="utf-8",
175
166
  check=True,
176
167
  )
177
168
  return res.stdout
@@ -1,10 +1,7 @@
1
- from typing import Optional
2
-
3
-
4
1
  def set_start_and_last_task_index(
5
2
  num_tasks: int,
6
- first_task_index: Optional[int] = None,
7
- last_task_index: Optional[int] = None,
3
+ first_task_index: int | None = None,
4
+ last_task_index: int | None = None,
8
5
  ) -> tuple[int, int]:
9
6
  """
10
7
  Handle `first_task_index` and `last_task_index`, by setting defaults and
@@ -38,21 +38,15 @@ async def cleanup_after_shutdown(*, jobsV2: list[int], logger_name: str):
38
38
 
39
39
  if len(jobsV2_db) == 0:
40
40
  logger.info(
41
- (
42
- "All jobs associated to this app are "
43
- "either done or failed. Exit."
44
- )
41
+ "All jobs associated to this app are "
42
+ "either done or failed. Exit."
45
43
  )
46
44
  return
47
45
  else:
48
- logger.info(
49
- (f"Some jobs are still 'submitted' " f"{jobsV2_db=}")
50
- )
46
+ logger.info(f"Some jobs are still 'submitted' {jobsV2_db=}")
51
47
  logger.info(
52
- (
53
- "Graceful shutdown reached its maximum time, "
54
- "but some jobs are still submitted"
55
- )
48
+ "Graceful shutdown reached its maximum time, "
49
+ "but some jobs are still submitted"
56
50
  )
57
51
 
58
52
  for job in jobsV2_db:
@@ -1,6 +1,4 @@
1
1
  from pathlib import Path
2
- from typing import Optional
3
- from typing import Union
4
2
 
5
3
  from pydantic import BaseModel
6
4
 
@@ -12,7 +10,7 @@ MULTISUBMIT_PREFIX = "par"
12
10
 
13
11
 
14
12
  def task_subfolder_name(
15
- order: Union[int, str],
13
+ order: int | str,
16
14
  task_name: str,
17
15
  ) -> str:
18
16
  """
@@ -48,8 +46,8 @@ class TaskFiles(BaseModel):
48
46
  task_order: int
49
47
 
50
48
  # Per-single-component
51
- component: Optional[str] = None
52
- prefix: Optional[str] = None
49
+ component: str | None = None
50
+ prefix: str | None = None
53
51
 
54
52
  def _check_component(self):
55
53
  if self.component is None:
@@ -134,14 +132,6 @@ class TaskFiles(BaseModel):
134
132
  def metadiff_file_remote(self) -> str:
135
133
  return self.metadiff_file_remote_path.as_posix()
136
134
 
137
- @property
138
- def remote_files_dict(self) -> dict[str, str]:
139
- return dict(
140
- args_file_remote=self.args_file_remote,
141
- metadiff_file_remote=self.metadiff_file_remote,
142
- log_file_remote=self.log_file_remote,
143
- )
144
-
145
135
 
146
136
  def enrich_task_files_multisubmit(
147
137
  *,
@@ -1,5 +1,4 @@
1
1
  from pathlib import Path
2
- from typing import Optional
3
2
 
4
3
  from ...models.v2 import DatasetV2
5
4
  from ...models.v2 import WorkflowV2
@@ -16,9 +15,9 @@ def process_workflow(
16
15
  dataset: DatasetV2,
17
16
  workflow_dir_local: Path,
18
17
  job_id: int,
19
- workflow_dir_remote: Optional[Path] = None,
20
- first_task_index: Optional[int] = None,
21
- last_task_index: Optional[int] = None,
18
+ workflow_dir_remote: Path | None = None,
19
+ first_task_index: int | None = None,
20
+ last_task_index: int | None = None,
22
21
  logger_name: str,
23
22
  job_attribute_filters: AttributeFilters,
24
23
  job_type_filters: dict[str, bool],
@@ -13,11 +13,9 @@
13
13
  """
14
14
  Slurm Backend
15
15
 
16
- This backend runs fractal workflows in a SLURM cluster using Clusterfutures
17
- Executor objects.
16
+ This backend runs fractal workflows in a SLURM cluster.
18
17
  """
19
18
  from pathlib import Path
20
- from typing import Optional
21
19
 
22
20
  from ....ssh._fabric import FractalSSH
23
21
  from ...models.v2 import DatasetV2
@@ -39,14 +37,14 @@ def process_workflow(
39
37
  dataset: DatasetV2,
40
38
  workflow_dir_local: Path,
41
39
  job_id: int,
42
- workflow_dir_remote: Optional[Path] = None,
43
- first_task_index: Optional[int] = None,
44
- last_task_index: Optional[int] = None,
40
+ workflow_dir_remote: Path | None = None,
41
+ first_task_index: int | None = None,
42
+ last_task_index: int | None = None,
45
43
  logger_name: str,
46
44
  job_attribute_filters: AttributeFilters,
47
45
  job_type_filters: dict[str, bool],
48
46
  fractal_ssh: FractalSSH,
49
- worker_init: Optional[str] = None,
47
+ worker_init: str | None = None,
50
48
  user_id: int,
51
49
  **kwargs, # not used
52
50
  ) -> None:
@@ -13,11 +13,9 @@
13
13
  """
14
14
  Slurm Backend
15
15
 
16
- This backend runs fractal workflows in a SLURM cluster using Clusterfutures
17
- Executor objects.
16
+ This backend runs fractal workflows in a SLURM cluster.
18
17
  """
19
18
  from pathlib import Path
20
- from typing import Optional
21
19
 
22
20
  from ...models.v2 import DatasetV2
23
21
  from ...models.v2 import WorkflowV2
@@ -34,18 +32,18 @@ def process_workflow(
34
32
  dataset: DatasetV2,
35
33
  workflow_dir_local: Path,
36
34
  job_id: int,
37
- workflow_dir_remote: Optional[Path] = None,
38
- first_task_index: Optional[int] = None,
39
- last_task_index: Optional[int] = None,
35
+ workflow_dir_remote: Path | None = None,
36
+ first_task_index: int | None = None,
37
+ last_task_index: int | None = None,
40
38
  logger_name: str,
41
39
  job_attribute_filters: AttributeFilters,
42
40
  job_type_filters: dict[str, bool],
43
41
  user_id: int,
44
42
  # Slurm-specific
45
- user_cache_dir: Optional[str] = None,
46
- slurm_user: Optional[str] = None,
47
- slurm_account: Optional[str] = None,
48
- worker_init: Optional[str] = None,
43
+ user_cache_dir: str | None = None,
44
+ slurm_user: str | None = None,
45
+ slurm_account: str | None = None,
46
+ worker_init: str | None = None,
49
47
  ) -> None:
50
48
  """
51
49
  Process workflow (SLURM backend public interface).
@@ -1,11 +1,10 @@
1
1
  import logging
2
+ from collections.abc import Callable
2
3
  from copy import copy
3
4
  from copy import deepcopy
4
5
  from pathlib import Path
5
6
  from typing import Any
6
- from typing import Callable
7
7
  from typing import Literal
8
- from typing import Optional
9
8
 
10
9
  from sqlalchemy.orm.attributes import flag_modified
11
10
  from sqlmodel import delete
@@ -76,13 +75,13 @@ def execute_tasks_v2(
76
75
  user_id: int,
77
76
  workflow_dir_local: Path,
78
77
  job_id: int,
79
- workflow_dir_remote: Optional[Path] = None,
80
- logger_name: Optional[str] = None,
78
+ workflow_dir_remote: Path | None = None,
79
+ logger_name: str | None = None,
81
80
  get_runner_config: Callable[
82
81
  [
83
82
  WorkflowTaskV2,
84
83
  Literal["non_parallel", "parallel"],
85
- Optional[Path],
84
+ Path | None,
86
85
  ],
87
86
  Any,
88
87
  ],
@@ -1,9 +1,7 @@
1
- import functools
1
+ from collections.abc import Callable
2
2
  from pathlib import Path
3
3
  from typing import Any
4
- from typing import Callable
5
4
  from typing import Literal
6
- from typing import Optional
7
5
 
8
6
  from pydantic import BaseModel
9
7
  from pydantic import ConfigDict
@@ -12,7 +10,6 @@ from ..exceptions import JobExecutionError
12
10
  from ..exceptions import TaskOutputValidationError
13
11
  from .db_tools import update_status_of_history_unit
14
12
  from .deduplicate_list import deduplicate_list
15
- from .runner_functions_low_level import run_single_task
16
13
  from .task_interface import InitTaskOutput
17
14
  from .task_interface import TaskOutput
18
15
  from fractal_server.app.db import get_sync_db
@@ -131,7 +128,7 @@ def run_v2_task_non_parallel(
131
128
  [
132
129
  WorkflowTaskV2,
133
130
  Literal["non_parallel", "parallel"],
134
- Optional[Path],
131
+ Path | None,
135
132
  int,
136
133
  ],
137
134
  Any,
@@ -207,13 +204,10 @@ def run_v2_task_non_parallel(
207
204
  )
208
205
 
209
206
  result, exception = runner.submit(
210
- functools.partial(
211
- run_single_task,
212
- command=task.command_non_parallel,
213
- workflow_task_order=wftask.order,
214
- workflow_task_id=wftask.task_id,
215
- task_name=wftask.task.name,
216
- ),
207
+ base_command=task.command_non_parallel,
208
+ workflow_task_order=wftask.order,
209
+ workflow_task_id=wftask.task_id,
210
+ task_name=wftask.task.name,
217
211
  parameters=function_kwargs,
218
212
  task_type=task_type,
219
213
  task_files=task_files,
@@ -256,7 +250,7 @@ def run_v2_task_parallel(
256
250
  [
257
251
  WorkflowTaskV2,
258
252
  Literal["non_parallel", "parallel"],
259
- Optional[Path],
253
+ Path | None,
260
254
  int,
261
255
  ],
262
256
  Any,
@@ -335,13 +329,10 @@ def run_v2_task_parallel(
335
329
  )
336
330
 
337
331
  results, exceptions = runner.multisubmit(
338
- functools.partial(
339
- run_single_task,
340
- command=task.command_parallel,
341
- workflow_task_order=wftask.order,
342
- workflow_task_id=wftask.task_id,
343
- task_name=wftask.task.name,
344
- ),
332
+ base_command=task.command_parallel,
333
+ workflow_task_order=wftask.order,
334
+ workflow_task_id=wftask.task_id,
335
+ task_name=wftask.task.name,
345
336
  list_parameters=list_function_kwargs,
346
337
  task_type="parallel",
347
338
  list_task_files=list_task_files,
@@ -390,7 +381,7 @@ def run_v2_task_compound(
390
381
  [
391
382
  WorkflowTaskV2,
392
383
  Literal["non_parallel", "parallel"],
393
- Optional[Path],
384
+ Path | None,
394
385
  int,
395
386
  ],
396
387
  Any,
@@ -456,13 +447,10 @@ def run_v2_task_compound(
456
447
  ],
457
448
  )
458
449
  result, exception = runner.submit(
459
- functools.partial(
460
- run_single_task,
461
- command=task.command_non_parallel,
462
- workflow_task_order=wftask.order,
463
- workflow_task_id=wftask.task_id,
464
- task_name=wftask.task.name,
465
- ),
450
+ base_command=task.command_non_parallel,
451
+ workflow_task_order=wftask.order,
452
+ workflow_task_id=wftask.task_id,
453
+ task_name=wftask.task.name,
466
454
  parameters=function_kwargs,
467
455
  task_type=task_type,
468
456
  task_files=task_files_init,
@@ -559,13 +547,10 @@ def run_v2_task_compound(
559
547
  history_unit_ids = [history_unit.id for history_unit in history_units]
560
548
 
561
549
  results, exceptions = runner.multisubmit(
562
- functools.partial(
563
- run_single_task,
564
- command=task.command_parallel,
565
- workflow_task_order=wftask.order,
566
- workflow_task_id=wftask.task_id,
567
- task_name=wftask.task.name,
568
- ),
550
+ base_command=task.command_parallel,
551
+ workflow_task_order=wftask.order,
552
+ workflow_task_id=wftask.task_id,
553
+ task_name=wftask.task.name,
569
554
  list_parameters=list_function_kwargs,
570
555
  task_type=task_type,
571
556
  list_task_files=list_task_files,
@@ -8,7 +8,6 @@ the individual backends.
8
8
  import os
9
9
  import traceback
10
10
  from pathlib import Path
11
- from typing import Optional
12
11
 
13
12
  from sqlalchemy.orm import Session as DBSyncSession
14
13
 
@@ -69,10 +68,10 @@ def submit_workflow(
69
68
  job_id: int,
70
69
  user_id: int,
71
70
  user_settings: UserSettings,
72
- worker_init: Optional[str] = None,
73
- slurm_user: Optional[str] = None,
74
- user_cache_dir: Optional[str] = None,
75
- fractal_ssh: Optional[FractalSSH] = None,
71
+ worker_init: str | None = None,
72
+ slurm_user: str | None = None,
73
+ user_cache_dir: str | None = None,
74
+ fractal_ssh: FractalSSH | None = None,
76
75
  ) -> None:
77
76
  """
78
77
  Prepares a workflow and applies it to a dataset
@@ -109,11 +108,9 @@ def submit_workflow(
109
108
 
110
109
  with next(DB.get_sync_db()) as db_sync:
111
110
  try:
112
- job: Optional[JobV2] = db_sync.get(JobV2, job_id)
113
- dataset: Optional[DatasetV2] = db_sync.get(DatasetV2, dataset_id)
114
- workflow: Optional[WorkflowV2] = db_sync.get(
115
- WorkflowV2, workflow_id
116
- )
111
+ job: JobV2 | None = db_sync.get(JobV2, job_id)
112
+ dataset: DatasetV2 | None = db_sync.get(DatasetV2, dataset_id)
113
+ workflow: WorkflowV2 | None = db_sync.get(WorkflowV2, workflow_id)
117
114
  except Exception as e:
118
115
  logger.error(
119
116
  f"Error connecting to the database. Original error: {str(e)}"
@@ -1,5 +1,4 @@
1
1
  from typing import Any
2
- from typing import Optional
3
2
 
4
3
  from pydantic import BaseModel
5
4
  from pydantic import ConfigDict
@@ -57,7 +56,7 @@ class InitTaskOutput(BaseModel):
57
56
 
58
57
  def _cast_and_validate_TaskOutput(
59
58
  task_output: dict[str, Any]
60
- ) -> Optional[TaskOutput]:
59
+ ) -> TaskOutput | None:
61
60
  try:
62
61
  validated_task_output = TaskOutput(**task_output)
63
62
  return validated_task_output
@@ -71,7 +70,7 @@ def _cast_and_validate_TaskOutput(
71
70
 
72
71
  def _cast_and_validate_InitTaskOutput(
73
72
  init_task_output: dict[str, Any],
74
- ) -> Optional[InitTaskOutput]:
73
+ ) -> InitTaskOutput | None:
75
74
  try:
76
75
  validated_init_task_output = InitTaskOutput(**init_task_output)
77
76
  return validated_init_task_output
@@ -1,25 +1,15 @@
1
1
  import json
2
2
  import sys
3
- from typing import Union
4
-
5
- import cloudpickle
6
3
 
7
4
  import fractal_server
8
5
 
9
6
 
10
- def get_versions() -> dict[str, Union[list[int], str]]:
7
+ def get_versions() -> dict[str, list[int] | str]:
11
8
  """
12
- Extract versions of Python, fractal-server and cloudpickle.
13
-
14
- This information is useful to check compatibility of two Python
15
- interpreters when running tasks: the current interpreter (which prepares
16
- the input pickles and orchestrates workflow execution) and a remote
17
- interpreter (e.g. the one defined in the `FRACTAL_SLURM_WORKER_PYTHON`
18
- configuration variable) that executes the tasks.
9
+ Extract versions of Python and fractal-server.
19
10
  """
20
11
  versions = dict(
21
- python=list(sys.version_info[:3]),
22
- cloudpickle=cloudpickle.__version__,
12
+ python=tuple(sys.version_info[:3]),
23
13
  fractal_server=fractal_server.__VERSION__,
24
14
  )
25
15
  return versions
@@ -1,5 +1,3 @@
1
- from typing import Optional
2
-
3
1
  from fastapi_users import schemas
4
2
  from pydantic import BaseModel
5
3
  from pydantic import ConfigDict
@@ -41,8 +39,8 @@ class UserRead(schemas.BaseUser[int]):
41
39
  username:
42
40
  """
43
41
 
44
- username: Optional[str] = None
45
- group_ids_names: Optional[list[tuple[int, str]]] = None
42
+ username: str | None = None
43
+ group_ids_names: list[tuple[int, str]] | None = None
46
44
  oauth_accounts: list[OAuthAccountRead]
47
45
 
48
46
 
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from typing import Optional
3
2
 
4
3
  from pydantic import BaseModel
5
4
  from pydantic import ConfigDict
@@ -33,7 +32,7 @@ class UserGroupRead(BaseModel):
33
32
  id: int
34
33
  name: str
35
34
  timestamp_created: AwareDatetime
36
- user_ids: Optional[list[int]] = None
35
+ user_ids: list[int] | None = None
37
36
  viewer_paths: list[str]
38
37
 
39
38
  @field_serializer("timestamp_created")
@@ -1,5 +1,3 @@
1
- from typing import Optional
2
-
3
1
  from pydantic import BaseModel
4
2
  from pydantic import ConfigDict
5
3
  from pydantic import field_validator
@@ -23,21 +21,21 @@ class UserSettingsRead(BaseModel):
23
21
  """
24
22
 
25
23
  id: int
26
- ssh_host: Optional[str] = None
27
- ssh_username: Optional[str] = None
28
- ssh_private_key_path: Optional[str] = None
29
- ssh_tasks_dir: Optional[str] = None
30
- ssh_jobs_dir: Optional[str] = None
31
- slurm_user: Optional[str] = None
24
+ ssh_host: str | None = None
25
+ ssh_username: str | None = None
26
+ ssh_private_key_path: str | None = None
27
+ ssh_tasks_dir: str | None = None
28
+ ssh_jobs_dir: str | None = None
29
+ slurm_user: str | None = None
32
30
  slurm_accounts: list[str]
33
- project_dir: Optional[str] = None
31
+ project_dir: str | None = None
34
32
 
35
33
 
36
34
  class UserSettingsReadStrict(BaseModel):
37
- slurm_user: Optional[str] = None
35
+ slurm_user: str | None = None
38
36
  slurm_accounts: list[str]
39
- ssh_username: Optional[str] = None
40
- project_dir: Optional[str] = None
37
+ ssh_username: str | None = None
38
+ project_dir: str | None = None
41
39
 
42
40
 
43
41
  class UserSettingsUpdate(BaseModel):
@@ -47,14 +45,14 @@ class UserSettingsUpdate(BaseModel):
47
45
 
48
46
  model_config = ConfigDict(extra="forbid")
49
47
 
50
- ssh_host: Optional[NonEmptyStr] = None
51
- ssh_username: Optional[NonEmptyStr] = None
52
- ssh_private_key_path: Optional[AbsolutePathStr] = None
53
- ssh_tasks_dir: Optional[AbsolutePathStr] = None
54
- ssh_jobs_dir: Optional[AbsolutePathStr] = None
55
- slurm_user: Optional[NonEmptyStr] = None
56
- slurm_accounts: Optional[ListUniqueNonEmptyString] = None
57
- project_dir: Optional[AbsolutePathStr] = None
48
+ ssh_host: NonEmptyStr | None = None
49
+ ssh_username: NonEmptyStr | None = None
50
+ ssh_private_key_path: AbsolutePathStr | None = None
51
+ ssh_tasks_dir: AbsolutePathStr | None = None
52
+ ssh_jobs_dir: AbsolutePathStr | None = None
53
+ slurm_user: NonEmptyStr | None = None
54
+ slurm_accounts: ListUniqueNonEmptyString | None = None
55
+ project_dir: AbsolutePathStr | None = None
58
56
 
59
57
  @field_validator("project_dir", mode="after")
60
58
  @classmethod
@@ -66,4 +64,4 @@ class UserSettingsUpdate(BaseModel):
66
64
 
67
65
  class UserSettingsUpdateStrict(BaseModel):
68
66
  model_config = ConfigDict(extra="forbid")
69
- slurm_accounts: Optional[ListUniqueNonEmptyString] = None
67
+ slurm_accounts: ListUniqueNonEmptyString | None = None
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from typing import Optional
3
2
 
4
3
  from pydantic import BaseModel
5
4
  from pydantic import ConfigDict
@@ -19,7 +18,7 @@ class DatasetCreateV2(BaseModel):
19
18
 
20
19
  name: NonEmptyStr
21
20
 
22
- zarr_dir: Optional[ZarrDirStr] = None
21
+ zarr_dir: ZarrDirStr | None = None
23
22
 
24
23
  attribute_filters: AttributeFilters = Field(default_factory=dict)
25
24
 
@@ -44,7 +43,7 @@ class DatasetUpdateV2(BaseModel):
44
43
  model_config = ConfigDict(extra="forbid")
45
44
 
46
45
  name: NonEmptyStr = None
47
- zarr_dir: Optional[ZarrDirStr] = None
46
+ zarr_dir: ZarrDirStr | None = None
48
47
 
49
48
 
50
49
  class DatasetImportV2(BaseModel):