fractal-server 2.0.6__py3-none-any.whl → 2.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +1 -1
  3. fractal_server/app/routes/admin/v1.py +2 -4
  4. fractal_server/app/routes/admin/v2.py +2 -4
  5. fractal_server/app/routes/api/v1/_aux_functions.py +24 -0
  6. fractal_server/app/routes/api/v1/job.py +3 -4
  7. fractal_server/app/routes/api/v1/project.py +28 -18
  8. fractal_server/app/routes/api/v2/_aux_functions.py +35 -12
  9. fractal_server/app/routes/api/v2/job.py +3 -4
  10. fractal_server/app/routes/api/v2/project.py +21 -0
  11. fractal_server/app/routes/api/v2/submit.py +36 -15
  12. fractal_server/app/routes/aux/_job.py +3 -1
  13. fractal_server/app/routes/aux/_runner.py +3 -3
  14. fractal_server/app/runner/executors/slurm/executor.py +169 -68
  15. fractal_server/app/runner/shutdown.py +88 -0
  16. fractal_server/app/runner/task_files.py +59 -27
  17. fractal_server/app/runner/v1/__init__.py +113 -64
  18. fractal_server/app/runner/v1/_common.py +53 -51
  19. fractal_server/app/runner/v1/_local/__init__.py +12 -11
  20. fractal_server/app/runner/v1/_local/_submit_setup.py +4 -4
  21. fractal_server/app/runner/v1/_slurm/__init__.py +16 -16
  22. fractal_server/app/runner/v1/_slurm/_submit_setup.py +11 -10
  23. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +6 -6
  24. fractal_server/app/runner/v2/__init__.py +139 -60
  25. fractal_server/app/runner/v2/_local/__init__.py +12 -11
  26. fractal_server/app/runner/v2/_local/_local_config.py +1 -1
  27. fractal_server/app/runner/v2/_local/_submit_setup.py +4 -4
  28. fractal_server/app/runner/v2/_local_experimental/__init__.py +155 -0
  29. fractal_server/app/runner/v2/_local_experimental/_local_config.py +108 -0
  30. fractal_server/app/runner/v2/_local_experimental/_submit_setup.py +42 -0
  31. fractal_server/app/runner/v2/_local_experimental/executor.py +156 -0
  32. fractal_server/app/runner/v2/_slurm/__init__.py +10 -10
  33. fractal_server/app/runner/v2/_slurm/_submit_setup.py +11 -10
  34. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +6 -6
  35. fractal_server/app/runner/v2/runner.py +17 -15
  36. fractal_server/app/runner/v2/runner_functions.py +38 -38
  37. fractal_server/app/runner/v2/runner_functions_low_level.py +12 -6
  38. fractal_server/app/security/__init__.py +4 -5
  39. fractal_server/config.py +73 -19
  40. fractal_server/gunicorn_fractal.py +40 -0
  41. fractal_server/{logger/__init__.py → logger.py} +2 -2
  42. fractal_server/main.py +45 -26
  43. fractal_server/migrations/env.py +1 -1
  44. {fractal_server-2.0.6.dist-info → fractal_server-2.2.0.dist-info}/METADATA +4 -1
  45. {fractal_server-2.0.6.dist-info → fractal_server-2.2.0.dist-info}/RECORD +48 -43
  46. fractal_server/logger/gunicorn_logger.py +0 -19
  47. {fractal_server-2.0.6.dist-info → fractal_server-2.2.0.dist-info}/LICENSE +0 -0
  48. {fractal_server-2.0.6.dist-info → fractal_server-2.2.0.dist-info}/WHEEL +0 -0
  49. {fractal_server-2.0.6.dist-info → fractal_server-2.2.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,42 @@
1
+ """
2
+ Submodule to define _local_submit_setup
3
+ """
4
+ from pathlib import Path
5
+ from typing import Literal
6
+ from typing import Optional
7
+
8
+ from ....models.v2 import WorkflowTaskV2
9
+ from ._local_config import get_local_backend_config
10
+
11
+
12
+ def _local_submit_setup(
13
+ *,
14
+ wftask: WorkflowTaskV2,
15
+ workflow_dir_local: Optional[Path] = None,
16
+ workflow_dir_remote: Optional[Path] = None,
17
+ which_type: Literal["non_parallel", "parallel"],
18
+ ) -> dict[str, object]:
19
+ """
20
+ Collect WorfklowTask-specific configuration parameters from different
21
+ sources, and inject them for execution.
22
+
23
+ Arguments:
24
+ wftask:
25
+ WorkflowTask for which the configuration is to be assembled
26
+ workflow_dir_local:
27
+ Not used in this function.
28
+ workflow_dir_remote:
29
+ Not used in this function.
30
+
31
+ Returns:
32
+ submit_setup_dict:
33
+ A dictionary that will be passed on to
34
+ `FractalProcessPoolExecutor.submit` and
35
+ `FractalProcessPoolExecutor.map`, so as to set extra options.
36
+ """
37
+
38
+ local_backend_config = get_local_backend_config(
39
+ wftask=wftask, which_type=which_type
40
+ )
41
+
42
+ return dict(local_backend_config=local_backend_config)
@@ -0,0 +1,156 @@
1
+ """
2
+ Custom version of Python
3
+ [ProcessPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ProcessPoolExecutor)).
4
+ """
5
+ import threading
6
+ import time
7
+ from concurrent.futures import ProcessPoolExecutor
8
+ from concurrent.futures.process import BrokenProcessPool
9
+ from pathlib import Path
10
+ from typing import Callable
11
+ from typing import Iterable
12
+ from typing import Optional
13
+ from typing import Sequence
14
+
15
+ import psutil
16
+
17
+ from ._local_config import get_default_local_backend_config
18
+ from ._local_config import LocalBackendConfig
19
+ from fractal_server.app.runner.exceptions import JobExecutionError
20
+ from fractal_server.logger import set_logger
21
+
22
+ logger = set_logger("FractalProcessPoolExecutor")
23
+
24
+
25
+ class FractalProcessPoolExecutor(ProcessPoolExecutor):
26
+
27
+ shutdown_file: Path
28
+ interval: float
29
+ _shutdown: bool
30
+ _shutdown_file_thread: threading.Thread
31
+
32
+ def __init__(
33
+ self, shutdown_file: Path, interval: float = 1.0, *args, **kwargs
34
+ ):
35
+ super().__init__(*args, **kwargs)
36
+ self.shutdown_file = Path(shutdown_file)
37
+ self.interval = float(interval)
38
+ logger.debug(
39
+ f"Start monitoring {shutdown_file} every {interval} seconds"
40
+ )
41
+ self._shutdown = False
42
+ self._shutdown_file_thread = threading.Thread(
43
+ target=self._run, daemon=True
44
+ )
45
+ self._shutdown_file_thread.start()
46
+
47
+ def _run(self):
48
+ """
49
+ Running on '_shutdown_file_thread'.
50
+ """
51
+ while True:
52
+ if self.shutdown_file.exists() or self._shutdown:
53
+ try:
54
+ self._terminate_processes()
55
+ except Exception as e:
56
+ logger.error(
57
+ "Terminate processes failed. "
58
+ f"Original error: {str(e)}."
59
+ )
60
+ finally:
61
+ return
62
+ time.sleep(self.interval)
63
+
64
+ def _terminate_processes(self):
65
+ """
66
+ Running on '_shutdown_file_thread'.
67
+ """
68
+
69
+ logger.info("Start terminating FractalProcessPoolExecutor processes.")
70
+ # We use 'psutil' in order to easily access the PIDs of the children.
71
+ if self._processes is not None:
72
+ for pid in self._processes.keys():
73
+ parent = psutil.Process(pid)
74
+ children = parent.children(recursive=True)
75
+ for child in children:
76
+ child.kill()
77
+ parent.kill()
78
+ logger.info(f"Process {pid} and its children terminated.")
79
+ logger.info("FractalProcessPoolExecutor processes terminated.")
80
+
81
+ def shutdown(self, *args, **kwargs) -> None:
82
+ self._shutdown = True
83
+ self._shutdown_file_thread.join()
84
+ return super().shutdown(*args, **kwargs)
85
+
86
+ def submit(
87
+ self,
88
+ *args,
89
+ local_backend_config: Optional[LocalBackendConfig] = None,
90
+ **kwargs,
91
+ ):
92
+ """
93
+ Compared to the `ProcessPoolExecutor` method, here we accept an
94
+ additional keyword argument (`local_backend_config`), which is then
95
+ simply ignored.
96
+ """
97
+ return super().submit(*args, **kwargs)
98
+
99
+ def map(
100
+ self,
101
+ fn: Callable,
102
+ *iterables: Sequence[Iterable],
103
+ local_backend_config: Optional[LocalBackendConfig] = None,
104
+ ):
105
+ """
106
+ Custom version of the `Executor.map` method
107
+
108
+ The main change with the respect to the original `map` method is that
109
+ the list of tasks to be executed is split into chunks, and then
110
+ `super().map` is called (sequentially) on each chunk. The goal of this
111
+ change is to limit parallelism, e.g. due to limited computational
112
+ resources.
113
+
114
+ Other changes from the `concurrent.futures` `map` method:
115
+
116
+ 1. Removed `timeout` argument;
117
+ 2. Removed `chunksize`;
118
+ 3. All iterators (both inputs and output ones) are transformed into
119
+ lists.
120
+
121
+ Args:
122
+ fn: A callable function.
123
+ iterables: The argument iterables (one iterable per argument of
124
+ `fn`).
125
+ local_backend_config: The backend configuration, needed to extract
126
+ `parallel_tasks_per_job`.
127
+ """
128
+ # Preliminary check
129
+ iterable_lengths = [len(it) for it in iterables]
130
+ if not len(set(iterable_lengths)) == 1:
131
+ raise ValueError("Iterables have different lengths.")
132
+ # Set total number of arguments
133
+ n_elements = len(iterables[0])
134
+
135
+ # Set parallel_tasks_per_job
136
+ if local_backend_config is None:
137
+ local_backend_config = get_default_local_backend_config()
138
+ parallel_tasks_per_job = local_backend_config.parallel_tasks_per_job
139
+ if parallel_tasks_per_job is None:
140
+ parallel_tasks_per_job = n_elements
141
+
142
+ # Execute tasks, in chunks of size parallel_tasks_per_job
143
+ results = []
144
+ for ind_chunk in range(0, n_elements, parallel_tasks_per_job):
145
+ chunk_iterables = [
146
+ it[ind_chunk : ind_chunk + parallel_tasks_per_job] # noqa
147
+ for it in iterables
148
+ ]
149
+ map_iter = super().map(fn, *chunk_iterables)
150
+
151
+ try:
152
+ results.extend(list(map_iter))
153
+ except BrokenProcessPool as e:
154
+ raise JobExecutionError(info=e.args[0])
155
+
156
+ return iter(results)
@@ -40,8 +40,8 @@ def _process_workflow(
40
40
  workflow: WorkflowV2,
41
41
  dataset: DatasetV2,
42
42
  logger_name: str,
43
- workflow_dir: Path,
44
- workflow_dir_user: Path,
43
+ workflow_dir_local: Path,
44
+ workflow_dir_remote: Path,
45
45
  first_task_index: int,
46
46
  last_task_index: int,
47
47
  slurm_user: Optional[str] = None,
@@ -76,8 +76,8 @@ def _process_workflow(
76
76
  keep_logs=True,
77
77
  slurm_user=slurm_user,
78
78
  user_cache_dir=user_cache_dir,
79
- working_dir=workflow_dir,
80
- working_dir_user=workflow_dir_user,
79
+ workflow_dir_local=workflow_dir_local,
80
+ workflow_dir_remote=workflow_dir_remote,
81
81
  common_script_lines=worker_init,
82
82
  slurm_account=slurm_account,
83
83
  ) as executor:
@@ -87,8 +87,8 @@ def _process_workflow(
87
87
  ], # noqa
88
88
  dataset=dataset,
89
89
  executor=executor,
90
- workflow_dir=workflow_dir,
91
- workflow_dir_user=workflow_dir_user,
90
+ workflow_dir_local=workflow_dir_local,
91
+ workflow_dir_remote=workflow_dir_remote,
92
92
  logger_name=logger_name,
93
93
  submit_setup_call=_slurm_submit_setup,
94
94
  )
@@ -99,8 +99,8 @@ async def process_workflow(
99
99
  *,
100
100
  workflow: WorkflowV2,
101
101
  dataset: DatasetV2,
102
- workflow_dir: Path,
103
- workflow_dir_user: Optional[Path] = None,
102
+ workflow_dir_local: Path,
103
+ workflow_dir_remote: Optional[Path] = None,
104
104
  first_task_index: Optional[int] = None,
105
105
  last_task_index: Optional[int] = None,
106
106
  logger_name: str,
@@ -129,8 +129,8 @@ async def process_workflow(
129
129
  workflow=workflow,
130
130
  dataset=dataset,
131
131
  logger_name=logger_name,
132
- workflow_dir=workflow_dir,
133
- workflow_dir_user=workflow_dir_user,
132
+ workflow_dir_local=workflow_dir_local,
133
+ workflow_dir_remote=workflow_dir_remote,
134
134
  first_task_index=first_task_index,
135
135
  last_task_index=last_task_index,
136
136
  user_cache_dir=user_cache_dir,
@@ -24,8 +24,8 @@ from fractal_server.app.models.v2 import WorkflowTaskV2
24
24
  def _slurm_submit_setup(
25
25
  *,
26
26
  wftask: WorkflowTaskV2,
27
- workflow_dir: Path,
28
- workflow_dir_user: Path,
27
+ workflow_dir_local: Path,
28
+ workflow_dir_remote: Path,
29
29
  which_type: Literal["non_parallel", "parallel"],
30
30
  ) -> dict[str, object]:
31
31
  """
@@ -44,13 +44,13 @@ def _slurm_submit_setup(
44
44
  Arguments:
45
45
  wftask:
46
46
  WorkflowTask for which the configuration is to be assembled
47
- workflow_dir:
47
+ workflow_dir_local:
48
48
  Server-owned directory to store all task-execution-related relevant
49
49
  files (inputs, outputs, errors, and all meta files related to the
50
50
  job execution). Note: users cannot write directly to this folder.
51
- workflow_dir_user:
52
- User-side directory with the same scope as `workflow_dir`, and
53
- where a user can write.
51
+ workflow_dir_remote:
52
+ User-side directory with the same scope as `workflow_dir_local`,
53
+ and where a user can write.
54
54
 
55
55
  Returns:
56
56
  submit_setup_dict:
@@ -62,16 +62,17 @@ def _slurm_submit_setup(
62
62
  # Get SlurmConfig object
63
63
  slurm_config = get_slurm_config(
64
64
  wftask=wftask,
65
- workflow_dir=workflow_dir,
66
- workflow_dir_user=workflow_dir_user,
65
+ workflow_dir_local=workflow_dir_local,
66
+ workflow_dir_remote=workflow_dir_remote,
67
67
  which_type=which_type,
68
68
  )
69
69
 
70
70
  # Get TaskFiles object
71
71
  task_files = get_task_file_paths(
72
- workflow_dir=workflow_dir,
73
- workflow_dir_user=workflow_dir_user,
72
+ workflow_dir_local=workflow_dir_local,
73
+ workflow_dir_remote=workflow_dir_remote,
74
74
  task_order=wftask.order,
75
+ task_name=wftask.task.name,
75
76
  )
76
77
 
77
78
  # Prepare and return output dictionary
@@ -18,8 +18,8 @@ from fractal_server.app.runner.executors.slurm._slurm_config import (
18
18
 
19
19
  def get_slurm_config(
20
20
  wftask: WorkflowTaskV2,
21
- workflow_dir: Path,
22
- workflow_dir_user: Path,
21
+ workflow_dir_local: Path,
22
+ workflow_dir_remote: Path,
23
23
  which_type: Literal["non_parallel", "parallel"],
24
24
  config_path: Optional[Path] = None,
25
25
  ) -> SlurmConfig:
@@ -43,13 +43,13 @@ def get_slurm_config(
43
43
  wftask:
44
44
  WorkflowTask for which the SLURM configuration is is to be
45
45
  prepared.
46
- workflow_dir:
46
+ workflow_dir_local:
47
47
  Server-owned directory to store all task-execution-related relevant
48
48
  files (inputs, outputs, errors, and all meta files related to the
49
49
  job execution). Note: users cannot write directly to this folder.
50
- workflow_dir_user:
51
- User-side directory with the same scope as `workflow_dir`, and
52
- where a user can write.
50
+ workflow_dir_remote:
51
+ User-side directory with the same scope as `workflow_dir_local`,
52
+ and where a user can write.
53
53
  config_path:
54
54
  Path of a Fractal SLURM configuration file; if `None`, use
55
55
  `FRACTAL_SLURM_CONFIG_FILE` variable from settings.
@@ -32,16 +32,18 @@ def execute_tasks_v2(
32
32
  wf_task_list: list[WorkflowTaskV2],
33
33
  dataset: DatasetV2,
34
34
  executor: ThreadPoolExecutor,
35
- workflow_dir: Path,
36
- workflow_dir_user: Optional[Path] = None,
35
+ workflow_dir_local: Path,
36
+ workflow_dir_remote: Optional[Path] = None,
37
37
  logger_name: Optional[str] = None,
38
38
  submit_setup_call: Callable = no_op_submit_setup_call,
39
39
  ) -> DatasetV2:
40
40
 
41
41
  logger = logging.getLogger(logger_name)
42
42
 
43
- if not workflow_dir.exists(): # FIXME: this should have already happened
44
- workflow_dir.mkdir()
43
+ if (
44
+ not workflow_dir_local.exists()
45
+ ): # FIXME: this should have already happened
46
+ workflow_dir_local.mkdir()
45
47
 
46
48
  # Initialize local dataset attributes
47
49
  zarr_dir = dataset.zarr_dir
@@ -94,8 +96,8 @@ def execute_tasks_v2(
94
96
  zarr_dir=zarr_dir,
95
97
  wftask=wftask,
96
98
  task=task,
97
- workflow_dir=workflow_dir,
98
- workflow_dir_user=workflow_dir_user,
99
+ workflow_dir_local=workflow_dir_local,
100
+ workflow_dir_remote=workflow_dir_remote,
99
101
  executor=executor,
100
102
  logger_name=logger_name,
101
103
  submit_setup_call=submit_setup_call,
@@ -105,8 +107,8 @@ def execute_tasks_v2(
105
107
  images=filtered_images,
106
108
  wftask=wftask,
107
109
  task=task,
108
- workflow_dir=workflow_dir,
109
- workflow_dir_user=workflow_dir_user,
110
+ workflow_dir_local=workflow_dir_local,
111
+ workflow_dir_remote=workflow_dir_remote,
110
112
  executor=executor,
111
113
  logger_name=logger_name,
112
114
  submit_setup_call=submit_setup_call,
@@ -117,8 +119,8 @@ def execute_tasks_v2(
117
119
  zarr_dir=zarr_dir,
118
120
  wftask=wftask,
119
121
  task=task,
120
- workflow_dir=workflow_dir,
121
- workflow_dir_user=workflow_dir_user,
122
+ workflow_dir_local=workflow_dir_local,
123
+ workflow_dir_remote=workflow_dir_remote,
122
124
  executor=executor,
123
125
  logger_name=logger_name,
124
126
  submit_setup_call=submit_setup_call,
@@ -133,8 +135,8 @@ def execute_tasks_v2(
133
135
  task_legacy=task_legacy,
134
136
  executor=executor,
135
137
  logger_name=logger_name,
136
- workflow_dir=workflow_dir,
137
- workflow_dir_user=workflow_dir_user,
138
+ workflow_dir_local=workflow_dir_local,
139
+ workflow_dir_remote=workflow_dir_remote,
138
140
  submit_setup_call=submit_setup_call,
139
141
  )
140
142
 
@@ -323,11 +325,11 @@ def execute_tasks_v2(
323
325
  # temporary files which can be used (1) to retrieve the latest state
324
326
  # when the job fails, (2) from within endpoints that need up-to-date
325
327
  # information
326
- with open(workflow_dir / HISTORY_FILENAME, "w") as f:
328
+ with open(workflow_dir_local / HISTORY_FILENAME, "w") as f:
327
329
  json.dump(tmp_history, f, indent=2)
328
- with open(workflow_dir / FILTERS_FILENAME, "w") as f:
330
+ with open(workflow_dir_local / FILTERS_FILENAME, "w") as f:
329
331
  json.dump(tmp_filters, f, indent=2)
330
- with open(workflow_dir / IMAGES_FILENAME, "w") as f:
332
+ with open(workflow_dir_local / IMAGES_FILENAME, "w") as f:
331
333
  json.dump(tmp_images, f, indent=2)
332
334
 
333
335
  logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
@@ -65,8 +65,8 @@ def _cast_and_validate_InitTaskOutput(
65
65
  def no_op_submit_setup_call(
66
66
  *,
67
67
  wftask: WorkflowTaskV2,
68
- workflow_dir: Path,
69
- workflow_dir_user: Path,
68
+ workflow_dir_local: Path,
69
+ workflow_dir_remote: Path,
70
70
  which_type: Literal["non_parallel", "parallel"],
71
71
  ) -> dict:
72
72
  """
@@ -79,16 +79,16 @@ def no_op_submit_setup_call(
79
79
  def _get_executor_options(
80
80
  *,
81
81
  wftask: WorkflowTaskV2,
82
- workflow_dir: Path,
83
- workflow_dir_user: Path,
82
+ workflow_dir_local: Path,
83
+ workflow_dir_remote: Path,
84
84
  submit_setup_call: Callable,
85
85
  which_type: Literal["non_parallel", "parallel"],
86
86
  ) -> dict:
87
87
  try:
88
88
  options = submit_setup_call(
89
89
  wftask=wftask,
90
- workflow_dir=workflow_dir,
91
- workflow_dir_user=workflow_dir_user,
90
+ workflow_dir_local=workflow_dir_local,
91
+ workflow_dir_remote=workflow_dir_remote,
92
92
  which_type=which_type,
93
93
  )
94
94
  except Exception as e:
@@ -115,8 +115,8 @@ def run_v2_task_non_parallel(
115
115
  zarr_dir: str,
116
116
  task: TaskV2,
117
117
  wftask: WorkflowTaskV2,
118
- workflow_dir: Path,
119
- workflow_dir_user: Optional[Path] = None,
118
+ workflow_dir_local: Path,
119
+ workflow_dir_remote: Optional[Path] = None,
120
120
  executor: Executor,
121
121
  logger_name: Optional[str] = None,
122
122
  submit_setup_call: Callable = no_op_submit_setup_call,
@@ -125,17 +125,17 @@ def run_v2_task_non_parallel(
125
125
  This runs server-side (see `executor` argument)
126
126
  """
127
127
 
128
- if workflow_dir_user is None:
129
- workflow_dir_user = workflow_dir
128
+ if workflow_dir_remote is None:
129
+ workflow_dir_remote = workflow_dir_local
130
130
  logging.warning(
131
- "In `run_single_task`, workflow_dir_user=None. Is this right?"
131
+ "In `run_single_task`, workflow_dir_remote=None. Is this right?"
132
132
  )
133
- workflow_dir_user = workflow_dir
133
+ workflow_dir_remote = workflow_dir_local
134
134
 
135
135
  executor_options = _get_executor_options(
136
136
  wftask=wftask,
137
- workflow_dir=workflow_dir,
138
- workflow_dir_user=workflow_dir_user,
137
+ workflow_dir_local=workflow_dir_local,
138
+ workflow_dir_remote=workflow_dir_remote,
139
139
  submit_setup_call=submit_setup_call,
140
140
  which_type="non_parallel",
141
141
  )
@@ -150,8 +150,8 @@ def run_v2_task_non_parallel(
150
150
  run_single_task,
151
151
  wftask=wftask,
152
152
  command=task.command_non_parallel,
153
- workflow_dir=workflow_dir,
154
- workflow_dir_user=workflow_dir_user,
153
+ workflow_dir_local=workflow_dir_local,
154
+ workflow_dir_remote=workflow_dir_remote,
155
155
  ),
156
156
  function_kwargs,
157
157
  **executor_options,
@@ -169,8 +169,8 @@ def run_v2_task_parallel(
169
169
  task: TaskV2,
170
170
  wftask: WorkflowTaskV2,
171
171
  executor: Executor,
172
- workflow_dir: Path,
173
- workflow_dir_user: Optional[Path] = None,
172
+ workflow_dir_local: Path,
173
+ workflow_dir_remote: Optional[Path] = None,
174
174
  logger_name: Optional[str] = None,
175
175
  submit_setup_call: Callable = no_op_submit_setup_call,
176
176
  ) -> TaskOutput:
@@ -182,8 +182,8 @@ def run_v2_task_parallel(
182
182
 
183
183
  executor_options = _get_executor_options(
184
184
  wftask=wftask,
185
- workflow_dir=workflow_dir,
186
- workflow_dir_user=workflow_dir_user,
185
+ workflow_dir_local=workflow_dir_local,
186
+ workflow_dir_remote=workflow_dir_remote,
187
187
  submit_setup_call=submit_setup_call,
188
188
  which_type="parallel",
189
189
  )
@@ -203,8 +203,8 @@ def run_v2_task_parallel(
203
203
  run_single_task,
204
204
  wftask=wftask,
205
205
  command=task.command_parallel,
206
- workflow_dir=workflow_dir,
207
- workflow_dir_user=workflow_dir_user,
206
+ workflow_dir_local=workflow_dir_local,
207
+ workflow_dir_remote=workflow_dir_remote,
208
208
  ),
209
209
  list_function_kwargs,
210
210
  **executor_options,
@@ -230,23 +230,23 @@ def run_v2_task_compound(
230
230
  task: TaskV2,
231
231
  wftask: WorkflowTaskV2,
232
232
  executor: Executor,
233
- workflow_dir: Path,
234
- workflow_dir_user: Optional[Path] = None,
233
+ workflow_dir_local: Path,
234
+ workflow_dir_remote: Optional[Path] = None,
235
235
  logger_name: Optional[str] = None,
236
236
  submit_setup_call: Callable = no_op_submit_setup_call,
237
237
  ) -> TaskOutput:
238
238
 
239
239
  executor_options_init = _get_executor_options(
240
240
  wftask=wftask,
241
- workflow_dir=workflow_dir,
242
- workflow_dir_user=workflow_dir_user,
241
+ workflow_dir_local=workflow_dir_local,
242
+ workflow_dir_remote=workflow_dir_remote,
243
243
  submit_setup_call=submit_setup_call,
244
244
  which_type="non_parallel",
245
245
  )
246
246
  executor_options_compute = _get_executor_options(
247
247
  wftask=wftask,
248
- workflow_dir=workflow_dir,
249
- workflow_dir_user=workflow_dir_user,
248
+ workflow_dir_local=workflow_dir_local,
249
+ workflow_dir_remote=workflow_dir_remote,
250
250
  submit_setup_call=submit_setup_call,
251
251
  which_type="parallel",
252
252
  )
@@ -262,8 +262,8 @@ def run_v2_task_compound(
262
262
  run_single_task,
263
263
  wftask=wftask,
264
264
  command=task.command_non_parallel,
265
- workflow_dir=workflow_dir,
266
- workflow_dir_user=workflow_dir_user,
265
+ workflow_dir_local=workflow_dir_local,
266
+ workflow_dir_remote=workflow_dir_remote,
267
267
  ),
268
268
  function_kwargs,
269
269
  **executor_options_init,
@@ -298,8 +298,8 @@ def run_v2_task_compound(
298
298
  run_single_task,
299
299
  wftask=wftask,
300
300
  command=task.command_parallel,
301
- workflow_dir=workflow_dir,
302
- workflow_dir_user=workflow_dir_user,
301
+ workflow_dir_local=workflow_dir_local,
302
+ workflow_dir_remote=workflow_dir_remote,
303
303
  ),
304
304
  list_function_kwargs,
305
305
  **executor_options_compute,
@@ -325,8 +325,8 @@ def run_v1_task_parallel(
325
325
  task_legacy: TaskV1,
326
326
  wftask: WorkflowTaskV2,
327
327
  executor: Executor,
328
- workflow_dir: Path,
329
- workflow_dir_user: Optional[Path] = None,
328
+ workflow_dir_local: Path,
329
+ workflow_dir_remote: Optional[Path] = None,
330
330
  logger_name: Optional[str] = None,
331
331
  submit_setup_call: Callable = no_op_submit_setup_call,
332
332
  ) -> TaskOutput:
@@ -335,8 +335,8 @@ def run_v1_task_parallel(
335
335
 
336
336
  executor_options = _get_executor_options(
337
337
  wftask=wftask,
338
- workflow_dir=workflow_dir,
339
- workflow_dir_user=workflow_dir_user,
338
+ workflow_dir_local=workflow_dir_local,
339
+ workflow_dir_remote=workflow_dir_remote,
340
340
  submit_setup_call=submit_setup_call,
341
341
  which_type="parallel",
342
342
  )
@@ -359,8 +359,8 @@ def run_v1_task_parallel(
359
359
  run_single_task,
360
360
  wftask=wftask,
361
361
  command=task_legacy.command,
362
- workflow_dir=workflow_dir,
363
- workflow_dir_user=workflow_dir_user,
362
+ workflow_dir_local=workflow_dir_local,
363
+ workflow_dir_remote=workflow_dir_remote,
364
364
  is_task_v1=True,
365
365
  ),
366
366
  list_function_kwargs,
@@ -60,8 +60,8 @@ def run_single_task(
60
60
  args: dict[str, Any],
61
61
  command: str,
62
62
  wftask: WorkflowTaskV2,
63
- workflow_dir: Path,
64
- workflow_dir_user: Optional[Path] = None,
63
+ workflow_dir_local: Path,
64
+ workflow_dir_remote: Optional[Path] = None,
65
65
  logger_name: Optional[str] = None,
66
66
  is_task_v1: bool = False,
67
67
  ) -> dict[str, Any]:
@@ -72,14 +72,20 @@ def run_single_task(
72
72
  logger = logging.getLogger(logger_name)
73
73
  logger.debug(f"Now start running {command=}")
74
74
 
75
- if not workflow_dir_user:
76
- workflow_dir_user = workflow_dir
75
+ if not workflow_dir_remote:
76
+ workflow_dir_remote = workflow_dir_local
77
+
78
+ if is_task_v1:
79
+ task_name = wftask.task_legacy.name
80
+ else:
81
+ task_name = wftask.task.name
77
82
 
78
83
  component = args.pop(_COMPONENT_KEY_, None)
79
84
  task_files = get_task_file_paths(
80
- workflow_dir=workflow_dir,
81
- workflow_dir_user=workflow_dir_user,
85
+ workflow_dir_local=workflow_dir_local,
86
+ workflow_dir_remote=workflow_dir_remote,
82
87
  task_order=wftask.order,
88
+ task_name=task_name,
83
89
  component=component,
84
90
  )
85
91
 
@@ -57,9 +57,8 @@ from sqlmodel import select
57
57
  from ...config import get_settings
58
58
  from ...syringe import Inject
59
59
  from ..db import get_async_db
60
- from ..models.security import OAuthAccount
61
- from ..models.security import UserOAuth as User
62
- from fractal_server.app.models.security import UserOAuth
60
+ from fractal_server.app.models.security import OAuthAccount
61
+ from fractal_server.app.models.security import UserOAuth as User
63
62
  from fractal_server.app.schemas.user import UserCreate
64
63
  from fractal_server.logger import get_logger
65
64
 
@@ -287,8 +286,8 @@ async def _create_first_user(
287
286
 
288
287
  if is_superuser is True:
289
288
  # If a superuser already exists, exit
290
- stm = select(UserOAuth).where(
291
- UserOAuth.is_superuser == True # noqa: E712
289
+ stm = select(User).where(
290
+ User.is_superuser == True # noqa E712
292
291
  )
293
292
  res = await session.execute(stm)
294
293
  existing_superuser = res.scalars().first()