fractal-server 2.13.0__py3-none-any.whl → 2.14.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/history/__init__.py +4 -0
  3. fractal_server/app/history/image_updates.py +142 -0
  4. fractal_server/app/history/status_enum.py +16 -0
  5. fractal_server/app/models/v2/__init__.py +9 -1
  6. fractal_server/app/models/v2/accounting.py +35 -0
  7. fractal_server/app/models/v2/history.py +53 -0
  8. fractal_server/app/routes/admin/v2/__init__.py +4 -0
  9. fractal_server/app/routes/admin/v2/accounting.py +108 -0
  10. fractal_server/app/routes/admin/v2/impersonate.py +35 -0
  11. fractal_server/app/routes/admin/v2/job.py +5 -13
  12. fractal_server/app/routes/admin/v2/task_group.py +4 -12
  13. fractal_server/app/routes/api/v2/__init__.py +2 -2
  14. fractal_server/app/routes/api/v2/_aux_functions.py +78 -0
  15. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
  16. fractal_server/app/routes/api/v2/dataset.py +12 -9
  17. fractal_server/app/routes/api/v2/history.py +247 -0
  18. fractal_server/app/routes/api/v2/submit.py +1 -0
  19. fractal_server/app/routes/api/v2/task_group.py +2 -5
  20. fractal_server/app/routes/api/v2/workflow.py +18 -3
  21. fractal_server/app/routes/api/v2/workflowtask.py +22 -0
  22. fractal_server/app/routes/aux/__init__.py +0 -20
  23. fractal_server/app/runner/executors/base_runner.py +114 -0
  24. fractal_server/app/runner/{v2/_local → executors/local}/_local_config.py +3 -3
  25. fractal_server/app/runner/executors/local/_submit_setup.py +54 -0
  26. fractal_server/app/runner/executors/local/runner.py +200 -0
  27. fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
  28. fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +3 -3
  29. fractal_server/app/runner/{v2/_slurm_ssh → executors/slurm_common}/_submit_setup.py +13 -12
  30. fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +9 -15
  31. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_executor_wait_thread.py +1 -1
  32. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_slurm_job.py +1 -1
  33. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/executor.py +13 -14
  34. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_check_jobs_status.py +11 -9
  35. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_executor_wait_thread.py +3 -3
  36. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -68
  37. fractal_server/app/runner/executors/slurm_sudo/runner.py +632 -0
  38. fractal_server/app/runner/task_files.py +70 -96
  39. fractal_server/app/runner/v2/__init__.py +9 -19
  40. fractal_server/app/runner/v2/_local.py +84 -0
  41. fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +12 -13
  42. fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +12 -12
  43. fractal_server/app/runner/v2/runner.py +106 -31
  44. fractal_server/app/runner/v2/runner_functions.py +88 -64
  45. fractal_server/app/runner/v2/runner_functions_low_level.py +20 -20
  46. fractal_server/app/schemas/v2/__init__.py +1 -0
  47. fractal_server/app/schemas/v2/accounting.py +18 -0
  48. fractal_server/app/schemas/v2/dataset.py +0 -17
  49. fractal_server/app/schemas/v2/history.py +23 -0
  50. fractal_server/config.py +58 -52
  51. fractal_server/migrations/versions/8223fcef886c_image_status.py +63 -0
  52. fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +68 -0
  53. fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
  54. fractal_server/tasks/v2/utils_background.py +1 -1
  55. {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/METADATA +1 -1
  56. {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/RECORD +66 -55
  57. fractal_server/app/routes/api/v2/status.py +0 -168
  58. fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
  59. fractal_server/app/runner/v2/_local/__init__.py +0 -129
  60. fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
  61. fractal_server/app/runner/v2/_local/executor.py +0 -100
  62. fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
  63. fractal_server/app/runner/v2/handle_failed_job.py +0 -59
  64. /fractal_server/app/runner/executors/{slurm → local}/__init__.py +0 -0
  65. /fractal_server/app/runner/executors/{slurm/ssh → slurm_common}/__init__.py +0 -0
  66. /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
  67. /fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +0 -0
  68. /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
  69. /fractal_server/app/runner/executors/{slurm/sudo → slurm_ssh}/__init__.py +0 -0
  70. /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_sudo}/__init__.py +0 -0
  71. {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/LICENSE +0 -0
  72. {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/WHEEL +0 -0
  73. {fractal_server-2.13.0.dist-info → fractal_server-2.14.0a0.dist-info}/entry_points.txt +0 -0
@@ -2,6 +2,8 @@ from pathlib import Path
2
2
  from typing import Optional
3
3
  from typing import Union
4
4
 
5
+ from pydantic import BaseModel
6
+
5
7
  from fractal_server.string_tools import sanitize_string
6
8
 
7
9
 
@@ -17,108 +19,80 @@ def task_subfolder_name(order: Union[int, str], task_name: str) -> str:
17
19
  return f"{order}_{task_name_slug}"
18
20
 
19
21
 
20
- class TaskFiles:
22
+ class TaskFiles(BaseModel):
21
23
  """
22
- Group all file paths pertaining to a task
23
-
24
- Attributes:
25
- workflow_dir_local:
26
- Server-owned directory to store all task-execution-related relevant
27
- files. Note: users cannot write directly to this folder.
28
- workflow_dir_remote:
29
- User-side directory with the same scope as `workflow_dir_local`,
30
- and where a user can write.
31
- subfolder_name:
32
- Name of task-specific subfolder
33
- remote_subfolder:
34
- Path to user-side task-specific subfolder
35
- task_name:
36
- Name of the task
37
- task_order:
38
- Positional order of the task within a workflow.
39
- component:
40
- Specific component to run the task for (relevant for tasks to be
41
- executed in parallel over many components).
42
- file_prefix:
43
- Prefix for all task-related files.
44
- args:
45
- Path for input json file.
46
- metadiff:
47
- Path for output json file with metadata update.
48
- out:
49
- Path for task-execution stdout.
50
- err:
51
- Path for task-execution stderr.
24
+ Group all file paths pertaining to a task FIXME
52
25
  """
53
26
 
54
- workflow_dir_local: Path
55
- workflow_dir_remote: Path
56
- remote_subfolder: Path
57
- subfolder_name: str
27
+ # Parent directory
28
+ root_dir_local: Path
29
+ root_dir_remote: Path
30
+
31
+ # Per-wftask
58
32
  task_name: str
59
- task_order: Optional[int] = None
33
+ task_order: int
34
+
35
+ # Per-single-component
60
36
  component: Optional[str] = None
61
37
 
62
- file_prefix: str
63
- file_prefix_with_subfolder: str
64
- args: Path
65
- out: Path
66
- err: Path
67
- log: Path
68
- metadiff: Path
69
-
70
- def __init__(
71
- self,
72
- workflow_dir_local: Path,
73
- workflow_dir_remote: Path,
74
- task_name: str,
75
- task_order: Optional[int] = None,
76
- component: Optional[str] = None,
77
- ):
78
- self.workflow_dir_local = workflow_dir_local
79
- self.workflow_dir_remote = workflow_dir_remote
80
- self.task_order = task_order
81
- self.task_name = task_name
82
- self.component = component
83
-
84
- if self.component is not None:
85
- component_safe = sanitize_string(str(self.component))
86
- component_safe = f"_par_{component_safe}"
87
- else:
88
- component_safe = ""
89
-
90
- if self.task_order is not None:
91
- order = str(self.task_order)
92
- else:
93
- order = "0"
94
- self.file_prefix = f"{order}{component_safe}"
95
- self.subfolder_name = task_subfolder_name(
96
- order=order, task_name=self.task_name
97
- )
98
- self.remote_subfolder = self.workflow_dir_remote / self.subfolder_name
99
- self.args = self.remote_subfolder / f"{self.file_prefix}.args.json"
100
- self.out = self.remote_subfolder / f"{self.file_prefix}.out"
101
- self.err = self.remote_subfolder / f"{self.file_prefix}.err"
102
- self.log = self.remote_subfolder / f"{self.file_prefix}.log"
103
- self.metadiff = (
104
- self.remote_subfolder / f"{self.file_prefix}.metadiff.json"
38
+ def _check_component(self):
39
+ if self.component is None:
40
+ raise ValueError("`component` cannot be None")
41
+
42
+ @property
43
+ def subfolder_name(self) -> str:
44
+ order = str(self.task_order or 0)
45
+ return task_subfolder_name(
46
+ order=order,
47
+ task_name=self.task_name,
105
48
  )
106
49
 
50
+ @property
51
+ def wftask_subfolder_remote(self) -> Path:
52
+ return self.root_dir_remote / self.subfolder_name
107
53
 
108
- def get_task_file_paths(
109
- workflow_dir_local: Path,
110
- workflow_dir_remote: Path,
111
- task_name: str,
112
- task_order: Optional[int] = None,
113
- component: Optional[str] = None,
114
- ) -> TaskFiles:
115
- """
116
- Return the corrisponding TaskFiles object
117
- """
118
- return TaskFiles(
119
- workflow_dir_local=workflow_dir_local,
120
- workflow_dir_remote=workflow_dir_remote,
121
- task_name=task_name,
122
- task_order=task_order,
123
- component=component,
124
- )
54
+ @property
55
+ def wftask_subfolder_local(self) -> Path:
56
+ return self.root_dir_local / self.subfolder_name
57
+
58
+ @property
59
+ def log_file_local(self) -> str:
60
+ self._check_component()
61
+ return (
62
+ self.wftask_subfolder_local / f"{self.component}-log.txt"
63
+ ).as_posix()
64
+
65
+ @property
66
+ def log_file_remote(self) -> str:
67
+ self._check_component()
68
+ return (
69
+ self.wftask_subfolder_remote / f"{self.component}-log.txt"
70
+ ).as_posix()
71
+
72
+ @property
73
+ def args_file_local(self) -> str:
74
+ self._check_component()
75
+ return (
76
+ self.wftask_subfolder_local / f"{self.component}-args.json"
77
+ ).as_posix()
78
+
79
+ @property
80
+ def args_file_remote(self) -> str:
81
+ self._check_component()
82
+ return (
83
+ self.wftask_subfolder_remote / f"{self.component}-args.json"
84
+ ).as_posix()
85
+
86
+ @property
87
+ def metadiff_file_local(self) -> str:
88
+ self._check_component()
89
+ return (
90
+ self.wftask_subfolder_local / f"{self.component}-metadiff.json"
91
+ ).as_posix()
92
+
93
+ @property
94
+ def metadiff_file_remote(self) -> str:
95
+ self._check_component()
96
+ return (
97
+ self.wftask_subfolder_remote / f"{self.component}-metadiff.json"
98
+ ).as_posix()
@@ -27,13 +27,12 @@ from ...models.v2 import WorkflowV2
27
27
  from ...schemas.v2 import JobStatusTypeV2
28
28
  from ..exceptions import JobExecutionError
29
29
  from ..exceptions import TaskExecutionError
30
- from ..executors.slurm.sudo._subprocess_run_as_user import _mkdir_as_user
30
+ from ..executors.slurm_sudo._subprocess_run_as_user import _mkdir_as_user
31
31
  from ..filenames import WORKFLOW_LOG_FILENAME
32
32
  from ..task_files import task_subfolder_name
33
33
  from ._local import process_workflow as local_process_workflow
34
34
  from ._slurm_ssh import process_workflow as slurm_ssh_process_workflow
35
35
  from ._slurm_sudo import process_workflow as slurm_sudo_process_workflow
36
- from .handle_failed_job import mark_last_wftask_as_failed
37
36
  from fractal_server import __VERSION__
38
37
  from fractal_server.app.models import UserSettings
39
38
 
@@ -70,6 +69,7 @@ def submit_workflow(
70
69
  workflow_id: int,
71
70
  dataset_id: int,
72
71
  job_id: int,
72
+ user_id: int,
73
73
  user_settings: UserSettings,
74
74
  worker_init: Optional[str] = None,
75
75
  slurm_user: Optional[str] = None,
@@ -90,6 +90,8 @@ def submit_workflow(
90
90
  job_id:
91
91
  Id of the job record which stores the state for the current
92
92
  workflow application.
93
+ user_id:
94
+ User ID.
93
95
  worker_init:
94
96
  Custom executor parameters that get parsed before the execution of
95
97
  each task.
@@ -198,7 +200,7 @@ def submit_workflow(
198
200
  f"{settings.FRACTAL_RUNNER_BACKEND}."
199
201
  )
200
202
 
201
- # Create all tasks subfolders
203
+ # Create all tasks subfolders # FIXME: do this with Runner
202
204
  for order in range(job.first_task_index, job.last_task_index + 1):
203
205
  this_wftask = workflow.task_list[order]
204
206
  task_name = this_wftask.task.name
@@ -216,10 +218,7 @@ def submit_workflow(
216
218
  folder=str(WORKFLOW_DIR_REMOTE / subfolder_name),
217
219
  user=slurm_user,
218
220
  )
219
- else:
220
- # Create local subfolder (with standard permission set)
221
- (WORKFLOW_DIR_LOCAL / subfolder_name).mkdir()
222
- logger.info("Skip remote-subfolder creation")
221
+
223
222
  except Exception as e:
224
223
  error_type = type(e).__name__
225
224
  fail_job(
@@ -312,6 +311,7 @@ def submit_workflow(
312
311
  process_workflow(
313
312
  workflow=workflow,
314
313
  dataset=dataset,
314
+ user_id=user_id,
315
315
  workflow_dir_local=WORKFLOW_DIR_LOCAL,
316
316
  workflow_dir_remote=WORKFLOW_DIR_REMOTE,
317
317
  logger_name=logger_name,
@@ -341,10 +341,6 @@ def submit_workflow(
341
341
  logger.debug(f'FAILED workflow "{workflow.name}", TaskExecutionError.')
342
342
  logger.info(f'Workflow "{workflow.name}" failed (TaskExecutionError).')
343
343
 
344
- mark_last_wftask_as_failed(
345
- dataset_id=dataset_id,
346
- logger_name=logger_name,
347
- )
348
344
  exception_args_string = "\n".join(e.args)
349
345
  log_msg = (
350
346
  f"TASK ERROR: "
@@ -357,10 +353,7 @@ def submit_workflow(
357
353
  except JobExecutionError as e:
358
354
  logger.debug(f'FAILED workflow "{workflow.name}", JobExecutionError.')
359
355
  logger.info(f'Workflow "{workflow.name}" failed (JobExecutionError).')
360
- mark_last_wftask_as_failed(
361
- dataset_id=dataset_id,
362
- logger_name=logger_name,
363
- )
356
+
364
357
  fail_job(
365
358
  db=db_sync,
366
359
  job=job,
@@ -374,10 +367,7 @@ def submit_workflow(
374
367
  except Exception:
375
368
  logger.debug(f'FAILED workflow "{workflow.name}", unknown error.')
376
369
  logger.info(f'Workflow "{workflow.name}" failed (unkwnon error).')
377
- mark_last_wftask_as_failed(
378
- dataset_id=dataset_id,
379
- logger_name=logger_name,
380
- )
370
+
381
371
  current_traceback = traceback.format_exc()
382
372
  fail_job(
383
373
  db=db_sync,
@@ -0,0 +1,84 @@
1
+ from pathlib import Path
2
+ from typing import Optional
3
+
4
+ from ...models.v2 import DatasetV2
5
+ from ...models.v2 import WorkflowV2
6
+ from ..executors.local._submit_setup import _local_submit_setup
7
+ from ..executors.local.runner import LocalRunner
8
+ from ..set_start_and_last_task_index import set_start_and_last_task_index
9
+ from .runner import execute_tasks_v2
10
+ from fractal_server.images.models import AttributeFiltersType
11
+
12
+
13
+ def process_workflow(
14
+ *,
15
+ workflow: WorkflowV2,
16
+ dataset: DatasetV2,
17
+ workflow_dir_local: Path,
18
+ workflow_dir_remote: Optional[Path] = None,
19
+ first_task_index: Optional[int] = None,
20
+ last_task_index: Optional[int] = None,
21
+ logger_name: str,
22
+ job_attribute_filters: AttributeFiltersType,
23
+ user_id: int,
24
+ **kwargs,
25
+ ) -> None:
26
+ """
27
+ Run a workflow through
28
+
29
+ Args:
30
+ workflow:
31
+ The workflow to be run
32
+ dataset:
33
+ Initial dataset.
34
+ workflow_dir_local:
35
+ Working directory for this run.
36
+ workflow_dir_remote:
37
+ Working directory for this run, on the user side. This argument is
38
+ present for compatibility with the standard backend interface, but
39
+ for the `local` backend it cannot be different from
40
+ `workflow_dir_local`.
41
+ first_task_index:
42
+ Positional index of the first task to execute; if `None`, start
43
+ from `0`.
44
+ last_task_index:
45
+ Positional index of the last task to execute; if `None`, proceed
46
+ until the last task.
47
+ logger_name: Logger name
48
+ user_id:
49
+
50
+ Raises:
51
+ TaskExecutionError: wrapper for errors raised during tasks' execution
52
+ (positive exit codes).
53
+ JobExecutionError: wrapper for errors raised by the tasks' executors
54
+ (negative exit codes).
55
+ """
56
+
57
+ if workflow_dir_remote and (workflow_dir_remote != workflow_dir_local):
58
+ raise NotImplementedError(
59
+ "Local backend does not support different directories "
60
+ f"{workflow_dir_local=} and {workflow_dir_remote=}"
61
+ )
62
+
63
+ # Set values of first_task_index and last_task_index
64
+ num_tasks = len(workflow.task_list)
65
+ first_task_index, last_task_index = set_start_and_last_task_index(
66
+ num_tasks,
67
+ first_task_index=first_task_index,
68
+ last_task_index=last_task_index,
69
+ )
70
+
71
+ with LocalRunner(root_dir_local=workflow_dir_local) as runner:
72
+ execute_tasks_v2(
73
+ wf_task_list=workflow.task_list[
74
+ first_task_index : (last_task_index + 1)
75
+ ],
76
+ dataset=dataset,
77
+ runner=runner,
78
+ workflow_dir_local=workflow_dir_local,
79
+ workflow_dir_remote=workflow_dir_local,
80
+ logger_name=logger_name,
81
+ submit_setup_call=_local_submit_setup,
82
+ job_attribute_filters=job_attribute_filters,
83
+ user_id=user_id,
84
+ )
@@ -19,14 +19,14 @@ Executor objects.
19
19
  from pathlib import Path
20
20
  from typing import Optional
21
21
 
22
- from .....ssh._fabric import FractalSSH
23
- from ....models.v2 import DatasetV2
24
- from ....models.v2 import WorkflowV2
25
- from ...exceptions import JobExecutionError
26
- from ...executors.slurm.ssh.executor import FractalSlurmSSHExecutor
27
- from ...set_start_and_last_task_index import set_start_and_last_task_index
28
- from ..runner import execute_tasks_v2
29
- from ._submit_setup import _slurm_submit_setup
22
+ from ....ssh._fabric import FractalSSH
23
+ from ...models.v2 import DatasetV2
24
+ from ...models.v2 import WorkflowV2
25
+ from ..exceptions import JobExecutionError
26
+ from ..executors.slurm_common._submit_setup import _slurm_submit_setup
27
+ from ..executors.slurm_ssh.executor import FractalSlurmSSHExecutor
28
+ from ..set_start_and_last_task_index import set_start_and_last_task_index
29
+ from .runner import execute_tasks_v2
30
30
  from fractal_server.images.models import AttributeFiltersType
31
31
  from fractal_server.logger import set_logger
32
32
 
@@ -45,10 +45,8 @@ def process_workflow(
45
45
  job_attribute_filters: AttributeFiltersType,
46
46
  fractal_ssh: FractalSSH,
47
47
  worker_init: Optional[str] = None,
48
- # Not used
49
- user_cache_dir: Optional[str] = None,
50
- slurm_user: Optional[str] = None,
51
- slurm_account: Optional[str] = None,
48
+ user_id: int,
49
+ **kwargs, # not used
52
50
  ) -> None:
53
51
  """
54
52
  Process workflow (SLURM backend public interface)
@@ -88,10 +86,11 @@ def process_workflow(
88
86
  first_task_index : (last_task_index + 1)
89
87
  ],
90
88
  dataset=dataset,
91
- executor=executor,
89
+ runner=executor,
92
90
  workflow_dir_local=workflow_dir_local,
93
91
  workflow_dir_remote=workflow_dir_remote,
94
92
  logger_name=logger_name,
95
93
  submit_setup_call=_slurm_submit_setup,
96
94
  job_attribute_filters=job_attribute_filters,
95
+ user_id=user_id,
97
96
  )
@@ -19,12 +19,12 @@ Executor objects.
19
19
  from pathlib import Path
20
20
  from typing import Optional
21
21
 
22
- from ....models.v2 import DatasetV2
23
- from ....models.v2 import WorkflowV2
24
- from ...executors.slurm.sudo.executor import FractalSlurmSudoExecutor
25
- from ...set_start_and_last_task_index import set_start_and_last_task_index
26
- from ..runner import execute_tasks_v2
27
- from ._submit_setup import _slurm_submit_setup
22
+ from ...models.v2 import DatasetV2
23
+ from ...models.v2 import WorkflowV2
24
+ from ..executors.slurm_common._submit_setup import _slurm_submit_setup
25
+ from ..executors.slurm_sudo.runner import RunnerSlurmSudo
26
+ from ..set_start_and_last_task_index import set_start_and_last_task_index
27
+ from .runner import execute_tasks_v2
28
28
  from fractal_server.images.models import AttributeFiltersType
29
29
 
30
30
 
@@ -38,6 +38,7 @@ def process_workflow(
38
38
  last_task_index: Optional[int] = None,
39
39
  logger_name: str,
40
40
  job_attribute_filters: AttributeFiltersType,
41
+ user_id: int,
41
42
  # Slurm-specific
42
43
  user_cache_dir: Optional[str] = None,
43
44
  slurm_user: Optional[str] = None,
@@ -64,13 +65,11 @@ def process_workflow(
64
65
  if isinstance(worker_init, str):
65
66
  worker_init = worker_init.split("\n")
66
67
 
67
- with FractalSlurmSudoExecutor(
68
- debug=True,
69
- keep_logs=True,
68
+ with RunnerSlurmSudo(
70
69
  slurm_user=slurm_user,
71
70
  user_cache_dir=user_cache_dir,
72
- workflow_dir_local=workflow_dir_local,
73
- workflow_dir_remote=workflow_dir_remote,
71
+ root_dir_local=workflow_dir_local,
72
+ root_dir_remote=workflow_dir_remote,
74
73
  common_script_lines=worker_init,
75
74
  slurm_account=slurm_account,
76
75
  ) as executor:
@@ -79,10 +78,11 @@ def process_workflow(
79
78
  first_task_index : (last_task_index + 1)
80
79
  ],
81
80
  dataset=dataset,
82
- executor=executor,
81
+ runner=executor,
83
82
  workflow_dir_local=workflow_dir_local,
84
83
  workflow_dir_remote=workflow_dir_remote,
85
84
  logger_name=logger_name,
86
85
  submit_setup_call=_slurm_submit_setup,
87
86
  job_attribute_filters=job_attribute_filters,
87
+ user_id=user_id,
88
88
  )