fractal-server 2.14.4a0__py3-none-any.whl → 2.14.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/runner/exceptions.py +1 -6
- fractal_server/app/runner/executors/base_runner.py +16 -4
- fractal_server/app/runner/executors/call_command_wrapper.py +52 -0
- fractal_server/app/runner/executors/local/runner.py +52 -13
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +87 -52
- fractal_server/app/runner/executors/slurm_common/remote.py +47 -92
- fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +20 -19
- fractal_server/app/runner/executors/slurm_ssh/runner.py +1 -2
- fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py +1 -4
- fractal_server/app/runner/executors/slurm_sudo/runner.py +3 -11
- fractal_server/app/runner/task_files.py +0 -8
- fractal_server/app/runner/v2/_slurm_ssh.py +1 -2
- fractal_server/app/runner/v2/_slurm_sudo.py +1 -2
- fractal_server/app/runner/v2/runner_functions.py +16 -30
- fractal_server/app/runner/versions.py +2 -11
- fractal_server/config.py +0 -9
- fractal_server/ssh/_fabric.py +4 -1
- {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/METADATA +1 -7
- {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/RECORD +23 -24
- fractal_server/app/runner/executors/slurm_common/utils_executors.py +0 -58
- fractal_server/app/runner/v2/runner_functions_low_level.py +0 -122
- {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.4a0.dist-info → fractal_server-2.14.5.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.14.
|
1
|
+
__VERSION__ = "2.14.5"
|
@@ -46,12 +46,7 @@ class JobExecutionError(RuntimeError):
|
|
46
46
|
Forwards errors in the execution of a task that are due to external factors
|
47
47
|
|
48
48
|
This error wraps and forwards errors occurred during the execution of
|
49
|
-
tasks, but related to external factors like
|
50
|
-
|
51
|
-
1. A negative exit code (e.g. because the task received a TERM or KILL
|
52
|
-
signal);
|
53
|
-
2. An error on the executor side (e.g. the SLURM executor could not
|
54
|
-
find the pickled file with task output).
|
49
|
+
tasks, but related to external factors like an error on the executor side.
|
55
50
|
|
56
51
|
This error also adds information that is useful to track down and debug the
|
57
52
|
failing task within a workflow.
|
@@ -26,7 +26,10 @@ class BaseRunner(object):
|
|
26
26
|
|
27
27
|
def submit(
|
28
28
|
self,
|
29
|
-
|
29
|
+
base_command: str,
|
30
|
+
workflow_task_order: int,
|
31
|
+
workflow_task_id: int,
|
32
|
+
task_name: str,
|
30
33
|
parameters: dict[str, Any],
|
31
34
|
history_unit_id: int,
|
32
35
|
task_type: TaskTypeType,
|
@@ -38,7 +41,10 @@ class BaseRunner(object):
|
|
38
41
|
Run a single fractal task.
|
39
42
|
|
40
43
|
Args:
|
41
|
-
|
44
|
+
base_command:
|
45
|
+
workflow_task_order:
|
46
|
+
workflow_task_id:
|
47
|
+
task_name:
|
42
48
|
parameters: Dictionary of parameters.
|
43
49
|
history_unit_id:
|
44
50
|
Database ID of the corresponding `HistoryUnit` entry.
|
@@ -51,7 +57,10 @@ class BaseRunner(object):
|
|
51
57
|
|
52
58
|
def multisubmit(
|
53
59
|
self,
|
54
|
-
|
60
|
+
base_command: str,
|
61
|
+
workflow_task_order: int,
|
62
|
+
workflow_task_id: int,
|
63
|
+
task_name: str,
|
55
64
|
list_parameters: list[dict[str, Any]],
|
56
65
|
history_unit_ids: list[int],
|
57
66
|
list_task_files: list[TaskFiles],
|
@@ -63,7 +72,10 @@ class BaseRunner(object):
|
|
63
72
|
Run a parallel fractal task.
|
64
73
|
|
65
74
|
Args:
|
66
|
-
|
75
|
+
base_command:
|
76
|
+
workflow_task_order:
|
77
|
+
workflow_task_id:
|
78
|
+
task_name:
|
67
79
|
parameters:
|
68
80
|
Dictionary of parameters. Must include `zarr_urls` key.
|
69
81
|
history_unit_ids:
|
@@ -0,0 +1,52 @@
|
|
1
|
+
import os
|
2
|
+
import shlex
|
3
|
+
import shutil
|
4
|
+
import subprocess # nosec
|
5
|
+
|
6
|
+
from fractal_server.app.runner.exceptions import TaskExecutionError
|
7
|
+
from fractal_server.string_tools import validate_cmd
|
8
|
+
|
9
|
+
|
10
|
+
def call_command_wrapper(*, cmd: str, log_path: str) -> None:
|
11
|
+
"""
|
12
|
+
Call a command and write its stdout and stderr to files
|
13
|
+
|
14
|
+
Args:
|
15
|
+
cmd:
|
16
|
+
log_path:
|
17
|
+
"""
|
18
|
+
try:
|
19
|
+
validate_cmd(cmd)
|
20
|
+
except ValueError as e:
|
21
|
+
raise TaskExecutionError(f"Invalid command. Original error: {str(e)}")
|
22
|
+
|
23
|
+
split_cmd = shlex.split(cmd)
|
24
|
+
|
25
|
+
# Verify that task command is executable
|
26
|
+
if shutil.which(split_cmd[0]) is None:
|
27
|
+
msg = (
|
28
|
+
f'Command "{split_cmd[0]}" is not valid. '
|
29
|
+
"Hint: make sure that it is executable."
|
30
|
+
)
|
31
|
+
raise TaskExecutionError(msg)
|
32
|
+
|
33
|
+
with open(log_path, "w") as fp_log:
|
34
|
+
try:
|
35
|
+
result = subprocess.run( # nosec
|
36
|
+
split_cmd,
|
37
|
+
stderr=fp_log,
|
38
|
+
stdout=fp_log,
|
39
|
+
)
|
40
|
+
except Exception as e:
|
41
|
+
# This is likely unreachable
|
42
|
+
raise e
|
43
|
+
|
44
|
+
if result.returncode != 0:
|
45
|
+
stderr = ""
|
46
|
+
if os.path.isfile(log_path):
|
47
|
+
with open(log_path, "r") as fp_stderr:
|
48
|
+
stderr = fp_stderr.read()
|
49
|
+
raise TaskExecutionError(
|
50
|
+
f"Task failed with returncode={result.returncode}.\n"
|
51
|
+
f"STDERR: {stderr}"
|
52
|
+
)
|
@@ -1,9 +1,11 @@
|
|
1
|
+
import json
|
1
2
|
from concurrent.futures import Future
|
2
3
|
from concurrent.futures import ThreadPoolExecutor
|
3
4
|
from pathlib import Path
|
4
5
|
from typing import Any
|
5
6
|
from typing import Literal
|
6
7
|
|
8
|
+
from ..call_command_wrapper import call_command_wrapper
|
7
9
|
from .get_local_config import LocalBackendConfig
|
8
10
|
from fractal_server.app.db import get_sync_db
|
9
11
|
from fractal_server.app.runner.exceptions import TaskExecutionError
|
@@ -16,10 +18,40 @@ from fractal_server.app.runner.v2.db_tools import update_status_of_history_unit
|
|
16
18
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
17
19
|
from fractal_server.logger import set_logger
|
18
20
|
|
19
|
-
|
20
21
|
logger = set_logger(__name__)
|
21
22
|
|
22
23
|
|
24
|
+
def run_single_task(
|
25
|
+
base_command: str,
|
26
|
+
parameters: dict[str, Any],
|
27
|
+
task_files: TaskFiles,
|
28
|
+
):
|
29
|
+
|
30
|
+
# Write args.json file
|
31
|
+
with open(task_files.args_file_local, "w") as f:
|
32
|
+
json.dump(parameters, f)
|
33
|
+
|
34
|
+
# Run command
|
35
|
+
full_command = (
|
36
|
+
f"{base_command} "
|
37
|
+
f"--args-json {task_files.args_file_local} "
|
38
|
+
f"--out-json {task_files.metadiff_file_local}"
|
39
|
+
)
|
40
|
+
|
41
|
+
call_command_wrapper(
|
42
|
+
cmd=full_command,
|
43
|
+
log_path=task_files.log_file_local,
|
44
|
+
)
|
45
|
+
|
46
|
+
try:
|
47
|
+
with open(task_files.metadiff_file_local, "r") as f:
|
48
|
+
out_meta = json.load(f)
|
49
|
+
return out_meta
|
50
|
+
except FileNotFoundError:
|
51
|
+
# Command completed, but it produced no metadiff file
|
52
|
+
return None
|
53
|
+
|
54
|
+
|
23
55
|
class LocalRunner(BaseRunner):
|
24
56
|
executor: ThreadPoolExecutor
|
25
57
|
root_dir_local: Path
|
@@ -47,17 +79,20 @@ class LocalRunner(BaseRunner):
|
|
47
79
|
|
48
80
|
def submit(
|
49
81
|
self,
|
50
|
-
|
82
|
+
base_command: str,
|
83
|
+
workflow_task_order: int,
|
84
|
+
workflow_task_id: int,
|
85
|
+
task_name: str,
|
51
86
|
parameters: dict[str, Any],
|
52
87
|
history_unit_id: int,
|
53
88
|
task_files: TaskFiles,
|
89
|
+
config: LocalBackendConfig,
|
54
90
|
task_type: Literal[
|
55
91
|
"non_parallel",
|
56
92
|
"converter_non_parallel",
|
57
93
|
"compound",
|
58
94
|
"converter_compound",
|
59
95
|
],
|
60
|
-
config: LocalBackendConfig,
|
61
96
|
user_id: int,
|
62
97
|
) -> tuple[Any, Exception]:
|
63
98
|
logger.debug("[submit] START")
|
@@ -69,9 +104,10 @@ class LocalRunner(BaseRunner):
|
|
69
104
|
|
70
105
|
# SUBMISSION PHASE
|
71
106
|
future = self.executor.submit(
|
72
|
-
|
107
|
+
run_single_task,
|
108
|
+
base_command=base_command,
|
73
109
|
parameters=parameters,
|
74
|
-
|
110
|
+
task_files=task_files,
|
75
111
|
)
|
76
112
|
except Exception as e:
|
77
113
|
logger.error(
|
@@ -111,7 +147,10 @@ class LocalRunner(BaseRunner):
|
|
111
147
|
|
112
148
|
def multisubmit(
|
113
149
|
self,
|
114
|
-
|
150
|
+
base_command: str,
|
151
|
+
workflow_task_order: int,
|
152
|
+
workflow_task_id: int,
|
153
|
+
task_name: str,
|
115
154
|
list_parameters: list[dict],
|
116
155
|
history_unit_ids: list[int],
|
117
156
|
list_task_files: list[TaskFiles],
|
@@ -139,8 +178,9 @@ class LocalRunner(BaseRunner):
|
|
139
178
|
)
|
140
179
|
|
141
180
|
workdir_local = list_task_files[0].wftask_subfolder_local
|
142
|
-
|
143
|
-
|
181
|
+
# Note: the `mkdir` is not needed for compound tasks, but it is
|
182
|
+
# needed for parallel tasks
|
183
|
+
workdir_local.mkdir(exist_ok=True)
|
144
184
|
|
145
185
|
# Set `n_elements` and `parallel_tasks_per_job`
|
146
186
|
n_elements = len(list_parameters)
|
@@ -178,11 +218,10 @@ class LocalRunner(BaseRunner):
|
|
178
218
|
positional_index = ind_chunk + ind_within_chunk
|
179
219
|
try:
|
180
220
|
future = self.executor.submit(
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
].remote_files_dict,
|
221
|
+
run_single_task,
|
222
|
+
base_command=base_command,
|
223
|
+
parameters=list_parameters[positional_index],
|
224
|
+
task_files=list_task_files[positional_index],
|
186
225
|
)
|
187
226
|
active_futures[positional_index] = future
|
188
227
|
except Exception as e:
|
@@ -7,7 +7,8 @@ from typing import Any
|
|
7
7
|
from typing import Literal
|
8
8
|
from typing import Optional
|
9
9
|
|
10
|
-
import
|
10
|
+
from pydantic import BaseModel
|
11
|
+
from pydantic import ConfigDict
|
11
12
|
|
12
13
|
from ..slurm_common._slurm_config import SlurmConfig
|
13
14
|
from ..slurm_common.slurm_job_task_models import SlurmJob
|
@@ -36,6 +37,17 @@ SHUTDOWN_EXCEPTION = JobExecutionError(SHUTDOWN_ERROR_MESSAGE)
|
|
36
37
|
logger = set_logger(__name__)
|
37
38
|
|
38
39
|
|
40
|
+
class RemoteInputData(BaseModel):
|
41
|
+
model_config = ConfigDict(extra="forbid")
|
42
|
+
|
43
|
+
python_version: tuple[int, int, int]
|
44
|
+
fractal_server_version: str
|
45
|
+
full_command: str
|
46
|
+
|
47
|
+
metadiff_file_remote: str
|
48
|
+
log_file_remote: str
|
49
|
+
|
50
|
+
|
39
51
|
def create_accounting_record_slurm(
|
40
52
|
*,
|
41
53
|
user_id: int,
|
@@ -121,7 +133,6 @@ class BaseSlurmRunner(BaseRunner):
|
|
121
133
|
raise NotImplementedError("Implement in child class.")
|
122
134
|
|
123
135
|
def _get_finished_jobs(self, job_ids: list[str]) -> set[str]:
|
124
|
-
|
125
136
|
# If there is no Slurm job to check, return right away
|
126
137
|
if not job_ids:
|
127
138
|
return set()
|
@@ -168,58 +179,74 @@ class BaseSlurmRunner(BaseRunner):
|
|
168
179
|
|
169
180
|
def _submit_single_sbatch(
|
170
181
|
self,
|
171
|
-
|
182
|
+
*,
|
183
|
+
base_command: str,
|
172
184
|
slurm_job: SlurmJob,
|
173
185
|
slurm_config: SlurmConfig,
|
174
186
|
) -> str:
|
175
187
|
logger.debug("[_submit_single_sbatch] START")
|
176
|
-
|
177
|
-
versions = dict(
|
178
|
-
python=sys.version_info[:3],
|
179
|
-
cloudpickle=cloudpickle.__version__,
|
180
|
-
fractal_server=__VERSION__,
|
181
|
-
)
|
188
|
+
|
182
189
|
for task in slurm_job.tasks:
|
183
|
-
# Write input
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
190
|
+
# Write input file
|
191
|
+
if self.slurm_runner_type == "ssh":
|
192
|
+
args_file_remote = task.task_files.args_file_remote
|
193
|
+
else:
|
194
|
+
args_file_remote = task.task_files.args_file_local
|
195
|
+
metadiff_file_remote = task.task_files.metadiff_file_remote
|
196
|
+
full_command = (
|
197
|
+
f"{base_command} "
|
198
|
+
f"--args-json {args_file_remote} "
|
199
|
+
f"--out-json {metadiff_file_remote}"
|
188
200
|
)
|
189
|
-
|
190
|
-
|
191
|
-
|
201
|
+
|
202
|
+
input_data = RemoteInputData(
|
203
|
+
full_command=full_command,
|
204
|
+
python_version=sys.version_info[:3],
|
205
|
+
fractal_server_version=__VERSION__,
|
206
|
+
metadiff_file_remote=task.task_files.metadiff_file_remote,
|
207
|
+
log_file_remote=task.task_files.log_file_remote,
|
208
|
+
)
|
209
|
+
|
210
|
+
with open(task.input_file_local, "w") as f:
|
211
|
+
json.dump(input_data.model_dump(), f, indent=2)
|
212
|
+
|
213
|
+
with open(task.task_files.args_file_local, "w") as f:
|
214
|
+
json.dump(task.parameters, f, indent=2)
|
215
|
+
|
192
216
|
logger.debug(
|
193
|
-
"[_submit_single_sbatch] Written "
|
194
|
-
f"{task.input_pickle_file_local=}"
|
217
|
+
"[_submit_single_sbatch] Written " f"{task.input_file_local=}"
|
195
218
|
)
|
196
219
|
|
197
220
|
if self.slurm_runner_type == "ssh":
|
198
|
-
# Send input
|
221
|
+
# Send input file (only relevant for SSH)
|
199
222
|
self.fractal_ssh.send_file(
|
200
|
-
local=task.
|
201
|
-
remote=task.
|
223
|
+
local=task.input_file_local,
|
224
|
+
remote=task.input_file_remote,
|
225
|
+
)
|
226
|
+
self.fractal_ssh.send_file(
|
227
|
+
local=task.task_files.args_file_local,
|
228
|
+
remote=task.task_files.args_file_remote,
|
202
229
|
)
|
203
230
|
logger.debug(
|
204
231
|
"[_submit_single_sbatch] Transferred "
|
205
|
-
f"{task.
|
232
|
+
f"{task.input_file_local=}"
|
206
233
|
)
|
207
234
|
|
208
235
|
# Prepare commands to be included in SLURM submission script
|
209
236
|
cmdlines = []
|
210
237
|
for task in slurm_job.tasks:
|
211
238
|
if self.slurm_runner_type == "ssh":
|
212
|
-
|
239
|
+
input_file = task.input_file_remote
|
213
240
|
else:
|
214
|
-
|
215
|
-
|
241
|
+
input_file = task.input_file_local
|
242
|
+
output_file = task.output_file_remote
|
216
243
|
cmdlines.append(
|
217
244
|
(
|
218
245
|
f"{self.python_worker_interpreter}"
|
219
246
|
" -m fractal_server.app.runner."
|
220
247
|
"executors.slurm_common.remote "
|
221
|
-
f"--input-file {
|
222
|
-
f"--output-file {
|
248
|
+
f"--input-file {input_file} "
|
249
|
+
f"--output-file {output_file}"
|
223
250
|
)
|
224
251
|
)
|
225
252
|
|
@@ -363,12 +390,12 @@ class BaseSlurmRunner(BaseRunner):
|
|
363
390
|
was_job_scancelled: bool = False,
|
364
391
|
) -> tuple[Any, Exception]:
|
365
392
|
try:
|
366
|
-
with open(task.
|
367
|
-
|
368
|
-
success
|
393
|
+
with open(task.output_file_local, "r") as f:
|
394
|
+
output = json.load(f)
|
395
|
+
success = output[0]
|
369
396
|
if success:
|
370
397
|
# Task succeeded
|
371
|
-
result = output
|
398
|
+
result = output[1]
|
372
399
|
return (result, None)
|
373
400
|
else:
|
374
401
|
# Task failed in a controlled way, and produced an `output`
|
@@ -376,21 +403,18 @@ class BaseSlurmRunner(BaseRunner):
|
|
376
403
|
# `exc_type_name` and `traceback_string` and with optional
|
377
404
|
# keys `workflow_task_order`, `workflow_task_id` and
|
378
405
|
# `task_name`.
|
379
|
-
|
406
|
+
exc_proxy = output[1]
|
407
|
+
exc_type_name = exc_proxy.get("exc_type_name")
|
380
408
|
logger.debug(
|
381
|
-
f"Output
|
409
|
+
f"Output file contains a '{exc_type_name}' exception."
|
410
|
+
)
|
411
|
+
traceback_string = output[1].get("traceback_string")
|
412
|
+
exception = TaskExecutionError(
|
413
|
+
traceback_string,
|
414
|
+
workflow_task_id=task.workflow_task_id,
|
415
|
+
workflow_task_order=task.workflow_task_order,
|
416
|
+
task_name=task.task_name,
|
382
417
|
)
|
383
|
-
traceback_string = output.get("traceback_string")
|
384
|
-
kwargs = {
|
385
|
-
key: output[key]
|
386
|
-
for key in [
|
387
|
-
"workflow_task_order",
|
388
|
-
"workflow_task_id",
|
389
|
-
"task_name",
|
390
|
-
]
|
391
|
-
if key in output.keys()
|
392
|
-
}
|
393
|
-
exception = TaskExecutionError(traceback_string, **kwargs)
|
394
418
|
return (None, exception)
|
395
419
|
|
396
420
|
except Exception as e:
|
@@ -405,8 +429,8 @@ class BaseSlurmRunner(BaseRunner):
|
|
405
429
|
exception = SHUTDOWN_EXCEPTION
|
406
430
|
return (None, exception)
|
407
431
|
finally:
|
408
|
-
Path(task.
|
409
|
-
Path(task.
|
432
|
+
Path(task.input_file_local).unlink(missing_ok=True)
|
433
|
+
Path(task.output_file_local).unlink(missing_ok=True)
|
410
434
|
|
411
435
|
def is_shutdown(self) -> bool:
|
412
436
|
return self.shutdown_file.exists()
|
@@ -451,7 +475,10 @@ class BaseSlurmRunner(BaseRunner):
|
|
451
475
|
|
452
476
|
def submit(
|
453
477
|
self,
|
454
|
-
|
478
|
+
base_command: str,
|
479
|
+
workflow_task_order: int,
|
480
|
+
workflow_task_id: int,
|
481
|
+
task_name: str,
|
455
482
|
parameters: dict[str, Any],
|
456
483
|
history_unit_id: int,
|
457
484
|
task_files: TaskFiles,
|
@@ -507,13 +534,16 @@ class BaseSlurmRunner(BaseRunner):
|
|
507
534
|
workdir_remote=workdir_remote,
|
508
535
|
workdir_local=workdir_local,
|
509
536
|
task_files=task_files,
|
537
|
+
workflow_task_order=workflow_task_order,
|
538
|
+
workflow_task_id=workflow_task_id,
|
539
|
+
task_name=task_name,
|
510
540
|
)
|
511
541
|
],
|
512
542
|
)
|
513
543
|
|
514
544
|
config.parallel_tasks_per_job = 1
|
515
545
|
self._submit_single_sbatch(
|
516
|
-
|
546
|
+
base_command=base_command,
|
517
547
|
slurm_job=slurm_job,
|
518
548
|
slurm_config=config,
|
519
549
|
)
|
@@ -586,7 +616,10 @@ class BaseSlurmRunner(BaseRunner):
|
|
586
616
|
|
587
617
|
def multisubmit(
|
588
618
|
self,
|
589
|
-
|
619
|
+
base_command: str,
|
620
|
+
workflow_task_order: int,
|
621
|
+
workflow_task_id: int,
|
622
|
+
task_name: str,
|
590
623
|
list_parameters: list[dict],
|
591
624
|
history_unit_ids: list[int],
|
592
625
|
list_task_files: list[TaskFiles],
|
@@ -602,7 +635,6 @@ class BaseSlurmRunner(BaseRunner):
|
|
602
635
|
|
603
636
|
logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
|
604
637
|
try:
|
605
|
-
|
606
638
|
if self.is_shutdown():
|
607
639
|
if task_type == "parallel":
|
608
640
|
with next(get_sync_db()) as db:
|
@@ -672,6 +704,9 @@ class BaseSlurmRunner(BaseRunner):
|
|
672
704
|
parameters=parameters,
|
673
705
|
zarr_url=parameters["zarr_url"],
|
674
706
|
task_files=list_task_files[index],
|
707
|
+
workflow_task_order=workflow_task_order,
|
708
|
+
workflow_task_id=workflow_task_id,
|
709
|
+
task_name=task_name,
|
675
710
|
),
|
676
711
|
)
|
677
712
|
jobs_to_submit.append(
|
@@ -687,7 +722,7 @@ class BaseSlurmRunner(BaseRunner):
|
|
687
722
|
logger.debug("[multisubmit] Transfer files and submit jobs.")
|
688
723
|
for slurm_job in jobs_to_submit:
|
689
724
|
self._submit_single_sbatch(
|
690
|
-
|
725
|
+
base_command=base_command,
|
691
726
|
slurm_job=slurm_job,
|
692
727
|
slurm_config=config,
|
693
728
|
)
|