fractal-server 2.14.0a33__py3-none-any.whl → 2.14.0a35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -54,18 +54,21 @@ def _check_versions_mismatch(
54
54
  do not match with the ones on the server
55
55
  """
56
56
 
57
- server_python_version = server_versions["python"]
58
- worker_python_version = sys.version_info[:3]
57
+ server_python_version = list(server_versions["python"])
58
+ worker_python_version = list(sys.version_info[:3])
59
59
  if worker_python_version != server_python_version:
60
- # FIXME: turn this into an error, after fixing a broader CI issue, see
61
- # https://github.com/fractal-analytics-platform/fractal-server/issues/375
62
- logging.warning(
63
- f"{server_python_version=} but {worker_python_version=}. "
64
- "cloudpickle is not guaranteed to correctly load "
65
- "pickle files created with different python versions. "
66
- "Note, however, that if you reached this line it means that "
67
- "the pickle file was likely loaded correctly."
68
- )
60
+ if worker_python_version[:2] != server_python_version[:2]:
61
+ # FIXME: Turn this into an error, in some version post 2.14.
62
+ logging.error(
63
+ f"{server_python_version=} but {worker_python_version=}. "
64
+ "This configuration will be deprecated in a future version, "
65
+ "please contact the admin of this Fractal instance."
66
+ )
67
+ else:
68
+ # Major.minor versions match, patch versions differ
69
+ logging.warning(
70
+ f"{server_python_version=} but {worker_python_version=}."
71
+ )
69
72
 
70
73
  server_cloudpickle_version = server_versions["cloudpickle"]
71
74
  worker_cloudpickle_version = cloudpickle.__version__
@@ -9,6 +9,8 @@ from fractal_server.app.runner.extract_archive import extract_archive
9
9
  from fractal_server.config import get_settings
10
10
  from fractal_server.logger import set_logger
11
11
  from fractal_server.ssh._fabric import FractalSSH
12
+ from fractal_server.ssh._fabric import FractalSSHCommandError
13
+ from fractal_server.ssh._fabric import FractalSSHTimeoutError
12
14
  from fractal_server.syringe import Inject
13
15
 
14
16
 
@@ -111,7 +113,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
111
113
  elapsed = time.perf_counter() - t_0
112
114
  logger.debug(
113
115
  "[_fetch_artifacts] Created filelist "
114
- f"({len(filelist)=}, from start: {elapsed:.3f} s)."
116
+ f"({len(filelist)=}, from start: {elapsed=:.3f} s)."
115
117
  )
116
118
 
117
119
  # Write filelist to file remotely
@@ -123,7 +125,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
123
125
  elapsed = time.perf_counter() - t_0
124
126
  logger.debug(
125
127
  f"[_fetch_artifacts] File list written to {tmp_filelist_path} "
126
- f"(from start: {elapsed:.3f} s)."
128
+ f"(from start: {elapsed=:.3f} s)."
127
129
  )
128
130
 
129
131
  # Create remote tarfile
@@ -138,7 +140,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
138
140
  t_1_tar = time.perf_counter()
139
141
  logger.info(
140
142
  f"[_fetch_artifacts] Remote archive {tarfile_path_remote} created"
141
- f" - elapsed: {t_1_tar - t_0_tar:.3f} s"
143
+ f" - elapsed={t_1_tar - t_0_tar:.3f} s"
142
144
  )
143
145
 
144
146
  # Fetch tarfile
@@ -151,7 +153,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
151
153
  logger.info(
152
154
  "[_fetch_artifacts] Subfolder archive transferred back "
153
155
  f"to {tarfile_path_local}"
154
- f" - elapsed: {t_1_get - t_0_get:.3f} s"
156
+ f" - elapsed={t_1_get - t_0_get:.3f} s"
155
157
  )
156
158
 
157
159
  # Extract tarfile locally
@@ -161,7 +163,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
161
163
  Path(tarfile_path_local).unlink(missing_ok=True)
162
164
 
163
165
  t_1 = time.perf_counter()
164
- logger.info(f"[_fetch_artifacts] End - elapsed: {t_1 - t_0:.3f} s")
166
+ logger.info(f"[_fetch_artifacts] End - elapsed={t_1 - t_0:.3f} s")
165
167
 
166
168
  def _send_inputs(self, jobs: list[SlurmJob]) -> None:
167
169
  """
@@ -188,7 +190,7 @@ class SlurmSSHRunner(BaseSlurmRunner):
188
190
  t_1_put = time.perf_counter()
189
191
  logger.info(
190
192
  f"Subfolder archive transferred to {tarfile_path_remote}"
191
- f" - elapsed: {t_1_put - t_0_put:.3f} s"
193
+ f" - elapsed={t_1_put - t_0_put:.3f} s"
192
194
  )
193
195
 
194
196
  # Remove local archive
@@ -206,3 +208,61 @@ class SlurmSSHRunner(BaseSlurmRunner):
206
208
  def _run_remote_cmd(self, cmd: str) -> str:
207
209
  stdout = self.fractal_ssh.run_command(cmd=cmd)
208
210
  return stdout
211
+
212
+ def run_squeue(
213
+ self,
214
+ *,
215
+ job_ids: list[str],
216
+ base_interval: float = 2.0,
217
+ max_attempts: int = 7,
218
+ ) -> str:
219
+ """
220
+ Run `squeue` for a set of SLURM job IDs.
221
+
222
+ Different scenarios:
223
+
224
+ 1. When `squeue -j` succeeds (with exit code 0), return its stdout.
225
+ 2. When `squeue -j` fails (typical example:
226
+ `squeue -j {invalid_job_id}` fails with exit code 1), re-raise.
227
+ The error will be handled upstream.
228
+ 3. When the SSH command fails because another thread is keeping the
229
+ lock of the `FractalSSH` object for a long time, mock the standard
230
+ output of the `squeue` command so that it looks like jobs are not
231
+ completed yet.
232
+ 4. When the SSH command fails for other reasons, despite a forgiving
233
+ setup (7 connection attempts with base waiting interval of 2
234
+ seconds, with a cumulative timeout of 126 seconds), return an empty
235
+ string. This will be treated upstream as an empty `squeu` output,
236
+ indirectly resulting in marking the job as completed.
237
+ """
238
+
239
+ if len(job_ids) == 0:
240
+ return ""
241
+
242
+ job_id_single_str = ",".join([str(j) for j in job_ids])
243
+ cmd = (
244
+ "squeue --noheader --format='%i %T' --states=all "
245
+ f"--jobs={job_id_single_str}"
246
+ )
247
+
248
+ try:
249
+ stdout = self.fractal_ssh.run_command(
250
+ cmd=cmd,
251
+ base_interval=base_interval,
252
+ max_attempts=max_attempts,
253
+ )
254
+ return stdout
255
+ except FractalSSHCommandError as e:
256
+ raise e
257
+ except FractalSSHTimeoutError:
258
+ logger.warning(
259
+ "[run_squeue] Could not acquire lock, use stdout placeholder."
260
+ )
261
+ FAKE_STATUS = "FRACTAL_STATUS_PLACEHOLDER"
262
+ placeholder_stdout = "\n".join(
263
+ [f"{job_id} {FAKE_STATUS}" for job_id in job_ids]
264
+ )
265
+ return placeholder_stdout
266
+ except Exception as e:
267
+ logger.error(f"Ignoring `squeue` command failure {e}")
268
+ return ""
@@ -88,18 +88,3 @@ def _mkdir_as_user(*, folder: str, user: str) -> None:
88
88
 
89
89
  cmd = f"mkdir -p {folder}"
90
90
  _run_command_as_user(cmd=cmd, user=user, check=True)
91
-
92
-
93
- def _path_exists_as_user(*, path: str, user: Optional[str] = None) -> bool:
94
- """
95
- Impersonate a user and check if `path` exists via `ls`
96
-
97
- Arguments:
98
- path: Absolute file/folder path
99
- user: If not `None`, user to be impersonated
100
- """
101
- res = _run_command_as_user(cmd=f"ls {path}", user=user)
102
- if res.returncode == 0:
103
- return True
104
- else:
105
- return False
@@ -176,6 +176,18 @@ class SudoSlurmRunner(BaseSlurmRunner):
176
176
  )
177
177
  return res.stdout
178
178
 
179
- def _run_local_cmd(self, cmd: str) -> str:
179
+ def run_squeue(self, job_ids: list[str]) -> str:
180
+ """
181
+ Run `squeue` for a set of SLURM job IDs.
182
+ """
183
+
184
+ if len(job_ids) == 0:
185
+ return ""
186
+
187
+ job_id_single_str = ",".join([str(j) for j in job_ids])
188
+ cmd = (
189
+ "squeue --noheader --format='%i %T' --states=all "
190
+ f"--jobs {job_id_single_str}"
191
+ )
180
192
  res = _subprocess_run_or_raise(cmd)
181
193
  return res.stdout
@@ -144,6 +144,7 @@ def execute_tasks_v2(
144
144
  get_runner_config=get_runner_config,
145
145
  history_run_id=history_run_id,
146
146
  dataset_id=dataset.id,
147
+ user_id=user_id,
147
148
  task_type=task.type,
148
149
  )
149
150
  elif task.type == "parallel":
@@ -157,6 +158,7 @@ def execute_tasks_v2(
157
158
  get_runner_config=get_runner_config,
158
159
  history_run_id=history_run_id,
159
160
  dataset_id=dataset.id,
161
+ user_id=user_id,
160
162
  )
161
163
  elif task.type in ["compound", "converter_compound"]:
162
164
  outcomes_dict, num_tasks = run_v2_task_compound(
@@ -171,6 +173,7 @@ def execute_tasks_v2(
171
173
  history_run_id=history_run_id,
172
174
  dataset_id=dataset.id,
173
175
  task_type=task.type,
176
+ user_id=user_id,
174
177
  )
175
178
  else:
176
179
  raise ValueError(f"Unexpected error: Invalid {task.type=}.")
@@ -139,6 +139,7 @@ def run_v2_task_non_parallel(
139
139
  dataset_id: int,
140
140
  history_run_id: int,
141
141
  task_type: Literal["non_parallel", "converter_non_parallel"],
142
+ user_id: int,
142
143
  ) -> tuple[dict[int, SubmissionOutcome], int]:
143
144
  """
144
145
  This runs server-side (see `executor` argument)
@@ -218,6 +219,7 @@ def run_v2_task_non_parallel(
218
219
  task_files=task_files,
219
220
  history_unit_id=history_unit_id,
220
221
  config=runner_config,
222
+ user_id=user_id,
221
223
  )
222
224
 
223
225
  positional_index = 0
@@ -261,6 +263,7 @@ def run_v2_task_parallel(
261
263
  ],
262
264
  dataset_id: int,
263
265
  history_run_id: int,
266
+ user_id: int,
264
267
  ) -> tuple[dict[int, SubmissionOutcome], int]:
265
268
  if len(images) == 0:
266
269
  return {}, 0
@@ -344,6 +347,7 @@ def run_v2_task_parallel(
344
347
  list_task_files=list_task_files,
345
348
  history_unit_ids=history_unit_ids,
346
349
  config=runner_config,
350
+ user_id=user_id,
347
351
  )
348
352
 
349
353
  outcome = {}
@@ -394,6 +398,7 @@ def run_v2_task_compound(
394
398
  dataset_id: int,
395
399
  history_run_id: int,
396
400
  task_type: Literal["compound", "converter_compound"],
401
+ user_id: int,
397
402
  ) -> tuple[dict[int, SubmissionOutcome], int]:
398
403
  # Get TaskFiles object
399
404
  task_files_init = TaskFiles(
@@ -463,6 +468,7 @@ def run_v2_task_compound(
463
468
  task_files=task_files_init,
464
469
  history_unit_id=init_history_unit_id,
465
470
  config=runner_config_init,
471
+ user_id=user_id,
466
472
  )
467
473
 
468
474
  init_outcome = _process_init_task_output(
@@ -565,6 +571,7 @@ def run_v2_task_compound(
565
571
  list_task_files=list_task_files,
566
572
  history_unit_ids=history_unit_ids,
567
573
  config=runner_config_compute,
574
+ user_id=user_id,
568
575
  )
569
576
 
570
577
  compute_outcomes: dict[int, SubmissionOutcome] = {}
@@ -23,6 +23,18 @@ class FractalSSHTimeoutError(RuntimeError):
23
23
  pass
24
24
 
25
25
 
26
+ class FractalSSHConnectionError(RuntimeError):
27
+ pass
28
+
29
+
30
+ class FractalSSHCommandError(RuntimeError):
31
+ pass
32
+
33
+
34
+ class FractalSSHUnknownError(RuntimeError):
35
+ pass
36
+
37
+
26
38
  logger = set_logger(__name__)
27
39
 
28
40
 
@@ -170,7 +182,6 @@ class FractalSSH(object):
170
182
  label="read_remote_json_file",
171
183
  timeout=self.default_lock_timeout,
172
184
  ):
173
-
174
185
  try:
175
186
  with self._sftp_unsafe().open(filepath, "r") as f:
176
187
  data = json.load(f)
@@ -263,7 +274,7 @@ class FractalSSH(object):
263
274
  cmd: str,
264
275
  allow_char: Optional[str] = None,
265
276
  max_attempts: Optional[int] = None,
266
- base_interval: Optional[int] = None,
277
+ base_interval: Optional[float] = None,
267
278
  lock_timeout: Optional[int] = None,
268
279
  ) -> str:
269
280
  """
@@ -311,7 +322,7 @@ class FractalSSH(object):
311
322
  t_1 = time.perf_counter()
312
323
  self.logger.info(
313
324
  f"{prefix} END running '{cmd}' over SSH, "
314
- f"elapsed {t_1-t_0:.3f}"
325
+ f"elapsed={t_1 - t_0:.3f}"
315
326
  )
316
327
  self.logger.debug("STDOUT:")
317
328
  self.logger.debug(res.stdout)
@@ -329,12 +340,16 @@ class FractalSSH(object):
329
340
  sleeptime = actual_base_interval**ind_attempt
330
341
  self.logger.warning(
331
342
  f"{prefix} Now sleep {sleeptime:.3f} "
332
- "seconds and continue."
343
+ "seconds and retry."
333
344
  )
334
345
  time.sleep(sleeptime)
335
346
  else:
336
347
  self.logger.error(f"{prefix} Reached last attempt")
337
- break
348
+ raise FractalSSHConnectionError(
349
+ f"Reached last attempt "
350
+ f"({max_attempts=}) for running "
351
+ f"'{cmd}' over SSH"
352
+ )
338
353
  except UnexpectedExit as e:
339
354
  # Case 3: Command fails with an actual error
340
355
  error_msg = (
@@ -342,18 +357,15 @@ class FractalSSH(object):
342
357
  f"Original error:\n{str(e)}."
343
358
  )
344
359
  self.logger.error(error_msg)
345
- raise RuntimeError(error_msg)
360
+ raise FractalSSHCommandError(error_msg)
361
+ except FractalSSHTimeoutError as e:
362
+ raise e
346
363
  except Exception as e:
347
364
  self.logger.error(
348
365
  f"Running command `{cmd}` over SSH failed.\n"
349
366
  f"Original Error:\n{str(e)}."
350
367
  )
351
- raise e
352
-
353
- raise RuntimeError(
354
- f"Reached last attempt ({max_attempts=}) for running "
355
- f"'{cmd}' over SSH"
356
- )
368
+ raise FractalSSHUnknownError(f"{type(e)}: {str(e)}")
357
369
 
358
370
  def send_file(
359
371
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a33
3
+ Version: 2.14.0a35
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=Tz70KF4J2Mmct3ALQfMgq7zpBD0SmKxrn48VV5J-ow8,26
1
+ fractal_server/__init__.py,sha256=uaL7SX1hlVu1EXJeVBSkUeSUZpES1i7jDygIA1DFLN0,26
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -70,24 +70,24 @@ fractal_server/app/runner/components.py,sha256=-Ii5l8d_V6f5DFOd-Zsr8VYmOsyqw0Hox
70
70
  fractal_server/app/runner/compress_folder.py,sha256=DX-4IYlSXlMd0EmXDD8M8FxisfKLbooSTrdNtzYAQAM,4876
71
71
  fractal_server/app/runner/exceptions.py,sha256=JC5ufHyeA1hYD_rkZUscI30DD8D903ncag7Z3AArmUY,4215
72
72
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
73
- fractal_server/app/runner/executors/base_runner.py,sha256=knWOERUwRLhsd9eq5GwGxH2ZVsvPOZRRjQPGbiExqcU,5052
73
+ fractal_server/app/runner/executors/base_runner.py,sha256=4xxMpYycIeAOz5niaJj2xtVW_Cq-shCxP1qk4g-KwOM,5137
74
74
  fractal_server/app/runner/executors/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
75
  fractal_server/app/runner/executors/local/get_local_config.py,sha256=KiakXxOahaLgWvQJ1LVGYGXht6DMGR9x8Xu-TuT9aY4,3628
76
- fractal_server/app/runner/executors/local/runner.py,sha256=pcwQ-ow4pJk4mkUg6mODMmfzGiMWX18vPxybrly_evY,6962
76
+ fractal_server/app/runner/executors/local/runner.py,sha256=AfJ2KDUBdLqkeJTdRzYCkfJh4LiGbdnsHROko_Pk9vA,9587
77
77
  fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
78
  fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
79
79
  fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
80
80
  fractal_server/app/runner/executors/slurm_common/_slurm_config.py,sha256=_feRRnVVnvQa3AsOQqfULfOgaoj2o6Ze0-fwXwic8p4,15795
81
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=3LHrNmJ8VuBSeFI07q4tq41DWtcYTzYJfHvsaezDyoI,30355
81
+ fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py,sha256=kmou-asQJ7SHBR0VPPiQrMLP9gv_NZG3s9t2yoszGhY,33870
82
82
  fractal_server/app/runner/executors/slurm_common/get_slurm_config.py,sha256=BW6fDpPyB0VH5leVxvwzkVH3r3hC7DuSyoWmRzHITWg,7305
83
- fractal_server/app/runner/executors/slurm_common/remote.py,sha256=EB2uASKjrBIr25oc13XvSwf8x-TpTBr9WuaLMwNr2y4,5850
83
+ fractal_server/app/runner/executors/slurm_common/remote.py,sha256=L5llMccL6ctdFpDQvynJl5KbxtATX2wzpq13_3ppw-I,5929
84
84
  fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py,sha256=RoxHLKOn0_wGjnY0Sv0a9nDSiqxYZHKRoMkT3p9_G1E,3607
85
85
  fractal_server/app/runner/executors/slurm_common/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
86
86
  fractal_server/app/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
87
- fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=LeEt8a4knm9OCULxhhLkMPBanMW_65ZvL1O-HEA9QMw,7151
87
+ fractal_server/app/runner/executors/slurm_ssh/runner.py,sha256=yKK_cjskHDiasn_QQ-k14GhplP3tNaK7Kp4yiVn44Y0,9437
88
88
  fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
- fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=O1bNg1DiSDJmQE0RmOk2Ii47DagiXp5ryd0R6KxO2OM,3177
90
- fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=iFaE3EMepbXzmKwqydwYMGJu7D1ak4RhbA43rkVUWZo,5962
89
+ fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=BlOz4NElv3v7rUYefyeki33uaJxcSDk6rPuVZx9ocdw,2776
90
+ fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=lPWkRT499mChP3dNLrdDjMT-nw7-LWv6g58kdF_sMRw,6290
91
91
  fractal_server/app/runner/extract_archive.py,sha256=I7UGIHXXuFvlgVPsP7GMWPu2-DiS1EiyBs7a1bvgkxI,2458
92
92
  fractal_server/app/runner/filenames.py,sha256=lPnxKHtdRizr6FqG3zOdjDPyWA7GoaJGTtiuJV0gA8E,70
93
93
  fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
@@ -101,8 +101,8 @@ fractal_server/app/runner/v2/_slurm_sudo.py,sha256=TVihkQKMX6YWEWxXJjQo0WEQOjVy7
101
101
  fractal_server/app/runner/v2/db_tools.py,sha256=du5dKhMMFMErQXbGIgu9JvO_vtMensodyPsyDeqz1yQ,3324
102
102
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
103
103
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
104
- fractal_server/app/runner/v2/runner.py,sha256=B4kAF1S-zHf2PbyHedfuiaNpu4oslVDp33KgXYcoXIk,15706
105
- fractal_server/app/runner/v2/runner_functions.py,sha256=2W6CFkezUsQ_k8YuC2oOEMtB_-7M9ensyhwCFvlS2No,19096
104
+ fractal_server/app/runner/v2/runner.py,sha256=DUJxltI91v0efnTaTEFCQp7CV5Mtz2KEE0rezJt4mHY,15805
105
+ fractal_server/app/runner/v2/runner_functions.py,sha256=AzsE7VF6NMz_5qc0htQkfow5_2rr-wkx50vFJTndj8I,19250
106
106
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=_h_OOffq3d7V0uHa8Uvs0mj31y1GSZBUXjDDF3WjVjY,3620
107
107
  fractal_server/app/runner/v2/submit_workflow.py,sha256=QywUGIoHAHnrWgfnyX8W9kVqKY-RvVyNLpzrbsXZOZ4,13075
108
108
  fractal_server/app/runner/v2/task_interface.py,sha256=IXdQTI8rXFgXv1Ez0js4CjKFf3QwO2GCHRTuwiFtiTQ,2891
@@ -179,7 +179,7 @@ fractal_server/migrations/versions/f384e1c0cf5d_drop_task_default_args_columns.p
179
179
  fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py,sha256=TDWCaIoM0Q4SpRWmR9zr_rdp3lJXhCfBPTMhtrP5xYE,3950
180
180
  fractal_server/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
181
181
  fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
182
- fractal_server/ssh/_fabric.py,sha256=gnSv_DaQ8uYLS35Rqb84wo3HRkMazXGVd-D19fo9zqA,22967
182
+ fractal_server/ssh/_fabric.py,sha256=jF7Nny0r3_PL1WjM1Zlw1I73Uqerx-mTaDWQlOaOpa0,23324
183
183
  fractal_server/string_tools.py,sha256=niViRrrZAOo0y6pEFI9L_eUYS1PoOiQZUBtngiLc2_k,1877
184
184
  fractal_server/syringe.py,sha256=3qSMW3YaMKKnLdgnooAINOPxnCOxP7y2jeAQYB21Gdo,2786
185
185
  fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
@@ -209,8 +209,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENP
209
209
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
210
210
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
211
211
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
212
- fractal_server-2.14.0a33.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
213
- fractal_server-2.14.0a33.dist-info/METADATA,sha256=GSqnRaq2UDONr0nfs3PW8XvMCbA6IS-Yq9KONzE1Y54,4563
214
- fractal_server-2.14.0a33.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
215
- fractal_server-2.14.0a33.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
216
- fractal_server-2.14.0a33.dist-info/RECORD,,
212
+ fractal_server-2.14.0a35.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
213
+ fractal_server-2.14.0a35.dist-info/METADATA,sha256=_zw2Yj6UEBCiw25HwYUZ8Pp5FVQyVFP1NyYqrmmHPjQ,4563
214
+ fractal_server-2.14.0a35.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
215
+ fractal_server-2.14.0a35.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
216
+ fractal_server-2.14.0a35.dist-info/RECORD,,