fractal-server 2.9.0a11__py3-none-any.whl → 2.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.9.0a11"
1
+ __VERSION__ = "2.9.1"
@@ -10,6 +10,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
10
10
  from sqlmodel import select
11
11
 
12
12
  from . import current_active_superuser
13
+ from ._aux_auth import _get_default_usergroup_id
13
14
  from ._aux_auth import _get_single_usergroup_with_user_ids
14
15
  from ._aux_auth import _user_or_404
15
16
  from ._aux_auth import _usergroup_or_404
@@ -234,16 +235,35 @@ async def remove_user_from_group(
234
235
  superuser: UserOAuth = Depends(current_active_superuser),
235
236
  db: AsyncSession = Depends(get_async_db),
236
237
  ) -> UserGroupRead:
238
+
239
+ # Check that user and group exist
237
240
  await _usergroup_or_404(group_id, db)
238
241
  user = await _user_or_404(user_id, db)
242
+
243
+ # Check that group is not the default one
244
+ default_user_group_id = await _get_default_usergroup_id(db=db)
245
+ if default_user_group_id == group_id:
246
+ raise HTTPException(
247
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
248
+ detail=(
249
+ f"Cannot remove user from '{FRACTAL_DEFAULT_GROUP_NAME}' "
250
+ "group.",
251
+ ),
252
+ )
253
+
239
254
  link = await db.get(LinkUserGroup, (group_id, user_id))
240
255
  if link is None:
256
+ # If user and group are not linked, fail
241
257
  raise HTTPException(
242
258
  status_code=422,
243
259
  detail=f"User '{user.email}' is not a member of group {group_id}.",
244
260
  )
245
261
  else:
262
+ # If user and group are linked, delete the link
246
263
  await db.delete(link)
247
264
  await db.commit()
265
+
266
+ # Enrich the response object with user_ids
248
267
  group = await _get_single_usergroup_with_user_ids(group_id=group_id, db=db)
268
+
249
269
  return group
@@ -456,20 +456,3 @@ def _parse_mem_value(raw_mem: Union[str, int]) -> int:
456
456
 
457
457
  logger.debug(f"{info}, return {mem_MB}")
458
458
  return mem_MB
459
-
460
-
461
- def get_default_slurm_config():
462
- """
463
- Return a default `SlurmConfig` configuration object
464
- """
465
- return SlurmConfig(
466
- partition="main",
467
- cpus_per_task=1,
468
- mem_per_task_MB=100,
469
- target_cpus_per_job=1,
470
- max_cpus_per_job=2,
471
- target_mem_per_job=100,
472
- max_mem_per_job=500,
473
- target_num_jobs=2,
474
- max_num_jobs=4,
475
- )
@@ -31,9 +31,11 @@ from ....filenames import SHUTDOWN_FILENAME
31
31
  from ....task_files import get_task_file_paths
32
32
  from ....task_files import TaskFiles
33
33
  from ....versions import get_versions
34
- from ...slurm._slurm_config import get_default_slurm_config
35
34
  from ...slurm._slurm_config import SlurmConfig
36
35
  from .._batching import heuristics
36
+ from ..utils_executors import get_pickle_file_path
37
+ from ..utils_executors import get_slurm_file_path
38
+ from ..utils_executors import get_slurm_script_file_path
37
39
  from ._executor_wait_thread import FractalSlurmWaitThread
38
40
  from fractal_server.app.runner.components import _COMPONENT_KEY_
39
41
  from fractal_server.app.runner.compress_folder import compress_folder
@@ -223,132 +225,12 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
223
225
  with self.jobs_lock:
224
226
  self.map_jobid_to_slurm_files_local.pop(jobid)
225
227
 
226
- def get_input_pickle_file_path_local(
227
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
228
- ) -> Path:
229
-
230
- prefix = prefix or "cfut"
231
- output = (
232
- self.workflow_dir_local
233
- / subfolder_name
234
- / f"{prefix}_in_{arg}.pickle"
235
- )
236
- return output
237
-
238
- def get_input_pickle_file_path_remote(
239
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
240
- ) -> Path:
241
-
242
- prefix = prefix or "cfut"
243
- output = (
244
- self.workflow_dir_remote
245
- / subfolder_name
246
- / f"{prefix}_in_{arg}.pickle"
247
- )
248
- return output
249
-
250
- def get_output_pickle_file_path_local(
251
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
252
- ) -> Path:
253
- prefix = prefix or "cfut"
254
- return (
255
- self.workflow_dir_local
256
- / subfolder_name
257
- / f"{prefix}_out_{arg}.pickle"
258
- )
259
-
260
- def get_output_pickle_file_path_remote(
261
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
262
- ) -> Path:
263
- prefix = prefix or "cfut"
264
- return (
265
- self.workflow_dir_remote
266
- / subfolder_name
267
- / f"{prefix}_out_{arg}.pickle"
268
- )
269
-
270
- def get_slurm_script_file_path_local(
271
- self, *, subfolder_name: str, prefix: Optional[str] = None
272
- ) -> Path:
273
- prefix = prefix or "_temp"
274
- return (
275
- self.workflow_dir_local
276
- / subfolder_name
277
- / f"{prefix}_slurm_submit.sbatch"
278
- )
279
-
280
- def get_slurm_script_file_path_remote(
281
- self, *, subfolder_name: str, prefix: Optional[str] = None
282
- ) -> Path:
283
- prefix = prefix or "_temp"
284
- return (
285
- self.workflow_dir_remote
286
- / subfolder_name
287
- / f"{prefix}_slurm_submit.sbatch"
288
- )
289
-
290
- def get_slurm_stdout_file_path_local(
291
- self,
292
- *,
293
- subfolder_name: str,
294
- arg: str = "%j",
295
- prefix: Optional[str] = None,
296
- ) -> Path:
297
- prefix = prefix or "slurmpy.stdout"
298
- return (
299
- self.workflow_dir_local
300
- / subfolder_name
301
- / f"{prefix}_slurm_{arg}.out"
302
- )
303
-
304
- def get_slurm_stdout_file_path_remote(
305
- self,
306
- *,
307
- subfolder_name: str,
308
- arg: str = "%j",
309
- prefix: Optional[str] = None,
310
- ) -> Path:
311
- prefix = prefix or "slurmpy.stdout"
312
- return (
313
- self.workflow_dir_remote
314
- / subfolder_name
315
- / f"{prefix}_slurm_{arg}.out"
316
- )
317
-
318
- def get_slurm_stderr_file_path_local(
319
- self,
320
- *,
321
- subfolder_name: str,
322
- arg: str = "%j",
323
- prefix: Optional[str] = None,
324
- ) -> Path:
325
- prefix = prefix or "slurmpy.stderr"
326
- return (
327
- self.workflow_dir_local
328
- / subfolder_name
329
- / f"{prefix}_slurm_{arg}.err"
330
- )
331
-
332
- def get_slurm_stderr_file_path_remote(
333
- self,
334
- *,
335
- subfolder_name: str,
336
- arg: str = "%j",
337
- prefix: Optional[str] = None,
338
- ) -> Path:
339
- prefix = prefix or "slurmpy.stderr"
340
- return (
341
- self.workflow_dir_remote
342
- / subfolder_name
343
- / f"{prefix}_slurm_{arg}.err"
344
- )
345
-
346
228
  def submit(
347
229
  self,
348
230
  fun: Callable[..., Any],
349
231
  *fun_args: Sequence[Any],
350
- slurm_config: Optional[SlurmConfig] = None,
351
- task_files: Optional[TaskFiles] = None,
232
+ slurm_config: SlurmConfig,
233
+ task_files: TaskFiles,
352
234
  **fun_kwargs: dict,
353
235
  ) -> Future:
354
236
  """
@@ -359,11 +241,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
359
241
  fun_args: Function positional arguments
360
242
  fun_kwargs: Function keyword arguments
361
243
  slurm_config:
362
- A `SlurmConfig` object; if `None`, use
363
- `get_default_slurm_config()`.
244
+ A `SlurmConfig` object.
364
245
  task_files:
365
- A `TaskFiles` object; if `None`, use
366
- `self.get_default_task_files()`.
246
+ A `TaskFiles` object.
367
247
 
368
248
  Returns:
369
249
  Future representing the execution of the current SLURM job.
@@ -375,12 +255,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
375
255
  logger.warning(error_msg)
376
256
  raise JobExecutionError(info=error_msg)
377
257
 
378
- # Set defaults, if needed
379
- if slurm_config is None:
380
- slurm_config = get_default_slurm_config()
381
- if task_files is None:
382
- task_files = self.get_default_task_files()
383
-
384
258
  # Set slurm_file_prefix
385
259
  slurm_file_prefix = task_files.file_prefix
386
260
 
@@ -418,8 +292,8 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
418
292
  fn: Callable[..., Any],
419
293
  iterable: list[Sequence[Any]],
420
294
  *,
421
- slurm_config: Optional[SlurmConfig] = None,
422
- task_files: Optional[TaskFiles] = None,
295
+ slurm_config: SlurmConfig,
296
+ task_files: TaskFiles,
423
297
  ):
424
298
  """
425
299
  Return an iterator with the results of several execution of a function
@@ -442,12 +316,9 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
442
316
  An iterable such that each element is the list of arguments to
443
317
  be passed to `fn`, as in `fn(*args)`.
444
318
  slurm_config:
445
- A `SlurmConfig` object; if `None`, use
446
- `get_default_slurm_config()`.
319
+ A `SlurmConfig` object.
447
320
  task_files:
448
- A `TaskFiles` object; if `None`, use
449
- `self.get_default_task_files()`.
450
-
321
+ A `TaskFiles` object.
451
322
  """
452
323
 
453
324
  # Do not continue if auxiliary thread was shut down
@@ -472,12 +343,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
472
343
  # self._exception
473
344
  del fut
474
345
 
475
- # Set defaults, if needed
476
- if not slurm_config:
477
- slurm_config = get_default_slurm_config()
478
- if task_files is None:
479
- task_files = self.get_default_task_files()
480
-
481
346
  # Include common_script_lines in extra_lines
482
347
  logger.debug(
483
348
  f"Adding {self.common_script_lines=} to "
@@ -710,63 +575,80 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
710
575
  f"Missing folder {subfolder_path.as_posix()}."
711
576
  )
712
577
 
713
- # Define I/O pickle file local/remote paths
714
578
  job.input_pickle_files_local = tuple(
715
- self.get_input_pickle_file_path_local(
579
+ get_pickle_file_path(
716
580
  arg=job.workerids[ind],
581
+ workflow_dir=self.workflow_dir_local,
717
582
  subfolder_name=job.wftask_subfolder_name,
583
+ in_or_out="in",
718
584
  prefix=job.wftask_file_prefixes[ind],
719
585
  )
720
586
  for ind in range(job.num_tasks_tot)
721
587
  )
588
+
722
589
  job.input_pickle_files_remote = tuple(
723
- self.get_input_pickle_file_path_remote(
590
+ get_pickle_file_path(
724
591
  arg=job.workerids[ind],
592
+ workflow_dir=self.workflow_dir_remote,
725
593
  subfolder_name=job.wftask_subfolder_name,
594
+ in_or_out="in",
726
595
  prefix=job.wftask_file_prefixes[ind],
727
596
  )
728
597
  for ind in range(job.num_tasks_tot)
729
598
  )
730
599
  job.output_pickle_files_local = tuple(
731
- self.get_output_pickle_file_path_local(
600
+ get_pickle_file_path(
732
601
  arg=job.workerids[ind],
602
+ workflow_dir=self.workflow_dir_local,
733
603
  subfolder_name=job.wftask_subfolder_name,
604
+ in_or_out="out",
734
605
  prefix=job.wftask_file_prefixes[ind],
735
606
  )
736
607
  for ind in range(job.num_tasks_tot)
737
608
  )
738
609
  job.output_pickle_files_remote = tuple(
739
- self.get_output_pickle_file_path_remote(
610
+ get_pickle_file_path(
740
611
  arg=job.workerids[ind],
612
+ workflow_dir=self.workflow_dir_remote,
741
613
  subfolder_name=job.wftask_subfolder_name,
614
+ in_or_out="out",
742
615
  prefix=job.wftask_file_prefixes[ind],
743
616
  )
744
617
  for ind in range(job.num_tasks_tot)
745
618
  )
746
-
747
- # Define SLURM-job file local/remote paths
748
- job.slurm_script_local = self.get_slurm_script_file_path_local(
619
+ # define slurm-job file local/remote paths
620
+ job.slurm_script_local = get_slurm_script_file_path(
621
+ workflow_dir=self.workflow_dir_local,
749
622
  subfolder_name=job.wftask_subfolder_name,
750
623
  prefix=job.slurm_file_prefix,
751
624
  )
752
- job.slurm_script_remote = self.get_slurm_script_file_path_remote(
625
+ job.slurm_script_remote = get_slurm_script_file_path(
626
+ workflow_dir=self.workflow_dir_remote,
753
627
  subfolder_name=job.wftask_subfolder_name,
754
628
  prefix=job.slurm_file_prefix,
755
629
  )
756
- job.slurm_stdout_local = self.get_slurm_stdout_file_path_local(
630
+ job.slurm_stdout_local = get_slurm_file_path(
631
+ workflow_dir=self.workflow_dir_local,
757
632
  subfolder_name=job.wftask_subfolder_name,
633
+ out_or_err="out",
758
634
  prefix=job.slurm_file_prefix,
759
635
  )
760
- job.slurm_stdout_remote = self.get_slurm_stdout_file_path_remote(
636
+ job.slurm_stdout_remote = get_slurm_file_path(
637
+ workflow_dir=self.workflow_dir_remote,
761
638
  subfolder_name=job.wftask_subfolder_name,
639
+ out_or_err="out",
762
640
  prefix=job.slurm_file_prefix,
763
641
  )
764
- job.slurm_stderr_local = self.get_slurm_stderr_file_path_local(
642
+ job.slurm_stderr_local = get_slurm_file_path(
643
+ workflow_dir=self.workflow_dir_local,
765
644
  subfolder_name=job.wftask_subfolder_name,
645
+ out_or_err="err",
766
646
  prefix=job.slurm_file_prefix,
767
647
  )
768
- job.slurm_stderr_remote = self.get_slurm_stderr_file_path_remote(
648
+ job.slurm_stderr_remote = get_slurm_file_path(
649
+ workflow_dir=self.workflow_dir_remote,
769
650
  subfolder_name=job.wftask_subfolder_name,
651
+ out_or_err="err",
770
652
  prefix=job.slurm_file_prefix,
771
653
  )
772
654
 
@@ -1294,7 +1176,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1294
1176
  slurm_err_path: str,
1295
1177
  slurm_config: SlurmConfig,
1296
1178
  ):
1297
-
1298
1179
  num_tasks_max_running = slurm_config.parallel_tasks_per_job
1299
1180
  mem_per_task_MB = slurm_config.mem_per_task_MB
1300
1181
 
@@ -1346,19 +1227,6 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
1346
1227
  script = "\n".join(script_lines)
1347
1228
  return script
1348
1229
 
1349
- def get_default_task_files(self) -> TaskFiles:
1350
- """
1351
- This will be called when self.submit or self.map are called from
1352
- outside fractal-server, and then lack some optional arguments.
1353
- """
1354
- task_files = TaskFiles(
1355
- workflow_dir_local=self.workflow_dir_local,
1356
- workflow_dir_remote=self.workflow_dir_remote,
1357
- task_order=None,
1358
- task_name="name",
1359
- )
1360
- return task_files
1361
-
1362
1230
  def shutdown(self, wait=True, *, cancel_futures=False):
1363
1231
  """
1364
1232
  Clean up all executor variables. Note that this function is executed on
@@ -37,9 +37,11 @@ from ....exceptions import TaskExecutionError
37
37
  from ....filenames import SHUTDOWN_FILENAME
38
38
  from ....task_files import get_task_file_paths
39
39
  from ....task_files import TaskFiles
40
- from ...slurm._slurm_config import get_default_slurm_config
41
40
  from ...slurm._slurm_config import SlurmConfig
42
41
  from .._batching import heuristics
42
+ from ..utils_executors import get_pickle_file_path
43
+ from ..utils_executors import get_slurm_file_path
44
+ from ..utils_executors import get_slurm_script_file_path
43
45
  from ._executor_wait_thread import FractalSlurmWaitThread
44
46
  from ._subprocess_run_as_user import _glob_as_user
45
47
  from ._subprocess_run_as_user import _glob_as_user_strict
@@ -305,72 +307,12 @@ class FractalSlurmExecutor(SlurmExecutor):
305
307
  with self.jobs_lock:
306
308
  self.map_jobid_to_slurm_files.pop(jobid)
307
309
 
308
- def get_input_pickle_file_path(
309
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
310
- ) -> Path:
311
-
312
- prefix = prefix or "cfut"
313
- output = (
314
- self.workflow_dir_local
315
- / subfolder_name
316
- / f"{prefix}_in_{arg}.pickle"
317
- )
318
- return output
319
-
320
- def get_output_pickle_file_path(
321
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
322
- ) -> Path:
323
- prefix = prefix or "cfut"
324
- return (
325
- self.workflow_dir_remote
326
- / subfolder_name
327
- / f"{prefix}_out_{arg}.pickle"
328
- )
329
-
330
- def get_slurm_script_file_path(
331
- self, *, subfolder_name: str, prefix: Optional[str] = None
332
- ) -> Path:
333
- prefix = prefix or "_temp"
334
- return (
335
- self.workflow_dir_local
336
- / subfolder_name
337
- / f"{prefix}_slurm_submit.sbatch"
338
- )
339
-
340
- def get_slurm_stdout_file_path(
341
- self,
342
- *,
343
- subfolder_name: str,
344
- arg: str = "%j",
345
- prefix: Optional[str] = None,
346
- ) -> Path:
347
- prefix = prefix or "slurmpy.stdout"
348
- return (
349
- self.workflow_dir_remote
350
- / subfolder_name
351
- / f"{prefix}_slurm_{arg}.out"
352
- )
353
-
354
- def get_slurm_stderr_file_path(
355
- self,
356
- *,
357
- subfolder_name: str,
358
- arg: str = "%j",
359
- prefix: Optional[str] = None,
360
- ) -> Path:
361
- prefix = prefix or "slurmpy.stderr"
362
- return (
363
- self.workflow_dir_remote
364
- / subfolder_name
365
- / f"{prefix}_slurm_{arg}.err"
366
- )
367
-
368
310
  def submit(
369
311
  self,
370
312
  fun: Callable[..., Any],
371
313
  *fun_args: Sequence[Any],
372
- slurm_config: Optional[SlurmConfig] = None,
373
- task_files: Optional[TaskFiles] = None,
314
+ slurm_config: SlurmConfig,
315
+ task_files: TaskFiles,
374
316
  **fun_kwargs: dict,
375
317
  ) -> Future:
376
318
  """
@@ -381,22 +323,14 @@ class FractalSlurmExecutor(SlurmExecutor):
381
323
  fun_args: Function positional arguments
382
324
  fun_kwargs: Function keyword arguments
383
325
  slurm_config:
384
- A `SlurmConfig` object; if `None`, use
385
- `get_default_slurm_config()`.
326
+ A `SlurmConfig` object.
386
327
  task_files:
387
- A `TaskFiles` object; if `None`, use
388
- `self.get_default_task_files()`.
328
+ A `TaskFiles` object.
389
329
 
390
330
  Returns:
391
331
  Future representing the execution of the current SLURM job.
392
332
  """
393
333
 
394
- # Set defaults, if needed
395
- if slurm_config is None:
396
- slurm_config = get_default_slurm_config()
397
- if task_files is None:
398
- task_files = self.get_default_task_files()
399
-
400
334
  # Set slurm_file_prefix
401
335
  slurm_file_prefix = task_files.file_prefix
402
336
 
@@ -431,8 +365,8 @@ class FractalSlurmExecutor(SlurmExecutor):
431
365
  fn: Callable[..., Any],
432
366
  iterable: list[Sequence[Any]],
433
367
  *,
434
- slurm_config: Optional[SlurmConfig] = None,
435
- task_files: Optional[TaskFiles] = None,
368
+ slurm_config: SlurmConfig,
369
+ task_files: TaskFiles,
436
370
  ):
437
371
  """
438
372
  Return an iterator with the results of several execution of a function
@@ -455,11 +389,9 @@ class FractalSlurmExecutor(SlurmExecutor):
455
389
  An iterable such that each element is the list of arguments to
456
390
  be passed to `fn`, as in `fn(*args)`.
457
391
  slurm_config:
458
- A `SlurmConfig` object; if `None`, use
459
- `get_default_slurm_config()`.
392
+ A `SlurmConfig` object.
460
393
  task_files:
461
- A `TaskFiles` object; if `None`, use
462
- `self.get_default_task_files()`.
394
+ A `TaskFiles` object.
463
395
 
464
396
  """
465
397
 
@@ -479,12 +411,6 @@ class FractalSlurmExecutor(SlurmExecutor):
479
411
  # self._exception
480
412
  del fut
481
413
 
482
- # Set defaults, if needed
483
- if not slurm_config:
484
- slurm_config = get_default_slurm_config()
485
- if task_files is None:
486
- task_files = self.get_default_task_files()
487
-
488
414
  # Include common_script_lines in extra_lines
489
415
  logger.debug(
490
416
  f"Adding {self.common_script_lines=} to "
@@ -700,39 +626,46 @@ class FractalSlurmExecutor(SlurmExecutor):
700
626
  f"Missing folder {subfolder_path.as_posix()}."
701
627
  )
702
628
 
703
- # Define I/O pickle file names/paths
704
629
  job.input_pickle_files = tuple(
705
- self.get_input_pickle_file_path(
630
+ get_pickle_file_path(
706
631
  arg=job.workerids[ind],
632
+ workflow_dir=self.workflow_dir_local,
707
633
  subfolder_name=job.wftask_subfolder_name,
634
+ in_or_out="in",
708
635
  prefix=job.wftask_file_prefixes[ind],
709
636
  )
710
637
  for ind in range(job.num_tasks_tot)
711
638
  )
712
639
  job.output_pickle_files = tuple(
713
- self.get_output_pickle_file_path(
640
+ get_pickle_file_path(
714
641
  arg=job.workerids[ind],
642
+ workflow_dir=self.workflow_dir_remote,
715
643
  subfolder_name=job.wftask_subfolder_name,
644
+ in_or_out="out",
716
645
  prefix=job.wftask_file_prefixes[ind],
717
646
  )
718
647
  for ind in range(job.num_tasks_tot)
719
648
  )
720
-
721
649
  # Define SLURM-job file names/paths
722
- job.slurm_script = self.get_slurm_script_file_path(
650
+ job.slurm_script = get_slurm_script_file_path(
651
+ workflow_dir=self.workflow_dir_local,
723
652
  subfolder_name=job.wftask_subfolder_name,
724
653
  prefix=job.slurm_file_prefix,
725
654
  )
726
- job.slurm_stdout = self.get_slurm_stdout_file_path(
655
+ job.slurm_stdout = get_slurm_file_path(
656
+ workflow_dir=self.workflow_dir_remote,
727
657
  subfolder_name=job.wftask_subfolder_name,
658
+ out_or_err="out",
728
659
  prefix=job.slurm_file_prefix,
729
660
  )
730
- job.slurm_stderr = self.get_slurm_stderr_file_path(
661
+ job.slurm_stderr = get_slurm_file_path(
662
+ workflow_dir=self.workflow_dir_remote,
731
663
  subfolder_name=job.wftask_subfolder_name,
664
+ out_or_err="err",
732
665
  prefix=job.slurm_file_prefix,
733
666
  )
734
667
 
735
- # Dump serialized versions+function+args+kwargs to pickle file
668
+ # Dump serialized versions+function+args+kwargs to pickle
736
669
  versions = dict(
737
670
  python=sys.version_info[:3],
738
671
  cloudpickle=cloudpickle.__version__,
@@ -824,7 +757,6 @@ class FractalSlurmExecutor(SlurmExecutor):
824
757
  """
825
758
  # Handle all uncaught exceptions in this broad try/except block
826
759
  try:
827
-
828
760
  # Retrieve job
829
761
  with self.jobs_lock:
830
762
  try:
@@ -1039,7 +971,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1039
971
  )
1040
972
 
1041
973
  for prefix in prefixes:
1042
-
1043
974
  if prefix == job.slurm_file_prefix:
1044
975
  files_to_copy = _glob_as_user(
1045
976
  folder=str(self.workflow_dir_remote / subfolder_name),
@@ -1177,7 +1108,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1177
1108
  slurm_err_path: str,
1178
1109
  slurm_config: SlurmConfig,
1179
1110
  ):
1180
-
1181
1111
  num_tasks_max_running = slurm_config.parallel_tasks_per_job
1182
1112
  mem_per_task_MB = slurm_config.mem_per_task_MB
1183
1113
 
@@ -1229,19 +1159,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1229
1159
  script = "\n".join(script_lines)
1230
1160
  return script
1231
1161
 
1232
- def get_default_task_files(self) -> TaskFiles:
1233
- """
1234
- This will be called when self.submit or self.map are called from
1235
- outside fractal-server, and then lack some optional arguments.
1236
- """
1237
- task_files = TaskFiles(
1238
- workflow_dir_local=self.workflow_dir_local,
1239
- workflow_dir_remote=self.workflow_dir_remote,
1240
- task_order=None,
1241
- task_name="name",
1242
- )
1243
- return task_files
1244
-
1245
1162
  def shutdown(self, wait=True, *, cancel_futures=False):
1246
1163
  """
1247
1164
  Clean up all executor variables. Note that this function is executed on
@@ -0,0 +1,58 @@
1
+ from pathlib import Path
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+
6
+ def get_pickle_file_path(
7
+ *,
8
+ arg: str,
9
+ workflow_dir: Path,
10
+ subfolder_name: str,
11
+ in_or_out: Literal["in", "out"],
12
+ prefix: str,
13
+ ) -> Path:
14
+ if in_or_out in ["in", "out"]:
15
+ output = (
16
+ workflow_dir
17
+ / subfolder_name
18
+ / f"{prefix}_{in_or_out}_{arg}.pickle"
19
+ )
20
+ return output
21
+ else:
22
+ raise ValueError(
23
+ f"Missing or unexpected value in_or_out argument, {in_or_out=}"
24
+ )
25
+
26
+
27
+ def get_slurm_script_file_path(
28
+ *, workflow_dir: Path, subfolder_name: str, prefix: Optional[str] = None
29
+ ) -> Path:
30
+ prefix = prefix or "_temp"
31
+ return workflow_dir / subfolder_name / f"{prefix}_slurm_submit.sbatch"
32
+
33
+
34
+ def get_slurm_file_path(
35
+ *,
36
+ workflow_dir: Path,
37
+ subfolder_name: str,
38
+ arg: str = "%j",
39
+ out_or_err: Literal["out", "err"],
40
+ prefix: str,
41
+ ) -> Path:
42
+ if out_or_err == "out":
43
+ return (
44
+ workflow_dir
45
+ / subfolder_name
46
+ / f"{prefix}_slurm_{arg}.{out_or_err}"
47
+ )
48
+ elif out_or_err == "err":
49
+ return (
50
+ workflow_dir
51
+ / subfolder_name
52
+ / f"{prefix}_slurm_{arg}.{out_or_err}"
53
+ )
54
+ else:
55
+ raise ValueError(
56
+ "Missing or unexpected value out_or_err argument, "
57
+ f"{out_or_err=}"
58
+ )
@@ -43,6 +43,9 @@ from fastapi_users.exceptions import UserAlreadyExists
43
43
  from fastapi_users.models import ID
44
44
  from fastapi_users.models import OAP
45
45
  from fastapi_users.models import UP
46
+ from fastapi_users.password import PasswordHelper
47
+ from pwdlib import PasswordHash
48
+ from pwdlib.hashers.bcrypt import BcryptHasher
46
49
  from sqlalchemy.ext.asyncio import AsyncSession
47
50
  from sqlalchemy.orm import selectinload
48
51
  from sqlmodel import func
@@ -177,7 +180,21 @@ async def get_user_db(
177
180
  yield SQLModelUserDatabaseAsync(session, UserOAuth, OAuthAccount)
178
181
 
179
182
 
183
+ password_hash = PasswordHash(hashers=(BcryptHasher(),))
184
+ password_helper = PasswordHelper(password_hash=password_hash)
185
+
186
+
180
187
  class UserManager(IntegerIDMixin, BaseUserManager[UserOAuth, int]):
188
+ def __init__(self, user_db):
189
+ """
190
+ Override `__init__` of `BaseUserManager` to define custom
191
+ `password_helper`.
192
+ """
193
+ super().__init__(
194
+ user_db=user_db,
195
+ password_helper=password_helper,
196
+ )
197
+
181
198
  async def validate_password(self, password: str, user: UserOAuth) -> None:
182
199
  # check password length
183
200
  min_length = 4
@@ -69,7 +69,6 @@ def collect_ssh(
69
69
  )
70
70
 
71
71
  with next(get_sync_db()) as db:
72
-
73
72
  # Get main objects from db
74
73
  activity = db.get(TaskGroupActivityV2, task_group_activity_id)
75
74
  task_group = db.get(TaskGroupV2, task_group_id)
@@ -117,6 +116,25 @@ def collect_ssh(
117
116
  return
118
117
 
119
118
  try:
119
+ script_dir_remote = (
120
+ Path(task_group.path) / SCRIPTS_SUBFOLDER
121
+ ).as_posix()
122
+ # Create remote `task_group.path` and `script_dir_remote`
123
+ # folders (note that because of `parents=True` we are in
124
+ # the `no error if existing, make parent directories as
125
+ # needed` scenario for `mkdir`)
126
+ fractal_ssh.mkdir(folder=task_group.path, parents=True)
127
+ fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
128
+
129
+ # Copy wheel file into task group path
130
+ if task_group.wheel_path:
131
+ new_wheel_path = _copy_wheel_file_ssh(
132
+ task_group=task_group,
133
+ fractal_ssh=fractal_ssh,
134
+ logger_name=LOGGER_NAME,
135
+ )
136
+ task_group.wheel_path = new_wheel_path
137
+ task_group = add_commit_refresh(obj=task_group, db=db)
120
138
 
121
139
  # Prepare replacements for templates
122
140
  replacements = get_collection_replacements(
@@ -127,9 +145,6 @@ def collect_ssh(
127
145
  )
128
146
 
129
147
  # Prepare common arguments for `_customize_and_run_template``
130
- script_dir_remote = (
131
- Path(task_group.path) / SCRIPTS_SUBFOLDER
132
- ).as_posix()
133
148
  common_args = dict(
134
149
  replacements=replacements,
135
150
  script_dir_local=(
@@ -144,23 +159,6 @@ def collect_ssh(
144
159
  logger_name=LOGGER_NAME,
145
160
  )
146
161
 
147
- # Create remote `task_group.path` and `script_dir_remote`
148
- # folders (note that because of `parents=True` we are in
149
- # the `no error if existing, make parent directories as
150
- # needed` scenario for `mkdir`)
151
- fractal_ssh.mkdir(folder=task_group.path, parents=True)
152
- fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
153
-
154
- # Copy wheel file into task group path
155
- if task_group.wheel_path:
156
- new_wheel_path = _copy_wheel_file_ssh(
157
- task_group=task_group,
158
- fractal_ssh=fractal_ssh,
159
- logger_name=LOGGER_NAME,
160
- )
161
- task_group.wheel_path = new_wheel_path
162
- task_group = add_commit_refresh(obj=task_group, db=db)
163
-
164
162
  logger.debug("installing - START")
165
163
 
166
164
  # Set status to ONGOING and refresh logs
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.9.0a11
3
+ Version: 2.9.1
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -13,12 +13,11 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Requires-Dist: alembic (>=1.13.1,<2.0.0)
16
- Requires-Dist: bcrypt (==4.0.1)
17
16
  Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
18
17
  Requires-Dist: clusterfutures (>=0.5,<0.6)
19
18
  Requires-Dist: fabric (>=3.2.2,<4.0.0)
20
19
  Requires-Dist: fastapi (>=0.115.0,<0.116.0)
21
- Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
20
+ Requires-Dist: fastapi-users[oauth] (>=14,<15)
22
21
  Requires-Dist: gunicorn (>=21.2,<23.0)
23
22
  Requires-Dist: packaging (>=23.2,<24.0)
24
23
  Requires-Dist: psutil (>=5.9.8,<6.0.0)
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=awxUTr6llNkjH1-5c-_yED0mpNs4APblXMX5R7fA0Qs,25
1
+ fractal_server/__init__.py,sha256=PV3pFNkPgVbTBTGekRDdEwLUrBsGNXOb1hGr6F9R4OE,22
2
2
  fractal_server/__main__.py,sha256=dEkCfzLLQrIlxsGC-HBfoR-RBMWnJDgNrxYTyzmE9c0,6146
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -63,7 +63,7 @@ fractal_server/app/routes/api/v2/workflowtask.py,sha256=ciHTwXXFiFnMF7ZpJ3Xs0q6Y
63
63
  fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
64
64
  fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
65
65
  fractal_server/app/routes/auth/current_user.py,sha256=I3aVY5etWAJ_SH6t65Mj5TjvB2X8sAGuu1KG7FxLyPU,5883
66
- fractal_server/app/routes/auth/group.py,sha256=cS9I6pCIWGbOWc3gUBYmQq6yjFYzm6rVQDukWF_9L90,7721
66
+ fractal_server/app/routes/auth/group.py,sha256=EBwR-eiTfHSZUbsbrhjKTWTiwPkGPLFYhuHi7ifDbfY,8358
67
67
  fractal_server/app/routes/auth/login.py,sha256=tSu6OBLOieoBtMZB4JkBAdEgH2Y8KqPGSbwy7NIypIo,566
68
68
  fractal_server/app/routes/auth/oauth.py,sha256=AnFHbjqL2AgBX3eksI931xD6RTtmbciHBEuGf9YJLjU,1895
69
69
  fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
@@ -82,17 +82,18 @@ fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrG
82
82
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
83
  fractal_server/app/runner/executors/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
84
84
  fractal_server/app/runner/executors/slurm/_batching.py,sha256=3mfeFuYm3UA4EXh4VWuqZTF-dcINECZgTHoPOaOszDo,8840
85
- fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=iyhtDi1qveqq7I4S1tycVKsp3VfyocvBgGugYDpOzAs,16069
85
+ fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=RkFrp9bltfVxrp5Ei2KuCMEft6q3mBArTvSBvtHA2n4,15682
86
86
  fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
87
87
  fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
88
88
  fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
89
89
  fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
90
- fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=rVKAMVV7zb0x5T8R7NQlyeJZUOWDs7FRQuXEwCJbeNo,57899
90
+ fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=yRn5v0ZUX_dQdN1MN8gjRBMCXVWZ_PZgcI2wnWXIAO8,54070
91
91
  fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
92
92
  fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
93
93
  fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=z5LlhaiqAb8pHsF1WwdzXN39C5anQmwjo1rSQgtRAYE,4422
94
94
  fractal_server/app/runner/executors/slurm/sudo/_subprocess_run_as_user.py,sha256=g8wqUjSicN17UZVXlfaMomYZ-xOIbBu1oE7HdJTzfvw,5218
95
- fractal_server/app/runner/executors/slurm/sudo/executor.py,sha256=mTqTSvoRukvocyMLvlGuGWMaL6hy3McYLx_7bUW1pEM,48918
95
+ fractal_server/app/runner/executors/slurm/sudo/executor.py,sha256=CAIPFMmsjQLxmjN8Kdpq0OlZIX9PZIiRo0XO1quKWEM,46495
96
+ fractal_server/app/runner/executors/slurm/utils_executors.py,sha256=naPyJI0I3lD-sYHbSXbMFGUBK4h_SggA5V91Z1Ch1Xg,1416
96
97
  fractal_server/app/runner/extract_archive.py,sha256=tLpjDrX47OjTNhhoWvm6iNukg8KoieWyTb7ZfvE9eWU,2483
97
98
  fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
98
99
  fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
@@ -160,7 +161,7 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=yHpCRxoj6tKqCiQfUjaTj8Sf
160
161
  fractal_server/app/schemas/v2/task_group.py,sha256=fSjdLbClrpmrPj5hFZMu9DoJW4Y33EnbOh0HjMBsGVc,3784
161
162
  fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
162
163
  fractal_server/app/schemas/v2/workflowtask.py,sha256=vDdMktYbHeYBgB5OuWSv6wRPRXWqvetkeqQ7IC5YtfA,5751
163
- fractal_server/app/security/__init__.py,sha256=8Xd4GxumZgvxEH1Vli3ULehwdesEPiaAbtffJvAEgNo,12509
164
+ fractal_server/app/security/__init__.py,sha256=MlWVrLFPj9M2Gug-k8yATM-Cw066RugVU4KK6kMRbnQ,13019
164
165
  fractal_server/app/user_settings.py,sha256=aZgQ3i0JkHfgwLGW1ee6Gzr1ae3IioFfJKKSsSS8Svk,1312
165
166
  fractal_server/config.py,sha256=Bk6EFKnU07sjgThf2NVEqrFAx9F4s0BfCvDKtWHzJTc,23217
166
167
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
@@ -221,7 +222,7 @@ fractal_server/tasks/v2/local/deactivate.py,sha256=XR1nvJY3mKCRqwPwV79rVaQmtb3J8
221
222
  fractal_server/tasks/v2/local/reactivate.py,sha256=R3rArAzUpMGf6xa3dGVwwXHW9WVDi5ia28AFisZsqNc,6112
222
223
  fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
223
224
  fractal_server/tasks/v2/ssh/_utils.py,sha256=2E-F_862zM6FZA-im-E8t8kjptWRIhBj1IDHC6QD1H8,2818
224
- fractal_server/tasks/v2/ssh/collect.py,sha256=ZOpz-v2t2kOAbbpdlsH_P_XjNtEh2TaC1dIZ1bBHwxw,12941
225
+ fractal_server/tasks/v2/ssh/collect.py,sha256=FkTfyhdwAp4qa4W_dqjT0CmuDMFuCBSOYjg_y1Kq2Bs,12939
225
226
  fractal_server/tasks/v2/ssh/deactivate.py,sha256=Ffk_UuQSBUBNBCiviuKNhEUGyZPQa4_erJKFdwgMcE8,10616
226
227
  fractal_server/tasks/v2/ssh/reactivate.py,sha256=jdO8iyzavzSVPcOpIZrYSEkGPYTvz5XJ5h_5-nz9yzA,7896
227
228
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
@@ -238,8 +239,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=C5WLuY3uGG2s53OEL-__H35-fmSlgu
238
239
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
239
240
  fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
240
241
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
241
- fractal_server-2.9.0a11.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
242
- fractal_server-2.9.0a11.dist-info/METADATA,sha256=0SUdcGO7gPL9_VuNTLt10VitKNIgZTgQ5VL4FV2xjX8,4586
243
- fractal_server-2.9.0a11.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
244
- fractal_server-2.9.0a11.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
245
- fractal_server-2.9.0a11.dist-info/RECORD,,
242
+ fractal_server-2.9.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
243
+ fractal_server-2.9.1.dist-info/METADATA,sha256=2XOrXmdvOE1zcrKJoCr4VemPcRk3PpZJOUg_0yXxeDc,4543
244
+ fractal_server-2.9.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
245
+ fractal_server-2.9.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
246
+ fractal_server-2.9.1.dist-info/RECORD,,