fractal-server 2.8.0__py3-none-any.whl → 2.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +2 -35
  3. fractal_server/app/models/v2/__init__.py +3 -3
  4. fractal_server/app/models/v2/task.py +0 -72
  5. fractal_server/app/models/v2/task_group.py +113 -0
  6. fractal_server/app/routes/admin/v1.py +13 -30
  7. fractal_server/app/routes/admin/v2/__init__.py +4 -0
  8. fractal_server/app/routes/admin/v2/job.py +13 -24
  9. fractal_server/app/routes/admin/v2/task.py +13 -0
  10. fractal_server/app/routes/admin/v2/task_group.py +75 -14
  11. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +267 -0
  12. fractal_server/app/routes/api/v1/project.py +7 -19
  13. fractal_server/app/routes/api/v2/__init__.py +11 -2
  14. fractal_server/app/routes/api/v2/{_aux_functions_task_collection.py → _aux_functions_task_lifecycle.py} +83 -0
  15. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +27 -17
  16. fractal_server/app/routes/api/v2/submit.py +19 -24
  17. fractal_server/app/routes/api/v2/task_collection.py +33 -65
  18. fractal_server/app/routes/api/v2/task_collection_custom.py +3 -3
  19. fractal_server/app/routes/api/v2/task_group.py +86 -14
  20. fractal_server/app/routes/api/v2/task_group_lifecycle.py +272 -0
  21. fractal_server/app/routes/api/v2/workflow.py +1 -1
  22. fractal_server/app/routes/api/v2/workflow_import.py +2 -2
  23. fractal_server/app/routes/auth/current_user.py +60 -17
  24. fractal_server/app/routes/auth/group.py +67 -39
  25. fractal_server/app/routes/auth/users.py +97 -99
  26. fractal_server/app/routes/aux/__init__.py +20 -0
  27. fractal_server/app/runner/executors/slurm/_slurm_config.py +0 -17
  28. fractal_server/app/runner/executors/slurm/ssh/executor.py +49 -204
  29. fractal_server/app/runner/executors/slurm/sudo/executor.py +26 -109
  30. fractal_server/app/runner/executors/slurm/utils_executors.py +58 -0
  31. fractal_server/app/runner/v2/_local_experimental/executor.py +2 -1
  32. fractal_server/app/schemas/_validators.py +1 -16
  33. fractal_server/app/schemas/user.py +16 -10
  34. fractal_server/app/schemas/user_group.py +0 -11
  35. fractal_server/app/schemas/v1/applyworkflow.py +0 -8
  36. fractal_server/app/schemas/v1/dataset.py +0 -5
  37. fractal_server/app/schemas/v1/project.py +0 -5
  38. fractal_server/app/schemas/v1/state.py +0 -5
  39. fractal_server/app/schemas/v1/workflow.py +0 -5
  40. fractal_server/app/schemas/v2/__init__.py +4 -2
  41. fractal_server/app/schemas/v2/dataset.py +1 -7
  42. fractal_server/app/schemas/v2/job.py +0 -8
  43. fractal_server/app/schemas/v2/project.py +0 -5
  44. fractal_server/app/schemas/v2/task_collection.py +13 -31
  45. fractal_server/app/schemas/v2/task_group.py +59 -8
  46. fractal_server/app/schemas/v2/workflow.py +0 -5
  47. fractal_server/app/security/__init__.py +17 -0
  48. fractal_server/config.py +61 -59
  49. fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py +117 -0
  50. fractal_server/ssh/_fabric.py +156 -83
  51. fractal_server/string_tools.py +10 -3
  52. fractal_server/tasks/utils.py +2 -12
  53. fractal_server/tasks/v2/local/__init__.py +3 -0
  54. fractal_server/tasks/v2/local/_utils.py +70 -0
  55. fractal_server/tasks/v2/local/collect.py +291 -0
  56. fractal_server/tasks/v2/local/deactivate.py +218 -0
  57. fractal_server/tasks/v2/local/reactivate.py +159 -0
  58. fractal_server/tasks/v2/ssh/__init__.py +3 -0
  59. fractal_server/tasks/v2/ssh/_utils.py +87 -0
  60. fractal_server/tasks/v2/ssh/collect.py +311 -0
  61. fractal_server/tasks/v2/ssh/deactivate.py +253 -0
  62. fractal_server/tasks/v2/ssh/reactivate.py +202 -0
  63. fractal_server/tasks/v2/templates/{_2_preliminary_pip_operations.sh → 1_create_venv.sh} +6 -7
  64. fractal_server/tasks/v2/templates/{_3_pip_install.sh → 2_pip_install.sh} +8 -1
  65. fractal_server/tasks/v2/templates/{_4_pip_freeze.sh → 3_pip_freeze.sh} +0 -7
  66. fractal_server/tasks/v2/templates/{_5_pip_show.sh → 4_pip_show.sh} +5 -6
  67. fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh +10 -0
  68. fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh +35 -0
  69. fractal_server/tasks/v2/utils_background.py +42 -127
  70. fractal_server/tasks/v2/utils_templates.py +32 -2
  71. fractal_server/utils.py +4 -2
  72. fractal_server/zip_tools.py +21 -4
  73. {fractal_server-2.8.0.dist-info → fractal_server-2.9.0.dist-info}/METADATA +3 -5
  74. {fractal_server-2.8.0.dist-info → fractal_server-2.9.0.dist-info}/RECORD +78 -65
  75. fractal_server/app/models/v2/collection_state.py +0 -22
  76. fractal_server/tasks/v2/collection_local.py +0 -357
  77. fractal_server/tasks/v2/collection_ssh.py +0 -352
  78. fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -42
  79. /fractal_server/tasks/v2/{database_operations.py → utils_database.py} +0 -0
  80. {fractal_server-2.8.0.dist-info → fractal_server-2.9.0.dist-info}/LICENSE +0 -0
  81. {fractal_server-2.8.0.dist-info → fractal_server-2.9.0.dist-info}/WHEEL +0 -0
  82. {fractal_server-2.8.0.dist-info → fractal_server-2.9.0.dist-info}/entry_points.txt +0 -0
@@ -37,9 +37,11 @@ from ....exceptions import TaskExecutionError
37
37
  from ....filenames import SHUTDOWN_FILENAME
38
38
  from ....task_files import get_task_file_paths
39
39
  from ....task_files import TaskFiles
40
- from ...slurm._slurm_config import get_default_slurm_config
41
40
  from ...slurm._slurm_config import SlurmConfig
42
41
  from .._batching import heuristics
42
+ from ..utils_executors import get_pickle_file_path
43
+ from ..utils_executors import get_slurm_file_path
44
+ from ..utils_executors import get_slurm_script_file_path
43
45
  from ._executor_wait_thread import FractalSlurmWaitThread
44
46
  from ._subprocess_run_as_user import _glob_as_user
45
47
  from ._subprocess_run_as_user import _glob_as_user_strict
@@ -305,72 +307,12 @@ class FractalSlurmExecutor(SlurmExecutor):
305
307
  with self.jobs_lock:
306
308
  self.map_jobid_to_slurm_files.pop(jobid)
307
309
 
308
- def get_input_pickle_file_path(
309
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
310
- ) -> Path:
311
-
312
- prefix = prefix or "cfut"
313
- output = (
314
- self.workflow_dir_local
315
- / subfolder_name
316
- / f"{prefix}_in_{arg}.pickle"
317
- )
318
- return output
319
-
320
- def get_output_pickle_file_path(
321
- self, *, arg: str, subfolder_name: str, prefix: Optional[str] = None
322
- ) -> Path:
323
- prefix = prefix or "cfut"
324
- return (
325
- self.workflow_dir_remote
326
- / subfolder_name
327
- / f"{prefix}_out_{arg}.pickle"
328
- )
329
-
330
- def get_slurm_script_file_path(
331
- self, *, subfolder_name: str, prefix: Optional[str] = None
332
- ) -> Path:
333
- prefix = prefix or "_temp"
334
- return (
335
- self.workflow_dir_local
336
- / subfolder_name
337
- / f"{prefix}_slurm_submit.sbatch"
338
- )
339
-
340
- def get_slurm_stdout_file_path(
341
- self,
342
- *,
343
- subfolder_name: str,
344
- arg: str = "%j",
345
- prefix: Optional[str] = None,
346
- ) -> Path:
347
- prefix = prefix or "slurmpy.stdout"
348
- return (
349
- self.workflow_dir_remote
350
- / subfolder_name
351
- / f"{prefix}_slurm_{arg}.out"
352
- )
353
-
354
- def get_slurm_stderr_file_path(
355
- self,
356
- *,
357
- subfolder_name: str,
358
- arg: str = "%j",
359
- prefix: Optional[str] = None,
360
- ) -> Path:
361
- prefix = prefix or "slurmpy.stderr"
362
- return (
363
- self.workflow_dir_remote
364
- / subfolder_name
365
- / f"{prefix}_slurm_{arg}.err"
366
- )
367
-
368
310
  def submit(
369
311
  self,
370
312
  fun: Callable[..., Any],
371
313
  *fun_args: Sequence[Any],
372
- slurm_config: Optional[SlurmConfig] = None,
373
- task_files: Optional[TaskFiles] = None,
314
+ slurm_config: SlurmConfig,
315
+ task_files: TaskFiles,
374
316
  **fun_kwargs: dict,
375
317
  ) -> Future:
376
318
  """
@@ -381,22 +323,14 @@ class FractalSlurmExecutor(SlurmExecutor):
381
323
  fun_args: Function positional arguments
382
324
  fun_kwargs: Function keyword arguments
383
325
  slurm_config:
384
- A `SlurmConfig` object; if `None`, use
385
- `get_default_slurm_config()`.
326
+ A `SlurmConfig` object.
386
327
  task_files:
387
- A `TaskFiles` object; if `None`, use
388
- `self.get_default_task_files()`.
328
+ A `TaskFiles` object.
389
329
 
390
330
  Returns:
391
331
  Future representing the execution of the current SLURM job.
392
332
  """
393
333
 
394
- # Set defaults, if needed
395
- if slurm_config is None:
396
- slurm_config = get_default_slurm_config()
397
- if task_files is None:
398
- task_files = self.get_default_task_files()
399
-
400
334
  # Set slurm_file_prefix
401
335
  slurm_file_prefix = task_files.file_prefix
402
336
 
@@ -431,8 +365,8 @@ class FractalSlurmExecutor(SlurmExecutor):
431
365
  fn: Callable[..., Any],
432
366
  iterable: list[Sequence[Any]],
433
367
  *,
434
- slurm_config: Optional[SlurmConfig] = None,
435
- task_files: Optional[TaskFiles] = None,
368
+ slurm_config: SlurmConfig,
369
+ task_files: TaskFiles,
436
370
  ):
437
371
  """
438
372
  Return an iterator with the results of several execution of a function
@@ -455,11 +389,9 @@ class FractalSlurmExecutor(SlurmExecutor):
455
389
  An iterable such that each element is the list of arguments to
456
390
  be passed to `fn`, as in `fn(*args)`.
457
391
  slurm_config:
458
- A `SlurmConfig` object; if `None`, use
459
- `get_default_slurm_config()`.
392
+ A `SlurmConfig` object.
460
393
  task_files:
461
- A `TaskFiles` object; if `None`, use
462
- `self.get_default_task_files()`.
394
+ A `TaskFiles` object.
463
395
 
464
396
  """
465
397
 
@@ -479,12 +411,6 @@ class FractalSlurmExecutor(SlurmExecutor):
479
411
  # self._exception
480
412
  del fut
481
413
 
482
- # Set defaults, if needed
483
- if not slurm_config:
484
- slurm_config = get_default_slurm_config()
485
- if task_files is None:
486
- task_files = self.get_default_task_files()
487
-
488
414
  # Include common_script_lines in extra_lines
489
415
  logger.debug(
490
416
  f"Adding {self.common_script_lines=} to "
@@ -700,39 +626,46 @@ class FractalSlurmExecutor(SlurmExecutor):
700
626
  f"Missing folder {subfolder_path.as_posix()}."
701
627
  )
702
628
 
703
- # Define I/O pickle file names/paths
704
629
  job.input_pickle_files = tuple(
705
- self.get_input_pickle_file_path(
630
+ get_pickle_file_path(
706
631
  arg=job.workerids[ind],
632
+ workflow_dir=self.workflow_dir_local,
707
633
  subfolder_name=job.wftask_subfolder_name,
634
+ in_or_out="in",
708
635
  prefix=job.wftask_file_prefixes[ind],
709
636
  )
710
637
  for ind in range(job.num_tasks_tot)
711
638
  )
712
639
  job.output_pickle_files = tuple(
713
- self.get_output_pickle_file_path(
640
+ get_pickle_file_path(
714
641
  arg=job.workerids[ind],
642
+ workflow_dir=self.workflow_dir_remote,
715
643
  subfolder_name=job.wftask_subfolder_name,
644
+ in_or_out="out",
716
645
  prefix=job.wftask_file_prefixes[ind],
717
646
  )
718
647
  for ind in range(job.num_tasks_tot)
719
648
  )
720
-
721
649
  # Define SLURM-job file names/paths
722
- job.slurm_script = self.get_slurm_script_file_path(
650
+ job.slurm_script = get_slurm_script_file_path(
651
+ workflow_dir=self.workflow_dir_local,
723
652
  subfolder_name=job.wftask_subfolder_name,
724
653
  prefix=job.slurm_file_prefix,
725
654
  )
726
- job.slurm_stdout = self.get_slurm_stdout_file_path(
655
+ job.slurm_stdout = get_slurm_file_path(
656
+ workflow_dir=self.workflow_dir_remote,
727
657
  subfolder_name=job.wftask_subfolder_name,
658
+ out_or_err="out",
728
659
  prefix=job.slurm_file_prefix,
729
660
  )
730
- job.slurm_stderr = self.get_slurm_stderr_file_path(
661
+ job.slurm_stderr = get_slurm_file_path(
662
+ workflow_dir=self.workflow_dir_remote,
731
663
  subfolder_name=job.wftask_subfolder_name,
664
+ out_or_err="err",
732
665
  prefix=job.slurm_file_prefix,
733
666
  )
734
667
 
735
- # Dump serialized versions+function+args+kwargs to pickle file
668
+ # Dump serialized versions+function+args+kwargs to pickle
736
669
  versions = dict(
737
670
  python=sys.version_info[:3],
738
671
  cloudpickle=cloudpickle.__version__,
@@ -824,7 +757,6 @@ class FractalSlurmExecutor(SlurmExecutor):
824
757
  """
825
758
  # Handle all uncaught exceptions in this broad try/except block
826
759
  try:
827
-
828
760
  # Retrieve job
829
761
  with self.jobs_lock:
830
762
  try:
@@ -1039,7 +971,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1039
971
  )
1040
972
 
1041
973
  for prefix in prefixes:
1042
-
1043
974
  if prefix == job.slurm_file_prefix:
1044
975
  files_to_copy = _glob_as_user(
1045
976
  folder=str(self.workflow_dir_remote / subfolder_name),
@@ -1177,7 +1108,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1177
1108
  slurm_err_path: str,
1178
1109
  slurm_config: SlurmConfig,
1179
1110
  ):
1180
-
1181
1111
  num_tasks_max_running = slurm_config.parallel_tasks_per_job
1182
1112
  mem_per_task_MB = slurm_config.mem_per_task_MB
1183
1113
 
@@ -1229,19 +1159,6 @@ class FractalSlurmExecutor(SlurmExecutor):
1229
1159
  script = "\n".join(script_lines)
1230
1160
  return script
1231
1161
 
1232
- def get_default_task_files(self) -> TaskFiles:
1233
- """
1234
- This will be called when self.submit or self.map are called from
1235
- outside fractal-server, and then lack some optional arguments.
1236
- """
1237
- task_files = TaskFiles(
1238
- workflow_dir_local=self.workflow_dir_local,
1239
- workflow_dir_remote=self.workflow_dir_remote,
1240
- task_order=None,
1241
- task_name="name",
1242
- )
1243
- return task_files
1244
-
1245
1162
  def shutdown(self, wait=True, *, cancel_futures=False):
1246
1163
  """
1247
1164
  Clean up all executor variables. Note that this function is executed on
@@ -0,0 +1,58 @@
1
+ from pathlib import Path
2
+ from typing import Literal
3
+ from typing import Optional
4
+
5
+
6
+ def get_pickle_file_path(
7
+ *,
8
+ arg: str,
9
+ workflow_dir: Path,
10
+ subfolder_name: str,
11
+ in_or_out: Literal["in", "out"],
12
+ prefix: str,
13
+ ) -> Path:
14
+ if in_or_out in ["in", "out"]:
15
+ output = (
16
+ workflow_dir
17
+ / subfolder_name
18
+ / f"{prefix}_{in_or_out}_{arg}.pickle"
19
+ )
20
+ return output
21
+ else:
22
+ raise ValueError(
23
+ f"Missing or unexpected value in_or_out argument, {in_or_out=}"
24
+ )
25
+
26
+
27
+ def get_slurm_script_file_path(
28
+ *, workflow_dir: Path, subfolder_name: str, prefix: Optional[str] = None
29
+ ) -> Path:
30
+ prefix = prefix or "_temp"
31
+ return workflow_dir / subfolder_name / f"{prefix}_slurm_submit.sbatch"
32
+
33
+
34
+ def get_slurm_file_path(
35
+ *,
36
+ workflow_dir: Path,
37
+ subfolder_name: str,
38
+ arg: str = "%j",
39
+ out_or_err: Literal["out", "err"],
40
+ prefix: str,
41
+ ) -> Path:
42
+ if out_or_err == "out":
43
+ return (
44
+ workflow_dir
45
+ / subfolder_name
46
+ / f"{prefix}_slurm_{arg}.{out_or_err}"
47
+ )
48
+ elif out_or_err == "err":
49
+ return (
50
+ workflow_dir
51
+ / subfolder_name
52
+ / f"{prefix}_slurm_{arg}.{out_or_err}"
53
+ )
54
+ else:
55
+ raise ValueError(
56
+ "Missing or unexpected value out_or_err argument, "
57
+ f"{out_or_err=}"
58
+ )
@@ -2,6 +2,7 @@
2
2
  Custom version of Python
3
3
  [ProcessPoolExecutor](https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.ProcessPoolExecutor)).
4
4
  """
5
+ import multiprocessing as mp
5
6
  import threading
6
7
  import time
7
8
  from concurrent.futures import ProcessPoolExecutor
@@ -32,7 +33,7 @@ class FractalProcessPoolExecutor(ProcessPoolExecutor):
32
33
  def __init__(
33
34
  self, shutdown_file: Path, interval: float = 1.0, *args, **kwargs
34
35
  ):
35
- super().__init__(*args, **kwargs)
36
+ super().__init__(*args, **kwargs, mp_context=mp.get_context("spawn"))
36
37
  self.shutdown_file = Path(shutdown_file)
37
38
  self.interval = float(interval)
38
39
  logger.debug(
@@ -1,6 +1,4 @@
1
1
  import os
2
- from datetime import datetime
3
- from datetime import timezone
4
2
  from typing import Any
5
3
  from typing import Optional
6
4
 
@@ -74,7 +72,7 @@ def val_absolute_path(attribute: str, accept_none: bool = False):
74
72
  Check that a string attribute is an absolute path
75
73
  """
76
74
 
77
- def val(string: Optional[str]) -> str:
75
+ def val(string: Optional[str]) -> Optional[str]:
78
76
  if string is None:
79
77
  if accept_none:
80
78
  return string
@@ -103,16 +101,3 @@ def val_unique_list(attribute: str):
103
101
  return must_be_unique
104
102
 
105
103
  return val
106
-
107
-
108
- def valutc(attribute: str):
109
- def val(timestamp: Optional[datetime]) -> Optional[datetime]:
110
- """
111
- Replacing `tzinfo` with `timezone.utc` is just required by SQLite data.
112
- If using Postgres, this function leaves the datetime exactly as it is.
113
- """
114
- if timestamp is not None:
115
- return timestamp.replace(tzinfo=timezone.utc)
116
- return None
117
-
118
- return val
@@ -3,6 +3,7 @@ from typing import Optional
3
3
  from fastapi_users import schemas
4
4
  from pydantic import BaseModel
5
5
  from pydantic import Extra
6
+ from pydantic import Field
6
7
  from pydantic import validator
7
8
 
8
9
  from ._validators import val_unique_list
@@ -11,8 +12,8 @@ from ._validators import valstr
11
12
  __all__ = (
12
13
  "UserRead",
13
14
  "UserUpdate",
15
+ "UserUpdateGroups",
14
16
  "UserCreate",
15
- "UserUpdateWithNewGroupIds",
16
17
  )
17
18
 
18
19
 
@@ -45,7 +46,7 @@ class UserRead(schemas.BaseUser[int]):
45
46
  oauth_accounts: list[OAuthAccountRead]
46
47
 
47
48
 
48
- class UserUpdate(schemas.BaseUserUpdate):
49
+ class UserUpdate(schemas.BaseUserUpdate, extra=Extra.forbid):
49
50
  """
50
51
  Schema for `User` update.
51
52
 
@@ -82,14 +83,6 @@ class UserUpdateStrict(BaseModel, extra=Extra.forbid):
82
83
  pass
83
84
 
84
85
 
85
- class UserUpdateWithNewGroupIds(UserUpdate):
86
- new_group_ids: Optional[list[int]] = None
87
-
88
- _val_unique = validator("new_group_ids", allow_reuse=True)(
89
- val_unique_list("new_group_ids")
90
- )
91
-
92
-
93
86
  class UserCreate(schemas.BaseUserCreate):
94
87
  """
95
88
  Schema for `User` creation.
@@ -103,3 +96,16 @@ class UserCreate(schemas.BaseUserCreate):
103
96
  # Validators
104
97
 
105
98
  _username = validator("username", allow_reuse=True)(valstr("username"))
99
+
100
+
101
+ class UserUpdateGroups(BaseModel, extra=Extra.forbid):
102
+ """
103
+ Schema for `POST /auth/users/{user_id}/set-groups/`
104
+
105
+ """
106
+
107
+ group_ids: list[int] = Field(min_items=1)
108
+
109
+ _group_ids = validator("group_ids", allow_reuse=True)(
110
+ val_unique_list("group_ids")
111
+ )
@@ -59,21 +59,10 @@ class UserGroupCreate(BaseModel, extra=Extra.forbid):
59
59
  class UserGroupUpdate(BaseModel, extra=Extra.forbid):
60
60
  """
61
61
  Schema for `UserGroup` update
62
-
63
- NOTE: `new_user_ids` does not correspond to a column of the `UserGroup`
64
- table, but it is rather used to create new `LinkUserGroup` rows.
65
-
66
- Attributes:
67
- new_user_ids: IDs of groups to be associated to user.
68
62
  """
69
63
 
70
- new_user_ids: list[int] = Field(default_factory=list)
71
64
  viewer_paths: Optional[list[str]] = None
72
65
 
73
- _val_unique = validator("new_user_ids", allow_reuse=True)(
74
- val_unique_list("new_user_ids")
75
- )
76
-
77
66
  @validator("viewer_paths")
78
67
  def viewer_paths_validator(cls, value):
79
68
  for i, path in enumerate(value):
@@ -7,7 +7,6 @@ from pydantic import validator
7
7
  from pydantic.types import StrictStr
8
8
 
9
9
  from .._validators import valstr
10
- from .._validators import valutc
11
10
  from .dumps import DatasetDumpV1
12
11
  from .dumps import ProjectDumpV1
13
12
  from .dumps import WorkflowDumpV1
@@ -150,13 +149,6 @@ class ApplyWorkflowReadV1(_ApplyWorkflowBaseV1):
150
149
  first_task_index: Optional[int]
151
150
  last_task_index: Optional[int]
152
151
 
153
- _start_timestamp = validator("start_timestamp", allow_reuse=True)(
154
- valutc("start_timestamp")
155
- )
156
- _end_timestamp = validator("end_timestamp", allow_reuse=True)(
157
- valutc("end_timestamp")
158
- )
159
-
160
152
 
161
153
  class ApplyWorkflowUpdateV1(BaseModel):
162
154
  """
@@ -8,7 +8,6 @@ from pydantic import validator
8
8
 
9
9
  from .._validators import val_absolute_path
10
10
  from .._validators import valstr
11
- from .._validators import valutc
12
11
  from .dumps import WorkflowTaskDumpV1
13
12
  from .project import ProjectReadV1
14
13
  from .workflow import WorkflowTaskStatusTypeV1
@@ -151,10 +150,6 @@ class DatasetReadV1(_DatasetBaseV1):
151
150
  project: ProjectReadV1
152
151
  timestamp_created: datetime
153
152
 
154
- _timestamp_created = validator("timestamp_created", allow_reuse=True)(
155
- valutc("timestamp_created")
156
- )
157
-
158
153
 
159
154
  class DatasetStatusReadV1(BaseModel):
160
155
  """
@@ -5,7 +5,6 @@ from pydantic import BaseModel
5
5
  from pydantic import validator
6
6
 
7
7
  from .._validators import valstr
8
- from .._validators import valutc
9
8
 
10
9
 
11
10
  __all__ = (
@@ -50,10 +49,6 @@ class ProjectReadV1(_ProjectBaseV1):
50
49
  id: int
51
50
  timestamp_created: datetime
52
51
 
53
- _timestamp_created = validator("timestamp_created", allow_reuse=True)(
54
- valutc("timestamp_created")
55
- )
56
-
57
52
 
58
53
  class ProjectUpdateV1(_ProjectBaseV1):
59
54
  """
@@ -3,9 +3,6 @@ from typing import Any
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
- from pydantic import validator
7
-
8
- from fractal_server.app.schemas._validators import valutc
9
6
 
10
7
 
11
8
  class StateRead(BaseModel):
@@ -19,5 +16,3 @@ class StateRead(BaseModel):
19
16
  id: Optional[int]
20
17
  data: dict[str, Any]
21
18
  timestamp: datetime
22
-
23
- _timestamp = validator("timestamp", allow_reuse=True)(valutc("timestamp"))
@@ -8,7 +8,6 @@ from pydantic import validator
8
8
 
9
9
  from .._validators import valint
10
10
  from .._validators import valstr
11
- from .._validators import valutc
12
11
  from .project import ProjectReadV1
13
12
  from .task import TaskExportV1
14
13
  from .task import TaskImportV1
@@ -135,10 +134,6 @@ class WorkflowReadV1(_WorkflowBaseV1):
135
134
  project: ProjectReadV1
136
135
  timestamp_created: datetime
137
136
 
138
- _timestamp_created = validator("timestamp_created", allow_reuse=True)(
139
- valutc("timestamp_created")
140
- )
141
-
142
137
 
143
138
  class WorkflowCreateV1(_WorkflowBaseV1):
144
139
  """
@@ -23,11 +23,13 @@ from .task import TaskImportV2 # noqa F401
23
23
  from .task import TaskImportV2Legacy # noqa F401
24
24
  from .task import TaskReadV2 # noqa F401
25
25
  from .task import TaskUpdateV2 # noqa F401
26
- from .task_collection import CollectionStateReadV2 # noqa F401
27
- from .task_collection import CollectionStatusV2 # noqa F401
28
26
  from .task_collection import TaskCollectCustomV2 # noqa F401
29
27
  from .task_collection import TaskCollectPipV2 # noqa F401
28
+ from .task_group import TaskGroupActivityActionV2 # noqa F401
29
+ from .task_group import TaskGroupActivityStatusV2 # noqa F401
30
+ from .task_group import TaskGroupActivityV2Read # noqa F401
30
31
  from .task_group import TaskGroupCreateV2 # noqa F401
32
+ from .task_group import TaskGroupCreateV2Strict # noqa F401
31
33
  from .task_group import TaskGroupReadV2 # noqa F401
32
34
  from .task_group import TaskGroupUpdateV2 # noqa F401
33
35
  from .task_group import TaskGroupV2OriginEnum # noqa F401
@@ -7,7 +7,6 @@ from pydantic import Field
7
7
  from pydantic import validator
8
8
 
9
9
  from .._validators import valstr
10
- from .._validators import valutc
11
10
  from .dumps import WorkflowTaskDumpV2
12
11
  from .project import ProjectReadV2
13
12
  from .workflowtask import WorkflowTaskStatusTypeV2
@@ -39,7 +38,7 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
39
38
 
40
39
  # Validators
41
40
  @validator("zarr_dir")
42
- def normalize_zarr_dir(cls, v: str) -> str:
41
+ def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
43
42
  if v is not None:
44
43
  return normalize_url(v)
45
44
  return v
@@ -62,11 +61,6 @@ class DatasetReadV2(BaseModel):
62
61
  zarr_dir: str
63
62
  filters: Filters = Field(default_factory=Filters)
64
63
 
65
- # Validators
66
- _timestamp_created = validator("timestamp_created", allow_reuse=True)(
67
- valutc("timestamp_created")
68
- )
69
-
70
64
 
71
65
  class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
72
66
 
@@ -8,7 +8,6 @@ from pydantic import validator
8
8
  from pydantic.types import StrictStr
9
9
 
10
10
  from .._validators import valstr
11
- from .._validators import valutc
12
11
  from .dumps import DatasetDumpV2
13
12
  from .dumps import ProjectDumpV2
14
13
  from .dumps import WorkflowDumpV2
@@ -101,13 +100,6 @@ class JobReadV2(BaseModel):
101
100
  last_task_index: Optional[int]
102
101
  worker_init: Optional[str]
103
102
 
104
- _start_timestamp = validator("start_timestamp", allow_reuse=True)(
105
- valutc("start_timestamp")
106
- )
107
- _end_timestamp = validator("end_timestamp", allow_reuse=True)(
108
- valutc("end_timestamp")
109
- )
110
-
111
103
 
112
104
  class JobUpdateV2(BaseModel, extra=Extra.forbid):
113
105
 
@@ -6,7 +6,6 @@ from pydantic import Extra
6
6
  from pydantic import validator
7
7
 
8
8
  from .._validators import valstr
9
- from .._validators import valutc
10
9
 
11
10
 
12
11
  class ProjectCreateV2(BaseModel, extra=Extra.forbid):
@@ -21,10 +20,6 @@ class ProjectReadV2(BaseModel):
21
20
  id: int
22
21
  name: str
23
22
  timestamp_created: datetime
24
- # Validators
25
- _timestamp_created = validator("timestamp_created", allow_reuse=True)(
26
- valutc("timestamp_created")
27
- )
28
23
 
29
24
 
30
25
  class ProjectUpdateV2(BaseModel, extra=Extra.forbid):