fractal-server 2.12.0a1__py3-none-any.whl → 2.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +17 -63
  3. fractal_server/app/models/security.py +9 -12
  4. fractal_server/app/models/v2/dataset.py +2 -2
  5. fractal_server/app/models/v2/job.py +11 -9
  6. fractal_server/app/models/v2/task.py +2 -3
  7. fractal_server/app/models/v2/task_group.py +6 -2
  8. fractal_server/app/models/v2/workflowtask.py +15 -8
  9. fractal_server/app/routes/admin/v2/task.py +1 -1
  10. fractal_server/app/routes/admin/v2/task_group.py +1 -1
  11. fractal_server/app/routes/api/v2/dataset.py +4 -4
  12. fractal_server/app/routes/api/v2/images.py +11 -23
  13. fractal_server/app/routes/api/v2/project.py +2 -2
  14. fractal_server/app/routes/api/v2/status.py +1 -1
  15. fractal_server/app/routes/api/v2/submit.py +8 -6
  16. fractal_server/app/routes/api/v2/task.py +4 -2
  17. fractal_server/app/routes/api/v2/task_collection.py +3 -2
  18. fractal_server/app/routes/api/v2/task_group.py +2 -2
  19. fractal_server/app/routes/api/v2/workflow.py +3 -3
  20. fractal_server/app/routes/api/v2/workflow_import.py +3 -3
  21. fractal_server/app/routes/api/v2/workflowtask.py +3 -1
  22. fractal_server/app/routes/auth/_aux_auth.py +4 -1
  23. fractal_server/app/routes/auth/current_user.py +3 -5
  24. fractal_server/app/routes/auth/group.py +1 -1
  25. fractal_server/app/routes/auth/users.py +2 -4
  26. fractal_server/app/routes/aux/_runner.py +1 -1
  27. fractal_server/app/routes/aux/validate_user_settings.py +1 -2
  28. fractal_server/app/runner/executors/_job_states.py +13 -0
  29. fractal_server/app/runner/executors/slurm/_slurm_config.py +26 -18
  30. fractal_server/app/runner/executors/slurm/ssh/__init__.py +0 -3
  31. fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +31 -22
  32. fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +2 -6
  33. fractal_server/app/runner/executors/slurm/ssh/executor.py +35 -50
  34. fractal_server/app/runner/executors/slurm/sudo/__init__.py +0 -3
  35. fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +1 -2
  36. fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +37 -47
  37. fractal_server/app/runner/executors/slurm/sudo/executor.py +77 -41
  38. fractal_server/app/runner/v2/__init__.py +0 -9
  39. fractal_server/app/runner/v2/_local/_local_config.py +5 -4
  40. fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +4 -4
  41. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +2 -2
  42. fractal_server/app/runner/v2/deduplicate_list.py +1 -1
  43. fractal_server/app/runner/v2/runner.py +9 -4
  44. fractal_server/app/runner/v2/task_interface.py +15 -7
  45. fractal_server/app/schemas/_filter_validators.py +6 -3
  46. fractal_server/app/schemas/_validators.py +7 -5
  47. fractal_server/app/schemas/user.py +23 -18
  48. fractal_server/app/schemas/user_group.py +25 -11
  49. fractal_server/app/schemas/user_settings.py +31 -24
  50. fractal_server/app/schemas/v2/dataset.py +48 -35
  51. fractal_server/app/schemas/v2/dumps.py +16 -14
  52. fractal_server/app/schemas/v2/job.py +49 -29
  53. fractal_server/app/schemas/v2/manifest.py +32 -28
  54. fractal_server/app/schemas/v2/project.py +18 -8
  55. fractal_server/app/schemas/v2/task.py +86 -75
  56. fractal_server/app/schemas/v2/task_collection.py +41 -30
  57. fractal_server/app/schemas/v2/task_group.py +39 -20
  58. fractal_server/app/schemas/v2/workflow.py +24 -12
  59. fractal_server/app/schemas/v2/workflowtask.py +63 -61
  60. fractal_server/app/security/__init__.py +7 -4
  61. fractal_server/app/security/signup_email.py +21 -12
  62. fractal_server/config.py +123 -75
  63. fractal_server/images/models.py +18 -12
  64. fractal_server/main.py +13 -10
  65. fractal_server/migrations/env.py +16 -63
  66. fractal_server/tasks/v2/local/collect.py +9 -8
  67. fractal_server/tasks/v2/local/deactivate.py +3 -0
  68. fractal_server/tasks/v2/local/reactivate.py +3 -0
  69. fractal_server/tasks/v2/ssh/collect.py +8 -8
  70. fractal_server/tasks/v2/ssh/deactivate.py +3 -0
  71. fractal_server/tasks/v2/ssh/reactivate.py +9 -6
  72. fractal_server/tasks/v2/utils_background.py +1 -1
  73. fractal_server/tasks/v2/utils_database.py +1 -1
  74. {fractal_server-2.12.0a1.dist-info → fractal_server-2.13.0.dist-info}/METADATA +10 -11
  75. {fractal_server-2.12.0a1.dist-info → fractal_server-2.13.0.dist-info}/RECORD +78 -81
  76. fractal_server/app/runner/v2/_local_experimental/__init__.py +0 -121
  77. fractal_server/app/runner/v2/_local_experimental/_local_config.py +0 -108
  78. fractal_server/app/runner/v2/_local_experimental/_submit_setup.py +0 -42
  79. fractal_server/app/runner/v2/_local_experimental/executor.py +0 -157
  80. {fractal_server-2.12.0a1.dist-info → fractal_server-2.13.0.dist-info}/LICENSE +0 -0
  81. {fractal_server-2.12.0a1.dist-info → fractal_server-2.13.0.dist-info}/WHEEL +0 -0
  82. {fractal_server-2.12.0a1.dist-info → fractal_server-2.13.0.dist-info}/entry_points.txt +0 -0
@@ -2,9 +2,10 @@ from typing import Optional
2
2
 
3
3
  from fastapi_users import schemas
4
4
  from pydantic import BaseModel
5
- from pydantic import Extra
5
+ from pydantic import ConfigDict
6
6
  from pydantic import Field
7
- from pydantic import validator
7
+ from pydantic import field_validator
8
+ from pydantic import ValidationInfo
8
9
 
9
10
  from ._validators import val_unique_list
10
11
  from ._validators import valstr
@@ -41,12 +42,12 @@ class UserRead(schemas.BaseUser[int]):
41
42
  username:
42
43
  """
43
44
 
44
- username: Optional[str]
45
+ username: Optional[str] = None
45
46
  group_ids_names: Optional[list[tuple[int, str]]] = None
46
47
  oauth_accounts: list[OAuthAccountRead]
47
48
 
48
49
 
49
- class UserUpdate(schemas.BaseUserUpdate, extra=Extra.forbid):
50
+ class UserUpdate(schemas.BaseUserUpdate):
50
51
  """
51
52
  Schema for `User` update.
52
53
 
@@ -54,33 +55,35 @@ class UserUpdate(schemas.BaseUserUpdate, extra=Extra.forbid):
54
55
  username:
55
56
  """
56
57
 
57
- username: Optional[str]
58
+ model_config = ConfigDict(extra="forbid")
59
+
60
+ username: Optional[str] = None
58
61
 
59
62
  # Validators
60
- _username = validator("username", allow_reuse=True)(valstr("username"))
63
+ _username = field_validator("username")(classmethod(valstr("username")))
61
64
 
62
- @validator(
65
+ @field_validator(
63
66
  "is_active",
64
67
  "is_verified",
65
68
  "is_superuser",
66
69
  "email",
67
70
  "password",
68
- always=False,
69
71
  )
70
- def cant_set_none(cls, v, field):
72
+ @classmethod
73
+ def cant_set_none(cls, v, info: ValidationInfo):
71
74
  if v is None:
72
- raise ValueError(f"Cannot set {field.name}=None")
75
+ raise ValueError(f"Cannot set {info.field_name}=None")
73
76
  return v
74
77
 
75
78
 
76
- class UserUpdateStrict(BaseModel, extra=Extra.forbid):
79
+ class UserUpdateStrict(BaseModel):
77
80
  """
78
81
  Schema for `User` self-editing.
79
82
 
80
83
  Attributes:
81
84
  """
82
85
 
83
- pass
86
+ model_config = ConfigDict(extra="forbid")
84
87
 
85
88
 
86
89
  class UserCreate(schemas.BaseUserCreate):
@@ -91,21 +94,23 @@ class UserCreate(schemas.BaseUserCreate):
91
94
  username:
92
95
  """
93
96
 
94
- username: Optional[str]
97
+ username: Optional[str] = None
95
98
 
96
99
  # Validators
97
100
 
98
- _username = validator("username", allow_reuse=True)(valstr("username"))
101
+ _username = field_validator("username")(classmethod(valstr("username")))
99
102
 
100
103
 
101
- class UserUpdateGroups(BaseModel, extra=Extra.forbid):
104
+ class UserUpdateGroups(BaseModel):
102
105
  """
103
106
  Schema for `POST /auth/users/{user_id}/set-groups/`
104
107
 
105
108
  """
106
109
 
107
- group_ids: list[int] = Field(min_items=1)
110
+ model_config = ConfigDict(extra="forbid")
111
+
112
+ group_ids: list[int] = Field(min_length=1)
108
113
 
109
- _group_ids = validator("group_ids", allow_reuse=True)(
110
- val_unique_list("group_ids")
114
+ _group_ids = field_validator("group_ids")(
115
+ classmethod(val_unique_list("group_ids"))
111
116
  )
@@ -2,9 +2,11 @@ from datetime import datetime
2
2
  from typing import Optional
3
3
 
4
4
  from pydantic import BaseModel
5
- from pydantic import Extra
5
+ from pydantic import ConfigDict
6
6
  from pydantic import Field
7
- from pydantic import validator
7
+ from pydantic import field_serializer
8
+ from pydantic import field_validator
9
+ from pydantic.types import AwareDatetime
8
10
 
9
11
  from ._validators import val_absolute_path
10
12
  from ._validators import val_unique_list
@@ -32,12 +34,16 @@ class UserGroupRead(BaseModel):
32
34
 
33
35
  id: int
34
36
  name: str
35
- timestamp_created: datetime
37
+ timestamp_created: AwareDatetime
36
38
  user_ids: Optional[list[int]] = None
37
39
  viewer_paths: list[str]
38
40
 
41
+ @field_serializer("timestamp_created")
42
+ def serialize_datetime(v: datetime) -> str:
43
+ return v.isoformat()
39
44
 
40
- class UserGroupCreate(BaseModel, extra=Extra.forbid):
45
+
46
+ class UserGroupCreate(BaseModel):
41
47
  """
42
48
  Schema for `UserGroup` creation
43
49
 
@@ -45,27 +51,35 @@ class UserGroupCreate(BaseModel, extra=Extra.forbid):
45
51
  name: Group name
46
52
  """
47
53
 
54
+ model_config = ConfigDict(extra="forbid")
55
+
48
56
  name: str
49
57
  viewer_paths: list[str] = Field(default_factory=list)
50
58
 
51
- @validator("viewer_paths")
59
+ @field_validator("viewer_paths")
60
+ @classmethod
52
61
  def viewer_paths_validator(cls, value):
53
62
  for i, path in enumerate(value):
54
- value[i] = val_absolute_path(f"viewer_paths[{i}]")(path)
55
- value = val_unique_list("viewer_paths")(value)
63
+ value[i] = val_absolute_path(f"viewer_paths[{i}]")(cls, path)
64
+ value = val_unique_list("viewer_paths")(cls, value)
56
65
  return value
57
66
 
58
67
 
59
- class UserGroupUpdate(BaseModel, extra=Extra.forbid):
68
+ class UserGroupUpdate(BaseModel):
60
69
  """
61
70
  Schema for `UserGroup` update
62
71
  """
63
72
 
73
+ model_config = ConfigDict(extra="forbid")
74
+
64
75
  viewer_paths: Optional[list[str]] = None
65
76
 
66
- @validator("viewer_paths")
77
+ @field_validator("viewer_paths")
78
+ @classmethod
67
79
  def viewer_paths_validator(cls, value):
80
+ if value is None:
81
+ raise ValueError("Cannot set `viewer_paths=None`.")
68
82
  for i, path in enumerate(value):
69
- value[i] = val_absolute_path(f"viewer_paths[{i}]")(path)
70
- value = val_unique_list("viewer_paths")(value)
83
+ value[i] = val_absolute_path(f"viewer_paths[{i}]")(cls, path)
84
+ value = val_unique_list("viewer_paths")(cls, value)
71
85
  return value
@@ -1,8 +1,8 @@
1
1
  from typing import Optional
2
2
 
3
3
  from pydantic import BaseModel
4
- from pydantic import Extra
5
- from pydantic import validator
4
+ from pydantic import ConfigDict
5
+ from pydantic import field_validator
6
6
  from pydantic.types import StrictStr
7
7
 
8
8
  from ._validators import val_absolute_path
@@ -41,11 +41,13 @@ class UserSettingsReadStrict(BaseModel):
41
41
  project_dir: Optional[str] = None
42
42
 
43
43
 
44
- class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
44
+ class UserSettingsUpdate(BaseModel):
45
45
  """
46
46
  Schema reserved for superusers
47
47
  """
48
48
 
49
+ model_config = ConfigDict(extra="forbid")
50
+
49
51
  ssh_host: Optional[str] = None
50
52
  ssh_username: Optional[str] = None
51
53
  ssh_private_key_path: Optional[str] = None
@@ -55,46 +57,51 @@ class UserSettingsUpdate(BaseModel, extra=Extra.forbid):
55
57
  slurm_accounts: Optional[list[StrictStr]] = None
56
58
  project_dir: Optional[str] = None
57
59
 
58
- _ssh_host = validator("ssh_host", allow_reuse=True)(
59
- valstr("ssh_host", accept_none=True)
60
+ _ssh_host = field_validator("ssh_host")(
61
+ classmethod(valstr("ssh_host", accept_none=True))
62
+ )
63
+ _ssh_username = field_validator("ssh_username")(
64
+ classmethod(valstr("ssh_username", accept_none=True))
60
65
  )
61
- _ssh_username = validator("ssh_username", allow_reuse=True)(
62
- valstr("ssh_username", accept_none=True)
66
+ _ssh_private_key_path = field_validator("ssh_private_key_path")(
67
+ classmethod(
68
+ val_absolute_path("ssh_private_key_path", accept_none=True)
69
+ )
63
70
  )
64
- _ssh_private_key_path = validator(
65
- "ssh_private_key_path", allow_reuse=True
66
- )(val_absolute_path("ssh_private_key_path", accept_none=True))
67
71
 
68
- _ssh_tasks_dir = validator("ssh_tasks_dir", allow_reuse=True)(
69
- val_absolute_path("ssh_tasks_dir", accept_none=True)
72
+ _ssh_tasks_dir = field_validator("ssh_tasks_dir")(
73
+ classmethod(val_absolute_path("ssh_tasks_dir", accept_none=True))
70
74
  )
71
- _ssh_jobs_dir = validator("ssh_jobs_dir", allow_reuse=True)(
72
- val_absolute_path("ssh_jobs_dir", accept_none=True)
75
+ _ssh_jobs_dir = field_validator("ssh_jobs_dir")(
76
+ classmethod(val_absolute_path("ssh_jobs_dir", accept_none=True))
73
77
  )
74
78
 
75
- _slurm_user = validator("slurm_user", allow_reuse=True)(
76
- valstr("slurm_user", accept_none=True)
79
+ _slurm_user = field_validator("slurm_user")(
80
+ classmethod(valstr("slurm_user", accept_none=True))
77
81
  )
78
82
 
79
- @validator("slurm_accounts")
83
+ @field_validator("slurm_accounts")
84
+ @classmethod
80
85
  def slurm_accounts_validator(cls, value):
81
86
  if value is None:
82
87
  return value
83
88
  for i, item in enumerate(value):
84
- value[i] = valstr(f"slurm_accounts[{i}]")(item)
85
- return val_unique_list("slurm_accounts")(value)
89
+ value[i] = valstr(f"slurm_accounts[{i}]")(cls, item)
90
+ return val_unique_list("slurm_accounts")(cls, value)
86
91
 
87
- @validator("project_dir")
92
+ @field_validator("project_dir")
93
+ @classmethod
88
94
  def project_dir_validator(cls, value):
89
95
  if value is None:
90
96
  return None
91
97
  validate_cmd(value)
92
- return val_absolute_path("project_dir")(value)
98
+ return val_absolute_path("project_dir")(cls, value)
93
99
 
94
100
 
95
- class UserSettingsUpdateStrict(BaseModel, extra=Extra.forbid):
101
+ class UserSettingsUpdateStrict(BaseModel):
102
+ model_config = ConfigDict(extra="forbid")
96
103
  slurm_accounts: Optional[list[StrictStr]] = None
97
104
 
98
- _slurm_accounts = validator("slurm_accounts", allow_reuse=True)(
99
- val_unique_list("slurm_accounts")
105
+ _slurm_accounts = field_validator("slurm_accounts")(
106
+ classmethod(val_unique_list("slurm_accounts"))
100
107
  )
@@ -3,10 +3,12 @@ from typing import Any
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
- from pydantic import Extra
6
+ from pydantic import ConfigDict
7
7
  from pydantic import Field
8
- from pydantic import root_validator
9
- from pydantic import validator
8
+ from pydantic import field_serializer
9
+ from pydantic import field_validator
10
+ from pydantic import model_validator
11
+ from pydantic.types import AwareDatetime
10
12
 
11
13
  from .._filter_validators import validate_attribute_filters
12
14
  from .._filter_validators import validate_type_filters
@@ -27,13 +29,14 @@ class _DatasetHistoryItemV2(BaseModel):
27
29
 
28
30
  workflowtask: WorkflowTaskDumpV2
29
31
  status: WorkflowTaskStatusTypeV2
30
- parallelization: Optional[dict]
32
+ parallelization: Optional[dict] = None
31
33
 
32
34
 
33
35
  # CRUD
34
36
 
35
37
 
36
- class DatasetCreateV2(BaseModel, extra=Extra.forbid):
38
+ class DatasetCreateV2(BaseModel):
39
+ model_config = ConfigDict(extra="forbid")
37
40
 
38
41
  name: str
39
42
 
@@ -44,19 +47,20 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
44
47
 
45
48
  # Validators
46
49
 
47
- _dict_keys = root_validator(pre=True, allow_reuse=True)(
48
- root_validate_dict_keys
50
+ _dict_keys = model_validator(mode="before")(
51
+ classmethod(root_validate_dict_keys)
49
52
  )
50
- _type_filters = validator("type_filters", allow_reuse=True)(
51
- validate_type_filters
53
+ _type_filters = field_validator("type_filters")(
54
+ classmethod(validate_type_filters)
52
55
  )
53
- _attribute_filters = validator("attribute_filters", allow_reuse=True)(
54
- validate_attribute_filters
56
+ _attribute_filters = field_validator("attribute_filters")(
57
+ classmethod(validate_attribute_filters)
55
58
  )
56
59
 
57
- _name = validator("name", allow_reuse=True)(valstr("name"))
60
+ _name = field_validator("name")(classmethod(valstr("name")))
58
61
 
59
- @validator("zarr_dir")
62
+ @field_validator("zarr_dir")
63
+ @classmethod
60
64
  def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
61
65
  if v is not None:
62
66
  return normalize_url(v)
@@ -64,7 +68,6 @@ class DatasetCreateV2(BaseModel, extra=Extra.forbid):
64
68
 
65
69
 
66
70
  class DatasetReadV2(BaseModel):
67
-
68
71
  id: int
69
72
  name: str
70
73
 
@@ -73,42 +76,48 @@ class DatasetReadV2(BaseModel):
73
76
 
74
77
  history: list[_DatasetHistoryItemV2]
75
78
 
76
- timestamp_created: datetime
79
+ timestamp_created: AwareDatetime
77
80
 
78
81
  zarr_dir: str
79
82
  type_filters: dict[str, bool]
80
83
  attribute_filters: AttributeFiltersType
81
84
 
85
+ @field_serializer("timestamp_created")
86
+ def serialize_datetime(v: datetime) -> str:
87
+ return v.isoformat()
88
+
82
89
 
83
- class DatasetUpdateV2(BaseModel, extra=Extra.forbid):
90
+ class DatasetUpdateV2(BaseModel):
91
+ model_config = ConfigDict(extra="forbid")
84
92
 
85
- name: Optional[str]
86
- zarr_dir: Optional[str]
87
- type_filters: Optional[dict[str, bool]]
88
- attribute_filters: Optional[dict[str, list[Any]]]
93
+ name: Optional[str] = None
94
+ zarr_dir: Optional[str] = None
95
+ type_filters: Optional[dict[str, bool]] = None
96
+ attribute_filters: Optional[dict[str, list[Any]]] = None
89
97
 
90
98
  # Validators
91
99
 
92
- _dict_keys = root_validator(pre=True, allow_reuse=True)(
93
- root_validate_dict_keys
100
+ _dict_keys = model_validator(mode="before")(
101
+ classmethod(root_validate_dict_keys)
94
102
  )
95
- _type_filters = validator("type_filters", allow_reuse=True)(
96
- validate_type_filters
103
+ _type_filters = field_validator("type_filters")(
104
+ classmethod(validate_type_filters)
97
105
  )
98
- _attribute_filters = validator("attribute_filters", allow_reuse=True)(
99
- validate_attribute_filters
106
+ _attribute_filters = field_validator("attribute_filters")(
107
+ classmethod(validate_attribute_filters)
100
108
  )
101
109
 
102
- _name = validator("name", allow_reuse=True)(valstr("name"))
110
+ _name = field_validator("name")(classmethod(valstr("name")))
103
111
 
104
- @validator("zarr_dir")
112
+ @field_validator("zarr_dir")
113
+ @classmethod
105
114
  def normalize_zarr_dir(cls, v: Optional[str]) -> Optional[str]:
106
115
  if v is not None:
107
116
  return normalize_url(v)
108
117
  return v
109
118
 
110
119
 
111
- class DatasetImportV2(BaseModel, extra=Extra.forbid):
120
+ class DatasetImportV2(BaseModel):
112
121
  """
113
122
  Class for `Dataset` import.
114
123
 
@@ -121,6 +130,8 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
121
130
  attribute_filters:
122
131
  """
123
132
 
133
+ model_config = ConfigDict(extra="forbid")
134
+
124
135
  name: str
125
136
  zarr_dir: str
126
137
  images: list[SingleImage] = Field(default_factory=list)
@@ -129,7 +140,8 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
129
140
  type_filters: dict[str, bool] = Field(default_factory=dict)
130
141
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
131
142
 
132
- @root_validator(pre=True)
143
+ @model_validator(mode="before")
144
+ @classmethod
133
145
  def update_legacy_filters(cls, values: dict):
134
146
  """
135
147
  Transform legacy filters (created with fractal-server<2.11.0)
@@ -159,14 +171,15 @@ class DatasetImportV2(BaseModel, extra=Extra.forbid):
159
171
 
160
172
  return values
161
173
 
162
- _type_filters = validator("type_filters", allow_reuse=True)(
163
- validate_type_filters
174
+ _type_filters = field_validator("type_filters")(
175
+ classmethod(validate_type_filters)
164
176
  )
165
- _attribute_filters = validator("attribute_filters", allow_reuse=True)(
166
- validate_attribute_filters
177
+ _attribute_filters = field_validator("attribute_filters")(
178
+ classmethod(validate_attribute_filters)
167
179
  )
168
180
 
169
- @validator("zarr_dir")
181
+ @field_validator("zarr_dir")
182
+ @classmethod
170
183
  def normalize_zarr_dir(cls, v: str) -> str:
171
184
  return normalize_url(v)
172
185
 
@@ -11,13 +11,13 @@ These models are used in at least two situations:
11
11
  from typing import Optional
12
12
 
13
13
  from pydantic import BaseModel
14
- from pydantic import Extra
14
+ from pydantic import ConfigDict
15
15
 
16
16
  from fractal_server.images.models import AttributeFiltersType
17
17
 
18
18
 
19
- class ProjectDumpV2(BaseModel, extra=Extra.forbid):
20
-
19
+ class ProjectDumpV2(BaseModel):
20
+ model_config = ConfigDict(extra="forbid")
21
21
  id: int
22
22
  name: str
23
23
  timestamp_created: str
@@ -28,10 +28,10 @@ class TaskDumpV2(BaseModel):
28
28
  name: str
29
29
  type: str
30
30
 
31
- command_non_parallel: Optional[str]
32
- command_parallel: Optional[str]
31
+ command_non_parallel: Optional[str] = None
32
+ command_parallel: Optional[str] = None
33
33
  source: Optional[str] = None
34
- version: Optional[str]
34
+ version: Optional[str] = None
35
35
 
36
36
  input_types: dict[str, bool]
37
37
  output_types: dict[str, bool]
@@ -39,29 +39,31 @@ class TaskDumpV2(BaseModel):
39
39
 
40
40
  class WorkflowTaskDumpV2(BaseModel):
41
41
  """
42
- We do not include 'extra=Extra.forbid' because legacy data may include
43
- 'input_filters' field and we want to avoid response-validation errors
44
- for the endpoints that GET datasets.
42
+ We do not include 'model_config = ConfigDict(extra="forbid")'
43
+ because legacy data may include 'input_filters' field and we want to avoid
44
+ response-validation errors for the endpoints that GET datasets.
45
45
  """
46
46
 
47
47
  id: int
48
48
  workflow_id: int
49
- order: Optional[int]
49
+ order: Optional[int] = None
50
50
 
51
51
  type_filters: dict[str, bool]
52
52
 
53
- task_id: Optional[int]
54
- task: Optional[TaskDumpV2]
53
+ task_id: Optional[int] = None
54
+ task: Optional[TaskDumpV2] = None
55
55
 
56
56
 
57
- class WorkflowDumpV2(BaseModel, extra=Extra.forbid):
57
+ class WorkflowDumpV2(BaseModel):
58
+ model_config = ConfigDict(extra="forbid")
58
59
  id: int
59
60
  name: str
60
61
  project_id: int
61
62
  timestamp_created: str
62
63
 
63
64
 
64
- class DatasetDumpV2(BaseModel, extra=Extra.forbid):
65
+ class DatasetDumpV2(BaseModel):
66
+ model_config = ConfigDict(extra="forbid")
65
67
  id: int
66
68
  name: str
67
69
  project_id: int
@@ -3,10 +3,13 @@ from enum import Enum
3
3
  from typing import Optional
4
4
 
5
5
  from pydantic import BaseModel
6
- from pydantic import Extra
6
+ from pydantic import ConfigDict
7
7
  from pydantic import Field
8
- from pydantic import root_validator
9
- from pydantic import validator
8
+ from pydantic import field_serializer
9
+ from pydantic import field_validator
10
+ from pydantic import model_validator
11
+ from pydantic import ValidationInfo
12
+ from pydantic.types import AwareDatetime
10
13
  from pydantic.types import StrictStr
11
14
 
12
15
  from .._filter_validators import validate_attribute_filters
@@ -39,28 +42,31 @@ class JobStatusTypeV2(str, Enum):
39
42
  FAILED = "failed"
40
43
 
41
44
 
42
- class JobCreateV2(BaseModel, extra=Extra.forbid):
45
+ class JobCreateV2(BaseModel):
46
+
47
+ model_config = ConfigDict(extra="forbid")
43
48
 
44
49
  first_task_index: Optional[int] = None
45
50
  last_task_index: Optional[int] = None
46
51
  slurm_account: Optional[StrictStr] = None
47
- worker_init: Optional[str]
52
+ worker_init: Optional[str] = None
48
53
 
49
54
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
50
55
 
51
56
  # Validators
52
- _worker_init = validator("worker_init", allow_reuse=True)(
53
- valstr("worker_init")
57
+ _worker_init = field_validator("worker_init")(
58
+ classmethod(valstr("worker_init"))
54
59
  )
55
- _dict_keys = root_validator(pre=True, allow_reuse=True)(
56
- root_validate_dict_keys
60
+ _dict_keys = model_validator(mode="before")(
61
+ classmethod(root_validate_dict_keys)
57
62
  )
58
- _attribute_filters = validator("attribute_filters", allow_reuse=True)(
59
- validate_attribute_filters
63
+ _attribute_filters = field_validator("attribute_filters")(
64
+ classmethod(validate_attribute_filters)
60
65
  )
61
66
 
62
- @validator("first_task_index", always=True)
63
- def first_task_index_non_negative(cls, v, values):
67
+ @field_validator("first_task_index")
68
+ @classmethod
69
+ def first_task_index_non_negative(cls, v):
64
70
  """
65
71
  Check that `first_task_index` is non-negative.
66
72
  """
@@ -70,8 +76,9 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
70
76
  )
71
77
  return v
72
78
 
73
- @validator("last_task_index", always=True)
74
- def first_last_task_indices(cls, v, values):
79
+ @field_validator("last_task_index")
80
+ @classmethod
81
+ def first_last_task_indices(cls, v, info: ValidationInfo):
75
82
  """
76
83
  Check that `last_task_index` is non-negative, and that it is not
77
84
  smaller than `first_task_index`.
@@ -81,7 +88,7 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
81
88
  f"last_task_index cannot be negative (given: {v})"
82
89
  )
83
90
 
84
- first_task_index = values.get("first_task_index")
91
+ first_task_index = info.data.get("first_task_index")
85
92
  last_task_index = v
86
93
  if first_task_index is not None and last_task_index is not None:
87
94
  if first_task_index > last_task_index:
@@ -95,26 +102,39 @@ class JobCreateV2(BaseModel, extra=Extra.forbid):
95
102
  class JobReadV2(BaseModel):
96
103
 
97
104
  id: int
98
- project_id: Optional[int]
105
+ project_id: Optional[int] = None
99
106
  project_dump: ProjectDumpV2
100
107
  user_email: str
101
- slurm_account: Optional[str]
102
- workflow_id: Optional[int]
108
+ slurm_account: Optional[str] = None
109
+ workflow_id: Optional[int] = None
103
110
  workflow_dump: WorkflowDumpV2
104
- dataset_id: Optional[int]
111
+ dataset_id: Optional[int] = None
105
112
  dataset_dump: DatasetDumpV2
106
- start_timestamp: datetime
107
- end_timestamp: Optional[datetime]
113
+ start_timestamp: AwareDatetime
114
+ end_timestamp: Optional[AwareDatetime] = None
108
115
  status: str
109
- log: Optional[str]
110
- working_dir: Optional[str]
111
- working_dir_user: Optional[str]
112
- first_task_index: Optional[int]
113
- last_task_index: Optional[int]
114
- worker_init: Optional[str]
116
+ log: Optional[str] = None
117
+ working_dir: Optional[str] = None
118
+ working_dir_user: Optional[str] = None
119
+ first_task_index: Optional[int] = None
120
+ last_task_index: Optional[int] = None
121
+ worker_init: Optional[str] = None
115
122
  attribute_filters: AttributeFiltersType
116
123
 
124
+ @field_serializer("start_timestamp")
125
+ def serialize_datetime_start(v: datetime) -> str:
126
+ return v.isoformat()
127
+
128
+ @field_serializer("end_timestamp")
129
+ def serialize_datetime_end(v: Optional[datetime]) -> Optional[str]:
130
+ if v is None:
131
+ return None
132
+ else:
133
+ return v.isoformat()
134
+
135
+
136
+ class JobUpdateV2(BaseModel):
117
137
 
118
- class JobUpdateV2(BaseModel, extra=Extra.forbid):
138
+ model_config = ConfigDict(extra="forbid")
119
139
 
120
140
  status: JobStatusTypeV2