fractal-server 2.12.1__py3-none-any.whl → 2.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +9 -12
  3. fractal_server/app/models/v2/dataset.py +2 -2
  4. fractal_server/app/models/v2/job.py +11 -9
  5. fractal_server/app/models/v2/task.py +2 -3
  6. fractal_server/app/models/v2/task_group.py +6 -2
  7. fractal_server/app/models/v2/workflowtask.py +15 -8
  8. fractal_server/app/routes/admin/v2/task.py +1 -1
  9. fractal_server/app/routes/admin/v2/task_group.py +1 -1
  10. fractal_server/app/routes/api/v2/dataset.py +4 -4
  11. fractal_server/app/routes/api/v2/images.py +11 -11
  12. fractal_server/app/routes/api/v2/project.py +2 -2
  13. fractal_server/app/routes/api/v2/status.py +1 -1
  14. fractal_server/app/routes/api/v2/submit.py +8 -6
  15. fractal_server/app/routes/api/v2/task.py +4 -2
  16. fractal_server/app/routes/api/v2/task_collection.py +3 -2
  17. fractal_server/app/routes/api/v2/task_group.py +2 -2
  18. fractal_server/app/routes/api/v2/workflow.py +3 -3
  19. fractal_server/app/routes/api/v2/workflow_import.py +3 -3
  20. fractal_server/app/routes/api/v2/workflowtask.py +3 -1
  21. fractal_server/app/routes/auth/_aux_auth.py +4 -1
  22. fractal_server/app/routes/auth/current_user.py +3 -5
  23. fractal_server/app/routes/auth/group.py +1 -1
  24. fractal_server/app/routes/auth/users.py +2 -4
  25. fractal_server/app/routes/aux/_runner.py +1 -1
  26. fractal_server/app/routes/aux/validate_user_settings.py +1 -2
  27. fractal_server/app/runner/executors/_job_states.py +13 -0
  28. fractal_server/app/runner/executors/slurm/_slurm_config.py +26 -18
  29. fractal_server/app/runner/executors/slurm/ssh/__init__.py +0 -3
  30. fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +31 -22
  31. fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +2 -5
  32. fractal_server/app/runner/executors/slurm/ssh/executor.py +21 -27
  33. fractal_server/app/runner/executors/slurm/sudo/__init__.py +0 -3
  34. fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +1 -2
  35. fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +37 -47
  36. fractal_server/app/runner/executors/slurm/sudo/executor.py +25 -24
  37. fractal_server/app/runner/v2/__init__.py +0 -9
  38. fractal_server/app/runner/v2/_local/_local_config.py +5 -4
  39. fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py +4 -4
  40. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +2 -2
  41. fractal_server/app/runner/v2/deduplicate_list.py +1 -1
  42. fractal_server/app/runner/v2/runner.py +9 -4
  43. fractal_server/app/runner/v2/task_interface.py +15 -7
  44. fractal_server/app/schemas/_filter_validators.py +6 -3
  45. fractal_server/app/schemas/_validators.py +7 -5
  46. fractal_server/app/schemas/user.py +23 -18
  47. fractal_server/app/schemas/user_group.py +25 -11
  48. fractal_server/app/schemas/user_settings.py +31 -24
  49. fractal_server/app/schemas/v2/dataset.py +48 -35
  50. fractal_server/app/schemas/v2/dumps.py +16 -14
  51. fractal_server/app/schemas/v2/job.py +49 -29
  52. fractal_server/app/schemas/v2/manifest.py +32 -28
  53. fractal_server/app/schemas/v2/project.py +18 -8
  54. fractal_server/app/schemas/v2/task.py +86 -75
  55. fractal_server/app/schemas/v2/task_collection.py +41 -30
  56. fractal_server/app/schemas/v2/task_group.py +39 -20
  57. fractal_server/app/schemas/v2/workflow.py +24 -12
  58. fractal_server/app/schemas/v2/workflowtask.py +63 -61
  59. fractal_server/app/security/__init__.py +1 -1
  60. fractal_server/config.py +32 -25
  61. fractal_server/images/models.py +18 -12
  62. fractal_server/main.py +1 -1
  63. fractal_server/tasks/v2/utils_background.py +1 -1
  64. fractal_server/tasks/v2/utils_database.py +1 -1
  65. {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/METADATA +9 -10
  66. {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/RECORD +69 -72
  67. fractal_server/app/runner/v2/_local_experimental/__init__.py +0 -121
  68. fractal_server/app/runner/v2/_local_experimental/_local_config.py +0 -108
  69. fractal_server/app/runner/v2/_local_experimental/_submit_setup.py +0 -42
  70. fractal_server/app/runner/v2/_local_experimental/executor.py +0 -157
  71. {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/LICENSE +0 -0
  72. {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/WHEEL +0 -0
  73. {fractal_server-2.12.1.dist-info → fractal_server-2.13.0.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.12.1"
1
+ __VERSION__ = "2.13.0"
@@ -12,6 +12,7 @@
12
12
  from datetime import datetime
13
13
  from typing import Optional
14
14
 
15
+ from pydantic import ConfigDict
15
16
  from pydantic import EmailStr
16
17
  from sqlalchemy import Column
17
18
  from sqlalchemy.types import DateTime
@@ -50,13 +51,11 @@ class OAuthAccount(SQLModel, table=True):
50
51
  user: Optional["UserOAuth"] = Relationship(back_populates="oauth_accounts")
51
52
  oauth_name: str = Field(index=True, nullable=False)
52
53
  access_token: str = Field(nullable=False)
53
- expires_at: Optional[int] = Field(nullable=True)
54
- refresh_token: Optional[str] = Field(nullable=True)
54
+ expires_at: Optional[int] = Field(nullable=True, default=None)
55
+ refresh_token: Optional[str] = Field(nullable=True, default=None)
55
56
  account_id: str = Field(index=True, nullable=False)
56
57
  account_email: str = Field(nullable=False)
57
-
58
- class Config:
59
- orm_mode = True
58
+ model_config = ConfigDict(from_attributes=True)
60
59
 
61
60
 
62
61
  class UserOAuth(SQLModel, table=True):
@@ -88,11 +87,11 @@ class UserOAuth(SQLModel, table=True):
88
87
  sa_column_kwargs={"unique": True, "index": True}, nullable=False
89
88
  )
90
89
  hashed_password: str
91
- is_active: bool = Field(True, nullable=False)
92
- is_superuser: bool = Field(False, nullable=False)
93
- is_verified: bool = Field(False, nullable=False)
90
+ is_active: bool = Field(default=True, nullable=False)
91
+ is_superuser: bool = Field(default=False, nullable=False)
92
+ is_verified: bool = Field(default=False, nullable=False)
94
93
 
95
- username: Optional[str]
94
+ username: Optional[str] = None
96
95
 
97
96
  oauth_accounts: list["OAuthAccount"] = Relationship(
98
97
  back_populates="user",
@@ -105,9 +104,7 @@ class UserOAuth(SQLModel, table=True):
105
104
  settings: Optional[UserSettings] = Relationship(
106
105
  sa_relationship_kwargs=dict(lazy="selectin", cascade="all, delete")
107
106
  )
108
-
109
- class Config:
110
- orm_mode = True
107
+ model_config = ConfigDict(from_attributes=True)
111
108
 
112
109
 
113
110
  class UserGroup(SQLModel, table=True):
@@ -2,6 +2,7 @@ from datetime import datetime
2
2
  from typing import Any
3
3
  from typing import Optional
4
4
 
5
+ from pydantic import ConfigDict
5
6
  from sqlalchemy import Column
6
7
  from sqlalchemy.types import DateTime
7
8
  from sqlalchemy.types import JSON
@@ -14,8 +15,7 @@ from fractal_server.images.models import AttributeFiltersType
14
15
 
15
16
 
16
17
  class DatasetV2(SQLModel, table=True):
17
- class Config:
18
- arbitrary_types_allowed = True
18
+ model_config = ConfigDict(arbitrary_types_allowed=True)
19
19
 
20
20
  id: Optional[int] = Field(default=None, primary_key=True)
21
21
  name: str
@@ -2,6 +2,7 @@ from datetime import datetime
2
2
  from typing import Any
3
3
  from typing import Optional
4
4
 
5
+ from pydantic import ConfigDict
5
6
  from sqlalchemy import Column
6
7
  from sqlalchemy.types import DateTime
7
8
  from sqlalchemy.types import JSON
@@ -14,16 +15,17 @@ from fractal_server.images.models import AttributeFiltersType
14
15
 
15
16
 
16
17
  class JobV2(SQLModel, table=True):
17
- class Config:
18
- arbitrary_types_allowed = True
18
+ model_config = ConfigDict(arbitrary_types_allowed=True)
19
19
 
20
20
  id: Optional[int] = Field(default=None, primary_key=True)
21
- project_id: Optional[int] = Field(foreign_key="projectv2.id")
22
- workflow_id: Optional[int] = Field(foreign_key="workflowv2.id")
23
- dataset_id: Optional[int] = Field(foreign_key="datasetv2.id")
21
+ project_id: Optional[int] = Field(foreign_key="projectv2.id", default=None)
22
+ workflow_id: Optional[int] = Field(
23
+ foreign_key="workflowv2.id", default=None
24
+ )
25
+ dataset_id: Optional[int] = Field(foreign_key="datasetv2.id", default=None)
24
26
 
25
27
  user_email: str = Field(nullable=False)
26
- slurm_account: Optional[str]
28
+ slurm_account: Optional[str] = None
27
29
 
28
30
  dataset_dump: dict[str, Any] = Field(
29
31
  sa_column=Column(JSON, nullable=False)
@@ -35,9 +37,9 @@ class JobV2(SQLModel, table=True):
35
37
  sa_column=Column(JSON, nullable=False)
36
38
  )
37
39
 
38
- worker_init: Optional[str]
39
- working_dir: Optional[str]
40
- working_dir_user: Optional[str]
40
+ worker_init: Optional[str] = None
41
+ working_dir: Optional[str] = None
42
+ working_dir_user: Optional[str] = None
41
43
  first_task_index: int
42
44
  last_task_index: int
43
45
 
@@ -1,7 +1,6 @@
1
1
  from typing import Any
2
2
  from typing import Optional
3
3
 
4
- from pydantic import HttpUrl
5
4
  from sqlalchemy import Column
6
5
  from sqlalchemy.types import JSON
7
6
  from sqlmodel import Field
@@ -31,9 +30,9 @@ class TaskV2(SQLModel, table=True):
31
30
  args_schema_parallel: Optional[dict[str, Any]] = Field(
32
31
  sa_column=Column(JSON), default=None
33
32
  )
34
- args_schema_version: Optional[str]
33
+ args_schema_version: Optional[str] = None
35
34
  docs_info: Optional[str] = None
36
- docs_link: Optional[HttpUrl] = None
35
+ docs_link: Optional[str] = None
37
36
 
38
37
  input_types: dict[str, bool] = Field(sa_column=Column(JSON), default={})
39
38
  output_types: dict[str, bool] = Field(sa_column=Column(JSON), default={})
@@ -22,7 +22,9 @@ class TaskGroupV2(SQLModel, table=True):
22
22
  )
23
23
 
24
24
  user_id: int = Field(foreign_key="user_oauth.id")
25
- user_group_id: Optional[int] = Field(foreign_key="usergroup.id")
25
+ user_group_id: Optional[int] = Field(
26
+ foreign_key="usergroup.id", default=None
27
+ )
26
28
 
27
29
  origin: str
28
30
  pkg_name: str
@@ -97,7 +99,9 @@ class TaskGroupActivityV2(SQLModel, table=True):
97
99
 
98
100
  id: Optional[int] = Field(default=None, primary_key=True)
99
101
  user_id: int = Field(foreign_key="user_oauth.id")
100
- taskgroupv2_id: Optional[int] = Field(foreign_key="taskgroupv2.id")
102
+ taskgroupv2_id: Optional[int] = Field(
103
+ default=None, foreign_key="taskgroupv2.id"
104
+ )
101
105
  timestamp_started: datetime = Field(
102
106
  default_factory=get_timestamp,
103
107
  sa_column=Column(DateTime(timezone=True), nullable=False),
@@ -1,6 +1,7 @@
1
1
  from typing import Any
2
2
  from typing import Optional
3
3
 
4
+ from pydantic import ConfigDict
4
5
  from sqlalchemy import Column
5
6
  from sqlalchemy.types import JSON
6
7
  from sqlmodel import Field
@@ -11,18 +12,24 @@ from .task import TaskV2
11
12
 
12
13
 
13
14
  class WorkflowTaskV2(SQLModel, table=True):
14
- class Config:
15
- arbitrary_types_allowed = True
16
- fields = {"parent": {"exclude": True}}
15
+ model_config = ConfigDict(arbitrary_types_allowed=True)
17
16
 
18
17
  id: Optional[int] = Field(default=None, primary_key=True)
19
18
 
20
19
  workflow_id: int = Field(foreign_key="workflowv2.id")
21
- order: Optional[int]
22
- meta_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
23
- meta_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
24
- args_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
25
- args_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
20
+ order: Optional[int] = None
21
+ meta_parallel: Optional[dict[str, Any]] = Field(
22
+ sa_column=Column(JSON), default=None
23
+ )
24
+ meta_non_parallel: Optional[dict[str, Any]] = Field(
25
+ sa_column=Column(JSON), default=None
26
+ )
27
+ args_parallel: Optional[dict[str, Any]] = Field(
28
+ sa_column=Column(JSON), default=None
29
+ )
30
+ args_non_parallel: Optional[dict[str, Any]] = Field(
31
+ sa_column=Column(JSON), default=None
32
+ )
26
33
 
27
34
  type_filters: dict[str, bool] = Field(
28
35
  sa_column=Column(JSON, nullable=False, server_default="{}")
@@ -28,7 +28,7 @@ class TaskV2Minimal(BaseModel):
28
28
  type: str
29
29
  taskgroupv2_id: int
30
30
  command_non_parallel: Optional[str] = None
31
- command_parallel: Optional[str]
31
+ command_parallel: Optional[str] = None
32
32
  source: Optional[str] = None
33
33
  version: Optional[str] = None
34
34
 
@@ -163,7 +163,7 @@ async def patch_task_group(
163
163
  detail=f"TaskGroupV2 {task_group_id} not found",
164
164
  )
165
165
 
166
- for key, value in task_group_update.dict(exclude_unset=True).items():
166
+ for key, value in task_group_update.model_dump(exclude_unset=True).items():
167
167
  if (key == "user_group_id") and (value is not None):
168
168
  await _verify_user_belongs_to_group(
169
169
  user_id=user.id, user_group_id=value, db=db
@@ -60,7 +60,7 @@ async def create_dataset(
60
60
  db_dataset = DatasetV2(
61
61
  project_id=project_id,
62
62
  zarr_dir="__PLACEHOLDER__",
63
- **dataset.dict(exclude={"zarr_dir"}),
63
+ **dataset.model_dump(exclude={"zarr_dir"}),
64
64
  )
65
65
  db.add(db_dataset)
66
66
  await db.commit()
@@ -77,7 +77,7 @@ async def create_dataset(
77
77
  await db.commit()
78
78
  await db.refresh(db_dataset)
79
79
  else:
80
- db_dataset = DatasetV2(project_id=project_id, **dataset.dict())
80
+ db_dataset = DatasetV2(project_id=project_id, **dataset.model_dump())
81
81
  db.add(db_dataset)
82
82
  await db.commit()
83
83
  await db.refresh(db_dataset)
@@ -172,7 +172,7 @@ async def update_dataset(
172
172
  ),
173
173
  )
174
174
 
175
- for key, value in dataset_update.dict(exclude_unset=True).items():
175
+ for key, value in dataset_update.model_dump(exclude_unset=True).items():
176
176
  setattr(db_dataset, key, value)
177
177
 
178
178
  await db.commit()
@@ -316,7 +316,7 @@ async def import_dataset(
316
316
  # Create new Dataset
317
317
  db_dataset = DatasetV2(
318
318
  project_id=project_id,
319
- **dataset.dict(exclude_none=True),
319
+ **dataset.model_dump(exclude_none=True),
320
320
  )
321
321
  db.add(db_dataset)
322
322
  await db.commit()
@@ -8,8 +8,8 @@ from fastapi import Response
8
8
  from fastapi import status
9
9
  from pydantic import BaseModel
10
10
  from pydantic import Field
11
- from pydantic import root_validator
12
- from pydantic import validator
11
+ from pydantic import field_validator
12
+ from pydantic import model_validator
13
13
  from sqlalchemy.orm.attributes import flag_modified
14
14
 
15
15
  from ._aux_functions import _get_dataset_check_owner
@@ -44,18 +44,18 @@ class ImagePage(BaseModel):
44
44
 
45
45
 
46
46
  class ImageQuery(BaseModel):
47
- zarr_url: Optional[str]
47
+ zarr_url: Optional[str] = None
48
48
  type_filters: dict[str, bool] = Field(default_factory=dict)
49
49
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
50
50
 
51
- _dict_keys = root_validator(pre=True, allow_reuse=True)(
52
- root_validate_dict_keys
51
+ _dict_keys = model_validator(mode="before")(
52
+ classmethod(root_validate_dict_keys)
53
53
  )
54
- _type_filters = validator("type_filters", allow_reuse=True)(
55
- validate_type_filters
54
+ _type_filters = field_validator("type_filters")(
55
+ classmethod(validate_type_filters)
56
56
  )
57
- _attribute_filters = validator("attribute_filters", allow_reuse=True)(
58
- validate_attribute_filters
57
+ _attribute_filters = field_validator("attribute_filters")(
58
+ classmethod(validate_attribute_filters)
59
59
  )
60
60
 
61
61
 
@@ -102,7 +102,7 @@ async def post_new_image(
102
102
  ),
103
103
  )
104
104
 
105
- dataset.images.append(new_image.dict())
105
+ dataset.images.append(new_image.model_dump())
106
106
  flag_modified(dataset, "images")
107
107
 
108
108
  await db.commit()
@@ -278,7 +278,7 @@ async def patch_dataset_image(
278
278
  )
279
279
  index = ret["index"]
280
280
 
281
- for key, value in image_update.dict(
281
+ for key, value in image_update.model_dump(
282
282
  exclude_none=True, exclude={"zarr_url"}
283
283
  ).items():
284
284
  db_dataset.images[index][key] = value
@@ -62,7 +62,7 @@ async def create_project(
62
62
  project_name=project.name, user_id=user.id, db=db
63
63
  )
64
64
 
65
- db_project = ProjectV2(**project.dict())
65
+ db_project = ProjectV2(**project.model_dump())
66
66
  db_project.user_list.append(user)
67
67
 
68
68
  db.add(db_project)
@@ -106,7 +106,7 @@ async def update_project(
106
106
  project_name=project_update.name, user_id=user.id, db=db
107
107
  )
108
108
 
109
- for key, value in project_update.dict(exclude_unset=True).items():
109
+ for key, value in project_update.model_dump(exclude_unset=True).items():
110
110
  setattr(project, key, value)
111
111
 
112
112
  await db.commit()
@@ -154,7 +154,7 @@ async def get_workflowtask_status(
154
154
  if wf_task_status is None:
155
155
  # If a wftask ID was not found, ignore it and continue
156
156
  continue
157
- clean_workflow_tasks_status_dict[wf_task.id] = wf_task_status
157
+ clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
158
158
  if wf_task_status == WorkflowTaskStatusTypeV2.FAILED:
159
159
  # Starting from the beginning of `workflow.task_list`, stop the
160
160
  # first time that you hit a failed job
@@ -165,11 +165,15 @@ async def apply_workflow(
165
165
  # The 'filters' field is not supported any more but still exists as a
166
166
  # database column, therefore we manually exclude it from dumps.
167
167
  dataset_dump=json.loads(
168
- dataset.json(exclude={"images", "history", "filters"})
168
+ dataset.model_dump_json(exclude={"images", "history", "filters"})
169
169
  ),
170
- workflow_dump=json.loads(workflow.json(exclude={"task_list"})),
171
- project_dump=json.loads(project.json(exclude={"user_list"})),
172
- **job_create.dict(),
170
+ workflow_dump=json.loads(
171
+ workflow.model_dump_json(exclude={"task_list"})
172
+ ),
173
+ project_dump=json.loads(
174
+ project.model_dump_json(exclude={"user_list"})
175
+ ),
176
+ **job_create.model_dump(),
173
177
  )
174
178
 
175
179
  db.add(job)
@@ -202,8 +206,6 @@ async def apply_workflow(
202
206
  # Define user-side job directory
203
207
  if FRACTAL_RUNNER_BACKEND == "local":
204
208
  WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
205
- elif FRACTAL_RUNNER_BACKEND == "local_experimental":
206
- WORKFLOW_DIR_REMOTE = WORKFLOW_DIR_LOCAL
207
209
  elif FRACTAL_RUNNER_BACKEND == "slurm":
208
210
  WORKFLOW_DIR_REMOTE = cache_dir / WORKFLOW_DIR_LOCAL.name
209
211
  elif FRACTAL_RUNNER_BACKEND == "slurm_ssh":
@@ -107,7 +107,7 @@ async def patch_task(
107
107
  db_task = await _get_task_full_access(
108
108
  task_id=task_id, user_id=user.id, db=db
109
109
  )
110
- update = task_update.dict(exclude_unset=True)
110
+ update = task_update.model_dump(exclude_unset=True)
111
111
 
112
112
  # Forbid changes that set a previously unset command
113
113
  if db_task.type == "non_parallel" and "command_parallel" in update:
@@ -182,7 +182,8 @@ async def create_task(
182
182
  )
183
183
 
184
184
  # Add task
185
- db_task = TaskV2(**task.dict(), type=task_type)
185
+
186
+ db_task = TaskV2(**task.model_dump(exclude_unset=True), type=task_type)
186
187
  pkg_name = db_task.name
187
188
  await _verify_non_duplication_user_constraint(
188
189
  db=db, pkg_name=pkg_name, user_id=user.id, version=db_task.version
@@ -206,6 +207,7 @@ async def create_task(
206
207
  await db.commit()
207
208
  await db.refresh(db_task)
208
209
  await db.close()
210
+
209
211
  return db_task
210
212
 
211
213
 
@@ -13,7 +13,7 @@ from fastapi import Response
13
13
  from fastapi import status
14
14
  from fastapi import UploadFile
15
15
  from pydantic import BaseModel
16
- from pydantic import root_validator
16
+ from pydantic import model_validator
17
17
  from pydantic import ValidationError
18
18
  from sqlmodel import select
19
19
 
@@ -68,7 +68,8 @@ class CollectionRequestData(BaseModel):
68
68
  file: Optional[UploadFile] = None
69
69
  origin: TaskGroupV2OriginEnum
70
70
 
71
- @root_validator(pre=True)
71
+ @model_validator(mode="before")
72
+ @classmethod
72
73
  def validate_data(cls, values):
73
74
  file = values.get("file")
74
75
  package = values.get("task_collect").package
@@ -223,7 +223,7 @@ async def patch_task_group(
223
223
  db=db,
224
224
  )
225
225
  if (
226
- "user_group_id" in task_group_update.dict(exclude_unset=True)
226
+ "user_group_id" in task_group_update.model_dump(exclude_unset=True)
227
227
  and task_group_update.user_group_id != task_group.user_group_id
228
228
  ):
229
229
  await _verify_non_duplication_group_constraint(
@@ -232,7 +232,7 @@ async def patch_task_group(
232
232
  version=task_group.version,
233
233
  user_group_id=task_group_update.user_group_id,
234
234
  )
235
- for key, value in task_group_update.dict(exclude_unset=True).items():
235
+ for key, value in task_group_update.model_dump(exclude_unset=True).items():
236
236
  if (key == "user_group_id") and (value is not None):
237
237
  await _verify_user_belongs_to_group(
238
238
  user_id=user.id, user_group_id=value, db=db
@@ -82,7 +82,7 @@ async def create_workflow(
82
82
  name=workflow.name, project_id=project_id, db=db
83
83
  )
84
84
 
85
- db_workflow = WorkflowV2(project_id=project_id, **workflow.dict())
85
+ db_workflow = WorkflowV2(project_id=project_id, **workflow.model_dump())
86
86
  db.add(db_workflow)
87
87
  await db.commit()
88
88
  await db.refresh(db_workflow)
@@ -149,7 +149,7 @@ async def update_workflow(
149
149
  name=patch.name, project_id=project_id, db=db
150
150
  )
151
151
 
152
- for key, value in patch.dict(exclude_unset=True).items():
152
+ for key, value in patch.model_dump(exclude_unset=True).items():
153
153
  if key == "reordered_workflowtask_ids":
154
154
  current_workflowtask_ids = [
155
155
  wftask.id for wftask in workflow.task_list
@@ -262,7 +262,7 @@ async def export_worfklow(
262
262
  wf_task_list = []
263
263
  for wftask in workflow.task_list:
264
264
  task_group = await db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
265
- wf_task_list.append(wftask.dict())
265
+ wf_task_list.append(wftask.model_dump())
266
266
  wf_task_list[-1]["task"] = dict(
267
267
  pkg_name=task_group.pkg_name,
268
268
  version=task_group.version,
@@ -321,7 +321,7 @@ async def import_workflow(
321
321
  detail=f"Could not find a task matching with {wf_task.task}.",
322
322
  )
323
323
  new_wf_task = WorkflowTaskCreateV2(
324
- **wf_task.dict(exclude_none=True, exclude={"task"})
324
+ **wf_task.model_dump(exclude_none=True, exclude={"task"})
325
325
  )
326
326
  list_wf_tasks.append(new_wf_task)
327
327
  list_task_ids.append(task_id)
@@ -336,7 +336,7 @@ async def import_workflow(
336
336
  # Create new Workflow
337
337
  db_workflow = WorkflowV2(
338
338
  project_id=project_id,
339
- **workflow_import.dict(exclude_none=True, exclude={"task_list"}),
339
+ **workflow_import.model_dump(exclude_none=True, exclude={"task_list"}),
340
340
  )
341
341
  db.add(db_workflow)
342
342
  await db.commit()
@@ -345,7 +345,7 @@ async def import_workflow(
345
345
  # Insert task into the workflow
346
346
  for ind, new_wf_task in enumerate(list_wf_tasks):
347
347
  await _workflow_insert_task(
348
- **new_wf_task.dict(),
348
+ **new_wf_task.model_dump(),
349
349
  workflow_id=db_workflow.id,
350
350
  task_id=list_task_ids[ind],
351
351
  db=db,
@@ -281,7 +281,9 @@ async def update_workflowtask(
281
281
  ),
282
282
  )
283
283
 
284
- for key, value in workflow_task_update.dict(exclude_unset=True).items():
284
+ for key, value in workflow_task_update.model_dump(
285
+ exclude_unset=True
286
+ ).items():
285
287
  if key == "args_parallel":
286
288
  # Get default arguments via a Task property method
287
289
  actual_args = deepcopy(value)
@@ -58,11 +58,14 @@ async def _get_single_user_with_groups(
58
58
  group_ids_names.insert(0, default_group)
59
59
  else:
60
60
  pass
61
+ oauth_accounts = [
62
+ oauth_account.model_dump() for oauth_account in user.oauth_accounts
63
+ ]
61
64
 
62
65
  return UserRead(
63
66
  **user.model_dump(),
64
67
  group_ids_names=group_ids_names,
65
- oauth_accounts=user.oauth_accounts,
68
+ oauth_accounts=oauth_accounts,
66
69
  )
67
70
 
68
71
 
@@ -57,14 +57,14 @@ async def patch_current_user(
57
57
  Note: a user cannot patch their own password (as enforced within the
58
58
  `UserUpdateStrict` schema).
59
59
  """
60
- update = UserUpdate(**user_update.dict(exclude_unset=True))
60
+ update = UserUpdate(**user_update.model_dump(exclude_unset=True))
61
61
 
62
62
  # NOTE: here it would be relevant to catch an `InvalidPasswordException`
63
63
  # (from `fastapi_users.exceptions`), if we were to allow users change
64
64
  # their own password
65
65
 
66
66
  user = await user_manager.update(update, current_user, safe=True)
67
- validated_user = schemas.model_validate(UserOAuth, user)
67
+ validated_user = schemas.model_validate(UserOAuth, user.model_dump())
68
68
 
69
69
  patched_user = await db.get(
70
70
  UserOAuth, validated_user.id, populate_existing=True
@@ -82,7 +82,6 @@ async def get_current_user_settings(
82
82
  current_user: UserOAuth = Depends(current_active_user),
83
83
  db: AsyncSession = Depends(get_async_db),
84
84
  ) -> UserSettingsReadStrict:
85
-
86
85
  verify_user_has_settings(current_user)
87
86
  user_settings = await db.get(UserSettings, current_user.user_settings_id)
88
87
  return user_settings
@@ -96,13 +95,12 @@ async def patch_current_user_settings(
96
95
  current_user: UserOAuth = Depends(current_active_user),
97
96
  db: AsyncSession = Depends(get_async_db),
98
97
  ) -> UserSettingsReadStrict:
99
-
100
98
  verify_user_has_settings(current_user)
101
99
  current_user_settings = await db.get(
102
100
  UserSettings, current_user.user_settings_id
103
101
  )
104
102
 
105
- for k, v in settings_update.dict(exclude_unset=True).items():
103
+ for k, v in settings_update.model_dump(exclude_unset=True).items():
106
104
  setattr(current_user_settings, k, v)
107
105
 
108
106
  db.add(current_user_settings)
@@ -194,7 +194,7 @@ async def patch_user_settings_bulk(
194
194
  .where(LinkUserGroup.group_id == group_id)
195
195
  )
196
196
  settings_list = res.scalars().all()
197
- update = settings_update.dict(exclude_unset=True)
197
+ update = settings_update.model_dump(exclude_unset=True)
198
198
  for settings in settings_list:
199
199
  for k, v in update.items():
200
200
  setattr(settings, k, v)
@@ -75,7 +75,7 @@ async def patch_user(
75
75
  safe=False,
76
76
  request=None,
77
77
  )
78
- validated_user = schemas.model_validate(UserOAuth, user)
78
+ validated_user = schemas.model_validate(UserOAuth, user.model_dump())
79
79
  patched_user = await db.get(
80
80
  UserOAuth, validated_user.id, populate_existing=True
81
81
  )
@@ -139,7 +139,6 @@ async def set_user_groups(
139
139
  superuser: UserOAuth = Depends(current_active_superuser),
140
140
  db: AsyncSession = Depends(get_async_db),
141
141
  ) -> UserRead:
142
-
143
142
  # Preliminary check that all objects exist in the db
144
143
  user = await _user_or_404(user_id=user_id, db=db)
145
144
  target_group_ids = user_update.group_ids
@@ -209,7 +208,6 @@ async def get_user_settings(
209
208
  superuser: UserOAuth = Depends(current_active_superuser),
210
209
  db: AsyncSession = Depends(get_async_db),
211
210
  ) -> UserSettingsRead:
212
-
213
211
  user = await _user_or_404(user_id=user_id, db=db)
214
212
  verify_user_has_settings(user)
215
213
  user_settings = await db.get(UserSettings, user.user_settings_id)
@@ -229,7 +227,7 @@ async def patch_user_settings(
229
227
  verify_user_has_settings(user)
230
228
  user_settings = await db.get(UserSettings, user.user_settings_id)
231
229
 
232
- for k, v in settings_update.dict(exclude_unset=True).items():
230
+ for k, v in settings_update.model_dump(exclude_unset=True).items():
233
231
  setattr(user_settings, k, v)
234
232
 
235
233
  db.add(user_settings)
@@ -6,7 +6,7 @@ from ....syringe import Inject
6
6
 
7
7
 
8
8
  def _backend_supports_shutdown(backend: str) -> bool:
9
- if backend in ["slurm", "slurm_ssh", "local_experimental"]:
9
+ if backend in ["slurm", "slurm_ssh"]:
10
10
  return True
11
11
  else:
12
12
  return False
@@ -1,6 +1,5 @@
1
1
  from fastapi import HTTPException
2
2
  from fastapi import status
3
- from pydantic import BaseModel
4
3
  from pydantic import ValidationError
5
4
 
6
5
  from fractal_server.app.db import AsyncSession
@@ -55,7 +54,7 @@ async def validate_user_settings(
55
54
  UserSettingsValidationModel = SlurmSudoUserSettings
56
55
  else:
57
56
  # For other backends, we don't validate anything
58
- UserSettingsValidationModel = BaseModel
57
+ return user_settings
59
58
 
60
59
  try:
61
60
  UserSettingsValidationModel(**user_settings.model_dump())
@@ -0,0 +1,13 @@
1
+ # https://slurm.schedmd.com/squeue.html#lbAG
2
+ STATES_FINISHED = {
3
+ "BOOT_FAIL",
4
+ "CANCELLED",
5
+ "COMPLETED",
6
+ "DEADLINE",
7
+ "FAILED",
8
+ "NODE_FAIL",
9
+ "OUT_OF_MEMORY",
10
+ "PREEMPTED",
11
+ "SPECIAL_EXIT",
12
+ "TIMEOUT",
13
+ }