fractal-server 2.15.2__py3-none-any.whl → 2.15.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/project.py +0 -1
  3. fractal_server/app/models/v2/task_group.py +0 -1
  4. fractal_server/app/models/v2/workflow.py +0 -1
  5. fractal_server/app/routes/admin/v2/accounting.py +0 -1
  6. fractal_server/app/routes/admin/v2/job.py +0 -1
  7. fractal_server/app/routes/admin/v2/task.py +0 -4
  8. fractal_server/app/routes/admin/v2/task_group.py +0 -3
  9. fractal_server/app/routes/api/v2/_aux_functions.py +40 -1
  10. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +0 -1
  11. fractal_server/app/routes/api/v2/history.py +61 -24
  12. fractal_server/app/routes/api/v2/images.py +0 -5
  13. fractal_server/app/routes/api/v2/pre_submission_checks.py +0 -1
  14. fractal_server/app/routes/api/v2/submit.py +0 -1
  15. fractal_server/app/routes/api/v2/task_collection_custom.py +0 -2
  16. fractal_server/app/routes/api/v2/task_collection_pixi.py +0 -1
  17. fractal_server/app/routes/api/v2/task_group.py +0 -2
  18. fractal_server/app/routes/api/v2/task_group_lifecycle.py +0 -2
  19. fractal_server/app/routes/api/v2/task_version_update.py +0 -2
  20. fractal_server/app/routes/api/v2/workflow.py +0 -2
  21. fractal_server/app/routes/auth/group.py +0 -5
  22. fractal_server/app/routes/pagination.py +0 -2
  23. fractal_server/app/runner/executors/local/runner.py +0 -3
  24. fractal_server/app/runner/v2/db_tools.py +0 -1
  25. fractal_server/app/runner/v2/merge_outputs.py +0 -2
  26. fractal_server/app/runner/v2/runner.py +0 -1
  27. fractal_server/app/runner/v2/task_interface.py +1 -4
  28. fractal_server/app/schemas/v2/accounting.py +0 -1
  29. fractal_server/app/schemas/v2/project.py +0 -3
  30. fractal_server/app/schemas/v2/workflow.py +0 -3
  31. fractal_server/config.py +2 -2
  32. fractal_server/data_migrations/2_14_10.py +0 -1
  33. fractal_server/main.py +0 -1
  34. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +0 -1
  35. fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py +0 -1
  36. fractal_server/tasks/v2/local/_utils.py +32 -0
  37. fractal_server/tasks/v2/local/collect.py +0 -1
  38. fractal_server/tasks/v2/local/collect_pixi.py +7 -6
  39. fractal_server/tasks/v2/local/deactivate.py +0 -2
  40. fractal_server/tasks/v2/local/deactivate_pixi.py +0 -1
  41. fractal_server/tasks/v2/local/reactivate_pixi.py +5 -0
  42. fractal_server/tasks/v2/ssh/_utils.py +38 -0
  43. fractal_server/tasks/v2/ssh/collect.py +0 -1
  44. fractal_server/tasks/v2/ssh/collect_pixi.py +11 -4
  45. fractal_server/tasks/v2/ssh/deactivate.py +0 -1
  46. fractal_server/tasks/v2/ssh/reactivate_pixi.py +10 -0
  47. fractal_server/tasks/v2/utils_pixi.py +67 -0
  48. fractal_server/tasks/v2/utils_python_interpreter.py +1 -1
  49. fractal_server/types/validators/_filter_validators.py +1 -1
  50. fractal_server/zip_tools.py +0 -1
  51. {fractal_server-2.15.2.dist-info → fractal_server-2.15.4.dist-info}/METADATA +2 -1
  52. {fractal_server-2.15.2.dist-info → fractal_server-2.15.4.dist-info}/RECORD +55 -55
  53. {fractal_server-2.15.2.dist-info → fractal_server-2.15.4.dist-info}/LICENSE +0 -0
  54. {fractal_server-2.15.2.dist-info → fractal_server-2.15.4.dist-info}/WHEEL +0 -0
  55. {fractal_server-2.15.2.dist-info → fractal_server-2.15.4.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.15.2"
1
+ __VERSION__ = "2.15.4"
@@ -12,7 +12,6 @@ from fractal_server.utils import get_timestamp
12
12
 
13
13
 
14
14
  class ProjectV2(SQLModel, table=True):
15
-
16
15
  id: int | None = Field(default=None, primary_key=True)
17
16
  name: str
18
17
  timestamp_created: datetime = Field(
@@ -107,7 +107,6 @@ class TaskGroupV2(SQLModel, table=True):
107
107
 
108
108
 
109
109
  class TaskGroupActivityV2(SQLModel, table=True):
110
-
111
110
  id: int | None = Field(default=None, primary_key=True)
112
111
  user_id: int = Field(foreign_key="user_oauth.id")
113
112
  taskgroupv2_id: int | None = Field(
@@ -12,7 +12,6 @@ from .workflowtask import WorkflowTaskV2
12
12
 
13
13
 
14
14
  class WorkflowV2(SQLModel, table=True):
15
-
16
15
  id: int | None = Field(default=None, primary_key=True)
17
16
  name: str
18
17
  project_id: int = Field(foreign_key="projectv2.id", ondelete="CASCADE")
@@ -82,7 +82,6 @@ async def query_accounting_slurm(
82
82
  superuser: UserOAuth = Depends(current_active_superuser),
83
83
  db: AsyncSession = Depends(get_async_db),
84
84
  ) -> JSONResponse:
85
-
86
85
  stm = select(AccountingRecordSlurm.slurm_job_ids)
87
86
  if query.user_id is not None:
88
87
  stm = stm.where(AccountingRecordSlurm.user_id == query.user_id)
@@ -111,7 +111,6 @@ async def view_single_job(
111
111
  user: UserOAuth = Depends(current_active_superuser),
112
112
  db: AsyncSession = Depends(get_async_db),
113
113
  ) -> JobReadV2:
114
-
115
114
  job = await db.get(JobV2, job_id)
116
115
  if not job:
117
116
  raise HTTPException(
@@ -20,7 +20,6 @@ router = APIRouter()
20
20
 
21
21
 
22
22
  class TaskV2Minimal(BaseModel):
23
-
24
23
  id: int
25
24
  name: str
26
25
  type: str
@@ -32,13 +31,11 @@ class TaskV2Minimal(BaseModel):
32
31
 
33
32
 
34
33
  class ProjectUser(BaseModel):
35
-
36
34
  id: int
37
35
  email: EmailStr
38
36
 
39
37
 
40
38
  class TaskV2Relationship(BaseModel):
41
-
42
39
  workflow_id: int
43
40
  workflow_name: str
44
41
  project_id: int
@@ -47,7 +44,6 @@ class TaskV2Relationship(BaseModel):
47
44
 
48
45
 
49
46
  class TaskV2Info(BaseModel):
50
-
51
47
  task: TaskV2Minimal
52
48
  relationships: list[TaskV2Relationship]
53
49
 
@@ -43,7 +43,6 @@ async def get_task_group_activity_list(
43
43
  superuser: UserOAuth = Depends(current_active_superuser),
44
44
  db: AsyncSession = Depends(get_async_db),
45
45
  ) -> list[TaskGroupActivityV2Read]:
46
-
47
46
  stm = select(TaskGroupActivityV2)
48
47
  if task_group_activity_id is not None:
49
48
  stm = stm.where(TaskGroupActivityV2.id == task_group_activity_id)
@@ -73,7 +72,6 @@ async def query_task_group(
73
72
  user: UserOAuth = Depends(current_active_superuser),
74
73
  db: AsyncSession = Depends(get_async_db),
75
74
  ) -> TaskGroupReadV2:
76
-
77
75
  task_group = await db.get(TaskGroupV2, task_group_id)
78
76
  if task_group is None:
79
77
  raise HTTPException(
@@ -96,7 +94,6 @@ async def query_task_group_list(
96
94
  user: UserOAuth = Depends(current_active_superuser),
97
95
  db: AsyncSession = Depends(get_async_db),
98
96
  ) -> list[TaskGroupReadV2]:
99
-
100
97
  stm = select(TaskGroupV2)
101
98
 
102
99
  if user_group_id is not None and private is True:
@@ -6,6 +6,7 @@ from typing import Literal
6
6
 
7
7
  from fastapi import HTTPException
8
8
  from fastapi import status
9
+ from sqlalchemy.exc import MultipleResultsFound
9
10
  from sqlalchemy.orm.attributes import flag_modified
10
11
  from sqlmodel import select
11
12
  from sqlmodel.sql.expression import SelectOfScalar
@@ -19,6 +20,9 @@ from ....models.v2 import TaskV2
19
20
  from ....models.v2 import WorkflowTaskV2
20
21
  from ....models.v2 import WorkflowV2
21
22
  from ....schemas.v2 import JobStatusTypeV2
23
+ from fractal_server.logger import set_logger
24
+
25
+ logger = set_logger(__name__)
22
26
 
23
27
 
24
28
  async def _get_project_check_owner(
@@ -329,7 +333,6 @@ async def _workflow_has_submitted_job(
329
333
  workflow_id: int,
330
334
  db: AsyncSession,
331
335
  ) -> bool:
332
-
333
336
  res = await db.execute(
334
337
  select(JobV2.id)
335
338
  .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
@@ -499,3 +502,39 @@ async def _get_workflowtask_or_404(
499
502
  )
500
503
  else:
501
504
  return wftask
505
+
506
+
507
+ async def _get_submitted_job_or_none(
508
+ *,
509
+ dataset_id: int,
510
+ workflow_id: int,
511
+ db: AsyncSession,
512
+ ) -> JobV2 | None:
513
+ """
514
+ Get the submitted job for given dataset/workflow, if any.
515
+
516
+ This function also handles the invalid branch where more than one job
517
+ is found.
518
+
519
+ Args:
520
+ dataset_id:
521
+ workflow_id:
522
+ db:
523
+ """
524
+ res = await db.execute(
525
+ _get_submitted_jobs_statement()
526
+ .where(JobV2.dataset_id == dataset_id)
527
+ .where(JobV2.workflow_id == workflow_id)
528
+ )
529
+ try:
530
+ return res.scalars().one_or_none()
531
+ except MultipleResultsFound as e:
532
+ error_msg = (
533
+ "Multiple running jobs found for "
534
+ f"{dataset_id=} and {workflow_id=}."
535
+ )
536
+ logger.error(f"{error_msg} Original error: {str(e)}.")
537
+ raise HTTPException(
538
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
539
+ detail=error_msg,
540
+ )
@@ -224,7 +224,6 @@ async def _get_collection_task_group_activity_status_message(
224
224
  task_group_id: int,
225
225
  db: AsyncSession,
226
226
  ) -> str:
227
-
228
227
  res = await db.execute(
229
228
  select(TaskGroupActivityV2)
230
229
  .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
@@ -1,5 +1,3 @@
1
- from copy import deepcopy
2
-
3
1
  from fastapi import APIRouter
4
2
  from fastapi import Depends
5
3
  from fastapi import HTTPException
@@ -9,6 +7,7 @@ from sqlmodel import func
9
7
  from sqlmodel import select
10
8
 
11
9
  from ._aux_functions import _get_dataset_check_owner
10
+ from ._aux_functions import _get_submitted_job_or_none
12
11
  from ._aux_functions import _get_workflow_check_owner
13
12
  from ._aux_functions_history import _verify_workflow_and_dataset_access
14
13
  from ._aux_functions_history import get_history_run_or_404
@@ -86,6 +85,19 @@ async def get_workflow_tasks_statuses(
86
85
  db=db,
87
86
  )
88
87
 
88
+ running_job = await _get_submitted_job_or_none(
89
+ db=db,
90
+ dataset_id=dataset_id,
91
+ workflow_id=workflow_id,
92
+ )
93
+ if running_job is not None:
94
+ running_wftasks = workflow.task_list[
95
+ running_job.first_task_index : running_job.last_task_index + 1
96
+ ]
97
+ running_wftask_ids = [wft.id for wft in running_wftasks]
98
+ else:
99
+ running_wftask_ids = []
100
+
89
101
  response: dict[int, dict[str, int | str] | None] = {}
90
102
  for wftask in workflow.task_list:
91
103
  res = await db.execute(
@@ -95,17 +107,37 @@ async def get_workflow_tasks_statuses(
95
107
  .order_by(HistoryRun.timestamp_started.desc())
96
108
  .limit(1)
97
109
  )
98
- latest_history_run = res.scalar_one_or_none()
99
- if latest_history_run is None:
100
- logger.debug(
101
- f"No HistoryRun found for {dataset_id=} and {wftask.id=}."
102
- )
103
- response[wftask.id] = None
110
+ latest_run = res.scalar_one_or_none()
111
+
112
+ if latest_run is None:
113
+ if wftask.id in running_wftask_ids:
114
+ logger.debug(f"A1: No HistoryRun for {wftask.id=}.")
115
+ response[wftask.id] = dict(status=HistoryUnitStatus.SUBMITTED)
116
+ else:
117
+ logger.debug(f"A2: No HistoryRun for {wftask.id=}.")
118
+ response[wftask.id] = None
104
119
  continue
105
- response[wftask.id] = dict(
106
- status=latest_history_run.status,
107
- num_available_images=latest_history_run.num_available_images,
108
- )
120
+ else:
121
+ if wftask.id in running_wftask_ids:
122
+ if latest_run.job_id == running_job.id:
123
+ logger.debug(
124
+ f"B1 for {wftask.id} and {latest_run.job_id=}."
125
+ )
126
+ response[wftask.id] = dict(status=latest_run.status)
127
+ else:
128
+ logger.debug(
129
+ f"B2 for {wftask.id} and {latest_run.job_id=}."
130
+ )
131
+ response[wftask.id] = dict(
132
+ status=HistoryUnitStatus.SUBMITTED
133
+ )
134
+ else:
135
+ logger.debug(f"C1: {wftask.id=} not in {running_wftask_ids=}.")
136
+ response[wftask.id] = dict(status=latest_run.status)
137
+
138
+ response[wftask.id][
139
+ "num_available_images"
140
+ ] = latest_run.num_available_images
109
141
 
110
142
  for target_status in HistoryUnitStatus:
111
143
  stm = (
@@ -122,18 +154,24 @@ async def get_workflow_tasks_statuses(
122
154
  num_images = res.scalar()
123
155
  response[wftask.id][f"num_{target_status}_images"] = num_images
124
156
 
125
- new_response = deepcopy(response)
126
- for key, value in response.items():
127
- if value is not None:
128
- num_total_images = sum(
129
- value[f"num_{target_status}_images"]
130
- for target_status in HistoryUnitStatus
131
- )
132
- if num_total_images > value["num_available_images"]:
133
- value["num_available_images"] = None
134
- new_response[key] = value
157
+ # Set `num_available_images=None` for cases where it would be
158
+ # smaller than `num_total_images`
159
+ values_to_skip = (None, {"status": HistoryUnitStatus.SUBMITTED})
160
+ response_update = {}
161
+ for wftask_id, status_value in response.items():
162
+ if status_value in values_to_skip:
163
+ # Skip cases where status has no image counters
164
+ continue
165
+ num_total_images = sum(
166
+ status_value[f"num_{target_status}_images"]
167
+ for target_status in HistoryUnitStatus
168
+ )
169
+ if num_total_images > status_value["num_available_images"]:
170
+ status_value["num_available_images"] = None
171
+ response_update[wftask_id] = status_value
172
+ response.update(response_update)
135
173
 
136
- return JSONResponse(content=new_response, status_code=200)
174
+ return JSONResponse(content=response, status_code=200)
137
175
 
138
176
 
139
177
  @router.get("/project/{project_id}/status/run/")
@@ -296,7 +334,6 @@ async def get_history_images(
296
334
  db: AsyncSession = Depends(get_async_db),
297
335
  pagination: PaginationRequest = Depends(get_pagination_params),
298
336
  ) -> ImagePage:
299
-
300
337
  # Access control and object retrieval
301
338
  wftask = await get_wftask_check_owner(
302
339
  project_id=project_id,
@@ -31,7 +31,6 @@ router = APIRouter()
31
31
 
32
32
 
33
33
  class ImagePage(PaginationResponse[SingleImage]):
34
-
35
34
  attributes: dict[str, list[ImageAttributeValue]]
36
35
  types: list[str]
37
36
 
@@ -64,7 +63,6 @@ async def post_new_image(
64
63
  user: UserOAuth = Depends(current_active_user),
65
64
  db: AsyncSession = Depends(get_async_db),
66
65
  ) -> Response:
67
-
68
66
  output = await _get_dataset_check_owner(
69
67
  project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
70
68
  )
@@ -117,7 +115,6 @@ async def query_dataset_images(
117
115
  user: UserOAuth = Depends(current_active_user),
118
116
  db: AsyncSession = Depends(get_async_db),
119
117
  ) -> ImagePage:
120
-
121
118
  page = pagination.page
122
119
  page_size = pagination.page_size
123
120
 
@@ -131,7 +128,6 @@ async def query_dataset_images(
131
128
  types = aggregate_types(images)
132
129
 
133
130
  if query is not None:
134
-
135
131
  if query.zarr_url is not None:
136
132
  image = next(
137
133
  (
@@ -190,7 +186,6 @@ async def delete_dataset_images(
190
186
  user: UserOAuth = Depends(current_active_user),
191
187
  db: AsyncSession = Depends(get_async_db),
192
188
  ) -> Response:
193
-
194
189
  output = await _get_dataset_check_owner(
195
190
  project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
196
191
  )
@@ -96,7 +96,6 @@ async def check_non_processed_images(
96
96
  user: UserOAuth = Depends(current_active_user),
97
97
  db: AsyncSession = Depends(get_async_db),
98
98
  ) -> JSONResponse:
99
-
100
99
  db_workflow_task, db_workflow = await _get_workflow_task_check_owner(
101
100
  project_id=project_id,
102
101
  workflow_task_id=workflowtask_id,
@@ -57,7 +57,6 @@ async def apply_workflow(
57
57
  user: UserOAuth = Depends(current_active_verified_user),
58
58
  db: AsyncSession = Depends(get_async_db),
59
59
  ) -> JobReadV2 | None:
60
-
61
60
  # Remove non-submitted V2 jobs from the app state when the list grows
62
61
  # beyond a threshold
63
62
  settings = Inject(get_settings)
@@ -47,7 +47,6 @@ async def collect_task_custom(
47
47
  user: UserOAuth = Depends(current_active_verified_user),
48
48
  db: AsyncSession = Depends(get_async_db),
49
49
  ) -> list[TaskReadV2]:
50
-
51
50
  settings = Inject(get_settings)
52
51
 
53
52
  # Validate query parameters related to user-group ownership
@@ -97,7 +96,6 @@ async def collect_task_custom(
97
96
  )
98
97
 
99
98
  if task_collect.package_root is None:
100
-
101
99
  package_name_underscore = task_collect.package_name.replace("-", "_")
102
100
  # Note that python_command is then used as part of a subprocess.run
103
101
  # statement: be careful with mixing `'` and `"`.
@@ -89,7 +89,6 @@ async def collect_task_pixi(
89
89
  user: UserOAuth = Depends(current_active_verified_user),
90
90
  db: AsyncSession = Depends(get_async_db),
91
91
  ) -> TaskGroupActivityV2Read:
92
-
93
92
  settings = Inject(get_settings)
94
93
  # Check if Pixi is available
95
94
  if settings.pixi is None:
@@ -72,7 +72,6 @@ async def get_task_group_activity_list(
72
72
  user: UserOAuth = Depends(current_active_user),
73
73
  db: AsyncSession = Depends(get_async_db),
74
74
  ) -> list[TaskGroupActivityV2Read]:
75
-
76
75
  stm = select(TaskGroupActivityV2).where(
77
76
  TaskGroupActivityV2.user_id == user.id
78
77
  )
@@ -105,7 +104,6 @@ async def get_task_group_activity(
105
104
  user: UserOAuth = Depends(current_active_user),
106
105
  db: AsyncSession = Depends(get_async_db),
107
106
  ) -> TaskGroupActivityV2Read:
108
-
109
107
  activity = await db.get(TaskGroupActivityV2, task_group_activity_id)
110
108
 
111
109
  if activity is None:
@@ -115,7 +115,6 @@ async def deactivate_task_group(
115
115
  # Submit background task
116
116
  settings = Inject(get_settings)
117
117
  if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
118
-
119
118
  # Validate user settings (backend-specific)
120
119
  user_settings = await validate_user_settings(
121
120
  user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
@@ -242,7 +241,6 @@ async def reactivate_task_group(
242
241
  # Submit background task
243
242
  settings = Inject(get_settings)
244
243
  if settings.FRACTAL_RUNNER_BACKEND == "slurm_ssh":
245
-
246
244
  # Validate user settings (backend-specific)
247
245
  user_settings = await validate_user_settings(
248
246
  user=user, backend=settings.FRACTAL_RUNNER_BACKEND, db=db
@@ -78,7 +78,6 @@ async def get_workflow_version_update_candidates(
78
78
  user: UserOAuth = Depends(current_active_user),
79
79
  db: AsyncSession = Depends(get_async_db),
80
80
  ) -> list[list[TaskVersionRead]]:
81
-
82
81
  workflow = await _get_workflow_check_owner(
83
82
  project_id=project_id,
84
83
  workflow_id=workflow_id,
@@ -181,7 +180,6 @@ async def replace_workflowtask(
181
180
  user: UserOAuth = Depends(current_active_user),
182
181
  db: AsyncSession = Depends(get_async_db),
183
182
  ) -> WorkflowTaskReadV2:
184
-
185
183
  # Get objects from database
186
184
  workflow_task, workflow = await _get_workflow_task_check_owner(
187
185
  project_id=project_id,
@@ -147,7 +147,6 @@ async def update_workflow(
147
147
 
148
148
  for key, value in patch.model_dump(exclude_unset=True).items():
149
149
  if key == "reordered_workflowtask_ids":
150
-
151
150
  if await _workflow_has_submitted_job(
152
151
  workflow_id=workflow_id, db=db
153
152
  ):
@@ -329,7 +328,6 @@ async def get_workflow_type_filters(
329
328
 
330
329
  response_items = []
331
330
  for wftask in workflow.task_list:
332
-
333
331
  # Compute input_type_filters, based on wftask and task manifest
334
332
  input_type_filters = merge_type_filters(
335
333
  wftask_type_filters=wftask.type_filters,
@@ -39,7 +39,6 @@ async def get_list_user_groups(
39
39
  user: UserOAuth = Depends(current_active_superuser),
40
40
  db: AsyncSession = Depends(get_async_db),
41
41
  ) -> list[UserGroupRead]:
42
-
43
42
  # Get all groups
44
43
  stm_all_groups = select(UserGroup)
45
44
  res = await db.execute(stm_all_groups)
@@ -88,7 +87,6 @@ async def create_single_group(
88
87
  user: UserOAuth = Depends(current_active_superuser),
89
88
  db: AsyncSession = Depends(get_async_db),
90
89
  ) -> UserGroupRead:
91
-
92
90
  # Check that name is not already in use
93
91
  existing_name_str = select(UserGroup).where(
94
92
  UserGroup.name == group_create.name
@@ -121,7 +119,6 @@ async def update_single_group(
121
119
  user: UserOAuth = Depends(current_active_superuser),
122
120
  db: AsyncSession = Depends(get_async_db),
123
121
  ) -> UserGroupRead:
124
-
125
122
  group = await _usergroup_or_404(group_id, db)
126
123
 
127
124
  # Patch `viewer_paths`
@@ -143,7 +140,6 @@ async def delete_single_group(
143
140
  user: UserOAuth = Depends(current_active_superuser),
144
141
  db: AsyncSession = Depends(get_async_db),
145
142
  ) -> Response:
146
-
147
143
  group = await _usergroup_or_404(group_id, db)
148
144
 
149
145
  if group.name == FRACTAL_DEFAULT_GROUP_NAME:
@@ -219,7 +215,6 @@ async def remove_user_from_group(
219
215
  superuser: UserOAuth = Depends(current_active_superuser),
220
216
  db: AsyncSession = Depends(get_async_db),
221
217
  ) -> UserGroupRead:
222
-
223
218
  # Check that user and group exist
224
219
  await _usergroup_or_404(group_id, db)
225
220
  user = await _user_or_404(user_id, db)
@@ -11,7 +11,6 @@ T = TypeVar("T")
11
11
 
12
12
 
13
13
  class PaginationRequest(BaseModel):
14
-
15
14
  page: int = Field(ge=1)
16
15
  page_size: int | None = Field(ge=1)
17
16
 
@@ -38,7 +37,6 @@ def get_pagination_params(
38
37
 
39
38
 
40
39
  class PaginationResponse(BaseModel, Generic[T]):
41
-
42
40
  current_page: int = Field(ge=1)
43
41
  page_size: int = Field(ge=0)
44
42
  total_count: int = Field(ge=0)
@@ -28,7 +28,6 @@ def run_single_task(
28
28
  parameters: dict[str, Any],
29
29
  task_files: TaskFiles,
30
30
  ):
31
-
32
31
  # Write args.json file
33
32
  with open(task_files.args_file_local, "w") as f:
34
33
  json.dump(parameters, f)
@@ -169,7 +168,6 @@ class LocalRunner(BaseRunner):
169
168
  exceptions: dict[int, BaseException] = {}
170
169
 
171
170
  try:
172
-
173
171
  self.validate_multisubmit_parameters(
174
172
  list_parameters=list_parameters,
175
173
  task_type=task_type,
@@ -208,7 +206,6 @@ class LocalRunner(BaseRunner):
208
206
 
209
207
  # Execute tasks, in chunks of size `parallel_tasks_per_job`
210
208
  for ind_chunk in range(0, n_elements, parallel_tasks_per_job):
211
-
212
209
  list_parameters_chunk = list_parameters[
213
210
  ind_chunk : ind_chunk + parallel_tasks_per_job
214
211
  ]
@@ -50,7 +50,6 @@ def bulk_update_status_of_history_unit(
50
50
  status: HistoryUnitStatus,
51
51
  db_sync: Session,
52
52
  ) -> None:
53
-
54
53
  len_history_unit_ids = len(history_unit_ids)
55
54
  logger.debug(
56
55
  f"[bulk_update_status_of_history_unit] {len_history_unit_ids=}."
@@ -3,7 +3,6 @@ from fractal_server.app.runner.v2.task_interface import TaskOutput
3
3
 
4
4
 
5
5
  def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
6
-
7
6
  if len(task_outputs) == 0:
8
7
  return TaskOutput()
9
8
 
@@ -11,7 +10,6 @@ def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
11
10
  final_image_list_removals = []
12
11
 
13
12
  for task_output in task_outputs:
14
-
15
13
  final_image_list_updates.extend(task_output.image_list_updates)
16
14
  final_image_list_removals.extend(task_output.image_list_removals)
17
15
 
@@ -415,7 +415,6 @@ def execute_tasks_v2(
415
415
  # Write current dataset images into the database.
416
416
  db_dataset = db.get(DatasetV2, dataset.id)
417
417
  if ENRICH_IMAGES_WITH_STATUS:
418
-
419
418
  db_dataset.images = _remove_status_from_attributes(tmp_images)
420
419
  else:
421
420
  db_dataset.images = tmp_images
@@ -11,7 +11,6 @@ from fractal_server.types import ZarrUrlStr
11
11
 
12
12
 
13
13
  class TaskOutput(BaseModel):
14
-
15
14
  model_config = ConfigDict(extra="forbid")
16
15
 
17
16
  image_list_updates: list[SingleImageTaskOutput] = Field(
@@ -40,7 +39,6 @@ class TaskOutput(BaseModel):
40
39
 
41
40
 
42
41
  class InitArgsModel(BaseModel):
43
-
44
42
  model_config = ConfigDict(extra="forbid")
45
43
 
46
44
  zarr_url: ZarrUrlStr
@@ -48,14 +46,13 @@ class InitArgsModel(BaseModel):
48
46
 
49
47
 
50
48
  class InitTaskOutput(BaseModel):
51
-
52
49
  model_config = ConfigDict(extra="forbid")
53
50
 
54
51
  parallelization_list: list[InitArgsModel] = Field(default_factory=list)
55
52
 
56
53
 
57
54
  def _cast_and_validate_TaskOutput(
58
- task_output: dict[str, Any]
55
+ task_output: dict[str, Any],
59
56
  ) -> TaskOutput | None:
60
57
  try:
61
58
  validated_task_output = TaskOutput(**task_output)
@@ -6,7 +6,6 @@ from pydantic.types import AwareDatetime
6
6
 
7
7
 
8
8
  class AccountingRecordRead(BaseModel):
9
-
10
9
  id: int
11
10
  user_id: int
12
11
  timestamp: AwareDatetime
@@ -9,14 +9,12 @@ from fractal_server.types import NonEmptyStr
9
9
 
10
10
 
11
11
  class ProjectCreateV2(BaseModel):
12
-
13
12
  model_config = ConfigDict(extra="forbid")
14
13
 
15
14
  name: NonEmptyStr
16
15
 
17
16
 
18
17
  class ProjectReadV2(BaseModel):
19
-
20
18
  id: int
21
19
  name: str
22
20
  timestamp_created: AwareDatetime
@@ -27,7 +25,6 @@ class ProjectReadV2(BaseModel):
27
25
 
28
26
 
29
27
  class ProjectUpdateV2(BaseModel):
30
-
31
28
  model_config = ConfigDict(extra="forbid")
32
29
 
33
30
  name: NonEmptyStr = None
@@ -17,14 +17,12 @@ from fractal_server.types import NonEmptyStr
17
17
 
18
18
 
19
19
  class WorkflowCreateV2(BaseModel):
20
-
21
20
  model_config = ConfigDict(extra="forbid")
22
21
 
23
22
  name: NonEmptyStr
24
23
 
25
24
 
26
25
  class WorkflowReadV2(BaseModel):
27
-
28
26
  id: int
29
27
  name: str
30
28
  project_id: int
@@ -42,7 +40,6 @@ class WorkflowReadV2WithWarnings(WorkflowReadV2):
42
40
 
43
41
 
44
42
  class WorkflowUpdateV2(BaseModel):
45
-
46
43
  model_config = ConfigDict(extra="forbid")
47
44
 
48
45
  name: NonEmptyStr = None