fractal-server 2.13.1__py3-none-any.whl → 2.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +3 -1
  3. fractal_server/app/models/linkusergroup.py +6 -2
  4. fractal_server/app/models/v2/__init__.py +7 -1
  5. fractal_server/app/models/v2/dataset.py +1 -11
  6. fractal_server/app/models/v2/history.py +78 -0
  7. fractal_server/app/models/v2/job.py +10 -3
  8. fractal_server/app/models/v2/task_group.py +2 -2
  9. fractal_server/app/models/v2/workflow.py +1 -1
  10. fractal_server/app/models/v2/workflowtask.py +1 -1
  11. fractal_server/app/routes/admin/v2/accounting.py +18 -28
  12. fractal_server/app/routes/admin/v2/task.py +1 -1
  13. fractal_server/app/routes/admin/v2/task_group.py +0 -17
  14. fractal_server/app/routes/api/__init__.py +1 -1
  15. fractal_server/app/routes/api/v2/__init__.py +8 -2
  16. fractal_server/app/routes/api/v2/_aux_functions.py +66 -0
  17. fractal_server/app/routes/api/v2/_aux_functions_history.py +166 -0
  18. fractal_server/app/routes/api/v2/dataset.py +0 -17
  19. fractal_server/app/routes/api/v2/history.py +544 -0
  20. fractal_server/app/routes/api/v2/images.py +31 -43
  21. fractal_server/app/routes/api/v2/job.py +30 -0
  22. fractal_server/app/routes/api/v2/project.py +1 -53
  23. fractal_server/app/routes/api/v2/{status.py → status_legacy.py} +6 -6
  24. fractal_server/app/routes/api/v2/submit.py +16 -14
  25. fractal_server/app/routes/api/v2/task.py +3 -10
  26. fractal_server/app/routes/api/v2/task_collection_custom.py +4 -9
  27. fractal_server/app/routes/api/v2/task_group.py +0 -17
  28. fractal_server/app/routes/api/v2/verify_image_types.py +61 -0
  29. fractal_server/app/routes/api/v2/workflow.py +28 -69
  30. fractal_server/app/routes/api/v2/workflowtask.py +53 -50
  31. fractal_server/app/routes/auth/group.py +0 -16
  32. fractal_server/app/routes/auth/oauth.py +5 -3
  33. fractal_server/app/routes/pagination.py +47 -0
  34. fractal_server/app/runner/components.py +0 -3
  35. fractal_server/app/runner/compress_folder.py +57 -29
  36. fractal_server/app/runner/exceptions.py +4 -0
  37. fractal_server/app/runner/executors/base_runner.py +157 -0
  38. fractal_server/app/runner/{v2/_local/_local_config.py → executors/local/get_local_config.py} +7 -9
  39. fractal_server/app/runner/executors/local/runner.py +248 -0
  40. fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
  41. fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +9 -7
  42. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +868 -0
  43. fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +48 -17
  44. fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +36 -47
  45. fractal_server/app/runner/executors/slurm_common/slurm_job_task_models.py +134 -0
  46. fractal_server/app/runner/executors/slurm_ssh/runner.py +268 -0
  47. fractal_server/app/runner/executors/slurm_sudo/__init__.py +0 -0
  48. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -83
  49. fractal_server/app/runner/executors/slurm_sudo/runner.py +193 -0
  50. fractal_server/app/runner/extract_archive.py +1 -3
  51. fractal_server/app/runner/task_files.py +134 -87
  52. fractal_server/app/runner/v2/__init__.py +0 -399
  53. fractal_server/app/runner/v2/_local.py +88 -0
  54. fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +20 -19
  55. fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +17 -15
  56. fractal_server/app/runner/v2/db_tools.py +119 -0
  57. fractal_server/app/runner/v2/runner.py +206 -95
  58. fractal_server/app/runner/v2/runner_functions.py +488 -187
  59. fractal_server/app/runner/v2/runner_functions_low_level.py +40 -43
  60. fractal_server/app/runner/v2/submit_workflow.py +358 -0
  61. fractal_server/app/runner/v2/task_interface.py +31 -0
  62. fractal_server/app/schemas/_validators.py +13 -24
  63. fractal_server/app/schemas/user.py +10 -7
  64. fractal_server/app/schemas/user_settings.py +9 -21
  65. fractal_server/app/schemas/v2/__init__.py +9 -1
  66. fractal_server/app/schemas/v2/dataset.py +12 -94
  67. fractal_server/app/schemas/v2/dumps.py +26 -9
  68. fractal_server/app/schemas/v2/history.py +80 -0
  69. fractal_server/app/schemas/v2/job.py +15 -8
  70. fractal_server/app/schemas/v2/manifest.py +14 -7
  71. fractal_server/app/schemas/v2/project.py +9 -7
  72. fractal_server/app/schemas/v2/status_legacy.py +35 -0
  73. fractal_server/app/schemas/v2/task.py +72 -77
  74. fractal_server/app/schemas/v2/task_collection.py +14 -32
  75. fractal_server/app/schemas/v2/task_group.py +10 -9
  76. fractal_server/app/schemas/v2/workflow.py +10 -11
  77. fractal_server/app/schemas/v2/workflowtask.py +2 -21
  78. fractal_server/app/security/__init__.py +3 -3
  79. fractal_server/app/security/signup_email.py +2 -2
  80. fractal_server/config.py +41 -46
  81. fractal_server/images/tools.py +23 -0
  82. fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py +50 -0
  83. fractal_server/migrations/versions/9db60297b8b2_set_ondelete.py +250 -0
  84. fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py +41 -0
  85. fractal_server/migrations/versions/e81103413827_add_job_type_filters.py +36 -0
  86. fractal_server/migrations/versions/f37aceb45062_make_historyunit_logfile_required.py +39 -0
  87. fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
  88. fractal_server/ssh/_fabric.py +28 -14
  89. fractal_server/tasks/v2/local/collect.py +2 -2
  90. fractal_server/tasks/v2/ssh/collect.py +2 -2
  91. fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
  92. fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
  93. fractal_server/tasks/v2/utils_background.py +0 -19
  94. fractal_server/tasks/v2/utils_database.py +30 -17
  95. fractal_server/tasks/v2/utils_templates.py +6 -0
  96. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/METADATA +4 -4
  97. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/RECORD +106 -96
  98. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/WHEEL +1 -1
  99. fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py +0 -126
  100. fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py +0 -116
  101. fractal_server/app/runner/executors/slurm/ssh/executor.py +0 -1386
  102. fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py +0 -71
  103. fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py +0 -130
  104. fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
  105. fractal_server/app/runner/v2/_local/__init__.py +0 -132
  106. fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
  107. fractal_server/app/runner/v2/_local/executor.py +0 -100
  108. fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py +0 -83
  109. fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
  110. fractal_server/app/runner/v2/handle_failed_job.py +0 -59
  111. fractal_server/app/schemas/v2/status.py +0 -16
  112. /fractal_server/app/{runner/executors/slurm → history}/__init__.py +0 -0
  113. /fractal_server/app/runner/executors/{slurm/ssh → local}/__init__.py +0 -0
  114. /fractal_server/app/runner/executors/{slurm/sudo → slurm_common}/__init__.py +0 -0
  115. /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
  116. /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
  117. /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_ssh}/__init__.py +0 -0
  118. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/LICENSE +0 -0
  119. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0.dist-info}/entry_points.txt +0 -0
@@ -11,12 +11,17 @@ from pydantic import Field
11
11
  from pydantic import field_validator
12
12
  from pydantic import model_validator
13
13
  from sqlalchemy.orm.attributes import flag_modified
14
+ from sqlmodel import delete
14
15
 
15
16
  from ._aux_functions import _get_dataset_check_owner
16
17
  from fractal_server.app.db import AsyncSession
17
18
  from fractal_server.app.db import get_async_db
19
+ from fractal_server.app.models import HistoryImageCache
18
20
  from fractal_server.app.models import UserOAuth
19
21
  from fractal_server.app.routes.auth import current_active_user
22
+ from fractal_server.app.routes.pagination import get_pagination_params
23
+ from fractal_server.app.routes.pagination import PaginationRequest
24
+ from fractal_server.app.routes.pagination import PaginationResponse
20
25
  from fractal_server.app.schemas._filter_validators import (
21
26
  validate_attribute_filters,
22
27
  )
@@ -25,26 +30,21 @@ from fractal_server.app.schemas._validators import root_validate_dict_keys
25
30
  from fractal_server.images import SingleImage
26
31
  from fractal_server.images import SingleImageUpdate
27
32
  from fractal_server.images.models import AttributeFiltersType
33
+ from fractal_server.images.tools import aggregate_attributes
34
+ from fractal_server.images.tools import aggregate_types
28
35
  from fractal_server.images.tools import find_image_by_zarr_url
29
36
  from fractal_server.images.tools import match_filter
30
37
 
31
38
  router = APIRouter()
32
39
 
33
40
 
34
- class ImagePage(BaseModel):
35
-
36
- total_count: int
37
- page_size: int
38
- current_page: int
41
+ class ImagePage(PaginationResponse[SingleImage]):
39
42
 
40
43
  attributes: dict[str, list[Any]]
41
44
  types: list[str]
42
45
 
43
- images: list[SingleImage]
44
-
45
46
 
46
47
  class ImageQuery(BaseModel):
47
- zarr_url: Optional[str] = None
48
48
  type_filters: dict[str, bool] = Field(default_factory=dict)
49
49
  attribute_filters: AttributeFiltersType = Field(default_factory=dict)
50
50
 
@@ -59,6 +59,10 @@ class ImageQuery(BaseModel):
59
59
  )
60
60
 
61
61
 
62
+ class ImageQueryWithZarrUrl(ImageQuery):
63
+ zarr_url: Optional[str] = None
64
+
65
+
62
66
  @router.post(
63
67
  "/project/{project_id}/dataset/{dataset_id}/images/",
64
68
  status_code=status.HTTP_201_CREATED,
@@ -118,18 +122,14 @@ async def post_new_image(
118
122
  async def query_dataset_images(
119
123
  project_id: int,
120
124
  dataset_id: int,
121
- page: int = 1, # query param
122
- page_size: Optional[int] = None, # query param
123
- query: Optional[ImageQuery] = None, # body
125
+ query: Optional[ImageQueryWithZarrUrl] = None,
126
+ pagination: PaginationRequest = Depends(get_pagination_params),
124
127
  user: UserOAuth = Depends(current_active_user),
125
128
  db: AsyncSession = Depends(get_async_db),
126
129
  ) -> ImagePage:
127
130
 
128
- if page < 1:
129
- raise HTTPException(
130
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
131
- detail=f"Invalid pagination parameter: page={page} < 1",
132
- )
131
+ page = pagination.page
132
+ page_size = pagination.page_size
133
133
 
134
134
  output = await _get_dataset_check_owner(
135
135
  project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
@@ -137,16 +137,8 @@ async def query_dataset_images(
137
137
  dataset = output["dataset"]
138
138
  images = dataset.images
139
139
 
140
- attributes = {}
141
- for image in images:
142
- for k, v in image["attributes"].items():
143
- attributes.setdefault(k, []).append(v)
144
- for k, v in attributes.items():
145
- attributes[k] = list(set(v))
146
-
147
- types = list(
148
- set(type for image in images for type in image["types"].keys())
149
- )
140
+ attributes = aggregate_attributes(images)
141
+ types = aggregate_types(images)
150
142
 
151
143
  if query is not None:
152
144
 
@@ -177,20 +169,10 @@ async def query_dataset_images(
177
169
 
178
170
  total_count = len(images)
179
171
 
180
- if page_size is not None:
181
- if page_size <= 0:
182
- raise HTTPException(
183
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
184
- detail=(
185
- f"Invalid pagination parameter: page_size={page_size} <= 0"
186
- ),
187
- )
188
- else:
172
+ if page_size is None:
189
173
  page_size = total_count
190
174
 
191
- if total_count == 0:
192
- page = 1
193
- else:
175
+ if total_count > 0:
194
176
  last_page = (total_count // page_size) + (total_count % page_size > 0)
195
177
  if page > last_page:
196
178
  page = last_page
@@ -201,9 +183,9 @@ async def query_dataset_images(
201
183
  total_count=total_count,
202
184
  current_page=page,
203
185
  page_size=page_size,
186
+ items=images,
204
187
  attributes=attributes,
205
188
  types=types,
206
- images=images,
207
189
  )
208
190
 
209
191
 
@@ -224,10 +206,10 @@ async def delete_dataset_images(
224
206
  )
225
207
  dataset = output["dataset"]
226
208
 
227
- image_to_remove = next(
228
- (image for image in dataset.images if image["zarr_url"] == zarr_url),
229
- None,
209
+ image_to_remove = find_image_by_zarr_url(
210
+ images=dataset.images, zarr_url=zarr_url
230
211
  )
212
+
231
213
  if image_to_remove is None:
232
214
  raise HTTPException(
233
215
  status_code=status.HTTP_404_NOT_FOUND,
@@ -237,9 +219,15 @@ async def delete_dataset_images(
237
219
  ),
238
220
  )
239
221
 
240
- dataset.images.remove(image_to_remove)
222
+ dataset.images.remove(image_to_remove["image"])
241
223
  flag_modified(dataset, "images")
242
224
 
225
+ await db.execute(
226
+ delete(HistoryImageCache)
227
+ .where(HistoryImageCache.dataset_id == dataset_id)
228
+ .where(HistoryImageCache.zarr_url == zarr_url)
229
+ )
230
+
243
231
  await db.commit()
244
232
 
245
233
  return Response(status_code=status.HTTP_204_NO_CONTENT)
@@ -5,6 +5,7 @@ from typing import Optional
5
5
 
6
6
  from fastapi import APIRouter
7
7
  from fastapi import Depends
8
+ from fastapi import HTTPException
8
9
  from fastapi import Response
9
10
  from fastapi import status
10
11
  from fastapi.responses import StreamingResponse
@@ -83,6 +84,35 @@ async def get_workflow_jobs(
83
84
  return job_list
84
85
 
85
86
 
87
+ @router.get("/project/{project_id}/latest-job/")
88
+ async def get_latest_job(
89
+ project_id: int,
90
+ workflow_id: int,
91
+ dataset_id: int,
92
+ user: UserOAuth = Depends(current_active_user),
93
+ db: AsyncSession = Depends(get_async_db),
94
+ ) -> JobReadV2:
95
+ await _get_workflow_check_owner(
96
+ project_id=project_id, workflow_id=workflow_id, user_id=user.id, db=db
97
+ )
98
+ stm = (
99
+ select(JobV2)
100
+ .where(JobV2.project_id == project_id)
101
+ .where(JobV2.workflow_id == workflow_id)
102
+ .where(JobV2.dataset_id == dataset_id)
103
+ .order_by(JobV2.start_timestamp.desc())
104
+ .limit(1)
105
+ )
106
+ res = await db.execute(stm)
107
+ latest_job = res.scalar_one_or_none()
108
+ if latest_job is None:
109
+ raise HTTPException(
110
+ status_code=status.HTTP_404_NOT_FOUND,
111
+ detail=f"Job with {workflow_id=} and {dataset_id=} not found.",
112
+ )
113
+ return latest_job
114
+
115
+
86
116
  @router.get(
87
117
  "/project/{project_id}/job/{job_id}/",
88
118
  response_model=JobReadV2,
@@ -11,11 +11,9 @@ from .....logger import reset_logger_handlers
11
11
  from .....logger import set_logger
12
12
  from ....db import AsyncSession
13
13
  from ....db import get_async_db
14
- from ....models.v2 import DatasetV2
15
14
  from ....models.v2 import JobV2
16
15
  from ....models.v2 import LinkUserProjectV2
17
16
  from ....models.v2 import ProjectV2
18
- from ....models.v2 import WorkflowV2
19
17
  from ....schemas.v2 import ProjectCreateV2
20
18
  from ....schemas.v2 import ProjectReadV2
21
19
  from ....schemas.v2 import ProjectUpdateV2
@@ -54,7 +52,7 @@ async def create_project(
54
52
  db: AsyncSession = Depends(get_async_db),
55
53
  ) -> Optional[ProjectReadV2]:
56
54
  """
57
- Create new poject
55
+ Create new project
58
56
  """
59
57
 
60
58
  # Check that there is no project with the same user and name
@@ -145,56 +143,6 @@ async def delete_project(
145
143
  ),
146
144
  )
147
145
 
148
- # Cascade operations
149
-
150
- # Workflows
151
- stm = select(WorkflowV2).where(WorkflowV2.project_id == project_id)
152
- res = await db.execute(stm)
153
- workflows = res.scalars().all()
154
- logger.info("Start of cascade operations on Workflows.")
155
- for wf in workflows:
156
- # Cascade operations: set foreign-keys to null for jobs which are in
157
- # relationship with the current workflow
158
- stm = select(JobV2).where(JobV2.workflow_id == wf.id)
159
- res = await db.execute(stm)
160
- jobs = res.scalars().all()
161
- for job in jobs:
162
- logger.info(f"Setting Job[{job.id}].workflow_id to None.")
163
- job.workflow_id = None
164
- # Delete workflow
165
- logger.info(f"Adding Workflow[{wf.id}] to deletion.")
166
- await db.delete(wf)
167
- logger.info("End of cascade operations on Workflows.")
168
-
169
- # Dataset
170
- stm = select(DatasetV2).where(DatasetV2.project_id == project_id)
171
- res = await db.execute(stm)
172
- datasets = res.scalars().all()
173
- logger.info("Start of cascade operations on Datasets.")
174
- for ds in datasets:
175
- # Cascade operations: set foreign-keys to null for jobs which are in
176
- # relationship with the current dataset
177
- stm = select(JobV2).where(JobV2.dataset_id == ds.id)
178
- res = await db.execute(stm)
179
- jobs = res.scalars().all()
180
- for job in jobs:
181
- logger.info(f"Setting Job[{job.id}].dataset_id to None.")
182
- job.dataset_id = None
183
- # Delete dataset
184
- logger.info(f"Adding Dataset[{ds.id}] to deletion.")
185
- await db.delete(ds)
186
- logger.info("End of cascade operations on Datasets.")
187
-
188
- # Job
189
- logger.info("Start of cascade operations on Jobs.")
190
- stm = select(JobV2).where(JobV2.project_id == project_id)
191
- res = await db.execute(stm)
192
- jobs = res.scalars().all()
193
- for job in jobs:
194
- logger.info(f"Setting Job[{job.id}].project_id to None.")
195
- job.project_id = None
196
- logger.info("End of cascade operations on Jobs.")
197
-
198
146
  logger.info(f"Adding Project[{project.id}] to deletion.")
199
147
  await db.delete(project)
200
148
 
@@ -9,8 +9,8 @@ from .....logger import set_logger
9
9
  from ....db import AsyncSession
10
10
  from ....db import get_async_db
11
11
  from ....models.v2 import JobV2
12
- from ....schemas.v2.dataset import WorkflowTaskStatusTypeV2
13
- from ....schemas.v2.status import StatusReadV2
12
+ from ....schemas.v2.status_legacy import LegacyStatusReadV2
13
+ from ....schemas.v2.status_legacy import WorkflowTaskStatusTypeV2
14
14
  from ._aux_functions import _get_dataset_check_owner
15
15
  from ._aux_functions import _get_submitted_jobs_statement
16
16
  from ._aux_functions import _get_workflow_check_owner
@@ -23,8 +23,8 @@ logger = set_logger(__name__)
23
23
 
24
24
 
25
25
  @router.get(
26
- "/project/{project_id}/status/",
27
- response_model=StatusReadV2,
26
+ "/project/{project_id}/status-legacy/",
27
+ response_model=LegacyStatusReadV2,
28
28
  )
29
29
  async def get_workflowtask_status(
30
30
  project_id: int,
@@ -32,7 +32,7 @@ async def get_workflowtask_status(
32
32
  workflow_id: int,
33
33
  user: UserOAuth = Depends(current_active_user),
34
34
  db: AsyncSession = Depends(get_async_db),
35
- ) -> Optional[StatusReadV2]:
35
+ ) -> Optional[LegacyStatusReadV2]:
36
36
  """
37
37
  Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
38
38
  on a given `DatasetV2`.
@@ -164,5 +164,5 @@ async def get_workflowtask_status(
164
164
  # first time that you hit `last_valid_wftask_id``
165
165
  break
166
166
 
167
- response_body = StatusReadV2(status=clean_workflow_tasks_status_dict)
167
+ response_body = LegacyStatusReadV2(status=clean_workflow_tasks_status_dict)
168
168
  return response_body
@@ -11,30 +11,32 @@ from fastapi import Request
11
11
  from fastapi import status
12
12
  from sqlmodel import select
13
13
 
14
- from .....config import get_settings
15
- from .....logger import set_logger
16
- from .....syringe import Inject
17
- from ....db import AsyncSession
18
- from ....db import get_async_db
19
- from ....models.v2 import JobV2
20
- from ....runner.set_start_and_last_task_index import (
21
- set_start_and_last_task_index,
22
- )
23
- from ....runner.v2 import submit_workflow
24
- from ....schemas.v2 import JobCreateV2
25
- from ....schemas.v2 import JobReadV2
26
- from ....schemas.v2 import JobStatusTypeV2
27
- from ...aux.validate_user_settings import validate_user_settings
28
14
  from ._aux_functions import _get_dataset_check_owner
29
15
  from ._aux_functions import _get_workflow_check_owner
30
16
  from ._aux_functions import clean_app_job_list_v2
31
17
  from ._aux_functions_tasks import _check_type_filters_compatibility
18
+ from fractal_server.app.db import AsyncSession
19
+ from fractal_server.app.db import get_async_db
32
20
  from fractal_server.app.models import TaskGroupV2
33
21
  from fractal_server.app.models import UserOAuth
22
+ from fractal_server.app.models.v2 import JobV2
34
23
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
35
24
  _get_task_read_access,
36
25
  )
37
26
  from fractal_server.app.routes.auth import current_active_verified_user
27
+ from fractal_server.app.routes.aux.validate_user_settings import (
28
+ validate_user_settings,
29
+ )
30
+ from fractal_server.app.runner.set_start_and_last_task_index import (
31
+ set_start_and_last_task_index,
32
+ )
33
+ from fractal_server.app.runner.v2.submit_workflow import submit_workflow
34
+ from fractal_server.app.schemas.v2 import JobCreateV2
35
+ from fractal_server.app.schemas.v2 import JobReadV2
36
+ from fractal_server.app.schemas.v2 import JobStatusTypeV2
37
+ from fractal_server.config import get_settings
38
+ from fractal_server.logger import set_logger
39
+ from fractal_server.syringe import Inject
38
40
 
39
41
 
40
42
  router = APIRouter()
@@ -152,14 +152,7 @@ async def create_task(
152
152
  db=db,
153
153
  )
154
154
 
155
- if task.command_non_parallel is None:
156
- task_type = "parallel"
157
- elif task.command_parallel is None:
158
- task_type = "non_parallel"
159
- else:
160
- task_type = "compound"
161
-
162
- if task_type == "parallel" and (
155
+ if task.type == "parallel" and (
163
156
  task.args_schema_non_parallel is not None
164
157
  or task.meta_non_parallel is not None
165
158
  ):
@@ -170,7 +163,7 @@ async def create_task(
170
163
  "`TaskV2.args_schema_non_parallel` if TaskV2 is parallel"
171
164
  ),
172
165
  )
173
- elif task_type == "non_parallel" and (
166
+ elif task.type == "non_parallel" and (
174
167
  task.args_schema_parallel is not None or task.meta_parallel is not None
175
168
  ):
176
169
  raise HTTPException(
@@ -183,7 +176,7 @@ async def create_task(
183
176
 
184
177
  # Add task
185
178
 
186
- db_task = TaskV2(**task.model_dump(exclude_unset=True), type=task_type)
179
+ db_task = TaskV2(**task.model_dump(exclude_unset=True))
187
180
  pkg_name = db_task.name
188
181
  await _verify_non_duplication_user_constraint(
189
182
  db=db, pkg_name=pkg_name, user_id=user.id, version=db_task.version
@@ -12,9 +12,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
12
12
  from ._aux_functions_tasks import _get_valid_user_group_id
13
13
  from ._aux_functions_tasks import _verify_non_duplication_group_constraint
14
14
  from ._aux_functions_tasks import _verify_non_duplication_user_constraint
15
- from fractal_server.app.db import DBSyncSession
16
15
  from fractal_server.app.db import get_async_db
17
- from fractal_server.app.db import get_sync_db
18
16
  from fractal_server.app.models import UserOAuth
19
17
  from fractal_server.app.models.v2 import TaskGroupV2
20
18
  from fractal_server.app.routes.auth import current_active_verified_user
@@ -31,7 +29,7 @@ from fractal_server.tasks.v2.utils_background import (
31
29
  _prepare_tasks_metadata,
32
30
  )
33
31
  from fractal_server.tasks.v2.utils_database import (
34
- create_db_tasks_and_update_task_group,
32
+ create_db_tasks_and_update_task_group_async,
35
33
  )
36
34
 
37
35
  router = APIRouter()
@@ -47,10 +45,7 @@ async def collect_task_custom(
47
45
  private: bool = False,
48
46
  user_group_id: Optional[int] = None,
49
47
  user: UserOAuth = Depends(current_active_verified_user),
50
- db: AsyncSession = Depends(get_async_db), # FIXME: using both sync/async
51
- db_sync: DBSyncSession = Depends(
52
- get_sync_db
53
- ), # FIXME: using both sync/async
48
+ db: AsyncSession = Depends(get_async_db),
54
49
  ) -> list[TaskReadV2]:
55
50
 
56
51
  settings = Inject(get_settings)
@@ -168,10 +163,10 @@ async def collect_task_custom(
168
163
  await db.refresh(task_group)
169
164
  db.expunge(task_group)
170
165
 
171
- task_group = create_db_tasks_and_update_task_group(
166
+ task_group = await create_db_tasks_and_update_task_group_async(
172
167
  task_list=task_list,
173
168
  task_group_id=task_group.id,
174
- db=db_sync,
169
+ db=db,
175
170
  )
176
171
 
177
172
  logger.debug(
@@ -181,23 +181,6 @@ async def delete_task_group(
181
181
  detail=f"TaskV2 {workflow_tasks[0].task_id} is still in use",
182
182
  )
183
183
 
184
- # Cascade operations: set foreign-keys to null for TaskGroupActivityV2
185
- # which are in relationship with the current TaskGroupV2
186
- logger.debug("Start of cascade operations on TaskGroupActivityV2.")
187
- stm = select(TaskGroupActivityV2).where(
188
- TaskGroupActivityV2.taskgroupv2_id == task_group_id
189
- )
190
- res = await db.execute(stm)
191
- task_group_activity_list = res.scalars().all()
192
- for task_group_activity in task_group_activity_list:
193
- logger.debug(
194
- f"Setting TaskGroupActivityV2[{task_group_activity.id}]"
195
- ".taskgroupv2_id to None."
196
- )
197
- task_group_activity.taskgroupv2_id = None
198
- db.add(task_group_activity)
199
- logger.debug("End of cascade operations on TaskGroupActivityV2.")
200
-
201
184
  await db.delete(task_group)
202
185
  await db.commit()
203
186
 
@@ -0,0 +1,61 @@
1
+ from typing import Optional
2
+
3
+ from fastapi import APIRouter
4
+ from fastapi import Depends
5
+ from fastapi import status
6
+
7
+ from ._aux_functions import _get_dataset_check_owner
8
+ from .images import ImageQuery
9
+ from fractal_server.app.db import AsyncSession
10
+ from fractal_server.app.db import get_async_db
11
+ from fractal_server.app.models import UserOAuth
12
+ from fractal_server.app.routes.auth import current_active_user
13
+ from fractal_server.images.tools import aggregate_types
14
+ from fractal_server.images.tools import filter_image_list
15
+
16
+ router = APIRouter()
17
+
18
+
19
+ @router.post(
20
+ "/project/{project_id}/dataset/{dataset_id}/images/verify-unique-types/",
21
+ status_code=status.HTTP_200_OK,
22
+ )
23
+ async def verify_unique_types(
24
+ project_id: int,
25
+ dataset_id: int,
26
+ query: Optional[ImageQuery] = None,
27
+ user: UserOAuth = Depends(current_active_user),
28
+ db: AsyncSession = Depends(get_async_db),
29
+ ) -> list[str]:
30
+ # Get dataset
31
+ output = await _get_dataset_check_owner(
32
+ project_id=project_id, dataset_id=dataset_id, user_id=user.id, db=db
33
+ )
34
+ dataset = output["dataset"]
35
+
36
+ # Filter images
37
+ if query is None:
38
+ filtered_images = dataset.images
39
+ else:
40
+ filtered_images = filter_image_list(
41
+ images=dataset.images,
42
+ attribute_filters=query.attribute_filters,
43
+ type_filters=query.type_filters,
44
+ )
45
+
46
+ # Get actual values for each available type
47
+ available_types = aggregate_types(filtered_images)
48
+ values_per_type: dict[str, set] = {
49
+ _type: set() for _type in available_types
50
+ }
51
+ for _img in filtered_images:
52
+ for _type in available_types:
53
+ values_per_type[_type].add(_img["types"].get(_type, False))
54
+
55
+ # Find types with non-unique value
56
+ non_unique_types = [
57
+ key for key, value in values_per_type.items() if len(value) > 1
58
+ ]
59
+ non_unique_types = sorted(non_unique_types)
60
+
61
+ return non_unique_types
@@ -14,16 +14,12 @@ from ....db import get_async_db
14
14
  from ....models.v2 import JobV2
15
15
  from ....models.v2 import ProjectV2
16
16
  from ....models.v2 import WorkflowV2
17
- from ....runner.set_start_and_last_task_index import (
18
- set_start_and_last_task_index,
19
- )
20
17
  from ....schemas.v2 import WorkflowCreateV2
21
18
  from ....schemas.v2 import WorkflowExportV2
22
19
  from ....schemas.v2 import WorkflowReadV2
23
20
  from ....schemas.v2 import WorkflowReadV2WithWarnings
24
21
  from ....schemas.v2 import WorkflowUpdateV2
25
22
  from ._aux_functions import _check_workflow_exists
26
- from ._aux_functions import _get_dataset_check_owner
27
23
  from ._aux_functions import _get_project_check_owner
28
24
  from ._aux_functions import _get_submitted_jobs_statement
29
25
  from ._aux_functions import _get_workflow_check_owner
@@ -225,14 +221,6 @@ async def delete_workflow(
225
221
  ),
226
222
  )
227
223
 
228
- # Cascade operations: set foreign-keys to null for jobs which are in
229
- # relationship with the current workflow
230
- stm = select(JobV2).where(JobV2.workflow_id == workflow_id)
231
- res = await db.execute(stm)
232
- jobs = res.scalars().all()
233
- for job in jobs:
234
- job.workflow_id = None
235
-
236
224
  # Delete workflow
237
225
  await db.delete(workflow)
238
226
  await db.commit()
@@ -244,7 +232,7 @@ async def delete_workflow(
244
232
  "/project/{project_id}/workflow/{workflow_id}/export/",
245
233
  response_model=WorkflowExportV2,
246
234
  )
247
- async def export_worfklow(
235
+ async def export_workflow(
248
236
  project_id: int,
249
237
  workflow_id: int,
250
238
  user: UserOAuth = Depends(current_active_user),
@@ -293,27 +281,22 @@ async def get_user_workflows(
293
281
  return workflow_list
294
282
 
295
283
 
296
- class TypeFiltersFlow(BaseModel):
297
- dataset_filters: list[dict[str, bool]]
298
- input_filters: list[dict[str, bool]]
299
- output_filters: list[dict[str, bool]]
284
+ class WorkflowTaskTypeFiltersInfo(BaseModel):
285
+ workflowtask_id: int
286
+ current_type_filters: dict[str, bool]
287
+ input_type_filters: dict[str, bool]
288
+ output_type_filters: dict[str, bool]
300
289
 
301
290
 
302
- @router.get(
303
- "/project/{project_id}/workflow/{workflow_id}/type-filters-flow/",
304
- response_model=TypeFiltersFlow,
305
- )
291
+ @router.get("/project/{project_id}/workflow/{workflow_id}/type-filters-flow/")
306
292
  async def get_workflow_type_filters(
307
293
  project_id: int,
308
294
  workflow_id: int,
309
- dataset_id: Optional[int] = None,
310
- first_task_index: Optional[int] = None,
311
- last_task_index: Optional[int] = None,
312
295
  user: UserOAuth = Depends(current_active_user),
313
296
  db: AsyncSession = Depends(get_async_db),
314
- ) -> Optional[WorkflowReadV2WithWarnings]:
297
+ ) -> list[WorkflowTaskTypeFiltersInfo]:
315
298
  """
316
- Get info on an existing workflow
299
+ Get info on type/type-filters flow for a workflow.
317
300
  """
318
301
 
319
302
  workflow = await _get_workflow_check_owner(
@@ -323,59 +306,35 @@ async def get_workflow_type_filters(
323
306
  db=db,
324
307
  )
325
308
 
326
- if len(workflow.task_list) == 0:
309
+ num_tasks = len(workflow.task_list)
310
+ if num_tasks == 0:
327
311
  raise HTTPException(
328
312
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
329
313
  detail="Workflow has no tasks.",
330
314
  )
331
315
 
332
- if dataset_id is None:
333
- dataset_type_filters = {}
334
- else:
335
- res = await _get_dataset_check_owner(
336
- project_id=project_id,
337
- dataset_id=dataset_id,
338
- user_id=user.id,
339
- db=db,
340
- )
341
- dataset = res["dataset"]
342
- dataset_type_filters = dataset.type_filters
316
+ current_type_filters = {}
343
317
 
344
- num_tasks = len(workflow.task_list)
345
- try:
346
- first_task_index, last_task_index = set_start_and_last_task_index(
347
- num_tasks,
348
- first_task_index=first_task_index,
349
- last_task_index=last_task_index,
350
- )
351
- except ValueError as e:
352
- raise HTTPException(
353
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
354
- detail=f"Invalid first/last task index.\nOriginal error: {str(e)}",
355
- )
356
-
357
- list_dataset_filters = [copy(dataset_type_filters)]
358
- list_filters_in = []
359
- list_filters_out = []
360
- for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
318
+ response_items = []
319
+ for wftask in workflow.task_list:
361
320
 
362
- input_type_filters = copy(dataset_type_filters)
363
- patch = merge_type_filters(
321
+ # Compute input_type_filters, based on wftask and task manifest
322
+ input_type_filters = merge_type_filters(
364
323
  wftask_type_filters=wftask.type_filters,
365
324
  task_input_types=wftask.task.input_types,
366
325
  )
367
- input_type_filters.update(patch)
368
- list_filters_in.append(copy(input_type_filters))
369
326
 
370
- output_type_filters = wftask.task.output_types
371
- list_filters_out.append(output_type_filters)
327
+ # Append current item to response list
328
+ response_items.append(
329
+ dict(
330
+ workflowtask_id=wftask.id,
331
+ current_type_filters=copy(current_type_filters),
332
+ input_type_filters=copy(input_type_filters),
333
+ output_type_filters=copy(wftask.task.output_types),
334
+ )
335
+ )
372
336
 
373
- dataset_type_filters.update(wftask.task.output_types)
374
- list_dataset_filters.append(copy(dataset_type_filters))
337
+ # Update `current_type_filters`
338
+ current_type_filters.update(wftask.task.output_types)
375
339
 
376
- response_body = dict(
377
- dataset_filters=list_dataset_filters,
378
- input_filters=list_filters_in,
379
- output_filters=list_filters_out,
380
- )
381
- return response_body
340
+ return response_items