fractal-server 2.10.5__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +9 -6
  3. fractal_server/app/models/v2/job.py +5 -0
  4. fractal_server/app/models/v2/workflowtask.py +5 -8
  5. fractal_server/app/routes/api/v1/dataset.py +2 -2
  6. fractal_server/app/routes/api/v2/_aux_functions.py +3 -10
  7. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +21 -0
  8. fractal_server/app/routes/api/v2/images.py +30 -7
  9. fractal_server/app/routes/api/v2/job.py +14 -1
  10. fractal_server/app/routes/api/v2/status.py +20 -20
  11. fractal_server/app/routes/api/v2/submit.py +11 -4
  12. fractal_server/app/routes/api/v2/workflow.py +95 -0
  13. fractal_server/app/routes/api/v2/workflow_import.py +8 -0
  14. fractal_server/app/routes/api/v2/workflowtask.py +45 -26
  15. fractal_server/app/runner/{async_wrap.py → async_wrap_v1.py} +1 -1
  16. fractal_server/app/runner/executors/slurm/_slurm_config.py +1 -1
  17. fractal_server/app/runner/executors/slurm/ssh/executor.py +2 -2
  18. fractal_server/app/runner/filenames.py +2 -4
  19. fractal_server/app/runner/v1/_common.py +4 -4
  20. fractal_server/app/runner/v1/_local/__init__.py +2 -2
  21. fractal_server/app/runner/v1/_slurm/__init__.py +2 -2
  22. fractal_server/app/runner/v1/handle_failed_job.py +4 -4
  23. fractal_server/app/runner/v2/__init__.py +12 -66
  24. fractal_server/app/runner/v2/_local/__init__.py +17 -47
  25. fractal_server/app/runner/v2/_local_experimental/__init__.py +27 -61
  26. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +26 -65
  27. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +24 -66
  28. fractal_server/app/runner/v2/handle_failed_job.py +31 -130
  29. fractal_server/app/runner/v2/merge_outputs.py +6 -17
  30. fractal_server/app/runner/v2/runner.py +51 -89
  31. fractal_server/app/runner/v2/task_interface.py +0 -2
  32. fractal_server/app/schemas/_filter_validators.py +43 -0
  33. fractal_server/app/schemas/_validators.py +13 -2
  34. fractal_server/app/schemas/v2/dataset.py +85 -12
  35. fractal_server/app/schemas/v2/dumps.py +6 -8
  36. fractal_server/app/schemas/v2/job.py +14 -0
  37. fractal_server/app/schemas/v2/task.py +9 -9
  38. fractal_server/app/schemas/v2/task_group.py +2 -2
  39. fractal_server/app/schemas/v2/workflowtask.py +69 -20
  40. fractal_server/data_migrations/2_11_0.py +168 -0
  41. fractal_server/images/__init__.py +0 -1
  42. fractal_server/images/models.py +12 -35
  43. fractal_server/images/tools.py +53 -14
  44. fractal_server/logger.py +4 -1
  45. fractal_server/migrations/versions/db09233ad13a_split_filters_and_keep_old_columns.py +96 -0
  46. fractal_server/tasks/v2/local/collect.py +2 -2
  47. fractal_server/tasks/v2/local/deactivate.py +2 -2
  48. fractal_server/tasks/v2/local/reactivate.py +2 -3
  49. fractal_server/tasks/v2/ssh/collect.py +2 -2
  50. fractal_server/tasks/v2/ssh/deactivate.py +2 -2
  51. fractal_server/tasks/v2/ssh/reactivate.py +2 -2
  52. fractal_server/utils.py +9 -7
  53. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/METADATA +1 -1
  54. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/RECORD +57 -54
  55. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/LICENSE +0 -0
  56. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/WHEEL +0 -0
  57. {fractal_server-2.10.5.dist-info → fractal_server-2.11.0.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.10.5"
1
+ __VERSION__ = "2.11.0"
@@ -11,6 +11,7 @@ from sqlmodel import Relationship
11
11
  from sqlmodel import SQLModel
12
12
 
13
13
  from ....utils import get_timestamp
14
+ from fractal_server.images.models import AttributeFiltersType
14
15
 
15
16
 
16
17
  class DatasetV2(SQLModel, table=True):
@@ -41,12 +42,14 @@ class DatasetV2(SQLModel, table=True):
41
42
  sa_column=Column(JSON, server_default="[]", nullable=False)
42
43
  )
43
44
 
44
- filters: dict[Literal["attributes", "types"], dict[str, Any]] = Field(
45
- sa_column=Column(
46
- JSON,
47
- nullable=False,
48
- server_default='{"attributes": {}, "types": {}}',
49
- )
45
+ filters: Optional[
46
+ dict[Literal["attributes", "types"], dict[str, Any]]
47
+ ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
48
+ type_filters: dict[str, bool] = Field(
49
+ sa_column=Column(JSON, nullable=False, server_default="{}")
50
+ )
51
+ attribute_filters: AttributeFiltersType = Field(
52
+ sa_column=Column(JSON, nullable=False, server_default="{}")
50
53
  )
51
54
 
52
55
  @property
@@ -10,6 +10,7 @@ from sqlmodel import SQLModel
10
10
 
11
11
  from ....utils import get_timestamp
12
12
  from ...schemas.v2 import JobStatusTypeV2
13
+ from fractal_server.images.models import AttributeFiltersType
13
14
 
14
15
 
15
16
  class JobV2(SQLModel, table=True):
@@ -49,3 +50,7 @@ class JobV2(SQLModel, table=True):
49
50
  )
50
51
  status: str = JobStatusTypeV2.SUBMITTED
51
52
  log: Optional[str] = None
53
+
54
+ attribute_filters: AttributeFiltersType = Field(
55
+ sa_column=Column(JSON, nullable=False, server_default="{}")
56
+ )
@@ -25,14 +25,11 @@ class WorkflowTaskV2(SQLModel, table=True):
25
25
  args_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
26
26
  args_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
27
27
 
28
- input_filters: dict[
29
- Literal["attributes", "types"], dict[str, Any]
30
- ] = Field(
31
- sa_column=Column(
32
- JSON,
33
- nullable=False,
34
- server_default='{"attributes": {}, "types": {}}',
35
- )
28
+ input_filters: Optional[
29
+ dict[Literal["attributes", "types"], dict[str, Any]]
30
+ ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
31
+ type_filters: dict[str, bool] = Field(
32
+ sa_column=Column(JSON, nullable=False, server_default="{}")
36
33
  )
37
34
 
38
35
  # Task
@@ -17,7 +17,7 @@ from ....models.v1 import ApplyWorkflow
17
17
  from ....models.v1 import Dataset
18
18
  from ....models.v1 import Project
19
19
  from ....models.v1 import Resource
20
- from ....runner.filenames import HISTORY_FILENAME
20
+ from ....runner.filenames import HISTORY_FILENAME_V1
21
21
  from ....schemas.v1 import DatasetCreateV1
22
22
  from ....schemas.v1 import DatasetReadV1
23
23
  from ....schemas.v1 import DatasetStatusReadV1
@@ -511,7 +511,7 @@ async def get_workflowtask_status(
511
511
  # Highest priority: Read status updates coming from the running-job
512
512
  # temporary file. Note: this file only contains information on
513
513
  # WorkflowTask's that ran through successfully
514
- tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
514
+ tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME_V1
515
515
  try:
516
516
  with tmp_file.open("r") as f:
517
517
  history = json.load(f)
@@ -21,7 +21,6 @@ from ....models.v2 import TaskV2
21
21
  from ....models.v2 import WorkflowTaskV2
22
22
  from ....models.v2 import WorkflowV2
23
23
  from ....schemas.v2 import JobStatusTypeV2
24
- from fractal_server.images import Filters
25
24
 
26
25
 
27
26
  async def _get_project_check_owner(
@@ -336,7 +335,7 @@ async def _workflow_insert_task(
336
335
  meta_non_parallel: Optional[dict[str, Any]] = None,
337
336
  args_non_parallel: Optional[dict[str, Any]] = None,
338
337
  args_parallel: Optional[dict[str, Any]] = None,
339
- input_filters: Optional[Filters] = None,
338
+ type_filters: Optional[dict[str, bool]] = None,
340
339
  db: AsyncSession,
341
340
  ) -> WorkflowTaskV2:
342
341
  """
@@ -350,7 +349,7 @@ async def _workflow_insert_task(
350
349
  meta_non_parallel:
351
350
  args_non_parallel:
352
351
  args_parallel:
353
- input_filters:
352
+ type_filters:
354
353
  db:
355
354
  """
356
355
  db_workflow = await db.get(WorkflowV2, workflow_id)
@@ -376,12 +375,6 @@ async def _workflow_insert_task(
376
375
  if final_meta_non_parallel == {}:
377
376
  final_meta_non_parallel = None
378
377
 
379
- # Prepare input_filters attribute
380
- if input_filters is None:
381
- input_filters_kwarg = {}
382
- else:
383
- input_filters_kwarg = dict(input_filters=input_filters)
384
-
385
378
  # Create DB entry
386
379
  wf_task = WorkflowTaskV2(
387
380
  task_type=task_type,
@@ -390,7 +383,7 @@ async def _workflow_insert_task(
390
383
  args_parallel=args_parallel,
391
384
  meta_parallel=final_meta_parallel,
392
385
  meta_non_parallel=final_meta_non_parallel,
393
- **input_filters_kwarg,
386
+ type_filters=(type_filters or dict()),
394
387
  )
395
388
  db_workflow.task_list.append(wf_task)
396
389
  flag_modified(db_workflow, "task_list")
@@ -22,6 +22,7 @@ from fractal_server.app.routes.auth._aux_auth import (
22
22
  _verify_user_belongs_to_group,
23
23
  )
24
24
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
25
+ from fractal_server.images.tools import merge_type_filters
25
26
  from fractal_server.logger import set_logger
26
27
 
27
28
  logger = set_logger(__name__)
@@ -351,3 +352,23 @@ async def _add_warnings_to_workflow_tasks(
351
352
  wftask_data["warning"] = "Current user has no access to this task."
352
353
  wftask_list_with_warnings.append(wftask_data)
353
354
  return wftask_list_with_warnings
355
+
356
+
357
+ def _check_type_filters_compatibility(
358
+ *,
359
+ task_input_types: dict[str, bool],
360
+ wftask_type_filters: dict[str, bool],
361
+ ) -> None:
362
+ """
363
+ Wrap `merge_type_filters` and raise `HTTPException` if needed.
364
+ """
365
+ try:
366
+ merge_type_filters(
367
+ task_input_types=task_input_types,
368
+ wftask_type_filters=wftask_type_filters,
369
+ )
370
+ except ValueError as e:
371
+ raise HTTPException(
372
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
373
+ detail=f"Incompatible type filters.\nOriginal error: {str(e)}",
374
+ )
@@ -8,6 +8,8 @@ from fastapi import Response
8
8
  from fastapi import status
9
9
  from pydantic import BaseModel
10
10
  from pydantic import Field
11
+ from pydantic import root_validator
12
+ from pydantic import validator
11
13
  from sqlalchemy.orm.attributes import flag_modified
12
14
 
13
15
  from ._aux_functions import _get_dataset_check_owner
@@ -15,9 +17,14 @@ from fractal_server.app.db import AsyncSession
15
17
  from fractal_server.app.db import get_async_db
16
18
  from fractal_server.app.models import UserOAuth
17
19
  from fractal_server.app.routes.auth import current_active_user
18
- from fractal_server.images import Filters
20
+ from fractal_server.app.schemas._filter_validators import (
21
+ validate_attribute_filters,
22
+ )
23
+ from fractal_server.app.schemas._filter_validators import validate_type_filters
24
+ from fractal_server.app.schemas._validators import root_validate_dict_keys
19
25
  from fractal_server.images import SingleImage
20
26
  from fractal_server.images import SingleImageUpdate
27
+ from fractal_server.images.models import AttributeFiltersType
21
28
  from fractal_server.images.tools import find_image_by_zarr_url
22
29
  from fractal_server.images.tools import match_filter
23
30
 
@@ -38,7 +45,18 @@ class ImagePage(BaseModel):
38
45
 
39
46
  class ImageQuery(BaseModel):
40
47
  zarr_url: Optional[str]
41
- filters: Filters = Field(default_factory=Filters)
48
+ type_filters: dict[str, bool] = Field(default_factory=dict)
49
+ attribute_filters: AttributeFiltersType = Field(default_factory=dict)
50
+
51
+ _dict_keys = root_validator(pre=True, allow_reuse=True)(
52
+ root_validate_dict_keys
53
+ )
54
+ _type_filters = validator("type_filters", allow_reuse=True)(
55
+ validate_type_filters
56
+ )
57
+ _attribute_filters = validator("attribute_filters", allow_reuse=True)(
58
+ validate_attribute_filters
59
+ )
42
60
 
43
61
 
44
62
  @router.post(
@@ -124,7 +142,11 @@ async def query_dataset_images(
124
142
  images = [
125
143
  image
126
144
  for image in images
127
- if match_filter(image, Filters(**dataset.filters))
145
+ if match_filter(
146
+ image=image,
147
+ type_filters=dataset.type_filters,
148
+ attribute_filters=dataset.attribute_filters,
149
+ )
128
150
  ]
129
151
 
130
152
  attributes = {}
@@ -154,13 +176,14 @@ async def query_dataset_images(
154
176
  else:
155
177
  images = [image]
156
178
 
157
- if query.filters.attributes or query.filters.types:
179
+ if query.attribute_filters or query.type_filters:
158
180
  images = [
159
181
  image
160
182
  for image in images
161
183
  if match_filter(
162
- image,
163
- Filters(**query.filters.dict()),
184
+ image=image,
185
+ type_filters=query.type_filters,
186
+ attribute_filters=query.attribute_filters,
164
187
  )
165
188
  ]
166
189
 
@@ -184,7 +207,7 @@ async def query_dataset_images(
184
207
  if page > last_page:
185
208
  page = last_page
186
209
  offset = (page - 1) * page_size
187
- images = images[offset : offset + page_size] # noqa E203
210
+ images = images[offset : offset + page_size]
188
211
 
189
212
  return ImagePage(
190
213
  total_count=total_count,
@@ -1,4 +1,6 @@
1
+ import asyncio
1
2
  from pathlib import Path
3
+ from typing import Iterator
2
4
  from typing import Optional
3
5
 
4
6
  from fastapi import APIRouter
@@ -24,6 +26,14 @@ from ._aux_functions import _get_workflow_check_owner
24
26
  from fractal_server.app.models import UserOAuth
25
27
  from fractal_server.app.routes.auth import current_active_user
26
28
 
29
+
30
+ # https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
31
+ # This moves the function execution to a separate thread,
32
+ # preventing it from blocking the event loop.
33
+ async def zip_folder_threaded(folder: str) -> Iterator[bytes]:
34
+ return await asyncio.to_thread(_zip_folder_to_byte_stream_iterator, folder)
35
+
36
+
27
37
  router = APIRouter()
28
38
 
29
39
 
@@ -128,8 +138,11 @@ async def download_job_logs(
128
138
  )
129
139
  job = output["job"]
130
140
  zip_name = f"{Path(job.working_dir).name}_archive.zip"
141
+
142
+ zip_bytes_iterator = await zip_folder_threaded(job.working_dir)
143
+
131
144
  return StreamingResponse(
132
- _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
145
+ zip_bytes_iterator,
133
146
  media_type="application/x-zip-compressed",
134
147
  headers={"Content-Disposition": f"attachment;filename={zip_name}"},
135
148
  )
@@ -1,5 +1,3 @@
1
- import json
2
- from pathlib import Path
3
1
  from typing import Optional
4
2
 
5
3
  from fastapi import APIRouter
@@ -18,7 +16,6 @@ from ._aux_functions import _get_submitted_jobs_statement
18
16
  from ._aux_functions import _get_workflow_check_owner
19
17
  from fractal_server.app.models import UserOAuth
20
18
  from fractal_server.app.routes.auth import current_active_user
21
- from fractal_server.app.runner.filenames import HISTORY_FILENAME
22
19
 
23
20
  router = APIRouter()
24
21
 
@@ -98,8 +95,8 @@ async def get_workflowtask_status(
98
95
  if running_job is None:
99
96
  # If no job is running, the chronological-last history item is also the
100
97
  # positional-last workflow task to be included in the response.
101
- if len(dataset.history) > 0:
102
- last_valid_wftask_id = dataset.history[-1]["workflowtask"]["id"]
98
+ if len(history) > 0:
99
+ last_valid_wftask_id = history[-1]["workflowtask"]["id"]
103
100
  else:
104
101
  last_valid_wftask_id = None
105
102
  else:
@@ -109,7 +106,24 @@ async def get_workflowtask_status(
109
106
  # as "submitted"
110
107
  start = running_job.first_task_index
111
108
  end = running_job.last_task_index + 1
112
- for wftask in workflow.task_list[start:end]:
109
+
110
+ running_job_wftasks = workflow.task_list[start:end]
111
+ running_job_statuses = [
112
+ workflow_tasks_status_dict.get(wft.id, None)
113
+ for wft in running_job_wftasks
114
+ ]
115
+ try:
116
+ first_submitted_index = running_job_statuses.index(
117
+ WorkflowTaskStatusTypeV2.SUBMITTED
118
+ )
119
+ except ValueError:
120
+ logger.warning(
121
+ f"Job {running_job.id} is submitted but its task list does "
122
+ f"not contain a {WorkflowTaskStatusTypeV2.SUBMITTED} task."
123
+ )
124
+ first_submitted_index = 0
125
+
126
+ for wftask in running_job_wftasks[first_submitted_index:]:
113
127
  workflow_tasks_status_dict[
114
128
  wftask.id
115
129
  ] = WorkflowTaskStatusTypeV2.SUBMITTED
@@ -133,20 +147,6 @@ async def get_workflowtask_status(
133
147
  last_valid_wftask_id = None
134
148
  logger.warning(f"Now setting {last_valid_wftask_id=}.")
135
149
 
136
- # Highest priority: Read status updates coming from the running-job
137
- # temporary file. Note: this file only contains information on
138
- # WorkflowTask's that ran through successfully.
139
- tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
140
- try:
141
- with tmp_file.open("r") as f:
142
- history = json.load(f)
143
- except FileNotFoundError:
144
- history = []
145
- for history_item in history:
146
- wftask_id = history_item["workflowtask"]["id"]
147
- wftask_status = history_item["status"]
148
- workflow_tasks_status_dict[wftask_id] = wftask_status
149
-
150
150
  # Based on previously-gathered information, clean up the response body
151
151
  clean_workflow_tasks_status_dict = {}
152
152
  for wf_task in workflow.task_list:
@@ -28,6 +28,7 @@ from ...aux.validate_user_settings import validate_user_settings
28
28
  from ._aux_functions import _get_dataset_check_owner
29
29
  from ._aux_functions import _get_workflow_check_owner
30
30
  from ._aux_functions import clean_app_job_list_v2
31
+ from ._aux_functions_tasks import _check_type_filters_compatibility
31
32
  from fractal_server.app.models import TaskGroupV2
32
33
  from fractal_server.app.models import UserOAuth
33
34
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
@@ -108,15 +109,17 @@ async def apply_workflow(
108
109
 
109
110
  # Check that tasks have read-access and are `active`
110
111
  used_task_group_ids = set()
111
- for wftask in workflow.task_list[
112
- first_task_index : last_task_index + 1 # noqa: E203
113
- ]:
112
+ for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
114
113
  task = await _get_task_read_access(
115
114
  user_id=user.id,
116
115
  task_id=wftask.task_id,
117
116
  require_active=True,
118
117
  db=db,
119
118
  )
119
+ _check_type_filters_compatibility(
120
+ task_input_types=task.input_types,
121
+ wftask_type_filters=wftask.type_filters,
122
+ )
120
123
  used_task_group_ids.add(task.taskgroupv2_id)
121
124
 
122
125
  # Validate user settings
@@ -159,7 +162,11 @@ async def apply_workflow(
159
162
  dataset_id=dataset_id,
160
163
  workflow_id=workflow_id,
161
164
  user_email=user.email,
162
- dataset_dump=json.loads(dataset.json(exclude={"images", "history"})),
165
+ # The 'filters' field is not supported any more but still exists as a
166
+ # database column, therefore we manually exclude it from dumps.
167
+ dataset_dump=json.loads(
168
+ dataset.json(exclude={"images", "history", "filters"})
169
+ ),
163
170
  workflow_dump=json.loads(workflow.json(exclude={"task_list"})),
164
171
  project_dump=json.loads(project.json(exclude={"user_list"})),
165
172
  **job_create.dict(),
@@ -1,3 +1,4 @@
1
+ from copy import copy
1
2
  from typing import Optional
2
3
 
3
4
  from fastapi import APIRouter
@@ -5,6 +6,7 @@ from fastapi import Depends
5
6
  from fastapi import HTTPException
6
7
  from fastapi import Response
7
8
  from fastapi import status
9
+ from pydantic import BaseModel
8
10
  from sqlmodel import select
9
11
 
10
12
  from ....db import AsyncSession
@@ -12,12 +14,16 @@ from ....db import get_async_db
12
14
  from ....models.v2 import JobV2
13
15
  from ....models.v2 import ProjectV2
14
16
  from ....models.v2 import WorkflowV2
17
+ from ....runner.set_start_and_last_task_index import (
18
+ set_start_and_last_task_index,
19
+ )
15
20
  from ....schemas.v2 import WorkflowCreateV2
16
21
  from ....schemas.v2 import WorkflowExportV2
17
22
  from ....schemas.v2 import WorkflowReadV2
18
23
  from ....schemas.v2 import WorkflowReadV2WithWarnings
19
24
  from ....schemas.v2 import WorkflowUpdateV2
20
25
  from ._aux_functions import _check_workflow_exists
26
+ from ._aux_functions import _get_dataset_check_owner
21
27
  from ._aux_functions import _get_project_check_owner
22
28
  from ._aux_functions import _get_submitted_jobs_statement
23
29
  from ._aux_functions import _get_workflow_check_owner
@@ -25,6 +31,7 @@ from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
25
31
  from fractal_server.app.models import UserOAuth
26
32
  from fractal_server.app.models.v2 import TaskGroupV2
27
33
  from fractal_server.app.routes.auth import current_active_user
34
+ from fractal_server.images.tools import merge_type_filters
28
35
 
29
36
  router = APIRouter()
30
37
 
@@ -284,3 +291,91 @@ async def get_user_workflows(
284
291
  res = await db.execute(stm)
285
292
  workflow_list = res.scalars().all()
286
293
  return workflow_list
294
+
295
+
296
+ class TypeFiltersFlow(BaseModel):
297
+ dataset_filters: list[dict[str, bool]]
298
+ input_filters: list[dict[str, bool]]
299
+ output_filters: list[dict[str, bool]]
300
+
301
+
302
+ @router.get(
303
+ "/project/{project_id}/workflow/{workflow_id}/type-filters-flow/",
304
+ response_model=TypeFiltersFlow,
305
+ )
306
+ async def get_workflow_type_filters(
307
+ project_id: int,
308
+ workflow_id: int,
309
+ dataset_id: Optional[int] = None,
310
+ first_task_index: Optional[int] = None,
311
+ last_task_index: Optional[int] = None,
312
+ user: UserOAuth = Depends(current_active_user),
313
+ db: AsyncSession = Depends(get_async_db),
314
+ ) -> Optional[WorkflowReadV2WithWarnings]:
315
+ """
316
+ Get info on an existing workflow
317
+ """
318
+
319
+ workflow = await _get_workflow_check_owner(
320
+ project_id=project_id,
321
+ workflow_id=workflow_id,
322
+ user_id=user.id,
323
+ db=db,
324
+ )
325
+
326
+ if len(workflow.task_list) == 0:
327
+ raise HTTPException(
328
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
329
+ detail="Workflow has no tasks.",
330
+ )
331
+
332
+ if dataset_id is None:
333
+ dataset_type_filters = {}
334
+ else:
335
+ res = await _get_dataset_check_owner(
336
+ project_id=project_id,
337
+ dataset_id=dataset_id,
338
+ user_id=user.id,
339
+ db=db,
340
+ )
341
+ dataset = res["dataset"]
342
+ dataset_type_filters = dataset.type_filters
343
+
344
+ num_tasks = len(workflow.task_list)
345
+ try:
346
+ first_task_index, last_task_index = set_start_and_last_task_index(
347
+ num_tasks,
348
+ first_task_index=first_task_index,
349
+ last_task_index=last_task_index,
350
+ )
351
+ except ValueError as e:
352
+ raise HTTPException(
353
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
354
+ detail=f"Invalid first/last task index.\nOriginal error: {str(e)}",
355
+ )
356
+
357
+ list_dataset_filters = [copy(dataset_type_filters)]
358
+ list_filters_in = []
359
+ list_filters_out = []
360
+ for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
361
+
362
+ input_type_filters = copy(dataset_type_filters)
363
+ patch = merge_type_filters(
364
+ wftask_type_filters=wftask.type_filters,
365
+ task_input_types=wftask.task.input_types,
366
+ )
367
+ input_type_filters.update(patch)
368
+ list_filters_in.append(copy(input_type_filters))
369
+
370
+ output_type_filters = wftask.task.output_types
371
+ list_filters_out.append(output_type_filters)
372
+
373
+ dataset_type_filters.update(wftask.task.output_types)
374
+ list_dataset_filters.append(copy(dataset_type_filters))
375
+
376
+ response_body = dict(
377
+ dataset_filters=list_dataset_filters,
378
+ input_filters=list_filters_in,
379
+ output_filters=list_filters_out,
380
+ )
381
+ return response_body
@@ -19,6 +19,7 @@ from ._aux_functions import _check_workflow_exists
19
19
  from ._aux_functions import _get_project_check_owner
20
20
  from ._aux_functions import _workflow_insert_task
21
21
  from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
22
+ from ._aux_functions_tasks import _check_type_filters_compatibility
22
23
  from fractal_server.app.models import LinkUserGroup
23
24
  from fractal_server.app.models import UserOAuth
24
25
  from fractal_server.app.models.v2 import TaskGroupV2
@@ -325,6 +326,13 @@ async def import_workflow(
325
326
  list_wf_tasks.append(new_wf_task)
326
327
  list_task_ids.append(task_id)
327
328
 
329
+ for wftask, task_id in zip(list_wf_tasks, list_task_ids):
330
+ task = await db.get(TaskV2, task_id)
331
+ _check_type_filters_compatibility(
332
+ task_input_types=task.input_types,
333
+ wftask_type_filters=wftask.type_filters,
334
+ )
335
+
328
336
  # Create new Workflow
329
337
  db_workflow = WorkflowV2(
330
338
  project_id=project_id,