fractal-server 2.0.0a8__py3-none-any.whl → 2.0.0a10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.0.0a8"
1
+ __VERSION__ = "2.0.0a10"
@@ -33,11 +33,3 @@ class WorkflowV2(SQLModel, table=True):
33
33
  default_factory=get_timestamp,
34
34
  sa_column=Column(DateTime(timezone=True), nullable=False),
35
35
  )
36
-
37
- @property
38
- def input_types(self):
39
- return self.task_list[0].task.input_types
40
-
41
- @property
42
- def output_types(self):
43
- return self.task_list[-1].task.output_types
@@ -51,14 +51,13 @@ class WorkflowTaskV2(SQLModel, table=True):
51
51
 
52
52
  @validator("args_non_parallel")
53
53
  def validate_args_non_parallel(cls, value):
54
- """
55
- FIXME V2 this requires an update
56
- """
57
54
  if value is None:
58
55
  return
59
56
  forbidden_args_keys = {
60
- "metadata",
61
- "component",
57
+ "zarr_dir",
58
+ "zarr_url",
59
+ "zarr_urls",
60
+ "init_args",
62
61
  }
63
62
  args_keys = set(value.keys())
64
63
  intersect_keys = forbidden_args_keys.intersection(args_keys)
@@ -71,14 +70,13 @@ class WorkflowTaskV2(SQLModel, table=True):
71
70
 
72
71
  @validator("args_parallel")
73
72
  def validate_args_parallel(cls, value):
74
- """
75
- FIXME V2 this requires an update
76
- """
77
73
  if value is None:
78
74
  return
79
75
  forbidden_args_keys = {
80
- "metadata",
81
- "component",
76
+ "zarr_dir",
77
+ "zarr_url",
78
+ "zarr_urls",
79
+ "init_args",
82
80
  }
83
81
  args_keys = set(value.keys())
84
82
  intersect_keys = forbidden_args_keys.intersection(args_keys)
@@ -7,6 +7,7 @@ from .dataset import router as dataset_router_v2
7
7
  from .images import router as images_routes_v2
8
8
  from .job import router as job_router_v2
9
9
  from .project import router as project_router_v2
10
+ from .status import router as status_router_v2
10
11
  from .submit import router as submit_job_router_v2
11
12
  from .task import router as task_router_v2
12
13
  from .task_collection import router as task_collection_router_v2
@@ -30,3 +31,4 @@ router_api_v2.include_router(
30
31
  )
31
32
  router_api_v2.include_router(workflow_router_v2, tags=["V2 Workflow"])
32
33
  router_api_v2.include_router(workflowtask_router_v2, tags=["V2 WorkflowTask"])
34
+ router_api_v2.include_router(status_router_v2, tags=["V2 Status"])
@@ -1,5 +1,3 @@
1
- import json
2
- from pathlib import Path
3
1
  from typing import Optional
4
2
 
5
3
  from fastapi import APIRouter
@@ -17,15 +15,13 @@ from ....models.v2 import ProjectV2
17
15
  from ....schemas.v2 import DatasetCreateV2
18
16
  from ....schemas.v2 import DatasetReadV2
19
17
  from ....schemas.v2 import DatasetUpdateV2
20
- from ....schemas.v2.dataset import DatasetStatusReadV2
21
- from ....schemas.v2.dataset import WorkflowTaskStatusTypeV2
18
+ from ....schemas.v2.dataset import DatasetExportV2
19
+ from ....schemas.v2.dataset import DatasetImportV2
22
20
  from ....security import current_active_user
23
21
  from ....security import User
24
22
  from ._aux_functions import _get_dataset_check_owner
25
23
  from ._aux_functions import _get_project_check_owner
26
24
  from ._aux_functions import _get_submitted_jobs_statement
27
- from ._aux_functions import _get_workflow_check_owner
28
- from fractal_server.app.runner.filenames import HISTORY_FILENAME
29
25
 
30
26
  router = APIRouter()
31
27
 
@@ -227,91 +223,61 @@ async def get_user_datasets(
227
223
 
228
224
 
229
225
  @router.get(
230
- "/project/{project_id}/dataset/{dataset_id}/status/",
231
- response_model=DatasetStatusReadV2,
226
+ "/project/{project_id}/dataset/{dataset_id}/export/",
227
+ response_model=DatasetExportV2,
232
228
  )
233
- async def get_workflowtask_status(
229
+ async def export_dataset(
234
230
  project_id: int,
235
231
  dataset_id: int,
236
232
  user: User = Depends(current_active_user),
237
233
  db: AsyncSession = Depends(get_async_db),
238
- ) -> Optional[DatasetStatusReadV2]:
234
+ ) -> Optional[DatasetExportV2]:
239
235
  """
240
- Extract the status of all `WorkflowTask`s that ran on a given `DatasetV2`.
236
+ Export an existing dataset
241
237
  """
242
- # Get the dataset DB entry
243
- output = await _get_dataset_check_owner(
238
+ dict_dataset_project = await _get_dataset_check_owner(
244
239
  project_id=project_id,
245
240
  dataset_id=dataset_id,
246
241
  user_id=user.id,
247
242
  db=db,
248
243
  )
249
- dataset = output["dataset"]
244
+ await db.close()
250
245
 
251
- # Check whether there exists a job such that
252
- # 1. `job.dataset_id == dataset_id`, and
253
- # 2. `job.status` is submitted
254
- # If one such job exists, it will be used later. If there are multiple
255
- # jobs, raise an error.
256
- stm = _get_submitted_jobs_statement().where(JobV2.dataset_id == dataset_id)
257
- res = await db.execute(stm)
258
- running_jobs = res.scalars().all()
259
- if len(running_jobs) == 0:
260
- running_job = None
261
- elif len(running_jobs) == 1:
262
- running_job = running_jobs[0]
263
- else:
264
- string_ids = str([job.id for job in running_jobs])[1:-1]
265
- raise HTTPException(
266
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
267
- detail=(
268
- f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
269
- f" is linked to multiple active jobs: {string_ids}."
270
- ),
271
- )
246
+ dataset = dict_dataset_project["dataset"]
272
247
 
273
- # Initialize empty dictionary for WorkflowTaskV2 status
274
- workflow_tasks_status_dict: dict = {}
275
-
276
- # Lowest priority: read status from DB, which corresponds to jobs that are
277
- # not running
278
- history = dataset.history
279
- for history_item in history:
280
- wftask_id = history_item["workflowtask"]["id"]
281
- wftask_status = history_item["status"]
282
- workflow_tasks_status_dict[wftask_id] = wftask_status
283
-
284
- # If a job is running, then gather more up-to-date information
285
- if running_job is not None:
286
- # Get the workflow DB entry
287
- running_workflow = await _get_workflow_check_owner(
288
- project_id=project_id,
289
- workflow_id=running_job.workflow_id,
290
- user_id=user.id,
291
- db=db,
292
- )
293
- # Mid priority: Set all WorkflowTask's that are part of the running job
294
- # as "submitted"
295
- start = running_job.first_task_index
296
- end = running_job.last_task_index + 1
297
- for wftask in running_workflow.task_list[start:end]:
298
- workflow_tasks_status_dict[
299
- wftask.id
300
- ] = WorkflowTaskStatusTypeV2.SUBMITTED
301
-
302
- # Highest priority: Read status updates coming from the running-job
303
- # temporary file. Note: this file only contains information on
304
- # # WorkflowTask's that ran through successfully.
305
- tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
306
- try:
307
- with tmp_file.open("r") as f:
308
- history = json.load(f)
309
- except FileNotFoundError:
310
- history = []
311
- for history_item in history:
312
- wftask_id = history_item["workflowtask"]["id"]
313
- wftask_status = history_item["status"]
314
- workflow_tasks_status_dict[wftask_id] = wftask_status
315
-
316
- response_body = DatasetStatusReadV2(status=workflow_tasks_status_dict)
317
- return response_body
248
+ return dataset
249
+
250
+
251
+ @router.post(
252
+ "/project/{project_id}/dataset/import/",
253
+ response_model=DatasetReadV2,
254
+ status_code=status.HTTP_201_CREATED,
255
+ )
256
+ async def import_dataset(
257
+ project_id: int,
258
+ dataset: DatasetImportV2,
259
+ user: User = Depends(current_active_user),
260
+ db: AsyncSession = Depends(get_async_db),
261
+ ) -> Optional[DatasetReadV2]:
262
+ """
263
+ Import an existing dataset into a project
264
+ """
265
+
266
+ # Preliminary checks
267
+ await _get_project_check_owner(
268
+ project_id=project_id,
269
+ user_id=user.id,
270
+ db=db,
271
+ )
272
+
273
+ # Create new Dataset
274
+ db_dataset = DatasetV2(
275
+ project_id=project_id,
276
+ **dataset.dict(exclude_none=True),
277
+ )
278
+ db.add(db_dataset)
279
+ await db.commit()
280
+ await db.refresh(db_dataset)
281
+ await db.close()
282
+
283
+ return db_dataset
@@ -66,6 +66,14 @@ async def post_new_image(
66
66
  f"{dataset.zarr_dir}."
67
67
  ),
68
68
  )
69
+ elif new_image.zarr_url == dataset.zarr_dir:
70
+ raise HTTPException(
71
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
72
+ detail=(
73
+ "`SingleImage.zarr_url` cannot be equal to `Dataset.zarr_dir`:"
74
+ f" {dataset.zarr_dir}"
75
+ ),
76
+ )
69
77
 
70
78
  if new_image.zarr_url in dataset.image_zarr_urls:
71
79
  raise HTTPException(
@@ -0,0 +1,150 @@
1
+ import json
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+ from fastapi import APIRouter
6
+ from fastapi import Depends
7
+ from fastapi import HTTPException
8
+ from fastapi import status
9
+
10
+ from ....db import AsyncSession
11
+ from ....db import get_async_db
12
+ from ....models.v2 import JobV2
13
+ from ....schemas.v2.dataset import WorkflowTaskStatusTypeV2
14
+ from ....schemas.v2.status import StatusReadV2
15
+ from ....security import current_active_user
16
+ from ....security import User
17
+ from ._aux_functions import _get_dataset_check_owner
18
+ from ._aux_functions import _get_submitted_jobs_statement
19
+ from ._aux_functions import _get_workflow_check_owner
20
+ from fractal_server.app.runner.filenames import HISTORY_FILENAME
21
+
22
+ router = APIRouter()
23
+
24
+
25
+ @router.get(
26
+ "/project/{project_id}/status/",
27
+ response_model=StatusReadV2,
28
+ )
29
+ async def get_workflowtask_status(
30
+ project_id: int,
31
+ dataset_id: int,
32
+ workflow_id: int,
33
+ user: User = Depends(current_active_user),
34
+ db: AsyncSession = Depends(get_async_db),
35
+ ) -> Optional[StatusReadV2]:
36
+ """
37
+ Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
38
+ on a given `DatasetV2`.
39
+
40
+ *NOTE*: the current endpoint is not guaranteed to provide consistent
41
+ results if the workflow task list is modified in a non-trivial way
42
+ (that is, by adding intermediate tasks, removing tasks, or changing their
43
+ order). See fractal-server GitHub issues: 793, 1083.
44
+ """
45
+ # Get the dataset DB entry
46
+ output = await _get_dataset_check_owner(
47
+ project_id=project_id,
48
+ dataset_id=dataset_id,
49
+ user_id=user.id,
50
+ db=db,
51
+ )
52
+ dataset = output["dataset"]
53
+
54
+ # Get the workflow DB entry
55
+ workflow = await _get_workflow_check_owner(
56
+ project_id=project_id,
57
+ workflow_id=workflow_id,
58
+ user_id=user.id,
59
+ db=db,
60
+ )
61
+
62
+ # Check whether there exists a submitted job associated to this
63
+ # workflow/dataset pair. If it does exist, it will be used later.
64
+ # If there are multiple jobs, raise an error.
65
+ stm = _get_submitted_jobs_statement()
66
+ stm = stm.where(JobV2.dataset_id == dataset_id)
67
+ stm = stm.where(JobV2.workflow_id == workflow_id)
68
+ res = await db.execute(stm)
69
+ running_jobs = res.scalars().all()
70
+ if len(running_jobs) == 0:
71
+ running_job = None
72
+ elif len(running_jobs) == 1:
73
+ running_job = running_jobs[0]
74
+ else:
75
+ string_ids = str([job.id for job in running_jobs])[1:-1]
76
+ raise HTTPException(
77
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
78
+ detail=(
79
+ f"Cannot get WorkflowTaskV2 statuses as DatasetV2 {dataset.id}"
80
+ f" is linked to multiple active jobs: {string_ids}."
81
+ ),
82
+ )
83
+
84
+ # Initialize empty dictionary for WorkflowTaskV2 status
85
+ workflow_tasks_status_dict: dict = {}
86
+
87
+ # Lowest priority: read status from DB, which corresponds to jobs that are
88
+ # not running
89
+ history = dataset.history
90
+ for history_item in history:
91
+ wftask_id = history_item["workflowtask"]["id"]
92
+ wftask_status = history_item["status"]
93
+ workflow_tasks_status_dict[wftask_id] = wftask_status
94
+
95
+ if running_job is None:
96
+ # If no job is running, the chronological-last history item is also the
97
+ # positional-last workflow task to be included in the response.
98
+ if len(dataset.history) > 0:
99
+ last_valid_wftask_id = dataset.history[-1]["workflowtask"]["id"]
100
+ else:
101
+ last_valid_wftask_id = None
102
+ else:
103
+ # If a job is running, then gather more up-to-date information
104
+
105
+ # Mid priority: Set all WorkflowTask's that are part of the running job
106
+ # as "submitted"
107
+ start = running_job.first_task_index
108
+ end = running_job.last_task_index + 1
109
+ for wftask in workflow.task_list[start:end]:
110
+ workflow_tasks_status_dict[
111
+ wftask.id
112
+ ] = WorkflowTaskStatusTypeV2.SUBMITTED
113
+
114
+ # The last workflow task that is included in the submitted job is also
115
+ # the positional-last workflow task to be included in the response.
116
+ last_valid_wftask_id = workflow.task_list[end - 1]
117
+
118
+ # Highest priority: Read status updates coming from the running-job
119
+ # temporary file. Note: this file only contains information on
120
+ # WorkflowTask's that ran through successfully.
121
+ tmp_file = Path(running_job.working_dir) / HISTORY_FILENAME
122
+ try:
123
+ with tmp_file.open("r") as f:
124
+ history = json.load(f)
125
+ except FileNotFoundError:
126
+ history = []
127
+ for history_item in history:
128
+ wftask_id = history_item["workflowtask"]["id"]
129
+ wftask_status = history_item["status"]
130
+ workflow_tasks_status_dict[wftask_id] = wftask_status
131
+
132
+ # Based on previously-gathered information, clean up the response body
133
+ clean_workflow_tasks_status_dict = {}
134
+ for wf_task in workflow.task_list:
135
+ wf_task_status = workflow_tasks_status_dict.get(wf_task.id)
136
+ if wf_task_status is None:
137
+ # If a wftask ID was not found, ignore it and continue
138
+ continue
139
+ clean_workflow_tasks_status_dict[wf_task.id] = wf_task_status
140
+ if wf_task_status == WorkflowTaskStatusTypeV2.FAILED:
141
+ # Starting from the beginning of `workflow.task_list`, stop the
142
+ # first time that you hit a failed job
143
+ break
144
+ if wf_task.id == last_valid_wftask_id:
145
+ # Starting from the beginning of `workflow.task_list`, stop the
146
+ # first time that you hit `last_valid_wftask_id``
147
+ break
148
+
149
+ response_body = StatusReadV2(status=clean_workflow_tasks_status_dict)
150
+ return response_body
@@ -200,9 +200,15 @@ async def update_workflowtask(
200
200
  setattr(db_wf_task, key, actual_args)
201
201
  elif key == "args_non_parallel":
202
202
  # Get default arguments via a Task property method
203
- default_args = deepcopy(
204
- db_wf_task.task.default_args_non_parallel_from_args_schema
205
- )
203
+ if db_wf_task.is_legacy_task:
204
+ # This is only needed so that we don't have to modify the rest
205
+ # of this block, but legacy task cannot take any non-parallel
206
+ # args (see checks above).
207
+ default_args = {}
208
+ else:
209
+ default_args = deepcopy(
210
+ db_wf_task.task.default_args_non_parallel_from_args_schema
211
+ )
206
212
  # Override default_args with args value items
207
213
  actual_args = default_args.copy()
208
214
  if value is not None:
@@ -125,7 +125,10 @@ def get_slurm_config(
125
125
  slurm_dict["mem_per_task_MB"] = mem_per_task_MB
126
126
 
127
127
  # Job name
128
- job_name = wftask.task.name.replace(" ", "_")
128
+ if wftask.is_legacy_task:
129
+ job_name = wftask.task_legacy.name.replace(" ", "_")
130
+ else:
131
+ job_name = wftask.task.name.replace(" ", "_")
129
132
  slurm_dict["job_name"] = job_name
130
133
 
131
134
  # Optional SLURM arguments and extra lines
@@ -96,8 +96,15 @@ def assemble_history_failed_job(
96
96
 
97
97
  # Part 3/B: Append failed task to history
98
98
  if failed_wftask is not None:
99
- failed_wftask_dump = failed_wftask.model_dump(exclude={"task"})
100
- failed_wftask_dump["task"] = failed_wftask.task.model_dump()
99
+ failed_wftask_dump = failed_wftask.model_dump(
100
+ exclude={"task", "task_legacy"}
101
+ )
102
+ if failed_wftask.is_legacy_task:
103
+ failed_wftask_dump[
104
+ "task_legacy"
105
+ ] = failed_wftask.task_legacy.model_dump()
106
+ else:
107
+ failed_wftask_dump["task"] = failed_wftask.task.model_dump()
101
108
  new_history_item = dict(
102
109
  workflowtask=failed_wftask_dump,
103
110
  status=WorkflowTaskStatusTypeV2.FAILED,
@@ -1,4 +1,5 @@
1
1
  import json
2
+ import logging
2
3
  from concurrent.futures import ThreadPoolExecutor
3
4
  from copy import copy
4
5
  from copy import deepcopy
@@ -37,6 +38,8 @@ def execute_tasks_v2(
37
38
  submit_setup_call: Callable = no_op_submit_setup_call,
38
39
  ) -> DatasetV2:
39
40
 
41
+ logger = logging.getLogger(logger_name)
42
+
40
43
  if not workflow_dir.exists(): # FIXME: this should have already happened
41
44
  workflow_dir.mkdir()
42
45
 
@@ -48,6 +51,9 @@ def execute_tasks_v2(
48
51
 
49
52
  for wftask in wf_task_list:
50
53
  task = wftask.task
54
+ task_legacy = wftask.task_legacy
55
+ task_name = task_legacy.name if wftask.is_legacy_task else task.name
56
+ logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
51
57
 
52
58
  # PRE TASK EXECUTION
53
59
 
@@ -63,12 +69,13 @@ def execute_tasks_v2(
63
69
  filters=Filters(**pre_filters),
64
70
  )
65
71
  # Verify that filtered images comply with task input_types
66
- for image in filtered_images:
67
- if not match_filter(image, Filters(types=task.input_types)):
68
- raise ValueError(
69
- f"Filtered images include {image}, which does "
70
- f"not comply with {task.input_types=}."
71
- )
72
+ if not wftask.is_legacy_task:
73
+ for image in filtered_images:
74
+ if not match_filter(image, Filters(types=task.input_types)):
75
+ raise ValueError(
76
+ f"Filtered images include {image}, which does "
77
+ f"not comply with {task.input_types=}."
78
+ )
72
79
 
73
80
  # TASK EXECUTION (V2)
74
81
  if not wftask.is_legacy_task:
@@ -77,7 +84,7 @@ def execute_tasks_v2(
77
84
  images=filtered_images,
78
85
  zarr_dir=zarr_dir,
79
86
  wftask=wftask,
80
- task=wftask.task,
87
+ task=task,
81
88
  workflow_dir=workflow_dir,
82
89
  workflow_dir_user=workflow_dir_user,
83
90
  executor=executor,
@@ -88,7 +95,7 @@ def execute_tasks_v2(
88
95
  current_task_output = run_v2_task_parallel(
89
96
  images=filtered_images,
90
97
  wftask=wftask,
91
- task=wftask.task,
98
+ task=task,
92
99
  workflow_dir=workflow_dir,
93
100
  workflow_dir_user=workflow_dir_user,
94
101
  executor=executor,
@@ -100,7 +107,7 @@ def execute_tasks_v2(
100
107
  images=filtered_images,
101
108
  zarr_dir=zarr_dir,
102
109
  wftask=wftask,
103
- task=wftask.task,
110
+ task=task,
104
111
  workflow_dir=workflow_dir,
105
112
  workflow_dir_user=workflow_dir_user,
106
113
  executor=executor,
@@ -114,9 +121,11 @@ def execute_tasks_v2(
114
121
  current_task_output = run_v1_task_parallel(
115
122
  images=filtered_images,
116
123
  wftask=wftask,
117
- task_legacy=wftask.task_legacy,
124
+ task_legacy=task_legacy,
118
125
  executor=executor,
119
126
  logger_name=logger_name,
127
+ workflow_dir=workflow_dir,
128
+ workflow_dir_user=workflow_dir_user,
120
129
  submit_setup_call=submit_setup_call,
121
130
  )
122
131
 
@@ -155,7 +164,8 @@ def execute_tasks_v2(
155
164
  # Update image attributes/types with task output and manifest
156
165
  updated_attributes.update(image["attributes"])
157
166
  updated_types.update(image["types"])
158
- updated_types.update(task.output_types)
167
+ if not wftask.is_legacy_task:
168
+ updated_types.update(task.output_types)
159
169
 
160
170
  # Unset attributes with None value
161
171
  updated_attributes = {
@@ -182,6 +192,11 @@ def execute_tasks_v2(
182
192
  f"{zarr_dir} is not a parent directory of "
183
193
  f"{image['zarr_url']}"
184
194
  )
195
+ # Check that image['zarr_url'] is not equal to zarr_dir
196
+ if image["zarr_url"] == zarr_dir:
197
+ raise ValueError(
198
+ "image['zarr_url'] cannot be equal to zarr_dir"
199
+ )
185
200
  # Propagate attributes and types from `origin` (if any)
186
201
  updated_attributes = {}
187
202
  updated_types = {}
@@ -202,7 +217,8 @@ def execute_tasks_v2(
202
217
  if value is not None
203
218
  }
204
219
  updated_types.update(image["types"])
205
- updated_types.update(task.output_types)
220
+ if not wftask.is_legacy_task:
221
+ updated_types.update(task.output_types)
206
222
  new_image = dict(
207
223
  zarr_url=image["zarr_url"],
208
224
  origin=image["origin"],
@@ -277,6 +293,8 @@ def execute_tasks_v2(
277
293
  with open(workflow_dir / IMAGES_FILENAME, "w") as f:
278
294
  json.dump(tmp_images, f, indent=2)
279
295
 
296
+ logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
297
+
280
298
  # NOTE: tmp_history only contains the newly-added history items (to be
281
299
  # appended to the original history), while tmp_filters and tmp_images
282
300
  # represent the new attributes (to replace the original ones)
@@ -146,6 +146,9 @@ def run_v2_task_parallel(
146
146
  submit_setup_call: Callable = no_op_submit_setup_call,
147
147
  ) -> TaskOutput:
148
148
 
149
+ if len(images) == 0:
150
+ return TaskOutput()
151
+
149
152
  _check_parallelization_list_size(images)
150
153
 
151
154
  executor_options = _get_executor_options(
@@ -249,6 +252,9 @@ def run_v2_task_compound(
249
252
  # 3/B: parallel part of a compound task
250
253
  _check_parallelization_list_size(parallelization_list)
251
254
 
255
+ if len(parallelization_list) == 0:
256
+ return TaskOutput()
257
+
252
258
  list_function_kwargs = []
253
259
  for ind, parallelization_item in enumerate(parallelization_list):
254
260
  list_function_kwargs.append(
@@ -313,10 +319,11 @@ def run_v1_task_parallel(
313
319
  for ind, image in enumerate(images):
314
320
  list_function_kwargs.append(
315
321
  convert_v2_args_into_v1(
316
- dict(
322
+ kwargs_v2=dict(
317
323
  zarr_url=image["zarr_url"],
318
324
  **(wftask.args_parallel or {}),
319
- )
325
+ ),
326
+ parallelization_level=task_legacy.parallelization_level,
320
327
  ),
321
328
  )
322
329
  list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
@@ -116,7 +116,10 @@ def run_single_task(
116
116
  except TaskExecutionError as e:
117
117
  e.workflow_task_order = wftask.order
118
118
  e.workflow_task_id = wftask.id
119
- e.task_name = wftask.task.name
119
+ if wftask.is_legacy_task:
120
+ e.task_name = wftask.task_legacy.name
121
+ else:
122
+ e.task_name = wftask.task.name
120
123
  raise e
121
124
 
122
125
  try:
@@ -3,13 +3,23 @@ from pathlib import Path
3
3
  from typing import Any
4
4
 
5
5
 
6
- def convert_v2_args_into_v1(kwargs_v2: dict[str, Any]) -> dict[str, Any]:
7
-
6
+ def convert_v2_args_into_v1(
7
+ kwargs_v2: dict[str, Any],
8
+ parallelization_level: str = "image",
9
+ ) -> dict[str, Any]:
8
10
  kwargs_v1 = deepcopy(kwargs_v2)
9
11
 
10
12
  zarr_url = kwargs_v1.pop("zarr_url")
11
13
  input_path = Path(zarr_url).parents[3].as_posix()
12
- component = zarr_url.replace(input_path, "").lstrip("/")
14
+ image_component = zarr_url.replace(input_path, "").lstrip("/")
15
+ if parallelization_level == "image":
16
+ component = image_component
17
+ elif parallelization_level == "well":
18
+ component = str(Path(image_component).parent)
19
+ elif parallelization_level == "plate":
20
+ component = str(Path(image_component).parents[2])
21
+ else:
22
+ raise ValueError(f"Invalid {parallelization_level=}.")
13
23
 
14
24
  kwargs_v1.update(
15
25
  input_paths=[input_path],
@@ -12,6 +12,7 @@ from .dumps import WorkflowTaskDumpV2
12
12
  from .project import ProjectReadV2
13
13
  from .workflowtask import WorkflowTaskStatusTypeV2
14
14
  from fractal_server.images import Filters
15
+ from fractal_server.images import SingleImage
15
16
  from fractal_server.urls import normalize_url
16
17
 
17
18
 
@@ -25,20 +26,6 @@ class _DatasetHistoryItemV2(BaseModel):
25
26
  parallelization: Optional[dict]
26
27
 
27
28
 
28
- class DatasetStatusReadV2(BaseModel):
29
- """
30
- Response type for the
31
- `/project/{project_id}/dataset/{dataset_id}/status/` endpoint
32
- """
33
-
34
- status: Optional[
35
- dict[
36
- str,
37
- WorkflowTaskStatusTypeV2,
38
- ]
39
- ] = None
40
-
41
-
42
29
  # CRUD
43
30
 
44
31
 
@@ -95,3 +82,40 @@ class DatasetUpdateV2(BaseModel):
95
82
  return v
96
83
 
97
84
  _name = validator("name", allow_reuse=True)(valstr("name"))
85
+
86
+
87
+ class DatasetImportV2(BaseModel):
88
+ """
89
+ Class for `Dataset` import.
90
+
91
+ Attributes:
92
+ name:
93
+ zarr_dir:
94
+ images:
95
+ filters:
96
+ """
97
+
98
+ class Config:
99
+ extra = "forbid"
100
+
101
+ name: str
102
+ zarr_dir: str
103
+ images: list[SingleImage] = Field(default_factory=[])
104
+ filters: Filters = Field(default_factory=Filters)
105
+
106
+
107
+ class DatasetExportV2(BaseModel):
108
+ """
109
+ Class for `Dataset` export.
110
+
111
+ Attributes:
112
+ name:
113
+ zarr_dir:
114
+ images:
115
+ filters:
116
+ """
117
+
118
+ name: str
119
+ zarr_dir: str
120
+ images: list[SingleImage]
121
+ filters: Filters
@@ -45,6 +45,8 @@ class WorkflowTaskDumpV2(BaseModel):
45
45
  workflow_id: int
46
46
  order: Optional[int]
47
47
 
48
+ is_legacy_task: bool
49
+
48
50
  input_filters: Filters
49
51
 
50
52
  task_id: Optional[int]
@@ -0,0 +1,16 @@
1
+ from pydantic import BaseModel
2
+ from pydantic import Field
3
+
4
+ from .workflowtask import WorkflowTaskStatusTypeV2
5
+
6
+
7
+ class StatusReadV2(BaseModel):
8
+ """
9
+ Response type for the
10
+ `/project/{project_id}/status/` endpoint
11
+ """
12
+
13
+ status: dict[
14
+ str,
15
+ WorkflowTaskStatusTypeV2,
16
+ ] = Field(default_factory=dict)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a8
3
+ Version: 2.0.0a10
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -23,7 +23,7 @@ Requires-Dist: cloudpickle (>=3.0.0,<3.1.0)
23
23
  Requires-Dist: clusterfutures (>=0.5,<0.6)
24
24
  Requires-Dist: fastapi (>=0.110.0,<0.111.0)
25
25
  Requires-Dist: fastapi-users[oauth] (>=12.1.0,<13.0.0)
26
- Requires-Dist: gunicorn (>=21.2.0,<22.0.0) ; extra == "gunicorn"
26
+ Requires-Dist: gunicorn (>=21.2,<23.0) ; extra == "gunicorn"
27
27
  Requires-Dist: packaging (>=23.2,<24.0)
28
28
  Requires-Dist: psycopg2 (>=2.9.5,<3.0.0) ; extra == "postgres"
29
29
  Requires-Dist: pydantic (>=1.10.8,<2)
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=K1rg40LWC0HcWGPng5LXrwtqh2Z-3XITlzdSqTIPpeQ,24
1
+ fractal_server/__init__.py,sha256=Ndyws1HQt0rbkLhMYPxRDuKeooalmEoA6oesT2NmbbU,25
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -18,8 +18,8 @@ fractal_server/app/models/v2/dataset.py,sha256=-7sxHEw4IIAvF_uSan7tA3o8hvoakBkQ0
18
18
  fractal_server/app/models/v2/job.py,sha256=PCJf0_NYIc5boXL6e6P72BvYJGydCZOGKnW2DT4Sw9g,1535
19
19
  fractal_server/app/models/v2/project.py,sha256=CqDEKzdVxmFDMee6DnVOyX7WGmdn-dQSLSekzw_OLUc,817
20
20
  fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxrxTslOso,3257
21
- fractal_server/app/models/v2/workflow.py,sha256=4pSTeZC78OQbgHHC5S0ge6pK1AP6ak7Qew_0ZNM9xuw,1256
22
- fractal_server/app/models/v2/workflowtask.py,sha256=f2a85MSAyBAdC7oG6SR8mViMNqlomQWaIB08n3ZhT-0,2727
21
+ fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
22
+ fractal_server/app/models/v2/workflowtask.py,sha256=kEm2k1LI0KK9vlTH7DL1NddaEUpIvMkFi42vahwDpd8,2695
23
23
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
25
25
  fractal_server/app/routes/admin/v1.py,sha256=uY6H1znlAlrM9e1MG2EThTqwciCl87Twew34JM5W6IU,13981
@@ -34,18 +34,19 @@ fractal_server/app/routes/api/v1/task.py,sha256=4zUXMtq5M95XjaZs1t9oibYHiDIwxpM-
34
34
  fractal_server/app/routes/api/v1/task_collection.py,sha256=_cY3pPRGchdWPuJ1XudMZMVJ0IC0_XVH0XwLTiAbRGg,8873
35
35
  fractal_server/app/routes/api/v1/workflow.py,sha256=ZObifWTPi100oRQ1wEER8Sgsr3Neo8QVdCCFQnWMNZ0,10930
36
36
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=ox-DIIqYV4K35hCu86eGa2SHnR5IQml-I00UHEwnmHQ,5579
37
- fractal_server/app/routes/api/v2/__init__.py,sha256=x56HcY1uBNCgq4BRVj-0j6bAj6OsTN97RNDqY8NefJ8,1373
37
+ fractal_server/app/routes/api/v2/__init__.py,sha256=UNgODxoEXfQpQDjvsnMvHaUWbZOrcHhEXNisLcU-0tE,1487
38
38
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=TCHf3aM-KQxaNJen10CGX1Da5IIra00xRF39FUTU698,14301
39
- fractal_server/app/routes/api/v2/dataset.py,sha256=BmNKrhHUVoy0EuEYZpdocR_aG7U2tNuOpoAVimaGIBY,10008
40
- fractal_server/app/routes/api/v2/images.py,sha256=LX9-EYOGN4NHSARNXYGgSK4_vvAVFwzTiwVM5KuVukA,7589
39
+ fractal_server/app/routes/api/v2/dataset.py,sha256=0JGRnK1DRQKgVA3FDhK8VdoRglLYFxgkMQOaoWI-tiQ,7853
40
+ fractal_server/app/routes/api/v2/images.py,sha256=4r_HblPWyuKSZSJZfn8mbDaLv1ncwZU0gWdKneZcNG4,7894
41
41
  fractal_server/app/routes/api/v2/job.py,sha256=9mXaKCX_N3FXM0GIxdE49nWl_hJZ8CBLBIaMMhaCKOM,5334
42
42
  fractal_server/app/routes/api/v2/project.py,sha256=i9a19HAqE36N92G60ZYgObIP9nv-hR7Jt5nd9Dkhz1g,6024
43
+ fractal_server/app/routes/api/v2/status.py,sha256=3bqQejJ3TnIMan5wK6jr9sv4ypsQr9WWU8xqlvTgDCE,5739
43
44
  fractal_server/app/routes/api/v2/submit.py,sha256=iszII5CvWDEjGPTphBgH9FVS1pNb5m11Xc8xozGgjgI,6901
44
45
  fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
45
46
  fractal_server/app/routes/api/v2/task_collection.py,sha256=iw74UF8qdQa9pJf0DvSjihng6ri2k2HtW2UhMS_a8Zc,8904
46
47
  fractal_server/app/routes/api/v2/task_legacy.py,sha256=P_VJv9v0yzFUBuS-DQHhMVSOe20ecGJJcFBqiiFciOM,1628
47
48
  fractal_server/app/routes/api/v2/workflow.py,sha256=sw-1phO_rrmDAcWX9Zqb9M8SfrWF78-02AuLB1-D1PU,11845
48
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=L4hYpb-ihKNfPxM5AnZqPhCdiojI9Eq5TR0wf-0vP_s,8414
49
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=I1nrIV5J_DW1IeBq0q9VmUeBDo7P6x7qYO_Ocls2Pno,8720
49
50
  fractal_server/app/routes/auth.py,sha256=Xv80iqdyfY3lyicYs2Y8B6zEDEnyUu_H6_6psYtv3R4,4885
50
51
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
52
  fractal_server/app/routes/aux/_job.py,sha256=5gKgvArAruSkMQuPN34Vvzi89WJbwWPsx0oDAa_iXu4,1248
@@ -85,15 +86,15 @@ fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=deagsLSy6A3ZHKaSDcQq
85
86
  fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
86
87
  fractal_server/app/runner/v2/_slurm/__init__.py,sha256=srxn5-KdQxqD8cWJmOJlSoctbXYlyCMM249xWGY9bhI,4409
87
88
  fractal_server/app/runner/v2/_slurm/_submit_setup.py,sha256=tsZHQdVy3VxENMdsBzHltrVWzugBppq0cFrHtaVzoUA,2793
88
- fractal_server/app/runner/v2/_slurm/get_slurm_config.py,sha256=sqP-hs58TPt849rx10VRFKWX_DgLDPQcKZJcE0zKBXs,6621
89
+ fractal_server/app/runner/v2/_slurm/get_slurm_config.py,sha256=I_lOS75iGYyJ74-gNwwcPadvZ9vI9HYe04WMln5GJ5Q,6726
89
90
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwoY0YIJCTf_SbWJNN9OZg,639
90
- fractal_server/app/runner/v2/handle_failed_job.py,sha256=fipRJT5Y8UY0US4bXUX-4ORTAQ1AetZcCAOVCjDO3_c,5202
91
+ fractal_server/app/runner/v2/handle_failed_job.py,sha256=M1r3dnrbUMo_AI2qjaVuGhieMAyLh5gcvB10YOBpjvI,5415
91
92
  fractal_server/app/runner/v2/merge_outputs.py,sha256=IHuHqbKmk97K35BFvTrKVBs60z3e_--OzXTnsvmA02c,1281
92
- fractal_server/app/runner/v2/runner.py,sha256=rBRehRDduGU0TUOkgQN6WbIGhDWZ6GOat4bv7IVB8cA,11784
93
- fractal_server/app/runner/v2/runner_functions.py,sha256=LfO1-FJF70_Qh78NQTCHJWyzyr011wvvtnzB6nTj5ZM,10087
94
- fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=Pp3hsj1i1t4ExDMcUBkQ27yEi7kjlvymY6q6eDiC8DM,3845
93
+ fractal_server/app/runner/v2/runner.py,sha256=K6bmWbQRSZwbO6ZI2Bp7wNxYdkHcXxhWwBObMxJ0iSU,12599
94
+ fractal_server/app/runner/v2/runner_functions.py,sha256=qVGG9KlH8ObX4Y0kr0q6qE8OpWFwf4RnOHhgPRRdj5M,10293
95
+ fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=djNKD1y_EE0Q9Jkzh1QdKpjM66JVsLQgX2_zJT0xQlA,3947
95
96
  fractal_server/app/runner/v2/task_interface.py,sha256=TZLVJs6CNFo2lFhr-lsDxe585cEhRv48eA490LS9aqc,1746
96
- fractal_server/app/runner/v2/v1_compat.py,sha256=6UijuRYbB2ry2mM073u1fW4CSTeelB11lmoj_TOGtm4,511
97
+ fractal_server/app/runner/v2/v1_compat.py,sha256=t0ficzAHUFaaeI56nqTb4YEKxfARF7L9Y6ijtJCwjP8,912
97
98
  fractal_server/app/schemas/__init__.py,sha256=VL55f3CTFngXHYkOsFaLBEEkEEewEWI5ODlcGTI7cqA,157
98
99
  fractal_server/app/schemas/_validators.py,sha256=1dTOYr1IZykrxuQSV2-zuEMZbKe_nGwrfS7iUrsh-sE,3461
99
100
  fractal_server/app/schemas/state.py,sha256=t4XM04aqxeluh8MfvD7LfEc-8-dOmUVluZHhLsfxxkc,692
@@ -108,11 +109,12 @@ fractal_server/app/schemas/v1/task.py,sha256=7BxOZ_qoRQ8n3YbQpDvB7VMcxB5fSYQmR5R
108
109
  fractal_server/app/schemas/v1/task_collection.py,sha256=uvq9bcMaGD_qHsh7YtcpoSAkVAbw12eY4DocIO3MKOg,3057
109
110
  fractal_server/app/schemas/v1/workflow.py,sha256=tuOs5E5Q_ozA8if7YPZ07cQjzqB_QMkBS4u92qo4Ro0,4618
110
111
  fractal_server/app/schemas/v2/__init__.py,sha256=zlCYrplCWwnCL9-BYsExRMfVzhBy21IMBfdHPMgJZYk,1752
111
- fractal_server/app/schemas/v2/dataset.py,sha256=V04_2bXgD12L53aW4_Ls8rnGLLgCaiVAFoAedx5of8Q,2086
112
- fractal_server/app/schemas/v2/dumps.py,sha256=Xen0OPf1Ax9i_7ItrAPvCk1OCNcUsnhlLRiyny89aLM,1997
112
+ fractal_server/app/schemas/v2/dataset.py,sha256=MGv0bdzEIQFNy8ARqiDn_neC1mJJTMXFzbb9M5l4xxg,2474
113
+ fractal_server/app/schemas/v2/dumps.py,sha256=IpIT_2KxJd7qTgW2NllDknGeP7vBAJDfyz1I5p3TytU,2023
113
114
  fractal_server/app/schemas/v2/job.py,sha256=zfF9K3v4jWUJ7M482ta2CkqUJ4tVT4XfVt60p9IRhP0,3250
114
115
  fractal_server/app/schemas/v2/manifest.py,sha256=N37IWohcfO3_y2l8rVM0h_1nZq7m4Izxk9iL1vtwBJw,6243
115
116
  fractal_server/app/schemas/v2/project.py,sha256=u7S4B-bote1oGjzAGiZ-DuQIyeRAGqJsI71Tc1EtYE0,736
117
+ fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
116
118
  fractal_server/app/schemas/v2/task.py,sha256=7IfxiZkaVqlARy7WYE_H8m7j_IEcuQaZORUrs6b5YuY,4672
117
119
  fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-20wIsmh7G1QOdr05KoDQ,3171
118
120
  fractal_server/app/schemas/v2/workflow.py,sha256=Zzx3e-qgkH8le0FUmAx9UrV5PWd7bj14PPXUh_zgZXM,1827
@@ -159,8 +161,8 @@ fractal_server/tasks/v2/background_operations.py,sha256=zr6j3uoWmCeW2EA9auxWNZ0s
159
161
  fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
160
162
  fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
161
163
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
162
- fractal_server-2.0.0a8.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
163
- fractal_server-2.0.0a8.dist-info/METADATA,sha256=sKsPtIdIowQ-avbzi1OajV7Qet2IIuHXM9duIYpxpyA,4204
164
- fractal_server-2.0.0a8.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
165
- fractal_server-2.0.0a8.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
166
- fractal_server-2.0.0a8.dist-info/RECORD,,
164
+ fractal_server-2.0.0a10.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
165
+ fractal_server-2.0.0a10.dist-info/METADATA,sha256=rZurMguM3pZbihOJYXyhi3zM3aRVhRrzDc_kuoTh910,4201
166
+ fractal_server-2.0.0a10.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
167
+ fractal_server-2.0.0a10.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
168
+ fractal_server-2.0.0a10.dist-info/RECORD,,