fractal-server 2.0.0a1__py3-none-any.whl → 2.0.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.0.0a1"
1
+ __VERSION__ = "2.0.0a3"
@@ -51,5 +51,5 @@ class DatasetV2(SQLModel, table=True):
51
51
  )
52
52
 
53
53
  @property
54
- def image_paths(self) -> list[str]:
55
- return [image["path"] for image in self.images]
54
+ def image_zarr_urls(self) -> list[str]:
55
+ return [image["zarr_url"] for image in self.images]
@@ -27,6 +27,7 @@ from ...models.v2 import ProjectV2
27
27
  from ...runner.filenames import WORKFLOW_LOG_FILENAME
28
28
  from ...schemas.v2 import JobReadV2
29
29
  from ...schemas.v2 import JobUpdateV2
30
+ from ...schemas.v2 import ProjectReadV2
30
31
  from ...security import current_active_superuser
31
32
  from ..aux._job import _write_shutdown_file
32
33
  from ..aux._job import _zip_folder_to_byte_stream
@@ -52,6 +53,35 @@ def _convert_to_db_timestamp(dt: datetime) -> datetime:
52
53
  return _dt
53
54
 
54
55
 
56
+ @router_admin_v2.get("/project/", response_model=list[ProjectReadV2])
57
+ async def view_project(
58
+ id: Optional[int] = None,
59
+ user_id: Optional[int] = None,
60
+ user: User = Depends(current_active_superuser),
61
+ db: AsyncSession = Depends(get_async_db),
62
+ ) -> list[ProjectReadV2]:
63
+ """
64
+ Query `ProjectV2` table.
65
+
66
+ Args:
67
+ id: If not `None`, select a given `project.id`.
68
+ user_id: If not `None`, select a given `project.user_id`.
69
+ """
70
+
71
+ stm = select(ProjectV2)
72
+
73
+ if id is not None:
74
+ stm = stm.where(ProjectV2.id == id)
75
+ if user_id is not None:
76
+ stm = stm.where(ProjectV2.user_list.any(User.id == user_id))
77
+
78
+ res = await db.execute(stm)
79
+ project_list = res.scalars().all()
80
+ await db.close()
81
+
82
+ return project_list
83
+
84
+
55
85
  @router_admin_v2.get("/job/", response_model=list[JobReadV2])
56
86
  async def view_job(
57
87
  id: Optional[int] = None,
@@ -8,6 +8,7 @@ from typing import Union
8
8
 
9
9
  from fastapi import HTTPException
10
10
  from fastapi import status
11
+ from sqlalchemy.orm.attributes import flag_modified
11
12
  from sqlmodel import select
12
13
  from sqlmodel.sql.expression import SelectOfScalar
13
14
 
@@ -488,9 +489,9 @@ async def _workflow_insert_task(
488
489
  meta_non_parallel=final_meta_non_parallel,
489
490
  **input_filters_kwarg,
490
491
  )
491
- db.add(wf_task)
492
492
  db_workflow.task_list.insert(order, wf_task)
493
493
  db_workflow.task_list.reorder() # type: ignore
494
+ flag_modified(db_workflow, "task_list")
494
495
  await db.commit()
495
496
  await db.refresh(wf_task)
496
497
 
@@ -186,7 +186,6 @@ async def delete_dataset(
186
186
  jobs = res.scalars().all()
187
187
  for job in jobs:
188
188
  job.dataset_id = None
189
- await db.commit()
190
189
 
191
190
  # Delete dataset
192
191
  await db.delete(dataset)
@@ -35,7 +35,7 @@ class ImagePage(BaseModel):
35
35
 
36
36
 
37
37
  class ImageQuery(BaseModel):
38
- path: Optional[str]
38
+ zarr_url: Optional[str]
39
39
  filters: Filters = Field(default_factory=Filters)
40
40
 
41
41
 
@@ -56,11 +56,11 @@ async def post_new_image(
56
56
  )
57
57
  dataset = output["dataset"]
58
58
 
59
- if new_image.path in dataset.image_paths:
59
+ if new_image.zarr_url in dataset.image_zarr_urls:
60
60
  raise HTTPException(
61
61
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
62
62
  detail=(
63
- f"Image with path '{new_image.path}' "
63
+ f"Image with zarr_url '{new_image.zarr_url}' "
64
64
  f"already in DatasetV2 {dataset_id}",
65
65
  ),
66
66
  )
@@ -121,9 +121,13 @@ async def query_dataset_images(
121
121
 
122
122
  if query is not None:
123
123
 
124
- if query.path is not None:
124
+ if query.zarr_url is not None:
125
125
  image = next(
126
- (image for image in images if image["path"] == query.path),
126
+ (
127
+ image
128
+ for image in images
129
+ if image["zarr_url"] == query.zarr_url
130
+ ),
127
131
  None,
128
132
  )
129
133
  if image is None:
@@ -180,7 +184,7 @@ async def query_dataset_images(
180
184
  async def delete_dataset_images(
181
185
  project_id: int,
182
186
  dataset_id: int,
183
- path: str,
187
+ zarr_url: str,
184
188
  user: User = Depends(current_active_user),
185
189
  db: AsyncSession = Depends(get_async_db),
186
190
  ) -> Response:
@@ -191,12 +195,16 @@ async def delete_dataset_images(
191
195
  dataset = output["dataset"]
192
196
 
193
197
  image_to_remove = next(
194
- (image for image in dataset.images if image["path"] == path), None
198
+ (image for image in dataset.images if image["zarr_url"] == zarr_url),
199
+ None,
195
200
  )
196
201
  if image_to_remove is None:
197
202
  raise HTTPException(
198
203
  status_code=status.HTTP_404_NOT_FOUND,
199
- detail=f"No image with path '{path}' in DatasetV2 {dataset_id}.",
204
+ detail=(
205
+ f"No image with zarr_url '{zarr_url}' in "
206
+ f"DatasetV2 {dataset_id}."
207
+ ),
200
208
  )
201
209
 
202
210
  dataset.images.remove(image_to_remove)
@@ -169,7 +169,6 @@ async def delete_project(
169
169
  job.workflow_id = None
170
170
  # Delete workflow
171
171
  await db.delete(wf)
172
- await db.commit()
173
172
 
174
173
  # Dataset
175
174
  stm = select(DatasetV2).where(DatasetV2.project_id == project_id)
@@ -185,7 +184,6 @@ async def delete_project(
185
184
  job.dataset_id = None
186
185
  # Delete dataset
187
186
  await db.delete(ds)
188
- await db.commit()
189
187
 
190
188
  # Job
191
189
  stm = select(JobV2).where(JobV2.project_id == project_id)
@@ -194,8 +192,6 @@ async def delete_project(
194
192
  for job in jobs:
195
193
  job.project_id = None
196
194
 
197
- await db.commit()
198
-
199
195
  await db.delete(project)
200
196
  await db.commit()
201
197
 
@@ -16,7 +16,6 @@ from ....models.v2 import JobV2
16
16
  from ....models.v2 import ProjectV2
17
17
  from ....models.v2 import TaskV2
18
18
  from ....models.v2 import WorkflowV2
19
- from ....schemas.v1 import WorkflowTaskCreateV1
20
19
  from ....schemas.v2 import WorkflowCreateV2
21
20
  from ....schemas.v2 import WorkflowExportV2
22
21
  from ....schemas.v2 import WorkflowImportV2
@@ -213,7 +212,6 @@ async def delete_workflow(
213
212
  jobs = res.scalars().all()
214
213
  for job in jobs:
215
214
  job.workflow_id = None
216
- await db.commit()
217
215
 
218
216
  # Delete workflow
219
217
  await db.delete(workflow)
@@ -341,40 +339,25 @@ async def import_workflow(
341
339
  await db.refresh(db_workflow)
342
340
 
343
341
  # Insert tasks
344
- async with db: # FIXME why?
345
-
346
- for wf_task in workflow.task_list:
347
- if wf_task.is_legacy_task is True:
348
- # Identify task_id
349
- source = wf_task.task_legacy.source
350
- task_id = source_to_id_legacy[source]
351
- # Prepare new_wf_task
352
- new_wf_task = WorkflowTaskCreateV1(
353
- **wf_task.dict(exclude_none=True)
354
- )
355
- # Insert task
356
- await _workflow_insert_task(
357
- **new_wf_task.dict(),
358
- is_legacy_task=True,
359
- workflow_id=db_workflow.id,
360
- task_id=task_id,
361
- db=db,
362
- )
363
- else:
364
- # Identify task_id
365
- source = wf_task.task.source
366
- task_id = source_to_id[source]
367
- # Prepare new_wf_task
368
- new_wf_task = WorkflowTaskCreateV2(
369
- **wf_task.dict(exclude_none=True)
370
- )
371
- # Insert task
372
- await _workflow_insert_task(
373
- **new_wf_task.dict(),
374
- workflow_id=db_workflow.id,
375
- task_id=task_id,
376
- db=db,
377
- )
342
+
343
+ for wf_task in workflow.task_list:
344
+ if wf_task.is_legacy_task is True:
345
+ source = wf_task.task_legacy.source
346
+ task_id = source_to_id_legacy[source]
347
+ else:
348
+ source = wf_task.task.source
349
+ task_id = source_to_id[source]
350
+
351
+ new_wf_task = WorkflowTaskCreateV2(
352
+ **wf_task.dict(exclude_none=True, exclude={"task", "task_legacy"})
353
+ )
354
+ # Insert task
355
+ await _workflow_insert_task(
356
+ **new_wf_task.dict(),
357
+ workflow_id=db_workflow.id,
358
+ task_id=task_id,
359
+ db=db,
360
+ )
378
361
 
379
362
  await db.close()
380
363
  return db_workflow
@@ -91,19 +91,18 @@ async def create_workflowtask(
91
91
  ),
92
92
  )
93
93
 
94
- async with db:
95
- workflow_task = await _workflow_insert_task(
96
- workflow_id=workflow.id,
97
- is_legacy_task=new_task.is_legacy_task,
98
- task_id=task_id,
99
- order=new_task.order,
100
- meta_non_parallel=new_task.meta_non_parallel,
101
- meta_parallel=new_task.meta_parallel,
102
- args_non_parallel=new_task.args_non_parallel,
103
- args_parallel=new_task.args_parallel,
104
- input_filters=new_task.input_filters,
105
- db=db,
106
- )
94
+ workflow_task = await _workflow_insert_task(
95
+ workflow_id=workflow.id,
96
+ is_legacy_task=new_task.is_legacy_task,
97
+ task_id=task_id,
98
+ order=new_task.order,
99
+ meta_non_parallel=new_task.meta_non_parallel,
100
+ meta_parallel=new_task.meta_parallel,
101
+ args_non_parallel=new_task.args_non_parallel,
102
+ args_parallel=new_task.args_parallel,
103
+ input_filters=new_task.input_filters,
104
+ db=db,
105
+ )
107
106
 
108
107
  await db.close()
109
108
 
@@ -212,17 +211,8 @@ async def update_workflowtask(
212
211
  if not actual_args:
213
212
  actual_args = None
214
213
  setattr(db_wf_task, key, actual_args)
215
- elif key == "meta_parallel":
216
- current_meta_parallel = deepcopy(db_wf_task.meta_parallel) or {}
217
- current_meta_parallel.update(value)
218
- setattr(db_wf_task, key, current_meta_parallel)
219
- elif key == "meta_non_parallel":
220
- current_meta_non_parallel = (
221
- deepcopy(db_wf_task.meta_non_parallel) or {}
222
- )
223
- current_meta_non_parallel.update(value)
224
- setattr(db_wf_task, key, current_meta_non_parallel)
225
- # FIXME handle `input_filters`
214
+ elif key in ["meta_parallel", "meta_non_parallel", "input_filters"]:
215
+ setattr(db_wf_task, key, value)
226
216
  else:
227
217
  raise HTTPException(
228
218
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -9,7 +9,7 @@ from typing import Optional
9
9
  from ....images import Filters
10
10
  from ....images import SingleImage
11
11
  from ....images.tools import filter_image_list
12
- from ....images.tools import find_image_by_path
12
+ from ....images.tools import find_image_by_zarr_url
13
13
  from ....images.tools import match_filter
14
14
  from ..filenames import FILTERS_FILENAME
15
15
  from ..filenames import HISTORY_FILENAME
@@ -66,7 +66,7 @@ def execute_tasks_v2(
66
66
  for image in filtered_images:
67
67
  if not match_filter(image, Filters(types=task.input_types)):
68
68
  raise ValueError(
69
- f"Filtered images include {image.dict()}, which does "
69
+ f"Filtered images include {image}, which does "
70
70
  f"not comply with {task.input_types=}."
71
71
  )
72
72
 
@@ -123,30 +123,32 @@ def execute_tasks_v2(
123
123
  # POST TASK EXECUTION
124
124
 
125
125
  # Update image list
126
- current_task_output.check_paths_are_unique()
126
+ current_task_output.check_zarr_urls_are_unique()
127
127
  for image_obj in current_task_output.image_list_updates:
128
128
  image = image_obj.dict()
129
129
  # Edit existing image
130
- if image["path"] in [_image["path"] for _image in tmp_images]:
130
+ if image["zarr_url"] in [
131
+ _image["zarr_url"] for _image in tmp_images
132
+ ]:
131
133
  if (
132
134
  image["origin"] is not None
133
- and image["origin"] != image["path"]
135
+ and image["origin"] != image["zarr_url"]
134
136
  ):
135
137
  raise ValueError(
136
- f"Trying to edit an image with {image['path']=} "
138
+ f"Trying to edit an image with {image['zarr_url']=} "
137
139
  f"and {image['origin']=}."
138
140
  )
139
- image_search = find_image_by_path(
141
+ img_search = find_image_by_zarr_url(
140
142
  images=tmp_images,
141
- path=image["path"],
143
+ zarr_url=image["zarr_url"],
142
144
  )
143
- if image_search is None:
145
+ if img_search is None:
144
146
  raise ValueError(
145
- f"Image with path {image['path']} not found, while "
146
- "updating image list."
147
+ f"Image with zarr_url {image['zarr_url']} not found, "
148
+ "while updating image list."
147
149
  )
148
- original_img = image_search["image"]
149
- original_index = image_search["index"]
150
+ original_img = img_search["image"]
151
+ original_index = img_search["index"]
150
152
  updated_attributes = copy(original_img["attributes"])
151
153
  updated_types = copy(original_img["types"])
152
154
 
@@ -160,22 +162,22 @@ def execute_tasks_v2(
160
162
  tmp_images[original_index]["types"] = updated_types
161
163
  # Add new image
162
164
  else:
163
- # Check that image['path'] is relative to zarr_dir
164
- if not image["path"].startswith(zarr_dir):
165
+ # Check that image['zarr_url'] is relative to zarr_dir
166
+ if not image["zarr_url"].startswith(zarr_dir):
165
167
  raise ValueError(
166
168
  f"{zarr_dir} is not a parent directory of "
167
- f"{image['path']}"
169
+ f"{image['zarr_url']}"
168
170
  )
169
171
  # Propagate attributes and types from `origin` (if any)
170
172
  updated_attributes = {}
171
173
  updated_types = {}
172
174
  if image["origin"] is not None:
173
- image_search = find_image_by_path(
175
+ img_search = find_image_by_zarr_url(
174
176
  images=tmp_images,
175
- path=image["origin"],
177
+ zarr_url=image["origin"],
176
178
  )
177
- if image_search is not None:
178
- original_img = image_search["image"]
179
+ if img_search is not None:
180
+ original_img = img_search["image"]
179
181
  updated_attributes = copy(original_img["attributes"])
180
182
  updated_types = copy(original_img["types"])
181
183
  # Update image attributes/types with task output and manifest
@@ -183,7 +185,7 @@ def execute_tasks_v2(
183
185
  updated_types.update(image["types"])
184
186
  updated_types.update(task.output_types)
185
187
  new_image = dict(
186
- path=image["path"],
188
+ zarr_url=image["zarr_url"],
187
189
  origin=image["origin"],
188
190
  attributes=updated_attributes,
189
191
  types=updated_types,
@@ -194,16 +196,16 @@ def execute_tasks_v2(
194
196
  tmp_images.append(new_image)
195
197
 
196
198
  # Remove images from tmp_images
197
- for image_path in current_task_output.image_list_removals:
198
- image_search = find_image_by_path(
199
- images=tmp_images, path=image_path
199
+ for img_zarr_url in current_task_output.image_list_removals:
200
+ img_search = find_image_by_zarr_url(
201
+ images=tmp_images, zarr_url=img_zarr_url
200
202
  )
201
- if image_search is None:
203
+ if img_search is None:
202
204
  raise ValueError(
203
- f"Cannot remove missing image with path {image_path=}"
205
+ f"Cannot remove missing image with zarr_url {img_zarr_url}"
204
206
  )
205
207
  else:
206
- tmp_images.pop(image_search["index"])
208
+ tmp_images.pop(img_search["index"])
207
209
 
208
210
  # Update filters.attributes:
209
211
  # current + (task_output: not really, in current examples..)
@@ -110,7 +110,7 @@ def run_v2_task_non_parallel(
110
110
  )
111
111
 
112
112
  function_kwargs = dict(
113
- paths=[image["path"] for image in images],
113
+ zarr_urls=[image["zarr_url"] for image in images],
114
114
  zarr_dir=zarr_dir,
115
115
  **(wftask.args_non_parallel or {}),
116
116
  )
@@ -160,7 +160,7 @@ def run_v2_task_parallel(
160
160
  for ind, image in enumerate(images):
161
161
  list_function_kwargs.append(
162
162
  dict(
163
- path=image["path"],
163
+ zarr_url=image["zarr_url"],
164
164
  **(wftask.args_parallel or {}),
165
165
  ),
166
166
  )
@@ -223,7 +223,7 @@ def run_v2_task_compound(
223
223
 
224
224
  # 3/A: non-parallel init task
225
225
  function_kwargs = dict(
226
- paths=[image["path"] for image in images],
226
+ zarr_urls=[image["zarr_url"] for image in images],
227
227
  zarr_dir=zarr_dir,
228
228
  **(wftask.args_non_parallel or {}),
229
229
  )
@@ -253,7 +253,7 @@ def run_v2_task_compound(
253
253
  for ind, parallelization_item in enumerate(parallelization_list):
254
254
  list_function_kwargs.append(
255
255
  dict(
256
- path=parallelization_item.path,
256
+ zarr_url=parallelization_item.zarr_url,
257
257
  init_args=parallelization_item.init_args,
258
258
  **(wftask.args_parallel or {}),
259
259
  ),
@@ -314,7 +314,7 @@ def run_v1_task_parallel(
314
314
  list_function_kwargs.append(
315
315
  convert_v2_args_into_v1(
316
316
  dict(
317
- path=image["path"],
317
+ zarr_url=image["zarr_url"],
318
318
  **(wftask.args_parallel or {}),
319
319
  )
320
320
  ),
@@ -15,13 +15,18 @@ class TaskOutput(BaseModel):
15
15
  image_list_removals: list[str] = Field(default_factory=list)
16
16
  filters: Filters = Field(default_factory=Filters)
17
17
 
18
- def check_paths_are_unique(self) -> None:
19
- paths = [img.path for img in self.image_list_updates]
20
- paths.extend(self.image_list_removals)
21
- if len(paths) != len(set(paths)):
22
- duplicates = [path for path in set(paths) if paths.count(path) > 1]
18
+ def check_zarr_urls_are_unique(self) -> None:
19
+ zarr_urls = [img.zarr_url for img in self.image_list_updates]
20
+ zarr_urls.extend(self.image_list_removals)
21
+ if len(zarr_urls) != len(set(zarr_urls)):
22
+ duplicates = [
23
+ zarr_url
24
+ for zarr_url in set(zarr_urls)
25
+ if zarr_urls.count(zarr_url) > 1
26
+ ]
23
27
  msg = (
24
- "TaskOutput image-list updates/removals has non-unique paths:"
28
+ "TaskOutput image-list updates/removals has "
29
+ "non-unique zarr_urls:"
25
30
  )
26
31
  for duplicate in duplicates:
27
32
  msg = f"{msg}\n{duplicate}"
@@ -32,7 +37,7 @@ class InitArgsModel(BaseModel):
32
37
  class Config:
33
38
  extra = "forbid"
34
39
 
35
- path: str
40
+ zarr_url: str
36
41
  init_args: dict[str, Any] = Field(default_factory=dict)
37
42
 
38
43
 
@@ -7,11 +7,11 @@ def convert_v2_args_into_v1(kwargs_v2: dict[str, Any]) -> dict[str, Any]:
7
7
 
8
8
  kwargs_v1 = deepcopy(kwargs_v2)
9
9
 
10
- path = kwargs_v2.pop("path")
11
- input_path = Path(path).parents[3].as_posix()
12
- component = path.replace(input_path, "").lstrip("/")
10
+ zarr_url = kwargs_v1.pop("zarr_url")
11
+ input_path = Path(zarr_url).parents[3].as_posix()
12
+ component = zarr_url.replace(input_path, "").lstrip("/")
13
13
 
14
- kwargs_v1 = dict(
14
+ kwargs_v1.update(
15
15
  input_paths=[input_path],
16
16
  output_path=input_path,
17
17
  metadata={},
@@ -5,6 +5,7 @@ from typing import Optional
5
5
  from pydantic import BaseModel
6
6
  from pydantic import Extra
7
7
  from pydantic import Field
8
+ from pydantic import root_validator
8
9
  from pydantic import validator
9
10
 
10
11
  from .._validators import valint
@@ -38,8 +39,8 @@ class WorkflowTaskStatusTypeV2(str, Enum):
38
39
 
39
40
  class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
40
41
 
41
- meta_parallel: Optional[dict[str, Any]]
42
42
  meta_non_parallel: Optional[dict[str, Any]]
43
+ meta_parallel: Optional[dict[str, Any]]
43
44
  args_non_parallel: Optional[dict[str, Any]]
44
45
  args_parallel: Optional[dict[str, Any]]
45
46
  order: Optional[int]
@@ -50,7 +51,18 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
50
51
  # Validators
51
52
 
52
53
  _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
53
- # FIXME validate: if `is_legacy_task`, `args_non_parallel` must be None
54
+
55
+ @root_validator
56
+ def validate_legacy_task(cls, values):
57
+ if values["is_legacy_task"] and (
58
+ values.get("meta_non_parallel") is not None
59
+ or values.get("args_non_parallel") is not None
60
+ ):
61
+ raise ValueError(
62
+ "If Task is legacy, 'args_non_parallel' and 'meta_non_parallel"
63
+ "must be None"
64
+ )
65
+ return values
54
66
 
55
67
 
56
68
  class WorkflowTaskReadV2(BaseModel):
@@ -59,8 +71,8 @@ class WorkflowTaskReadV2(BaseModel):
59
71
 
60
72
  workflow_id: int
61
73
  order: Optional[int]
62
- meta_parallel: Optional[dict[str, Any]]
63
74
  meta_non_parallel: Optional[dict[str, Any]]
75
+ meta_parallel: Optional[dict[str, Any]]
64
76
 
65
77
  args_non_parallel: Optional[dict[str, Any]]
66
78
  args_parallel: Optional[dict[str, Any]]
@@ -77,8 +89,8 @@ class WorkflowTaskReadV2(BaseModel):
77
89
 
78
90
  class WorkflowTaskUpdateV2(BaseModel):
79
91
 
80
- meta_parallel: Optional[dict[str, Any]]
81
92
  meta_non_parallel: Optional[dict[str, Any]]
93
+ meta_parallel: Optional[dict[str, Any]]
82
94
  args_non_parallel: Optional[dict[str, Any]]
83
95
  args_parallel: Optional[dict[str, Any]]
84
96
  input_filters: Optional[Filters]
@@ -96,9 +108,10 @@ class WorkflowTaskUpdateV2(BaseModel):
96
108
 
97
109
  class WorkflowTaskImportV2(BaseModel):
98
110
 
99
- meta_parallel: Optional[dict[str, Any]] = None
100
111
  meta_non_parallel: Optional[dict[str, Any]] = None
101
- args: Optional[dict[str, Any]] = None # FIXME
112
+ meta_parallel: Optional[dict[str, Any]] = None
113
+ args_non_parallel: Optional[dict[str, Any]] = None
114
+ args_parallel: Optional[dict[str, Any]] = None
102
115
 
103
116
  input_filters: Optional[Filters] = None
104
117
 
@@ -109,9 +122,10 @@ class WorkflowTaskImportV2(BaseModel):
109
122
 
110
123
  class WorkflowTaskExportV2(BaseModel):
111
124
 
112
- meta_parallel: Optional[dict[str, Any]] = None
113
125
  meta_non_parallel: Optional[dict[str, Any]] = None
114
- args: Optional[dict[str, Any]] = None # FIXME
126
+ meta_parallel: Optional[dict[str, Any]] = None
127
+ args_non_parallel: Optional[dict[str, Any]] = None
128
+ args_parallel: Optional[dict[str, Any]] = None
115
129
  input_filters: Filters = Field(default_factory=Filters)
116
130
 
117
131
  is_legacy_task: bool = False
@@ -9,7 +9,7 @@ from pydantic import validator
9
9
 
10
10
  class SingleImage(BaseModel):
11
11
 
12
- path: str
12
+ zarr_url: str
13
13
  origin: Optional[str] = None
14
14
 
15
15
  attributes: dict[str, Any] = Field(default_factory=dict)
@@ -10,24 +10,24 @@ from fractal_server.images import Filters
10
10
  ImageSearch = dict[Literal["image", "index"], Union[int, dict[str, Any]]]
11
11
 
12
12
 
13
- def find_image_by_path(
13
+ def find_image_by_zarr_url(
14
14
  *,
15
15
  images: list[dict[str, Any]],
16
- path: str,
16
+ zarr_url: str,
17
17
  ) -> Optional[ImageSearch]:
18
18
  """
19
- Return a copy of the image with a given path, and its positional index.
19
+ Return a copy of the image with a given zarr_url, and its positional index.
20
20
 
21
21
  Arguments:
22
22
  images: List of images.
23
- path: Path that the returned image must have.
23
+ zarr_url: Path that the returned image must have.
24
24
 
25
25
  Returns:
26
- The first image from `images` which has path equal to `path`.
26
+ The first image from `images` which has zarr_url equal to `zarr_url`.
27
27
  """
28
- image_paths = [img["path"] for img in images]
28
+ image_urls = [img["zarr_url"] for img in images]
29
29
  try:
30
- ind = image_paths.index(path)
30
+ ind = image_urls.index(zarr_url)
31
31
  except ValueError:
32
32
  return None
33
33
  return dict(image=copy(images[ind]), index=ind)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a1
3
+ Version: 2.0.0a3
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=nTWdIbpub_qGvHveAOSoZcQPCAXn60jq4tWgPbd3jQk,24
1
+ fractal_server/__init__.py,sha256=HWERJ6DN1N5nAIka5TF2b6cTQfjSmZGWOqkcnNTIxWs,24
2
2
  fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -14,7 +14,7 @@ fractal_server/app/models/v1/project.py,sha256=sDmAFLOBK5o4dLrwsIN681JcT5J1rzoUN
14
14
  fractal_server/app/models/v1/task.py,sha256=3xZqNeFYUqslh8ddMSXF2nO4nIiOD8T5Ij37wY20kss,2782
15
15
  fractal_server/app/models/v1/workflow.py,sha256=dnY5eMaOe3oZv8arn00RNX9qVkBtTLG-vYdWXcQuyo4,3950
16
16
  fractal_server/app/models/v2/__init__.py,sha256=2T_ZXpP9n5IktoX3bkQUKUKzGAN5tJiR1LKWOtOCclM,400
17
- fractal_server/app/models/v2/dataset.py,sha256=3G0PO9tY3FTOlEj4we06V3QbWGfvggXpq00xQKNp8EA,1475
17
+ fractal_server/app/models/v2/dataset.py,sha256=rTDSNeB7jCNa9QwgRPK_vB5a8oM1EhO7KKuV63EIMjE,1483
18
18
  fractal_server/app/models/v2/job.py,sha256=PCJf0_NYIc5boXL6e6P72BvYJGydCZOGKnW2DT4Sw9g,1535
19
19
  fractal_server/app/models/v2/project.py,sha256=VJvkQexFSS150KxDQxOWBAZYDlV3Ve4l3h2oabzE-tM,845
20
20
  fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxrxTslOso,3257
@@ -22,7 +22,7 @@ fractal_server/app/models/v2/workflow.py,sha256=4pSTeZC78OQbgHHC5S0ge6pK1AP6ak7Q
22
22
  fractal_server/app/models/v2/workflowtask.py,sha256=f2a85MSAyBAdC7oG6SR8mViMNqlomQWaIB08n3ZhT-0,2727
23
23
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  fractal_server/app/routes/admin/v1.py,sha256=uY6H1znlAlrM9e1MG2EThTqwciCl87Twew34JM5W6IU,13981
25
- fractal_server/app/routes/admin/v2.py,sha256=TFG6oshQXY5QlW_SIxVlQw5rSJm0tqDEnijmHE2ea-4,8891
25
+ fractal_server/app/routes/admin/v2.py,sha256=RswcZ2DxRr_OPo3JJSLDFG4j2Ac6z1g5H-uiBiWXF2w,9706
26
26
  fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
27
27
  fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
28
28
  fractal_server/app/routes/api/v1/_aux_functions.py,sha256=eC5exnGj9jnJqx0ccecoNaipxDeK2ZsR1ev0syH5x-Y,11955
@@ -34,16 +34,16 @@ fractal_server/app/routes/api/v1/task_collection.py,sha256=LtOakYF30XiKo4ei7i09W
34
34
  fractal_server/app/routes/api/v1/workflow.py,sha256=ZObifWTPi100oRQ1wEER8Sgsr3Neo8QVdCCFQnWMNZ0,10930
35
35
  fractal_server/app/routes/api/v1/workflowtask.py,sha256=ox-DIIqYV4K35hCu86eGa2SHnR5IQml-I00UHEwnmHQ,5579
36
36
  fractal_server/app/routes/api/v2/__init__.py,sha256=wCd4eBUnZlP43uoFDKtrFMZBwDQz6pX8owGs3pdtixk,1217
37
- fractal_server/app/routes/api/v2/_aux_functions.py,sha256=AA_DHK5OfLs9AEm_uWlf3O6zV0_A4j23dp1PDBbxsyc,14142
38
- fractal_server/app/routes/api/v2/dataset.py,sha256=Djq-4IwAKehgBRvSCtQba4ctgKu5P113HyL7Nat7JHM,9702
39
- fractal_server/app/routes/api/v2/images.py,sha256=5voGxRbx5qTnrRbnku-Q6neE1KcW8unOaaLuOFNFGyA,5765
37
+ fractal_server/app/routes/api/v2/_aux_functions.py,sha256=reX1N0_jP1P86FVxkAuuDFrl0zBQRT8ozS-BuCeLv4Q,14218
38
+ fractal_server/app/routes/api/v2/dataset.py,sha256=qQi9jfT9YLu6DrRCPh280J3MoFWs9yMiejkCNaauCyQ,9680
39
+ fractal_server/app/routes/api/v2/images.py,sha256=b1NM9Y0ocuRYRec-3UcVAizB0vFkmzPEHfObaoCnIMY,5956
40
40
  fractal_server/app/routes/api/v2/job.py,sha256=9mXaKCX_N3FXM0GIxdE49nWl_hJZ8CBLBIaMMhaCKOM,5334
41
- fractal_server/app/routes/api/v2/project.py,sha256=YYche5eKdQu-Wnd2ob_zwxz-cVz3x3-zC-pFS0GWD6M,6091
41
+ fractal_server/app/routes/api/v2/project.py,sha256=i9a19HAqE36N92G60ZYgObIP9nv-hR7Jt5nd9Dkhz1g,6024
42
42
  fractal_server/app/routes/api/v2/submit.py,sha256=I8asPxY3KUogLbeDi0uPNbVLQBunOwMHCp1fbTYmdyg,7219
43
43
  fractal_server/app/routes/api/v2/task.py,sha256=gJ0LruSk-Q1iMw8ZOX8C0wrZ4S4DGlQTr_5SdJJud0Q,7130
44
44
  fractal_server/app/routes/api/v2/task_collection.py,sha256=kxSOOSsTFq2w1SeDwMeX6mSDPYbH5Uds18xpdLU5kTo,8466
45
- fractal_server/app/routes/api/v2/workflow.py,sha256=MPfwyrksZ4tA5eh1ZULoKT7L2P8w_M-rgmcyaUNvQTE,12580
46
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=8Ibu71nSJg-v5d5hJVOz9YRGomj7dVOaiIhQK50_O6s,8895
45
+ fractal_server/app/routes/api/v2/workflow.py,sha256=sw-1phO_rrmDAcWX9Zqb9M8SfrWF78-02AuLB1-D1PU,11845
46
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=L4hYpb-ihKNfPxM5AnZqPhCdiojI9Eq5TR0wf-0vP_s,8414
47
47
  fractal_server/app/routes/auth.py,sha256=Xv80iqdyfY3lyicYs2Y8B6zEDEnyUu_H6_6psYtv3R4,4885
48
48
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  fractal_server/app/routes/aux/_job.py,sha256=5gKgvArAruSkMQuPN34Vvzi89WJbwWPsx0oDAa_iXu4,1248
@@ -85,11 +85,11 @@ fractal_server/app/runner/v2/_slurm/get_slurm_config.py,sha256=sqP-hs58TPt849rx1
85
85
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=UShgbFy8d8elUE5sa1_jLDqQWip4Bi21VDhcFFM0fpU,571
86
86
  fractal_server/app/runner/v2/handle_failed_job.py,sha256=t4MjRH_7OhDMqZHP5UeZJ9_RlIJVj-F5VYtl34JBXO8,5149
87
87
  fractal_server/app/runner/v2/merge_outputs.py,sha256=IHuHqbKmk97K35BFvTrKVBs60z3e_--OzXTnsvmA02c,1281
88
- fractal_server/app/runner/v2/runner.py,sha256=wvfaaVnqzap0AJwihBFntLBEprh1gV88l3wQVSAPPo8,11027
89
- fractal_server/app/runner/v2/runner_functions.py,sha256=pHGBbah0UlmN7fukmorU-AZq4WW8Aj8oSWxJwr8Woh4,10047
88
+ fractal_server/app/runner/v2/runner.py,sha256=hLLGE6wD8nVSFWui0LlNTqn63WYYCPFeRvIAn4sBLlU,11119
89
+ fractal_server/app/runner/v2/runner_functions.py,sha256=LfO1-FJF70_Qh78NQTCHJWyzyr011wvvtnzB6nTj5ZM,10087
90
90
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=Pp3hsj1i1t4ExDMcUBkQ27yEi7kjlvymY6q6eDiC8DM,3845
91
- fractal_server/app/runner/v2/task_interface.py,sha256=QwoTQnsBzzPzfsK6LIy-FfKRU_vItlrlJG0ViYN3D64,1243
92
- fractal_server/app/runner/v2/v1_compat.py,sha256=dR_ukOppfc1XmInvOyKiVLdh15OSEK7ZX1l_DxYI8Sg,495
91
+ fractal_server/app/runner/v2/task_interface.py,sha256=3M0xDaARCVCD3yX2-N6YaFsYwZwsPpCKqAFMp0YqRDA,1376
92
+ fractal_server/app/runner/v2/v1_compat.py,sha256=6UijuRYbB2ry2mM073u1fW4CSTeelB11lmoj_TOGtm4,511
93
93
  fractal_server/app/schemas/__init__.py,sha256=VL55f3CTFngXHYkOsFaLBEEkEEewEWI5ODlcGTI7cqA,157
94
94
  fractal_server/app/schemas/_validators.py,sha256=s9a6AX4-3Vfoy1Y_HMQA3lXm4FLdmnODYUD4lfsJr6w,2549
95
95
  fractal_server/app/schemas/json_schemas/manifest.json,sha256=yXYKHbYXPYSkSXMTLfTpfCUGBtmQuPTk1xuSXscdba4,1787
@@ -113,13 +113,13 @@ fractal_server/app/schemas/v2/project.py,sha256=Okm9n4KqUUs8oxFo6yIV3Y_4mJznLeKC
113
113
  fractal_server/app/schemas/v2/task.py,sha256=vZPIsqBVM9RJDkk81EvJQQhQa-LNSh5YGdP-KM9AKgs,3607
114
114
  fractal_server/app/schemas/v2/task_collection.py,sha256=Jk-r3f2RIHRAXbej9xnz_WsPrIrod1P_FIWK1iEVkes,2993
115
115
  fractal_server/app/schemas/v2/workflow.py,sha256=KnzsuTQZ8S1wwoRDY3poWTnO3GbogFTLqCoBJNYzIFU,1831
116
- fractal_server/app/schemas/v2/workflowtask.py,sha256=ZGtsQyefk5Z52vWrTudF0OAJ2_LTbmfY7dE1nO2v0EU,3424
116
+ fractal_server/app/schemas/v2/workflowtask.py,sha256=Cg9h2RlkMGISeHXNtW0X_SUJFO0azLBX3_NNrm5WDOQ,3903
117
117
  fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
118
118
  fractal_server/config.py,sha256=CA8ASObADaME5chDiBXawAJZ3MvjTRpCKP0jvdYtSh8,15080
119
119
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
120
120
  fractal_server/images/__init__.py,sha256=JnTf7TflCdTbhcMHi12s3CJhEtuAXNulwauUU1wDpp0,88
121
- fractal_server/images/models.py,sha256=FId-e_lQNqp6rMoj2C4yO-fF0mCpjl5n-NJwum5TWns,1536
122
- fractal_server/images/tools.py,sha256=e4sajbw9OF1JSje_UiTX6xOe-Cnx8tkQxLLcl0x3Rts,2204
121
+ fractal_server/images/models.py,sha256=Aj_U-IGjVFSbubMxo7pwlwa5IuHyn_HdtBiEEvddCss,1540
122
+ fractal_server/images/tools.py,sha256=Q7jM60r_jq5bttrt1b4bU29n717RSUMMPbAbAkzWjgw,2234
123
123
  fractal_server/logger.py,sha256=95duXY8eSxf1HWg0CVn8SUGNzgJw9ZR0FlapDDF6WAY,3924
124
124
  fractal_server/main.py,sha256=7CpwPfCsHxBAo5fWuXPCsYOFCpbBI0F7Z0jsgCQdou8,3001
125
125
  fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
@@ -153,8 +153,8 @@ fractal_server/tasks/v2/_TaskCollectPip.py,sha256=QeCqXDgOnMjk3diVlC5bgGEywyQjYF
153
153
  fractal_server/tasks/v2/background_operations.py,sha256=zr6j3uoWmCeW2EA9auxWNZ0sG3SHgSxUVTC1OpQXE3Y,12803
154
154
  fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
155
155
  fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
156
- fractal_server-2.0.0a1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
157
- fractal_server-2.0.0a1.dist-info/METADATA,sha256=BQTLOQphWBZRtiU6OQSrVF3esVMqyLQkkju3mDMM19U,4205
158
- fractal_server-2.0.0a1.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
159
- fractal_server-2.0.0a1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
160
- fractal_server-2.0.0a1.dist-info/RECORD,,
156
+ fractal_server-2.0.0a3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
157
+ fractal_server-2.0.0a3.dist-info/METADATA,sha256=5eETQf0NY1w6uZqbwgmgaLfqUzKyP74Y9vTI9VwUgOY,4205
158
+ fractal_server-2.0.0a3.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
159
+ fractal_server-2.0.0a3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
160
+ fractal_server-2.0.0a3.dist-info/RECORD,,