fractal-server 2.0.0a10__py3-none-any.whl → 2.0.0a12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +5 -3
  3. fractal_server/app/models/v1/__init__.py +1 -0
  4. fractal_server/app/models/{state.py → v1/state.py} +2 -2
  5. fractal_server/app/models/v2/__init__.py +2 -0
  6. fractal_server/app/models/v2/collection_state.py +21 -0
  7. fractal_server/app/models/v2/job.py +2 -2
  8. fractal_server/app/models/v2/workflowtask.py +0 -39
  9. fractal_server/app/routes/admin/v1.py +5 -5
  10. fractal_server/app/routes/admin/v2.py +4 -4
  11. fractal_server/app/routes/api/v1/_aux_functions.py +6 -6
  12. fractal_server/app/routes/api/v1/dataset.py +4 -4
  13. fractal_server/app/routes/api/v1/project.py +4 -4
  14. fractal_server/app/routes/api/v1/task.py +2 -2
  15. fractal_server/app/routes/api/v1/task_collection.py +3 -3
  16. fractal_server/app/routes/api/v1/workflow.py +4 -4
  17. fractal_server/app/routes/api/v1/workflowtask.py +1 -1
  18. fractal_server/app/routes/api/v2/_aux_functions.py +2 -2
  19. fractal_server/app/routes/api/v2/dataset.py +10 -0
  20. fractal_server/app/routes/api/v2/job.py +1 -1
  21. fractal_server/app/routes/api/v2/project.py +5 -17
  22. fractal_server/app/routes/api/v2/submit.py +1 -1
  23. fractal_server/app/routes/api/v2/task_collection.py +5 -5
  24. fractal_server/app/routes/api/v2/workflowtask.py +10 -10
  25. fractal_server/app/routes/aux/_job.py +1 -1
  26. fractal_server/app/runner/task_files.py +1 -1
  27. fractal_server/app/runner/v1/__init__.py +4 -4
  28. fractal_server/app/runner/v1/_common.py +2 -2
  29. fractal_server/app/runner/v1/_local/__init__.py +3 -3
  30. fractal_server/app/runner/v2/runner.py +62 -25
  31. fractal_server/app/runner/v2/runner_functions.py +36 -10
  32. fractal_server/app/runner/v2/runner_functions_low_level.py +6 -13
  33. fractal_server/app/runner/v2/task_interface.py +4 -2
  34. fractal_server/app/schemas/__init__.py +0 -3
  35. fractal_server/app/schemas/v1/__init__.py +0 -6
  36. fractal_server/app/schemas/v2/__init__.py +2 -0
  37. fractal_server/app/schemas/v2/dataset.py +5 -0
  38. fractal_server/app/schemas/v2/workflowtask.py +60 -14
  39. fractal_server/images/tools.py +0 -1
  40. fractal_server/migrations/versions/{80e12e1bc4fd_v2.py → 5bf02391cfef_v2.py} +12 -4
  41. fractal_server/tasks/v1/background_operations.py +2 -2
  42. fractal_server/tasks/v2/background_operations.py +2 -2
  43. {fractal_server-2.0.0a10.dist-info → fractal_server-2.0.0a12.dist-info}/METADATA +1 -1
  44. {fractal_server-2.0.0a10.dist-info → fractal_server-2.0.0a12.dist-info}/RECORD +47 -46
  45. {fractal_server-2.0.0a10.dist-info → fractal_server-2.0.0a12.dist-info}/LICENSE +0 -0
  46. {fractal_server-2.0.0a10.dist-info → fractal_server-2.0.0a12.dist-info}/WHEEL +0 -0
  47. {fractal_server-2.0.0a10.dist-info → fractal_server-2.0.0a12.dist-info}/entry_points.txt +0 -0
@@ -12,6 +12,7 @@ from ....images import SingleImage
12
12
  from ....images.tools import filter_image_list
13
13
  from ....images.tools import find_image_by_zarr_url
14
14
  from ....images.tools import match_filter
15
+ from ..exceptions import JobExecutionError
15
16
  from ..filenames import FILTERS_FILENAME
16
17
  from ..filenames import HISTORY_FILENAME
17
18
  from ..filenames import IMAGES_FILENAME
@@ -20,13 +21,12 @@ from .runner_functions import run_v1_task_parallel
20
21
  from .runner_functions import run_v2_task_compound
21
22
  from .runner_functions import run_v2_task_non_parallel
22
23
  from .runner_functions import run_v2_task_parallel
24
+ from .task_interface import TaskOutput
23
25
  from fractal_server.app.models.v2 import DatasetV2
24
26
  from fractal_server.app.models.v2 import WorkflowTaskV2
25
27
  from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
26
28
  from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
27
29
 
28
- # FIXME: define RESERVED_ARGUMENTS = [", ...]
29
-
30
30
 
31
31
  def execute_tasks_v2(
32
32
  wf_task_list: list[WorkflowTaskV2],
@@ -52,8 +52,15 @@ def execute_tasks_v2(
52
52
  for wftask in wf_task_list:
53
53
  task = wftask.task
54
54
  task_legacy = wftask.task_legacy
55
- task_name = task_legacy.name if wftask.is_legacy_task else task.name
56
- logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
55
+ if wftask.is_legacy_task:
56
+ task_name = task_legacy.name
57
+ logger.debug(
58
+ f"SUBMIT {wftask.order}-th task "
59
+ f'(legacy, name="{task_name}")'
60
+ )
61
+ else:
62
+ task_name = task.name
63
+ logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
57
64
 
58
65
  # PRE TASK EXECUTION
59
66
 
@@ -72,9 +79,11 @@ def execute_tasks_v2(
72
79
  if not wftask.is_legacy_task:
73
80
  for image in filtered_images:
74
81
  if not match_filter(image, Filters(types=task.input_types)):
75
- raise ValueError(
76
- f"Filtered images include {image}, which does "
77
- f"not comply with {task.input_types=}."
82
+ raise JobExecutionError(
83
+ "Invalid filtered image list\n"
84
+ f"Task input types: {task.input_types=}\n"
85
+ f'Image zarr_url: {image["zarr_url"]}\n'
86
+ f'Image types: {image["types"]}\n'
78
87
  )
79
88
 
80
89
  # TASK EXECUTION (V2)
@@ -115,7 +124,7 @@ def execute_tasks_v2(
115
124
  submit_setup_call=submit_setup_call,
116
125
  )
117
126
  else:
118
- raise ValueError(f"Invalid {task.type=}.")
127
+ raise ValueError(f"Unexpected error: Invalid {task.type=}.")
119
128
  # TASK EXECUTION (V1)
120
129
  else:
121
130
  current_task_output = run_v1_task_parallel(
@@ -131,21 +140,36 @@ def execute_tasks_v2(
131
140
 
132
141
  # POST TASK EXECUTION
133
142
 
143
+ # If `current_task_output` includes no images (to be created, edited or
144
+ # removed), then flag all the input images as modified. See
145
+ # fractal-server issue #1374.
146
+ if (
147
+ current_task_output.image_list_updates == []
148
+ and current_task_output.image_list_removals == []
149
+ ):
150
+ current_task_output = TaskOutput(
151
+ **current_task_output.dict(exclude={"image_list_updates"}),
152
+ image_list_updates=[
153
+ dict(zarr_url=img["zarr_url"]) for img in filtered_images
154
+ ],
155
+ )
156
+
134
157
  # Update image list
135
158
  current_task_output.check_zarr_urls_are_unique()
136
159
  for image_obj in current_task_output.image_list_updates:
137
160
  image = image_obj.dict()
138
161
  # Edit existing image
139
- if image["zarr_url"] in [
140
- _image["zarr_url"] for _image in tmp_images
141
- ]:
162
+ tmp_image_paths = [img["zarr_url"] for img in tmp_images]
163
+ if image["zarr_url"] in tmp_image_paths:
142
164
  if (
143
165
  image["origin"] is not None
144
166
  and image["origin"] != image["zarr_url"]
145
167
  ):
146
- raise ValueError(
147
- f"Trying to edit an image with {image['zarr_url']=} "
148
- f"and {image['origin']=}."
168
+ raise JobExecutionError(
169
+ "Cannot edit an image with zarr_url different from "
170
+ "origin.\n"
171
+ f"zarr_url={image['zarr_url']}\n"
172
+ f"origin={image['origin']}"
149
173
  )
150
174
  img_search = find_image_by_zarr_url(
151
175
  images=tmp_images,
@@ -153,6 +177,7 @@ def execute_tasks_v2(
153
177
  )
154
178
  if img_search is None:
155
179
  raise ValueError(
180
+ "Unexpected error: "
156
181
  f"Image with zarr_url {image['zarr_url']} not found, "
157
182
  "while updating image list."
158
183
  )
@@ -188,14 +213,19 @@ def execute_tasks_v2(
188
213
  else:
189
214
  # Check that image['zarr_url'] is relative to zarr_dir
190
215
  if not image["zarr_url"].startswith(zarr_dir):
191
- raise ValueError(
192
- f"{zarr_dir} is not a parent directory of "
193
- f"{image['zarr_url']}"
216
+ raise JobExecutionError(
217
+ "Cannot create image if zarr_dir is not a parent "
218
+ "directory of zarr_url.\n"
219
+ f"zarr_dir: {zarr_dir}\n"
220
+ f"zarr_url: {image['zarr_url']}"
194
221
  )
195
222
  # Check that image['zarr_url'] is not equal to zarr_dir
196
223
  if image["zarr_url"] == zarr_dir:
197
- raise ValueError(
198
- "image['zarr_url'] cannot be equal to zarr_dir"
224
+ raise JobExecutionError(
225
+ "Cannot create image if zarr_url is equal to "
226
+ "zarr_dir.\n"
227
+ f"zarr_dir: {zarr_dir}\n"
228
+ f"zarr_url: {image['zarr_url']}"
199
229
  )
200
230
  # Propagate attributes and types from `origin` (if any)
201
231
  updated_attributes = {}
@@ -236,8 +266,8 @@ def execute_tasks_v2(
236
266
  images=tmp_images, zarr_url=img_zarr_url
237
267
  )
238
268
  if img_search is None:
239
- raise ValueError(
240
- f"Cannot remove missing image with zarr_url {img_zarr_url}"
269
+ raise JobExecutionError(
270
+ f"Cannot remove missing image (zarr_url={img_zarr_url})."
241
271
  )
242
272
  else:
243
273
  tmp_images.pop(img_search["index"])
@@ -249,24 +279,31 @@ def execute_tasks_v2(
249
279
  current_task_output.filters.attributes
250
280
  )
251
281
 
252
- # Update filters.types: current + (task_output + task_manifest)
282
+ # Find manifest ouptut types
253
283
  if wftask.is_legacy_task:
254
284
  types_from_manifest = {}
255
285
  else:
256
286
  types_from_manifest = task.output_types
287
+
288
+ # Find task-output types
257
289
  if current_task_output.filters is not None:
258
290
  types_from_task = current_task_output.filters.types
259
291
  else:
260
292
  types_from_task = {}
293
+
261
294
  # Check that key sets are disjoint
262
295
  set_types_from_manifest = set(types_from_manifest.keys())
263
296
  set_types_from_task = set(types_from_task.keys())
264
297
  if not set_types_from_manifest.isdisjoint(set_types_from_task):
265
298
  overlap = set_types_from_manifest.intersection(set_types_from_task)
266
- raise ValueError(
267
- "Both task and task manifest did set the same"
268
- f"output type. Overlapping keys: {overlap}."
299
+ raise JobExecutionError(
300
+ "Some type filters are being set twice, "
301
+ f"for task '{task_name}'.\n"
302
+ f"Types from task output: {types_from_task}\n"
303
+ f"Types from task maniest: {types_from_manifest}\n"
304
+ f"Overlapping keys: {overlap}"
269
305
  )
306
+
270
307
  # Update filters.types
271
308
  tmp_filters["types"].update(types_from_manifest)
272
309
  tmp_filters["types"].update(types_from_task)
@@ -8,6 +8,9 @@ from typing import Callable
8
8
  from typing import Literal
9
9
  from typing import Optional
10
10
 
11
+ from pydantic import ValidationError
12
+
13
+ from ..exceptions import JobExecutionError
11
14
  from .deduplicate_list import deduplicate_list
12
15
  from .merge_outputs import merge_outputs
13
16
  from .runner_functions_low_level import run_single_task
@@ -31,6 +34,34 @@ __all__ = [
31
34
  MAX_PARALLELIZATION_LIST_SIZE = 20_000
32
35
 
33
36
 
37
+ def _cast_and_validate_TaskOutput(
38
+ task_output: dict[str, Any]
39
+ ) -> Optional[TaskOutput]:
40
+ try:
41
+ validated_task_output = TaskOutput(**task_output)
42
+ return validated_task_output
43
+ except ValidationError as e:
44
+ raise JobExecutionError(
45
+ "Validation of task output failed.\n"
46
+ f"Original error: {str(e)}\n"
47
+ f"Original data: {task_output}."
48
+ )
49
+
50
+
51
+ def _cast_and_validate_InitTaskOutput(
52
+ init_task_output: dict[str, Any],
53
+ ) -> Optional[InitTaskOutput]:
54
+ try:
55
+ validated_init_task_output = InitTaskOutput(**init_task_output)
56
+ return validated_init_task_output
57
+ except ValidationError as e:
58
+ raise JobExecutionError(
59
+ "Validation of init-task output failed.\n"
60
+ f"Original error: {str(e)}\n"
61
+ f"Original data: {init_task_output}."
62
+ )
63
+
64
+
34
65
  def no_op_submit_setup_call(
35
66
  *,
36
67
  wftask: WorkflowTaskV2,
@@ -71,7 +102,7 @@ def _get_executor_options(
71
102
 
72
103
  def _check_parallelization_list_size(my_list):
73
104
  if len(my_list) > MAX_PARALLELIZATION_LIST_SIZE:
74
- raise ValueError(
105
+ raise JobExecutionError(
75
106
  "Too many parallelization items.\n"
76
107
  f" {len(my_list)}\n"
77
108
  f" {MAX_PARALLELIZATION_LIST_SIZE=}\n"
@@ -126,12 +157,10 @@ def run_v2_task_non_parallel(
126
157
  **executor_options,
127
158
  )
128
159
  output = future.result()
129
- # FIXME V2: handle validation errors
130
160
  if output is None:
131
161
  return TaskOutput()
132
162
  else:
133
- validated_output = TaskOutput(**output)
134
- return validated_output
163
+ return _cast_and_validate_TaskOutput(output)
135
164
 
136
165
 
137
166
  def run_v2_task_parallel(
@@ -188,9 +217,7 @@ def run_v2_task_parallel(
188
217
  if output is None:
189
218
  outputs[ind] = TaskOutput()
190
219
  else:
191
- # FIXME: improve handling of validation errors
192
- validated_output = TaskOutput(**output)
193
- outputs[ind] = validated_output
220
+ outputs[ind] = _cast_and_validate_TaskOutput(output)
194
221
 
195
222
  merged_output = merge_outputs(outputs)
196
223
  return merged_output
@@ -245,7 +272,7 @@ def run_v2_task_compound(
245
272
  if output is None:
246
273
  init_task_output = InitTaskOutput()
247
274
  else:
248
- init_task_output = InitTaskOutput(**output)
275
+ init_task_output = _cast_and_validate_InitTaskOutput(output)
249
276
  parallelization_list = init_task_output.parallelization_list
250
277
  parallelization_list = deduplicate_list(parallelization_list)
251
278
 
@@ -285,8 +312,7 @@ def run_v2_task_compound(
285
312
  if output is None:
286
313
  outputs[ind] = TaskOutput()
287
314
  else:
288
- # FIXME: improve handling of validation errors
289
- validated_output = TaskOutput(**output)
315
+ validated_output = _cast_and_validate_TaskOutput(output)
290
316
  outputs[ind] = validated_output
291
317
 
292
318
  merged_output = merge_outputs(outputs)
@@ -76,19 +76,12 @@ def run_single_task(
76
76
  workflow_dir_user = workflow_dir
77
77
 
78
78
  component = args.pop(_COMPONENT_KEY_, None)
79
- if component is None:
80
- task_files = get_task_file_paths(
81
- workflow_dir=workflow_dir,
82
- workflow_dir_user=workflow_dir_user,
83
- task_order=wftask.order,
84
- )
85
- else:
86
- task_files = get_task_file_paths(
87
- workflow_dir=workflow_dir,
88
- workflow_dir_user=workflow_dir_user,
89
- task_order=wftask.order,
90
- component=component,
91
- )
79
+ task_files = get_task_file_paths(
80
+ workflow_dir=workflow_dir,
81
+ workflow_dir_user=workflow_dir_user,
82
+ task_order=wftask.order,
83
+ component=component,
84
+ )
92
85
 
93
86
  # Write arguments to args.json file
94
87
  with task_files.args.open("w") as f:
@@ -29,8 +29,10 @@ class TaskOutput(BaseModel):
29
29
  if zarr_urls.count(zarr_url) > 1
30
30
  ]
31
31
  msg = (
32
- "TaskOutput image-list updates/removals has "
33
- "non-unique zarr_urls:"
32
+ "TaskOutput "
33
+ f"({len(self.image_list_updates)} image_list_updates and "
34
+ f"{len(self.image_list_removals)} image_list_removals) "
35
+ "has non-unique zarr_urls:"
34
36
  )
35
37
  for duplicate in duplicates:
36
38
  msg = f"{msg}\n{duplicate}"
@@ -1,4 +1 @@
1
- from .state import * # noqa: F401, F403
2
1
  from .user import * # noqa: F401, F403
3
- from .v1 import * # noqa: F401, F403
4
- from .v2 import * # noqa: F401, F403
@@ -1,12 +1,6 @@
1
1
  """
2
2
  Schemas for API request/response bodies
3
3
  """
4
- from ..state import _StateBase # noqa: F401
5
- from ..state import StateRead # noqa: F401
6
- from ..user import UserCreate # noqa: F401
7
- from ..user import UserRead # noqa: F401
8
- from ..user import UserUpdate # noqa: F401
9
- from ..user import UserUpdateStrict # noqa: F401
10
4
  from .applyworkflow import ApplyWorkflowCreateV1 # noqa: F401
11
5
  from .applyworkflow import ApplyWorkflowReadV1 # noqa: F401
12
6
  from .applyworkflow import ApplyWorkflowUpdateV1 # noqa: F401
@@ -1,4 +1,6 @@
1
1
  from .dataset import DatasetCreateV2 # noqa F401
2
+ from .dataset import DatasetExportV2 # noqa F401
3
+ from .dataset import DatasetImportV2 # noqa F401
2
4
  from .dataset import DatasetReadV2 # noqa F401
3
5
  from .dataset import DatasetUpdateV2 # noqa F401
4
6
  from .dumps import DatasetDumpV2 # noqa F401
@@ -103,6 +103,11 @@ class DatasetImportV2(BaseModel):
103
103
  images: list[SingleImage] = Field(default_factory=[])
104
104
  filters: Filters = Field(default_factory=Filters)
105
105
 
106
+ # Validators
107
+ @validator("zarr_dir")
108
+ def normalize_zarr_dir(cls, v: str) -> str:
109
+ return normalize_url(v)
110
+
106
111
 
107
112
  class DatasetExportV2(BaseModel):
108
113
  """
@@ -12,12 +12,14 @@ from .._validators import valdictkeys
12
12
  from .._validators import valint
13
13
  from ..v1.task import TaskExportV1
14
14
  from ..v1.task import TaskImportV1
15
- from ..v1.task import TaskReadV1
16
15
  from .task import TaskExportV2
17
16
  from .task import TaskImportV2
17
+ from .task import TaskLegacyReadV2
18
18
  from .task import TaskReadV2
19
19
  from fractal_server.images import Filters
20
20
 
21
+ RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
22
+
21
23
 
22
24
  class WorkflowTaskStatusTypeV2(str, Enum):
23
25
  """
@@ -56,14 +58,36 @@ class WorkflowTaskCreateV2(BaseModel, extra=Extra.forbid):
56
58
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
57
59
  valdictkeys("meta_parallel")
58
60
  )
59
- _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
60
- valdictkeys("args_non_parallel")
61
- )
62
- _args_parallel = validator("args_parallel", allow_reuse=True)(
63
- valdictkeys("args_parallel")
64
- )
65
61
  _order = validator("order", allow_reuse=True)(valint("order", min_val=0))
66
62
 
63
+ @validator("args_non_parallel")
64
+ def validate_args_non_parallel(cls, value):
65
+ if value is None:
66
+ return
67
+ valdictkeys("args_non_parallel")(value)
68
+ args_keys = set(value.keys())
69
+ intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
70
+ if intersect_keys:
71
+ raise ValueError(
72
+ "`args` contains the following forbidden keys: "
73
+ f"{intersect_keys}"
74
+ )
75
+ return value
76
+
77
+ @validator("args_parallel")
78
+ def validate_args_parallel(cls, value):
79
+ if value is None:
80
+ return
81
+ valdictkeys("args_parallel")(value)
82
+ args_keys = set(value.keys())
83
+ intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
84
+ if intersect_keys:
85
+ raise ValueError(
86
+ "`args` contains the following forbidden keys: "
87
+ f"{intersect_keys}"
88
+ )
89
+ return value
90
+
67
91
  @root_validator
68
92
  def validate_legacy_task(cls, values):
69
93
  if values["is_legacy_task"] and (
@@ -96,7 +120,7 @@ class WorkflowTaskReadV2(BaseModel):
96
120
  task_id: Optional[int]
97
121
  task: Optional[TaskReadV2]
98
122
  task_legacy_id: Optional[int]
99
- task_legacy: Optional[TaskReadV1]
123
+ task_legacy: Optional[TaskLegacyReadV2]
100
124
 
101
125
 
102
126
  class WorkflowTaskUpdateV2(BaseModel):
@@ -114,12 +138,34 @@ class WorkflowTaskUpdateV2(BaseModel):
114
138
  _meta_parallel = validator("meta_parallel", allow_reuse=True)(
115
139
  valdictkeys("meta_parallel")
116
140
  )
117
- _args_non_parallel = validator("args_non_parallel", allow_reuse=True)(
118
- valdictkeys("args_non_parallel")
119
- )
120
- _args_parallel = validator("args_parallel", allow_reuse=True)(
121
- valdictkeys("args_parallel")
122
- )
141
+
142
+ @validator("args_non_parallel")
143
+ def validate_args_non_parallel(cls, value):
144
+ if value is None:
145
+ return
146
+ valdictkeys("args_non_parallel")(value)
147
+ args_keys = set(value.keys())
148
+ intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
149
+ if intersect_keys:
150
+ raise ValueError(
151
+ "`args` contains the following forbidden keys: "
152
+ f"{intersect_keys}"
153
+ )
154
+ return value
155
+
156
+ @validator("args_parallel")
157
+ def validate_args_parallel(cls, value):
158
+ if value is None:
159
+ return
160
+ valdictkeys("args_parallel")(value)
161
+ args_keys = set(value.keys())
162
+ intersect_keys = RESERVED_ARGUMENTS.intersection(args_keys)
163
+ if intersect_keys:
164
+ raise ValueError(
165
+ "`args` contains the following forbidden keys: "
166
+ f"{intersect_keys}"
167
+ )
168
+ return value
123
169
 
124
170
 
125
171
  class WorkflowTaskImportV2(BaseModel):
@@ -33,7 +33,6 @@ def find_image_by_zarr_url(
33
33
  return dict(image=copy(images[ind]), index=ind)
34
34
 
35
35
 
36
- # FIXME: what is filters
37
36
  def match_filter(image: dict[str, Any], filters: Filters) -> bool:
38
37
  """
39
38
  Find whether an image matches a filter set.
@@ -1,8 +1,8 @@
1
- """V2
1
+ """v2
2
2
 
3
- Revision ID: 80e12e1bc4fd
3
+ Revision ID: 5bf02391cfef
4
4
  Revises: 9fd26a2b0de4
5
- Create Date: 2024-04-12 10:13:58.085788
5
+ Create Date: 2024-04-18 10:35:19.067833
6
6
 
7
7
  """
8
8
  import sqlalchemy as sa
@@ -11,7 +11,7 @@ from alembic import op
11
11
 
12
12
 
13
13
  # revision identifiers, used by Alembic.
14
- revision = "80e12e1bc4fd"
14
+ revision = "5bf02391cfef"
15
15
  down_revision = "9fd26a2b0de4"
16
16
  branch_labels = None
17
17
  depends_on = None
@@ -19,6 +19,13 @@ depends_on = None
19
19
 
20
20
  def upgrade() -> None:
21
21
  # ### commands auto generated by Alembic - please adjust! ###
22
+ op.create_table(
23
+ "collectionstatev2",
24
+ sa.Column("id", sa.Integer(), nullable=False),
25
+ sa.Column("data", sa.JSON(), nullable=True),
26
+ sa.Column("timestamp", sa.DateTime(timezone=True), nullable=True),
27
+ sa.PrimaryKeyConstraint("id"),
28
+ )
22
29
  op.create_table(
23
30
  "projectv2",
24
31
  sa.Column("id", sa.Integer(), nullable=False),
@@ -234,4 +241,5 @@ def downgrade() -> None:
234
241
  op.drop_table("datasetv2")
235
242
  op.drop_table("taskv2")
236
243
  op.drop_table("projectv2")
244
+ op.drop_table("collectionstatev2")
237
245
  # ### end Alembic commands ###
@@ -15,8 +15,8 @@ from ..utils import slugify_task_name
15
15
  from ._TaskCollectPip import _TaskCollectPip
16
16
  from fractal_server.app.db import DBSyncSession
17
17
  from fractal_server.app.db import get_sync_db
18
- from fractal_server.app.models import State
19
- from fractal_server.app.models import Task
18
+ from fractal_server.app.models.v1 import State
19
+ from fractal_server.app.models.v1 import Task
20
20
  from fractal_server.app.schemas.v1 import TaskCollectStatusV1
21
21
  from fractal_server.app.schemas.v1 import TaskCreateV1
22
22
  from fractal_server.app.schemas.v1 import TaskReadV1
@@ -15,7 +15,7 @@ from ..utils import slugify_task_name
15
15
  from ._TaskCollectPip import _TaskCollectPip
16
16
  from fractal_server.app.db import DBSyncSession
17
17
  from fractal_server.app.db import get_sync_db
18
- from fractal_server.app.models import State
18
+ from fractal_server.app.models.v2 import CollectionStateV2
19
19
  from fractal_server.app.models.v2 import TaskV2
20
20
  from fractal_server.app.schemas.v2 import TaskCollectStatusV2
21
21
  from fractal_server.app.schemas.v2 import TaskCreateV2
@@ -313,7 +313,7 @@ async def background_collect_pip(
313
313
  logger.debug(f"{key}: {value}")
314
314
 
315
315
  with next(get_sync_db()) as db:
316
- state: State = db.get(State, state_id)
316
+ state: CollectionStateV2 = db.get(CollectionStateV2, state_id)
317
317
  data = TaskCollectStatusV2(**state.data)
318
318
  data.info = None
319
319
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: fractal-server
3
- Version: 2.0.0a10
3
+ Version: 2.0.0a12
4
4
  Summary: Server component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause