fractal-server 1.4.10__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +6 -8
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/v1/__init__.py +12 -0
  6. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  7. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  8. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  9. fractal_server/app/models/{state.py → v1/state.py} +2 -2
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +22 -0
  13. fractal_server/app/models/v2/collection_state.py +21 -0
  14. fractal_server/app/models/v2/dataset.py +54 -0
  15. fractal_server/app/models/v2/job.py +51 -0
  16. fractal_server/app/models/v2/project.py +30 -0
  17. fractal_server/app/models/v2/task.py +93 -0
  18. fractal_server/app/models/v2/workflow.py +35 -0
  19. fractal_server/app/models/v2/workflowtask.py +49 -0
  20. fractal_server/app/routes/admin/__init__.py +0 -0
  21. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  22. fractal_server/app/routes/admin/v2.py +309 -0
  23. fractal_server/app/routes/api/v1/__init__.py +7 -7
  24. fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
  25. fractal_server/app/routes/api/v1/dataset.py +41 -41
  26. fractal_server/app/routes/api/v1/job.py +14 -14
  27. fractal_server/app/routes/api/v1/project.py +27 -25
  28. fractal_server/app/routes/api/v1/task.py +26 -16
  29. fractal_server/app/routes/api/v1/task_collection.py +28 -16
  30. fractal_server/app/routes/api/v1/workflow.py +28 -28
  31. fractal_server/app/routes/api/v1/workflowtask.py +11 -11
  32. fractal_server/app/routes/api/v2/__init__.py +34 -0
  33. fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
  34. fractal_server/app/routes/api/v2/dataset.py +293 -0
  35. fractal_server/app/routes/api/v2/images.py +279 -0
  36. fractal_server/app/routes/api/v2/job.py +200 -0
  37. fractal_server/app/routes/api/v2/project.py +186 -0
  38. fractal_server/app/routes/api/v2/status.py +150 -0
  39. fractal_server/app/routes/api/v2/submit.py +210 -0
  40. fractal_server/app/routes/api/v2/task.py +222 -0
  41. fractal_server/app/routes/api/v2/task_collection.py +239 -0
  42. fractal_server/app/routes/api/v2/task_legacy.py +59 -0
  43. fractal_server/app/routes/api/v2/workflow.py +380 -0
  44. fractal_server/app/routes/api/v2/workflowtask.py +265 -0
  45. fractal_server/app/routes/aux/_job.py +2 -2
  46. fractal_server/app/runner/__init__.py +0 -364
  47. fractal_server/app/runner/async_wrap.py +27 -0
  48. fractal_server/app/runner/components.py +5 -0
  49. fractal_server/app/runner/exceptions.py +129 -0
  50. fractal_server/app/runner/executors/__init__.py +0 -0
  51. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  52. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  53. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  54. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  55. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  56. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  57. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +32 -21
  58. fractal_server/app/runner/filenames.py +6 -0
  59. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  60. fractal_server/app/runner/task_files.py +103 -0
  61. fractal_server/app/runner/v1/__init__.py +366 -0
  62. fractal_server/app/runner/{_common.py → v1/_common.py} +14 -121
  63. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
  64. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  65. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  66. fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
  67. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
  68. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  69. fractal_server/app/runner/v1/common.py +117 -0
  70. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  71. fractal_server/app/runner/v2/__init__.py +336 -0
  72. fractal_server/app/runner/v2/_local/__init__.py +162 -0
  73. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  74. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  75. fractal_server/app/runner/v2/_local/executor.py +100 -0
  76. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
  77. fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
  78. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
  79. fractal_server/app/runner/v2/deduplicate_list.py +23 -0
  80. fractal_server/app/runner/v2/handle_failed_job.py +165 -0
  81. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  82. fractal_server/app/runner/v2/runner.py +343 -0
  83. fractal_server/app/runner/v2/runner_functions.py +374 -0
  84. fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
  85. fractal_server/app/runner/v2/task_interface.py +62 -0
  86. fractal_server/app/runner/v2/v1_compat.py +31 -0
  87. fractal_server/app/schemas/__init__.py +1 -42
  88. fractal_server/app/schemas/_validators.py +28 -5
  89. fractal_server/app/schemas/v1/__init__.py +36 -0
  90. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  91. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  92. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  93. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  94. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  95. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  96. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  97. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  98. fractal_server/app/schemas/v2/__init__.py +37 -0
  99. fractal_server/app/schemas/v2/dataset.py +126 -0
  100. fractal_server/app/schemas/v2/dumps.py +87 -0
  101. fractal_server/app/schemas/v2/job.py +114 -0
  102. fractal_server/app/schemas/v2/manifest.py +159 -0
  103. fractal_server/app/schemas/v2/project.py +34 -0
  104. fractal_server/app/schemas/v2/status.py +16 -0
  105. fractal_server/app/schemas/v2/task.py +151 -0
  106. fractal_server/app/schemas/v2/task_collection.py +109 -0
  107. fractal_server/app/schemas/v2/workflow.py +79 -0
  108. fractal_server/app/schemas/v2/workflowtask.py +208 -0
  109. fractal_server/config.py +5 -4
  110. fractal_server/images/__init__.py +4 -0
  111. fractal_server/images/models.py +136 -0
  112. fractal_server/images/tools.py +84 -0
  113. fractal_server/main.py +11 -3
  114. fractal_server/migrations/env.py +0 -2
  115. fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
  116. fractal_server/tasks/__init__.py +0 -5
  117. fractal_server/tasks/endpoint_operations.py +13 -19
  118. fractal_server/tasks/utils.py +35 -0
  119. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  120. fractal_server/tasks/v1/__init__.py +0 -0
  121. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
  122. fractal_server/tasks/v1/get_collection_data.py +14 -0
  123. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  124. fractal_server/tasks/v2/__init__.py +0 -0
  125. fractal_server/tasks/v2/background_operations.py +381 -0
  126. fractal_server/tasks/v2/get_collection_data.py +14 -0
  127. fractal_server/urls.py +13 -0
  128. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/METADATA +10 -10
  129. fractal_server-2.0.0.dist-info/RECORD +169 -0
  130. fractal_server/app/runner/_slurm/.gitignore +0 -2
  131. fractal_server/app/runner/common.py +0 -311
  132. fractal_server/app/schemas/json_schemas/manifest.json +0 -81
  133. fractal_server-1.4.10.dist-info/RECORD +0 -98
  134. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  135. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  136. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
  137. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
  138. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,38 @@
1
+ from copy import copy
2
+
3
+ from fractal_server.app.runner.v2.deduplicate_list import deduplicate_list
4
+ from fractal_server.app.runner.v2.task_interface import TaskOutput
5
+
6
+
7
+ def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
8
+
9
+ final_image_list_updates = []
10
+ final_image_list_removals = []
11
+ last_new_filters = None
12
+
13
+ for ind, task_output in enumerate(task_outputs):
14
+
15
+ final_image_list_updates.extend(task_output.image_list_updates)
16
+ final_image_list_removals.extend(task_output.image_list_removals)
17
+
18
+ # Check that all filters are the same
19
+ current_new_filters = task_output.filters
20
+ if ind == 0:
21
+ last_new_filters = copy(current_new_filters)
22
+ if current_new_filters != last_new_filters:
23
+ raise ValueError(f"{current_new_filters=} but {last_new_filters=}")
24
+ last_new_filters = copy(current_new_filters)
25
+
26
+ final_image_list_updates = deduplicate_list(final_image_list_updates)
27
+
28
+ additional_args = {}
29
+ if last_new_filters is not None:
30
+ additional_args["filters"] = last_new_filters
31
+
32
+ final_output = TaskOutput(
33
+ image_list_updates=final_image_list_updates,
34
+ image_list_removals=final_image_list_removals,
35
+ **additional_args,
36
+ )
37
+
38
+ return final_output
@@ -0,0 +1,343 @@
1
+ import json
2
+ import logging
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from copy import copy
5
+ from copy import deepcopy
6
+ from pathlib import Path
7
+ from typing import Callable
8
+ from typing import Optional
9
+
10
+ from ....images import Filters
11
+ from ....images import SingleImage
12
+ from ....images.tools import filter_image_list
13
+ from ....images.tools import find_image_by_zarr_url
14
+ from ....images.tools import match_filter
15
+ from ..exceptions import JobExecutionError
16
+ from ..filenames import FILTERS_FILENAME
17
+ from ..filenames import HISTORY_FILENAME
18
+ from ..filenames import IMAGES_FILENAME
19
+ from .runner_functions import no_op_submit_setup_call
20
+ from .runner_functions import run_v1_task_parallel
21
+ from .runner_functions import run_v2_task_compound
22
+ from .runner_functions import run_v2_task_non_parallel
23
+ from .runner_functions import run_v2_task_parallel
24
+ from .task_interface import TaskOutput
25
+ from fractal_server.app.models.v2 import DatasetV2
26
+ from fractal_server.app.models.v2 import WorkflowTaskV2
27
+ from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
28
+ from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
29
+
30
+
31
+ def execute_tasks_v2(
32
+ wf_task_list: list[WorkflowTaskV2],
33
+ dataset: DatasetV2,
34
+ executor: ThreadPoolExecutor,
35
+ workflow_dir: Path,
36
+ workflow_dir_user: Optional[Path] = None,
37
+ logger_name: Optional[str] = None,
38
+ submit_setup_call: Callable = no_op_submit_setup_call,
39
+ ) -> DatasetV2:
40
+
41
+ logger = logging.getLogger(logger_name)
42
+
43
+ if not workflow_dir.exists(): # FIXME: this should have already happened
44
+ workflow_dir.mkdir()
45
+
46
+ # Initialize local dataset attributes
47
+ zarr_dir = dataset.zarr_dir
48
+ tmp_images = deepcopy(dataset.images)
49
+ tmp_filters = deepcopy(dataset.filters)
50
+ tmp_history = []
51
+
52
+ for wftask in wf_task_list:
53
+ task = wftask.task
54
+ task_legacy = wftask.task_legacy
55
+ if wftask.is_legacy_task:
56
+ task_name = task_legacy.name
57
+ logger.debug(
58
+ f"SUBMIT {wftask.order}-th task "
59
+ f'(legacy, name="{task_name}")'
60
+ )
61
+ else:
62
+ task_name = task.name
63
+ logger.debug(f'SUBMIT {wftask.order}-th task (name="{task_name}")')
64
+
65
+ # PRE TASK EXECUTION
66
+
67
+ # Get filtered images
68
+ pre_filters = dict(
69
+ types=copy(tmp_filters["types"]),
70
+ attributes=copy(tmp_filters["attributes"]),
71
+ )
72
+ pre_filters["types"].update(wftask.input_filters["types"])
73
+ pre_filters["attributes"].update(wftask.input_filters["attributes"])
74
+ filtered_images = filter_image_list(
75
+ images=tmp_images,
76
+ filters=Filters(**pre_filters),
77
+ )
78
+ # Verify that filtered images comply with task input_types
79
+ if not wftask.is_legacy_task:
80
+ for image in filtered_images:
81
+ if not match_filter(image, Filters(types=task.input_types)):
82
+ raise JobExecutionError(
83
+ "Invalid filtered image list\n"
84
+ f"Task input types: {task.input_types=}\n"
85
+ f'Image zarr_url: {image["zarr_url"]}\n'
86
+ f'Image types: {image["types"]}\n'
87
+ )
88
+
89
+ # TASK EXECUTION (V2)
90
+ if not wftask.is_legacy_task:
91
+ if task.type == "non_parallel":
92
+ current_task_output = run_v2_task_non_parallel(
93
+ images=filtered_images,
94
+ zarr_dir=zarr_dir,
95
+ wftask=wftask,
96
+ task=task,
97
+ workflow_dir=workflow_dir,
98
+ workflow_dir_user=workflow_dir_user,
99
+ executor=executor,
100
+ logger_name=logger_name,
101
+ submit_setup_call=submit_setup_call,
102
+ )
103
+ elif task.type == "parallel":
104
+ current_task_output = run_v2_task_parallel(
105
+ images=filtered_images,
106
+ wftask=wftask,
107
+ task=task,
108
+ workflow_dir=workflow_dir,
109
+ workflow_dir_user=workflow_dir_user,
110
+ executor=executor,
111
+ logger_name=logger_name,
112
+ submit_setup_call=submit_setup_call,
113
+ )
114
+ elif task.type == "compound":
115
+ current_task_output = run_v2_task_compound(
116
+ images=filtered_images,
117
+ zarr_dir=zarr_dir,
118
+ wftask=wftask,
119
+ task=task,
120
+ workflow_dir=workflow_dir,
121
+ workflow_dir_user=workflow_dir_user,
122
+ executor=executor,
123
+ logger_name=logger_name,
124
+ submit_setup_call=submit_setup_call,
125
+ )
126
+ else:
127
+ raise ValueError(f"Unexpected error: Invalid {task.type=}.")
128
+ # TASK EXECUTION (V1)
129
+ else:
130
+ current_task_output = run_v1_task_parallel(
131
+ images=filtered_images,
132
+ wftask=wftask,
133
+ task_legacy=task_legacy,
134
+ executor=executor,
135
+ logger_name=logger_name,
136
+ workflow_dir=workflow_dir,
137
+ workflow_dir_user=workflow_dir_user,
138
+ submit_setup_call=submit_setup_call,
139
+ )
140
+
141
+ # POST TASK EXECUTION
142
+
143
+ # If `current_task_output` includes no images (to be created, edited or
144
+ # removed), then flag all the input images as modified. See
145
+ # fractal-server issue #1374.
146
+ if (
147
+ current_task_output.image_list_updates == []
148
+ and current_task_output.image_list_removals == []
149
+ ):
150
+ current_task_output = TaskOutput(
151
+ **current_task_output.dict(exclude={"image_list_updates"}),
152
+ image_list_updates=[
153
+ dict(zarr_url=img["zarr_url"]) for img in filtered_images
154
+ ],
155
+ )
156
+
157
+ # Update image list
158
+ current_task_output.check_zarr_urls_are_unique()
159
+ for image_obj in current_task_output.image_list_updates:
160
+ image = image_obj.dict()
161
+ # Edit existing image
162
+ tmp_image_paths = [img["zarr_url"] for img in tmp_images]
163
+ if image["zarr_url"] in tmp_image_paths:
164
+ if (
165
+ image["origin"] is not None
166
+ and image["origin"] != image["zarr_url"]
167
+ ):
168
+ raise JobExecutionError(
169
+ "Cannot edit an image with zarr_url different from "
170
+ "origin.\n"
171
+ f"zarr_url={image['zarr_url']}\n"
172
+ f"origin={image['origin']}"
173
+ )
174
+ img_search = find_image_by_zarr_url(
175
+ images=tmp_images,
176
+ zarr_url=image["zarr_url"],
177
+ )
178
+ if img_search is None:
179
+ raise ValueError(
180
+ "Unexpected error: "
181
+ f"Image with zarr_url {image['zarr_url']} not found, "
182
+ "while updating image list."
183
+ )
184
+ original_img = img_search["image"]
185
+ original_index = img_search["index"]
186
+ updated_attributes = copy(original_img["attributes"])
187
+ updated_types = copy(original_img["types"])
188
+
189
+ # Update image attributes/types with task output and manifest
190
+ updated_attributes.update(image["attributes"])
191
+ updated_types.update(image["types"])
192
+ if not wftask.is_legacy_task:
193
+ updated_types.update(task.output_types)
194
+
195
+ # Unset attributes with None value
196
+ updated_attributes = {
197
+ key: value
198
+ for key, value in updated_attributes.items()
199
+ if value is not None
200
+ }
201
+
202
+ # Validate new image
203
+ SingleImage(
204
+ zarr_url=image["zarr_url"],
205
+ types=updated_types,
206
+ attributes=updated_attributes,
207
+ )
208
+
209
+ # Update image in the dataset image list
210
+ tmp_images[original_index]["attributes"] = updated_attributes
211
+ tmp_images[original_index]["types"] = updated_types
212
+ # Add new image
213
+ else:
214
+ # Check that image['zarr_url'] is relative to zarr_dir
215
+ if not image["zarr_url"].startswith(zarr_dir):
216
+ raise JobExecutionError(
217
+ "Cannot create image if zarr_dir is not a parent "
218
+ "directory of zarr_url.\n"
219
+ f"zarr_dir: {zarr_dir}\n"
220
+ f"zarr_url: {image['zarr_url']}"
221
+ )
222
+ # Check that image['zarr_url'] is not equal to zarr_dir
223
+ if image["zarr_url"] == zarr_dir:
224
+ raise JobExecutionError(
225
+ "Cannot create image if zarr_url is equal to "
226
+ "zarr_dir.\n"
227
+ f"zarr_dir: {zarr_dir}\n"
228
+ f"zarr_url: {image['zarr_url']}"
229
+ )
230
+ # Propagate attributes and types from `origin` (if any)
231
+ updated_attributes = {}
232
+ updated_types = {}
233
+ if image["origin"] is not None:
234
+ img_search = find_image_by_zarr_url(
235
+ images=tmp_images,
236
+ zarr_url=image["origin"],
237
+ )
238
+ if img_search is not None:
239
+ original_img = img_search["image"]
240
+ updated_attributes = copy(original_img["attributes"])
241
+ updated_types = copy(original_img["types"])
242
+ # Update image attributes/types with task output and manifest
243
+ updated_attributes.update(image["attributes"])
244
+ updated_attributes = {
245
+ key: value
246
+ for key, value in updated_attributes.items()
247
+ if value is not None
248
+ }
249
+ updated_types.update(image["types"])
250
+ if not wftask.is_legacy_task:
251
+ updated_types.update(task.output_types)
252
+ new_image = dict(
253
+ zarr_url=image["zarr_url"],
254
+ origin=image["origin"],
255
+ attributes=updated_attributes,
256
+ types=updated_types,
257
+ )
258
+ # Validate new image
259
+ SingleImage(**new_image)
260
+ # Add image into the dataset image list
261
+ tmp_images.append(new_image)
262
+
263
+ # Remove images from tmp_images
264
+ for img_zarr_url in current_task_output.image_list_removals:
265
+ img_search = find_image_by_zarr_url(
266
+ images=tmp_images, zarr_url=img_zarr_url
267
+ )
268
+ if img_search is None:
269
+ raise JobExecutionError(
270
+ f"Cannot remove missing image (zarr_url={img_zarr_url})."
271
+ )
272
+ else:
273
+ tmp_images.pop(img_search["index"])
274
+
275
+ # Update filters.attributes:
276
+ # current + (task_output: not really, in current examples..)
277
+ if current_task_output.filters is not None:
278
+ tmp_filters["attributes"].update(
279
+ current_task_output.filters.attributes
280
+ )
281
+
282
+ # Find manifest ouptut types
283
+ if wftask.is_legacy_task:
284
+ types_from_manifest = {}
285
+ else:
286
+ types_from_manifest = task.output_types
287
+
288
+ # Find task-output types
289
+ if current_task_output.filters is not None:
290
+ types_from_task = current_task_output.filters.types
291
+ else:
292
+ types_from_task = {}
293
+
294
+ # Check that key sets are disjoint
295
+ set_types_from_manifest = set(types_from_manifest.keys())
296
+ set_types_from_task = set(types_from_task.keys())
297
+ if not set_types_from_manifest.isdisjoint(set_types_from_task):
298
+ overlap = set_types_from_manifest.intersection(set_types_from_task)
299
+ raise JobExecutionError(
300
+ "Some type filters are being set twice, "
301
+ f"for task '{task_name}'.\n"
302
+ f"Types from task output: {types_from_task}\n"
303
+ f"Types from task maniest: {types_from_manifest}\n"
304
+ f"Overlapping keys: {overlap}"
305
+ )
306
+
307
+ # Update filters.types
308
+ tmp_filters["types"].update(types_from_manifest)
309
+ tmp_filters["types"].update(types_from_task)
310
+
311
+ # Update history (based on _DatasetHistoryItemV2)
312
+ history_item = _DatasetHistoryItemV2(
313
+ workflowtask=wftask,
314
+ status=WorkflowTaskStatusTypeV2.DONE,
315
+ parallelization=dict(
316
+ # task_type=wftask.task.type, # FIXME: breaks for V1 tasks
317
+ # component_list=fil, #FIXME
318
+ ),
319
+ ).dict()
320
+ tmp_history.append(history_item)
321
+
322
+ # Write current dataset attributes (history, images, filters) into
323
+ # temporary files which can be used (1) to retrieve the latest state
324
+ # when the job fails, (2) from within endpoints that need up-to-date
325
+ # information
326
+ with open(workflow_dir / HISTORY_FILENAME, "w") as f:
327
+ json.dump(tmp_history, f, indent=2)
328
+ with open(workflow_dir / FILTERS_FILENAME, "w") as f:
329
+ json.dump(tmp_filters, f, indent=2)
330
+ with open(workflow_dir / IMAGES_FILENAME, "w") as f:
331
+ json.dump(tmp_images, f, indent=2)
332
+
333
+ logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
334
+
335
+ # NOTE: tmp_history only contains the newly-added history items (to be
336
+ # appended to the original history), while tmp_filters and tmp_images
337
+ # represent the new attributes (to replace the original ones)
338
+ result = dict(
339
+ history=tmp_history,
340
+ filters=tmp_filters,
341
+ images=tmp_images,
342
+ )
343
+ return result