fractal-server 2.13.1__py3-none-any.whl → 2.14.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/history/__init__.py +4 -0
  3. fractal_server/app/history/image_updates.py +142 -0
  4. fractal_server/app/history/status_enum.py +16 -0
  5. fractal_server/app/models/v2/__init__.py +5 -1
  6. fractal_server/app/models/v2/history.py +53 -0
  7. fractal_server/app/routes/api/v2/__init__.py +2 -2
  8. fractal_server/app/routes/api/v2/_aux_functions.py +78 -0
  9. fractal_server/app/routes/api/v2/dataset.py +12 -9
  10. fractal_server/app/routes/api/v2/history.py +247 -0
  11. fractal_server/app/routes/api/v2/project.py +25 -0
  12. fractal_server/app/routes/api/v2/workflow.py +18 -3
  13. fractal_server/app/routes/api/v2/workflowtask.py +22 -0
  14. fractal_server/app/runner/executors/base_runner.py +114 -0
  15. fractal_server/app/runner/{v2/_local → executors/local}/_local_config.py +3 -3
  16. fractal_server/app/runner/executors/local/_submit_setup.py +54 -0
  17. fractal_server/app/runner/executors/local/runner.py +200 -0
  18. fractal_server/app/runner/executors/{slurm → slurm_common}/_batching.py +1 -1
  19. fractal_server/app/runner/executors/{slurm → slurm_common}/_slurm_config.py +3 -3
  20. fractal_server/app/runner/{v2/_slurm_ssh → executors/slurm_common}/_submit_setup.py +13 -12
  21. fractal_server/app/runner/{v2/_slurm_common → executors/slurm_common}/get_slurm_config.py +9 -15
  22. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_executor_wait_thread.py +1 -1
  23. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/_slurm_job.py +1 -1
  24. fractal_server/app/runner/executors/{slurm/ssh → slurm_ssh}/executor.py +13 -14
  25. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_check_jobs_status.py +11 -9
  26. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_executor_wait_thread.py +3 -3
  27. fractal_server/app/runner/executors/{slurm/sudo → slurm_sudo}/_subprocess_run_as_user.py +2 -68
  28. fractal_server/app/runner/executors/slurm_sudo/runner.py +632 -0
  29. fractal_server/app/runner/task_files.py +70 -96
  30. fractal_server/app/runner/v2/__init__.py +5 -19
  31. fractal_server/app/runner/v2/_local.py +84 -0
  32. fractal_server/app/runner/v2/{_slurm_ssh/__init__.py → _slurm_ssh.py} +10 -13
  33. fractal_server/app/runner/v2/{_slurm_sudo/__init__.py → _slurm_sudo.py} +10 -12
  34. fractal_server/app/runner/v2/runner.py +93 -28
  35. fractal_server/app/runner/v2/runner_functions.py +85 -62
  36. fractal_server/app/runner/v2/runner_functions_low_level.py +20 -20
  37. fractal_server/app/schemas/v2/dataset.py +0 -17
  38. fractal_server/app/schemas/v2/history.py +23 -0
  39. fractal_server/config.py +2 -2
  40. fractal_server/migrations/versions/8223fcef886c_image_status.py +63 -0
  41. fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +68 -0
  42. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0a1.dist-info}/METADATA +1 -1
  43. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0a1.dist-info}/RECORD +53 -47
  44. fractal_server/app/routes/api/v2/status.py +0 -168
  45. fractal_server/app/runner/executors/slurm/sudo/executor.py +0 -1281
  46. fractal_server/app/runner/v2/_local/__init__.py +0 -132
  47. fractal_server/app/runner/v2/_local/_submit_setup.py +0 -52
  48. fractal_server/app/runner/v2/_local/executor.py +0 -100
  49. fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py +0 -83
  50. fractal_server/app/runner/v2/handle_failed_job.py +0 -59
  51. /fractal_server/app/runner/executors/{slurm → local}/__init__.py +0 -0
  52. /fractal_server/app/runner/executors/{slurm/ssh → slurm_common}/__init__.py +0 -0
  53. /fractal_server/app/runner/executors/{_job_states.py → slurm_common/_job_states.py} +0 -0
  54. /fractal_server/app/runner/executors/{slurm → slurm_common}/remote.py +0 -0
  55. /fractal_server/app/runner/executors/{slurm → slurm_common}/utils_executors.py +0 -0
  56. /fractal_server/app/runner/executors/{slurm/sudo → slurm_ssh}/__init__.py +0 -0
  57. /fractal_server/app/runner/{v2/_slurm_common → executors/slurm_sudo}/__init__.py +0 -0
  58. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0a1.dist-info}/LICENSE +0 -0
  59. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0a1.dist-info}/WHEEL +0 -0
  60. {fractal_server-2.13.1.dist-info → fractal_server-2.14.0a1.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,6 @@
1
1
  import functools
2
2
  import logging
3
3
  import traceback
4
- from concurrent.futures import Executor
5
4
  from pathlib import Path
6
5
  from typing import Any
7
6
  from typing import Callable
@@ -20,6 +19,7 @@ from fractal_server.app.models.v2 import TaskV2
20
19
  from fractal_server.app.models.v2 import WorkflowTaskV2
21
20
  from fractal_server.app.runner.components import _COMPONENT_KEY_
22
21
  from fractal_server.app.runner.components import _index_to_component
22
+ from fractal_server.app.runner.executors.base_runner import BaseRunner
23
23
 
24
24
 
25
25
  __all__ = [
@@ -59,13 +59,7 @@ def _cast_and_validate_InitTaskOutput(
59
59
  )
60
60
 
61
61
 
62
- def no_op_submit_setup_call(
63
- *,
64
- wftask: WorkflowTaskV2,
65
- workflow_dir_local: Path,
66
- workflow_dir_remote: Path,
67
- which_type: Literal["non_parallel", "parallel"],
68
- ) -> dict:
62
+ def no_op_submit_setup_call(*args, **kwargs) -> dict:
69
63
  """
70
64
  Default (no-operation) interface of submit_setup_call in V2.
71
65
  """
@@ -84,8 +78,8 @@ def _get_executor_options(
84
78
  try:
85
79
  options = submit_setup_call(
86
80
  wftask=wftask,
87
- workflow_dir_local=workflow_dir_local,
88
- workflow_dir_remote=workflow_dir_remote,
81
+ root_dir_local=workflow_dir_local,
82
+ root_dir_remote=workflow_dir_remote,
89
83
  which_type=which_type,
90
84
  )
91
85
  except Exception as e:
@@ -114,9 +108,10 @@ def run_v2_task_non_parallel(
114
108
  wftask: WorkflowTaskV2,
115
109
  workflow_dir_local: Path,
116
110
  workflow_dir_remote: Optional[Path] = None,
117
- executor: Executor,
111
+ executor: BaseRunner,
118
112
  submit_setup_call: Callable = no_op_submit_setup_call,
119
- ) -> tuple[TaskOutput, int]:
113
+ history_item_id: int,
114
+ ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
120
115
  """
121
116
  This runs server-side (see `executor` argument)
122
117
  """
@@ -141,23 +136,29 @@ def run_v2_task_non_parallel(
141
136
  zarr_dir=zarr_dir,
142
137
  **(wftask.args_non_parallel or {}),
143
138
  )
144
- future = executor.submit(
139
+ function_kwargs[_COMPONENT_KEY_] = _index_to_component(0)
140
+
141
+ result, exception = executor.submit(
145
142
  functools.partial(
146
143
  run_single_task,
147
144
  wftask=wftask,
148
145
  command=task.command_non_parallel,
149
- workflow_dir_local=workflow_dir_local,
150
- workflow_dir_remote=workflow_dir_remote,
146
+ root_dir_local=workflow_dir_local,
147
+ root_dir_remote=workflow_dir_remote,
151
148
  ),
152
- function_kwargs,
149
+ parameters=function_kwargs,
150
+ history_item_id=history_item_id,
153
151
  **executor_options,
154
152
  )
155
- output = future.result()
153
+
156
154
  num_tasks = 1
157
- if output is None:
158
- return (TaskOutput(), num_tasks)
155
+ if exception is None:
156
+ if result is None:
157
+ return (TaskOutput(), num_tasks, {})
158
+ else:
159
+ return (_cast_and_validate_TaskOutput(result), num_tasks, {})
159
160
  else:
160
- return (_cast_and_validate_TaskOutput(output), num_tasks)
161
+ return (TaskOutput(), num_tasks, {0: exception})
161
162
 
162
163
 
163
164
  def run_v2_task_parallel(
@@ -165,14 +166,15 @@ def run_v2_task_parallel(
165
166
  images: list[dict[str, Any]],
166
167
  task: TaskV2,
167
168
  wftask: WorkflowTaskV2,
168
- executor: Executor,
169
+ executor: BaseRunner,
169
170
  workflow_dir_local: Path,
170
171
  workflow_dir_remote: Optional[Path] = None,
171
172
  submit_setup_call: Callable = no_op_submit_setup_call,
172
- ) -> tuple[TaskOutput, int]:
173
+ history_item_id: int,
174
+ ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
173
175
 
174
176
  if len(images) == 0:
175
- return (TaskOutput(), 0)
177
+ return (TaskOutput(), 0, {})
176
178
 
177
179
  _check_parallelization_list_size(images)
178
180
 
@@ -194,30 +196,36 @@ def run_v2_task_parallel(
194
196
  )
195
197
  list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
196
198
 
197
- results_iterator = executor.map(
199
+ results, exceptions = executor.multisubmit(
198
200
  functools.partial(
199
201
  run_single_task,
200
202
  wftask=wftask,
201
203
  command=task.command_parallel,
202
- workflow_dir_local=workflow_dir_local,
203
- workflow_dir_remote=workflow_dir_remote,
204
+ root_dir_local=workflow_dir_local,
205
+ root_dir_remote=workflow_dir_remote,
204
206
  ),
205
- list_function_kwargs,
207
+ list_parameters=list_function_kwargs,
208
+ history_item_id=history_item_id,
206
209
  **executor_options,
207
210
  )
208
- # Explicitly iterate over the whole list, so that all futures are waited
209
- outputs = list(results_iterator)
210
211
 
211
- # Validate all non-None outputs
212
- for ind, output in enumerate(outputs):
213
- if output is None:
214
- outputs[ind] = TaskOutput()
212
+ outputs = []
213
+ for ind in range(len(list_function_kwargs)):
214
+ if ind in results.keys():
215
+ result = results[ind]
216
+ if result is None:
217
+ output = TaskOutput()
218
+ else:
219
+ output = _cast_and_validate_TaskOutput(result)
220
+ outputs.append(output)
221
+ elif ind in exceptions.keys():
222
+ print(f"Bad: {exceptions[ind]}")
215
223
  else:
216
- outputs[ind] = _cast_and_validate_TaskOutput(output)
224
+ print("VERY BAD - should have not reached this point")
217
225
 
218
226
  num_tasks = len(images)
219
227
  merged_output = merge_outputs(outputs)
220
- return (merged_output, num_tasks)
228
+ return (merged_output, num_tasks, exceptions)
221
229
 
222
230
 
223
231
  def run_v2_task_compound(
@@ -226,11 +234,12 @@ def run_v2_task_compound(
226
234
  zarr_dir: str,
227
235
  task: TaskV2,
228
236
  wftask: WorkflowTaskV2,
229
- executor: Executor,
237
+ executor: BaseRunner,
230
238
  workflow_dir_local: Path,
231
239
  workflow_dir_remote: Optional[Path] = None,
232
240
  submit_setup_call: Callable = no_op_submit_setup_call,
233
- ) -> TaskOutput:
241
+ history_item_id: int,
242
+ ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
234
243
 
235
244
  executor_options_init = _get_executor_options(
236
245
  wftask=wftask,
@@ -253,32 +262,40 @@ def run_v2_task_compound(
253
262
  zarr_dir=zarr_dir,
254
263
  **(wftask.args_non_parallel or {}),
255
264
  )
256
- future = executor.submit(
265
+ function_kwargs[_COMPONENT_KEY_] = f"init_{_index_to_component(0)}"
266
+ result, exception = executor.submit(
257
267
  functools.partial(
258
268
  run_single_task,
259
269
  wftask=wftask,
260
270
  command=task.command_non_parallel,
261
- workflow_dir_local=workflow_dir_local,
262
- workflow_dir_remote=workflow_dir_remote,
271
+ root_dir_local=workflow_dir_local,
272
+ root_dir_remote=workflow_dir_remote,
263
273
  ),
264
- function_kwargs,
274
+ parameters=function_kwargs,
275
+ history_item_id=history_item_id,
276
+ in_compound_task=True,
265
277
  **executor_options_init,
266
278
  )
267
- output = future.result()
268
- if output is None:
269
- init_task_output = InitTaskOutput()
279
+
280
+ num_tasks = 1
281
+ if exception is None:
282
+ if result is None:
283
+ init_task_output = InitTaskOutput()
284
+ else:
285
+ init_task_output = _cast_and_validate_InitTaskOutput(result)
270
286
  else:
271
- init_task_output = _cast_and_validate_InitTaskOutput(output)
287
+ return (TaskOutput(), num_tasks, {0: exception})
288
+
272
289
  parallelization_list = init_task_output.parallelization_list
273
290
  parallelization_list = deduplicate_list(parallelization_list)
274
291
 
275
- num_task = 1 + len(parallelization_list)
292
+ num_tasks = 1 + len(parallelization_list)
276
293
 
277
294
  # 3/B: parallel part of a compound task
278
295
  _check_parallelization_list_size(parallelization_list)
279
296
 
280
297
  if len(parallelization_list) == 0:
281
- return (TaskOutput(), 0)
298
+ return (TaskOutput(), 0, {})
282
299
 
283
300
  list_function_kwargs = []
284
301
  for ind, parallelization_item in enumerate(parallelization_list):
@@ -289,29 +306,35 @@ def run_v2_task_compound(
289
306
  **(wftask.args_parallel or {}),
290
307
  ),
291
308
  )
292
- list_function_kwargs[-1][_COMPONENT_KEY_] = _index_to_component(ind)
309
+ list_function_kwargs[-1][
310
+ _COMPONENT_KEY_
311
+ ] = f"compute_{_index_to_component(ind)}"
293
312
 
294
- results_iterator = executor.map(
313
+ results, exceptions = executor.multisubmit(
295
314
  functools.partial(
296
315
  run_single_task,
297
316
  wftask=wftask,
298
317
  command=task.command_parallel,
299
- workflow_dir_local=workflow_dir_local,
300
- workflow_dir_remote=workflow_dir_remote,
318
+ root_dir_local=workflow_dir_local,
319
+ root_dir_remote=workflow_dir_remote,
301
320
  ),
302
- list_function_kwargs,
321
+ list_parameters=list_function_kwargs,
322
+ history_item_id=history_item_id,
323
+ in_compound_task=True,
303
324
  **executor_options_compute,
304
325
  )
305
- # Explicitly iterate over the whole list, so that all futures are waited
306
- outputs = list(results_iterator)
307
326
 
308
- # Validate all non-None outputs
309
- for ind, output in enumerate(outputs):
310
- if output is None:
311
- outputs[ind] = TaskOutput()
312
- else:
313
- validated_output = _cast_and_validate_TaskOutput(output)
314
- outputs[ind] = validated_output
327
+ outputs = []
328
+ for ind in range(len(list_function_kwargs)):
329
+ if ind in results.keys():
330
+ result = results[ind]
331
+ if result is None:
332
+ output = TaskOutput()
333
+ else:
334
+ output = _cast_and_validate_TaskOutput(result)
335
+ outputs.append(output)
336
+ elif ind in exceptions.keys():
337
+ print(f"Bad: {exceptions[ind]}")
315
338
 
316
339
  merged_output = merge_outputs(outputs)
317
- return (merged_output, num_task)
340
+ return (merged_output, num_tasks, exceptions)
@@ -11,11 +11,11 @@ from ..components import _COMPONENT_KEY_
11
11
  from ..exceptions import JobExecutionError
12
12
  from ..exceptions import TaskExecutionError
13
13
  from fractal_server.app.models.v2 import WorkflowTaskV2
14
- from fractal_server.app.runner.task_files import get_task_file_paths
14
+ from fractal_server.app.runner.task_files import TaskFiles
15
15
  from fractal_server.string_tools import validate_cmd
16
16
 
17
17
 
18
- def _call_command_wrapper(cmd: str, log_path: Path) -> None:
18
+ def _call_command_wrapper(cmd: str, log_path: str) -> None:
19
19
  """
20
20
  Call a command and write its stdout and stderr to files
21
21
 
@@ -50,7 +50,7 @@ def _call_command_wrapper(cmd: str, log_path: Path) -> None:
50
50
  raise e
51
51
 
52
52
  if result.returncode > 0:
53
- with log_path.open("r") as fp_stderr:
53
+ with open(log_path, "r") as fp_stderr:
54
54
  err = fp_stderr.read()
55
55
  raise TaskExecutionError(err)
56
56
  elif result.returncode < 0:
@@ -60,49 +60,49 @@ def _call_command_wrapper(cmd: str, log_path: Path) -> None:
60
60
 
61
61
 
62
62
  def run_single_task(
63
- args: dict[str, Any],
63
+ parameters: dict[str, Any],
64
64
  command: str,
65
65
  wftask: WorkflowTaskV2,
66
- workflow_dir_local: Path,
67
- workflow_dir_remote: Optional[Path] = None,
66
+ root_dir_local: Path,
67
+ root_dir_remote: Optional[Path] = None,
68
68
  logger_name: Optional[str] = None,
69
69
  ) -> dict[str, Any]:
70
70
  """
71
- Runs within an executor.
71
+ Runs within an executor (AKA on the SLURM cluster).
72
72
  """
73
73
 
74
74
  logger = logging.getLogger(logger_name)
75
75
  logger.debug(f"Now start running {command=}")
76
76
 
77
- if not workflow_dir_remote:
78
- workflow_dir_remote = workflow_dir_local
77
+ if not root_dir_remote:
78
+ root_dir_remote = root_dir_local
79
79
 
80
80
  task_name = wftask.task.name
81
81
 
82
- component = args.pop(_COMPONENT_KEY_, None)
83
- task_files = get_task_file_paths(
84
- workflow_dir_local=workflow_dir_local,
85
- workflow_dir_remote=workflow_dir_remote,
86
- task_order=wftask.order,
82
+ component = parameters.pop(_COMPONENT_KEY_)
83
+ task_files = TaskFiles(
84
+ root_dir_local=root_dir_local,
85
+ root_dir_remote=root_dir_remote,
87
86
  task_name=task_name,
87
+ task_order=wftask.order,
88
88
  component=component,
89
89
  )
90
90
 
91
91
  # Write arguments to args.json file
92
- with task_files.args.open("w") as f:
93
- json.dump(args, f, indent=2)
92
+ with open(task_files.args_file_remote, "w") as f:
93
+ json.dump(parameters, f, indent=2)
94
94
 
95
95
  # Assemble full command
96
96
  full_command = (
97
97
  f"{command} "
98
- f"--args-json {task_files.args.as_posix()} "
99
- f"--out-json {task_files.metadiff.as_posix()}"
98
+ f"--args-json {task_files.args_file_remote} "
99
+ f"--out-json {task_files.metadiff_file_remote}"
100
100
  )
101
101
 
102
102
  try:
103
103
  _call_command_wrapper(
104
104
  full_command,
105
- log_path=task_files.log,
105
+ log_path=task_files.log_file_remote,
106
106
  )
107
107
  except TaskExecutionError as e:
108
108
  e.workflow_task_order = wftask.order
@@ -111,7 +111,7 @@ def run_single_task(
111
111
  raise e
112
112
 
113
113
  try:
114
- with task_files.metadiff.open("r") as f:
114
+ with open(task_files.metadiff_file_remote, "r") as f:
115
115
  out_meta = json.load(f)
116
116
  except FileNotFoundError as e:
117
117
  logger.debug(
@@ -14,27 +14,12 @@ from .._filter_validators import validate_attribute_filters
14
14
  from .._filter_validators import validate_type_filters
15
15
  from .._validators import root_validate_dict_keys
16
16
  from .._validators import valstr
17
- from .dumps import WorkflowTaskDumpV2
18
17
  from .project import ProjectReadV2
19
- from .workflowtask import WorkflowTaskStatusTypeV2
20
18
  from fractal_server.images import SingleImage
21
19
  from fractal_server.images.models import AttributeFiltersType
22
20
  from fractal_server.urls import normalize_url
23
21
 
24
22
 
25
- class _DatasetHistoryItemV2(BaseModel):
26
- """
27
- Class for an item of `Dataset.history`.
28
- """
29
-
30
- workflowtask: WorkflowTaskDumpV2
31
- status: WorkflowTaskStatusTypeV2
32
- parallelization: Optional[dict] = None
33
-
34
-
35
- # CRUD
36
-
37
-
38
23
  class DatasetCreateV2(BaseModel):
39
24
  model_config = ConfigDict(extra="forbid")
40
25
 
@@ -74,8 +59,6 @@ class DatasetReadV2(BaseModel):
74
59
  project_id: int
75
60
  project: ProjectReadV2
76
61
 
77
- history: list[_DatasetHistoryItemV2]
78
-
79
62
  timestamp_created: AwareDatetime
80
63
 
81
64
  zarr_dir: str
@@ -0,0 +1,23 @@
1
+ from datetime import datetime
2
+ from typing import Any
3
+
4
+ from pydantic import BaseModel
5
+ from pydantic import field_serializer
6
+ from pydantic.types import AwareDatetime
7
+
8
+
9
+ class HistoryItemV2Read(BaseModel):
10
+ id: int
11
+ dataset_id: int
12
+ workflowtask_id: int
13
+ timestamp_started: AwareDatetime
14
+ parameters_hash: str
15
+ num_available_images: int
16
+ num_current_images: int
17
+ images: dict[str, str]
18
+ workflowtask_dump: dict[str, Any]
19
+ task_group_dump: dict[str, Any]
20
+
21
+ @field_serializer("timestamp_started")
22
+ def serialize_datetime(v: datetime) -> str:
23
+ return v.isoformat()
fractal_server/config.py CHANGED
@@ -705,7 +705,7 @@ class Settings(BaseSettings):
705
705
 
706
706
  info = f"FRACTAL_RUNNER_BACKEND={self.FRACTAL_RUNNER_BACKEND}"
707
707
  if self.FRACTAL_RUNNER_BACKEND == "slurm":
708
- from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
708
+ from fractal_server.app.runner.executors.slurm_common._slurm_config import ( # noqa: E501
709
709
  load_slurm_config_file,
710
710
  )
711
711
 
@@ -735,7 +735,7 @@ class Settings(BaseSettings):
735
735
  f"Must set FRACTAL_SLURM_WORKER_PYTHON when {info}"
736
736
  )
737
737
 
738
- from fractal_server.app.runner.executors.slurm._slurm_config import ( # noqa: E501
738
+ from fractal_server.app.runner.executors.slurm_common._slurm_config import ( # noqa: E501
739
739
  load_slurm_config_file,
740
740
  )
741
741
 
@@ -0,0 +1,63 @@
1
+ """image status
2
+
3
+ Revision ID: 8223fcef886c
4
+ Revises: 87cd72a537a2
5
+ Create Date: 2025-02-24 11:32:22.267338
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "8223fcef886c"
15
+ down_revision = "87cd72a537a2"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ op.create_table(
23
+ "imagestatus",
24
+ sa.Column(
25
+ "zarr_url", sqlmodel.sql.sqltypes.AutoString(), nullable=False
26
+ ),
27
+ sa.Column("workflowtask_id", sa.Integer(), nullable=False),
28
+ sa.Column("dataset_id", sa.Integer(), nullable=False),
29
+ sa.Column(
30
+ "parameters_hash",
31
+ sqlmodel.sql.sqltypes.AutoString(),
32
+ nullable=False,
33
+ ),
34
+ sa.Column(
35
+ "status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
36
+ ),
37
+ sa.Column(
38
+ "logfile", sqlmodel.sql.sqltypes.AutoString(), nullable=False
39
+ ),
40
+ sa.ForeignKeyConstraint(
41
+ ["dataset_id"],
42
+ ["datasetv2.id"],
43
+ name=op.f("fk_imagestatus_dataset_id_datasetv2"),
44
+ ),
45
+ sa.ForeignKeyConstraint(
46
+ ["workflowtask_id"],
47
+ ["workflowtaskv2.id"],
48
+ name=op.f("fk_imagestatus_workflowtask_id_workflowtaskv2"),
49
+ ),
50
+ sa.PrimaryKeyConstraint(
51
+ "zarr_url",
52
+ "workflowtask_id",
53
+ "dataset_id",
54
+ name=op.f("pk_imagestatus"),
55
+ ),
56
+ )
57
+ # ### end Alembic commands ###
58
+
59
+
60
+ def downgrade() -> None:
61
+ # ### commands auto generated by Alembic - please adjust! ###
62
+ op.drop_table("imagestatus")
63
+ # ### end Alembic commands ###
@@ -0,0 +1,68 @@
1
+ """Add HistoryItem table
2
+
3
+ Revision ID: 87cd72a537a2
4
+ Revises: af1ef1c83c9b
5
+ Create Date: 2025-02-18 10:48:16.401995
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ import sqlmodel
10
+ from alembic import op
11
+ from sqlalchemy.dialects import postgresql
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "87cd72a537a2"
15
+ down_revision = "af1ef1c83c9b"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ op.create_table(
23
+ "historyitemv2",
24
+ sa.Column("id", sa.Integer(), nullable=False),
25
+ sa.Column("dataset_id", sa.Integer(), nullable=False),
26
+ sa.Column("workflowtask_id", sa.Integer(), nullable=True),
27
+ sa.Column(
28
+ "timestamp_started", sa.DateTime(timezone=True), nullable=False
29
+ ),
30
+ sa.Column(
31
+ "workflowtask_dump",
32
+ postgresql.JSONB(astext_type=sa.Text()),
33
+ nullable=False,
34
+ ),
35
+ sa.Column(
36
+ "task_group_dump",
37
+ postgresql.JSONB(astext_type=sa.Text()),
38
+ nullable=False,
39
+ ),
40
+ sa.Column(
41
+ "parameters_hash",
42
+ sqlmodel.sql.sqltypes.AutoString(),
43
+ nullable=False,
44
+ ),
45
+ sa.Column("num_available_images", sa.Integer(), nullable=False),
46
+ sa.Column("num_current_images", sa.Integer(), nullable=False),
47
+ sa.Column(
48
+ "images", postgresql.JSONB(astext_type=sa.Text()), nullable=False
49
+ ),
50
+ sa.ForeignKeyConstraint(
51
+ ["dataset_id"],
52
+ ["datasetv2.id"],
53
+ name=op.f("fk_historyitemv2_dataset_id_datasetv2"),
54
+ ),
55
+ sa.ForeignKeyConstraint(
56
+ ["workflowtask_id"],
57
+ ["workflowtaskv2.id"],
58
+ name=op.f("fk_historyitemv2_workflowtask_id_workflowtaskv2"),
59
+ ),
60
+ sa.PrimaryKeyConstraint("id", name=op.f("pk_historyitemv2")),
61
+ )
62
+ # ### end Alembic commands ###
63
+
64
+
65
+ def downgrade() -> None:
66
+ # ### commands auto generated by Alembic - please adjust! ###
67
+ op.drop_table("historyitemv2")
68
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.13.1
3
+ Version: 2.14.0a1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause