fractal-server 2.14.0a6__py3-none-any.whl → 2.14.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.14.0a6"
1
+ __VERSION__ = "2.14.0a7"
@@ -152,14 +152,7 @@ async def create_task(
152
152
  db=db,
153
153
  )
154
154
 
155
- if task.command_non_parallel is None:
156
- task_type = "parallel"
157
- elif task.command_parallel is None:
158
- task_type = "non_parallel"
159
- else:
160
- task_type = "compound"
161
-
162
- if task_type == "parallel" and (
155
+ if task.type == "parallel" and (
163
156
  task.args_schema_non_parallel is not None
164
157
  or task.meta_non_parallel is not None
165
158
  ):
@@ -170,7 +163,7 @@ async def create_task(
170
163
  "`TaskV2.args_schema_non_parallel` if TaskV2 is parallel"
171
164
  ),
172
165
  )
173
- elif task_type == "non_parallel" and (
166
+ elif task.type == "non_parallel" and (
174
167
  task.args_schema_parallel is not None or task.meta_parallel is not None
175
168
  ):
176
169
  raise HTTPException(
@@ -183,7 +176,7 @@ async def create_task(
183
176
 
184
177
  # Add task
185
178
 
186
- db_task = TaskV2(**task.model_dump(exclude_unset=True), type=task_type)
179
+ db_task = TaskV2(**task.model_dump(exclude_unset=True))
187
180
  pkg_name = db_task.name
188
181
  await _verify_non_duplication_user_constraint(
189
182
  db=db, pkg_name=pkg_name, user_id=user.id, version=db_task.version
@@ -56,7 +56,14 @@ async def replace_workflowtask(
56
56
  )
57
57
 
58
58
  # Preliminary checks
59
- if old_wftask.task_type != new_task.type:
59
+ EQUIVALENT_TASK_TYPES = [
60
+ {"non_parallel", "converter_non_parallel"},
61
+ {"compound", "converter_compound"},
62
+ ]
63
+ if (
64
+ old_wftask.task_type != new_task.type
65
+ and {old_wftask.task_type, new_task.type} not in EQUIVALENT_TASK_TYPES
66
+ ):
60
67
  raise HTTPException(
61
68
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
62
69
  detail=(
@@ -64,6 +71,7 @@ async def replace_workflowtask(
64
71
  f"{old_wftask.task_type} to {new_task.type}."
65
72
  ),
66
73
  )
74
+
67
75
  if replace.args_non_parallel is not None and new_task.type == "parallel":
68
76
  raise HTTPException(
69
77
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -259,7 +267,10 @@ async def update_workflowtask(
259
267
  "parallel."
260
268
  ),
261
269
  )
262
- elif db_wf_task.task_type == "non_parallel" and (
270
+ elif db_wf_task.task_type in [
271
+ "non_parallel",
272
+ "converter_non_parallel",
273
+ ] and (
263
274
  workflow_task_update.args_parallel is not None
264
275
  or workflow_task_update.meta_parallel is not None
265
276
  ):
@@ -1,6 +1,20 @@
1
1
  from typing import Any
2
2
 
3
3
  from fractal_server.app.runner.components import _COMPONENT_KEY_
4
+ from fractal_server.app.schemas.v2.task import TaskTypeType
5
+
6
+
7
+ TASK_TYPES_SUBMIT: list[TaskTypeType] = [
8
+ "compound",
9
+ "converter_compound",
10
+ "non_parallel",
11
+ "converter_non_parallel",
12
+ ]
13
+ TASK_TYPES_MULTISUBMIT: list[TaskTypeType] = [
14
+ "compound",
15
+ "converter_compound",
16
+ "parallel",
17
+ ]
4
18
 
5
19
 
6
20
  class BaseRunner(object):
@@ -16,7 +30,7 @@ class BaseRunner(object):
16
30
  func: callable,
17
31
  parameters: dict[str, Any],
18
32
  history_item_id: int,
19
- in_compound_task: bool,
33
+ task_type: TaskTypeType,
20
34
  **kwargs,
21
35
  ) -> tuple[Any, BaseException]:
22
36
  """
@@ -25,16 +39,13 @@ class BaseRunner(object):
25
39
  # FIXME: Describe more in detail
26
40
 
27
41
  Args:
28
- func:
29
- Function to be executed.
42
+ func: Function to be executed.
30
43
  parameters:
31
44
  Dictionary of parameters. Must include `zarr_urls` key.
32
45
  history_item_id:
33
46
  Database ID of the corresponding `HistoryItemV2` entry.
34
- in_compound_task:
35
- Whether this is the init part of a compound task.
36
- kwargs:
37
- Runner-specific parameters.
47
+ task_type: Task type.
48
+ kwargs: Runner-specific parameters.
38
49
  """
39
50
  raise NotImplementedError()
40
51
 
@@ -43,7 +54,7 @@ class BaseRunner(object):
43
54
  func: callable,
44
55
  list_parameters: list[dict[str, Any]],
45
56
  history_item_id: int,
46
- in_compound_task: bool,
57
+ task_type: TaskTypeType,
47
58
  **kwargs,
48
59
  ) -> tuple[dict[int, Any], dict[int, BaseException]]:
49
60
  """
@@ -52,33 +63,44 @@ class BaseRunner(object):
52
63
  # FIXME: Describe more in detail
53
64
 
54
65
  Args:
55
- func:
56
- Function to be executed.
66
+ func: Function to be executed.
57
67
  list_parameters:
58
68
  List of dictionaries of parameters. Each one must include a
59
69
  `zarr_url` key.
60
70
  history_item_id:
61
71
  Database ID of the corresponding `HistoryItemV2` entry.
62
- in_compound_task:
63
- Whether this is the compute part of a compound task.
64
- kwargs:
65
- Runner-specific parameters.
72
+ task_type: Task type.
73
+ kwargs: Runner-specific parameters.
66
74
  """
67
75
  raise NotImplementedError()
68
76
 
69
- def validate_submit_parameters(self, parameters: dict[str, Any]) -> None:
77
+ def validate_submit_parameters(
78
+ self,
79
+ parameters: dict[str, Any],
80
+ task_type: TaskTypeType,
81
+ ) -> None:
70
82
  """
71
83
  Validate parameters for `submit` method
72
84
 
73
85
  Args:
74
86
  parameters: Parameters dictionary.
87
+ task_type: Task type.s
75
88
  """
89
+ if task_type not in TASK_TYPES_SUBMIT:
90
+ raise ValueError(f"Invalid {task_type=} for `submit`.")
76
91
  if not isinstance(parameters, dict):
77
92
  raise ValueError("`parameters` must be a dictionary.")
78
- if "zarr_urls" not in parameters.keys():
79
- raise ValueError(
80
- f"No 'zarr_urls' key in in {list(parameters.keys())}"
81
- )
93
+ if task_type in ["non_parallel", "compound"]:
94
+ if "zarr_urls" not in parameters.keys():
95
+ raise ValueError(
96
+ f"No 'zarr_urls' key in in {list(parameters.keys())}"
97
+ )
98
+ elif task_type in ["converter_non_parallel", "converter_compound"]:
99
+ if "zarr_urls" in parameters.keys():
100
+ raise ValueError(
101
+ f"Forbidden 'zarr_urls' key in {list(parameters.keys())}"
102
+ )
103
+
82
104
  if _COMPONENT_KEY_ not in parameters.keys():
83
105
  raise ValueError(
84
106
  f"No '{_COMPONENT_KEY_}' key in in {list(parameters.keys())}"
@@ -87,21 +109,26 @@ class BaseRunner(object):
87
109
  def validate_multisubmit_parameters(
88
110
  self,
89
111
  list_parameters: list[dict[str, Any]],
90
- in_compound_task: bool,
112
+ task_type: TaskTypeType,
91
113
  ) -> None:
92
114
  """
93
115
  Validate parameters for `multi_submit` method
94
116
 
95
117
  Args:
96
118
  list_parameters: List of parameters dictionaries.
97
- in_compound_task:
98
- Whether this is the compute part of a compound task.
119
+ task_type: Task type.
99
120
  """
121
+ if task_type not in TASK_TYPES_MULTISUBMIT:
122
+ raise ValueError(f"Invalid {task_type=} for `multisubmit`.")
123
+
124
+ if not isinstance(list_parameters, list):
125
+ raise ValueError("`parameters` must be a list.")
126
+
100
127
  for single_kwargs in list_parameters:
101
128
  if not isinstance(single_kwargs, dict):
102
- raise RuntimeError("kwargs itemt must be a dictionary.")
129
+ raise ValueError("kwargs itemt must be a dictionary.")
103
130
  if "zarr_url" not in single_kwargs.keys():
104
- raise RuntimeError(
131
+ raise ValueError(
105
132
  f"No 'zarr_url' key in in {list(single_kwargs.keys())}"
106
133
  )
107
134
  if _COMPONENT_KEY_ not in single_kwargs.keys():
@@ -109,7 +136,7 @@ class BaseRunner(object):
109
136
  f"No '{_COMPONENT_KEY_}' key "
110
137
  f"in {list(single_kwargs.keys())}"
111
138
  )
112
- if not in_compound_task:
139
+ if task_type == "parallel":
113
140
  zarr_urls = [kwargs["zarr_url"] for kwargs in list_parameters]
114
141
  if len(zarr_urls) != len(set(zarr_urls)):
115
- raise RuntimeError("Non-unique zarr_urls")
142
+ raise ValueError("Non-unique zarr_urls")
@@ -9,9 +9,9 @@ from ._local_config import LocalBackendConfig
9
9
  from fractal_server.app.runner.components import _COMPONENT_KEY_
10
10
  from fractal_server.app.runner.executors.base_runner import BaseRunner
11
11
  from fractal_server.app.runner.task_files import TaskFiles
12
+ from fractal_server.app.schemas.v2.task import TaskTypeType
12
13
  from fractal_server.logger import set_logger
13
14
 
14
-
15
15
  logger = set_logger(__name__)
16
16
 
17
17
 
@@ -50,6 +50,7 @@ class LocalRunner(BaseRunner):
50
50
  func: callable,
51
51
  parameters: dict[str, Any],
52
52
  task_files: TaskFiles,
53
+ task_type: TaskTypeType,
53
54
  local_backend_config: Optional[LocalBackendConfig] = None,
54
55
  ) -> tuple[Any, Exception]:
55
56
  logger.debug("[submit] START")
@@ -61,7 +62,7 @@ class LocalRunner(BaseRunner):
61
62
  component=parameters[_COMPONENT_KEY_],
62
63
  )
63
64
 
64
- self.validate_submit_parameters(parameters)
65
+ self.validate_submit_parameters(parameters, task_type=task_type)
65
66
  workdir_local = current_task_files.wftask_subfolder_local
66
67
  workdir_local.mkdir()
67
68
 
@@ -83,18 +84,18 @@ class LocalRunner(BaseRunner):
83
84
  func: callable,
84
85
  list_parameters: list[dict],
85
86
  task_files: TaskFiles,
86
- in_compound_task: bool = False,
87
+ task_type: TaskTypeType,
87
88
  local_backend_config: Optional[LocalBackendConfig] = None,
88
89
  ):
89
90
  logger.debug(f"[multisubmit] START, {len(list_parameters)=}")
90
91
 
91
92
  self.validate_multisubmit_parameters(
92
93
  list_parameters=list_parameters,
93
- in_compound_task=in_compound_task,
94
+ task_type=task_type,
94
95
  )
95
96
 
96
97
  workdir_local = task_files.wftask_subfolder_local
97
- if not in_compound_task:
98
+ if task_type not in ["compound", "converter_compound"]:
98
99
  workdir_local.mkdir()
99
100
 
100
101
  # Get local_backend_config
@@ -30,15 +30,11 @@ from fractal_server.app.runner.executors.slurm_common._slurm_config import (
30
30
  )
31
31
  from fractal_server.app.runner.filenames import SHUTDOWN_FILENAME
32
32
  from fractal_server.app.runner.task_files import TaskFiles
33
+ from fractal_server.app.schemas.v2.task import TaskTypeType
33
34
  from fractal_server.config import get_settings
34
35
  from fractal_server.logger import set_logger
35
36
  from fractal_server.syringe import Inject
36
37
 
37
- # from fractal_server.app.history import ImageStatus
38
- # from fractal_server.app.history import update_all_images
39
- # from fractal_server.app.history import update_single_image
40
- # from fractal_server.app.history import update_single_image_logfile
41
-
42
38
 
43
39
  logger = set_logger(__name__)
44
40
 
@@ -426,7 +422,7 @@ class RunnerSlurmSudo(BaseRunner):
426
422
  history_item_id: int,
427
423
  task_files: TaskFiles,
428
424
  slurm_config: SlurmConfig,
429
- in_compound_task: bool = False,
425
+ task_type: TaskTypeType,
430
426
  ) -> tuple[Any, Exception]:
431
427
  workdir_local = task_files.wftask_subfolder_local
432
428
  workdir_remote = task_files.wftask_subfolder_remote
@@ -439,21 +435,9 @@ class RunnerSlurmSudo(BaseRunner):
439
435
  )
440
436
 
441
437
  if self.jobs != {}:
442
- if not in_compound_task:
443
- pass
444
- # update_all_images(
445
- # history_item_id=history_item_id,
446
- # status=ImageStatus.FAILED,
447
- # )
448
438
  raise JobExecutionError("Unexpected branch: jobs should be empty.")
449
439
 
450
440
  if self.is_shutdown():
451
- if not in_compound_task:
452
- pass
453
- # update_all_images(
454
- # history_item_id=history_item_id,
455
- # status=ImageStatus.FAILED,
456
- # )
457
441
  raise JobExecutionError("Cannot continue after shutdown.")
458
442
 
459
443
  # Validation phase
@@ -505,22 +489,6 @@ class RunnerSlurmSudo(BaseRunner):
505
489
  )
506
490
  time.sleep(self.slurm_poll_interval)
507
491
 
508
- if not in_compound_task:
509
- if exception is None:
510
- pass
511
- # update_all_images(
512
- # history_item_id=history_item_id,
513
- # status=ImageStatus.DONE,
514
- # logfile=LOGFILE,
515
- # )
516
- else:
517
- pass
518
- # update_all_images(
519
- # history_item_id=history_item_id,
520
- # status=ImageStatus.FAILED,
521
- # logfile=LOGFILE,
522
- # )
523
-
524
492
  return result, exception
525
493
 
526
494
  def multisubmit(
@@ -530,20 +498,19 @@ class RunnerSlurmSudo(BaseRunner):
530
498
  history_item_id: int,
531
499
  task_files: TaskFiles,
532
500
  slurm_config: SlurmConfig,
533
- in_compound_task: bool = False,
501
+ task_type: TaskTypeType,
534
502
  ):
535
503
  # self.scancel_jobs()
536
504
 
537
505
  self.validate_multisubmit_parameters(
538
- list_parameters=list_parameters,
539
- in_compound_task=in_compound_task,
506
+ list_parameters=list_parameters, task_type=task_type
540
507
  )
541
508
 
542
509
  workdir_local = task_files.wftask_subfolder_local
543
510
  workdir_remote = task_files.wftask_subfolder_remote
544
511
 
545
512
  # Create local&remote task subfolders
546
- if not in_compound_task:
513
+ if task_type not in ["converter_compound", "compound"]:
547
514
  original_umask = os.umask(0)
548
515
  workdir_local.mkdir(parents=True, mode=0o755)
549
516
  os.umask(original_umask)
@@ -640,28 +607,6 @@ class RunnerSlurmSudo(BaseRunner):
640
607
  result, exception = self._postprocess_single_task(
641
608
  task=task
642
609
  )
643
- if not in_compound_task:
644
- pass
645
- # update_single_image_logfile(
646
- # history_item_id=history_item_id,
647
- # zarr_url=task.zarr_url,
648
- # logfile=task.task_files.log_file_local,
649
- # )
650
- if not in_compound_task:
651
- if exception is None:
652
- pass
653
- # update_single_image(
654
- # zarr_url=task.zarr_url,
655
- # history_item_id=history_item_id,
656
- # status=ImageStatus.DONE,
657
- # )
658
- else:
659
- pass
660
- # update_single_image(
661
- # zarr_url=task.zarr_url,
662
- # history_item_id=history_item_id,
663
- # status=ImageStatus.FAILED,
664
- # )
665
610
  if exception is None:
666
611
  results[task.index] = result
667
612
  else:
@@ -14,6 +14,8 @@ from ....images.tools import find_image_by_zarr_url
14
14
  from ..exceptions import JobExecutionError
15
15
  from .runner_functions import no_op_submit_setup_call
16
16
  from .runner_functions import run_v2_task_compound
17
+ from .runner_functions import run_v2_task_converter_compound
18
+ from .runner_functions import run_v2_task_converter_non_parallel
17
19
  from .runner_functions import run_v2_task_non_parallel
18
20
  from .runner_functions import run_v2_task_parallel
19
21
  from .task_interface import TaskOutput
@@ -63,22 +65,26 @@ def execute_tasks_v2(
63
65
  # PRE TASK EXECUTION
64
66
 
65
67
  # Filter images by types and attributes (in two steps)
66
- type_filters = copy(current_dataset_type_filters)
67
- type_filters_patch = merge_type_filters(
68
- task_input_types=task.input_types,
69
- wftask_type_filters=wftask.type_filters,
70
- )
71
- type_filters.update(type_filters_patch)
72
- type_filtered_images = filter_image_list(
73
- images=tmp_images,
74
- type_filters=type_filters,
75
- attribute_filters=None,
76
- )
77
- filtered_images = filter_image_list(
78
- images=type_filtered_images,
79
- type_filters=None,
80
- attribute_filters=job_attribute_filters,
81
- )
68
+ if wftask.task_type in ["compound", "parallel", "non_parallel"]:
69
+ type_filters = copy(current_dataset_type_filters)
70
+ type_filters_patch = merge_type_filters(
71
+ task_input_types=task.input_types,
72
+ wftask_type_filters=wftask.type_filters,
73
+ )
74
+ type_filters.update(type_filters_patch)
75
+ type_filtered_images = filter_image_list(
76
+ images=tmp_images,
77
+ type_filters=type_filters,
78
+ attribute_filters=None,
79
+ )
80
+ num_available_images = len(type_filtered_images)
81
+ filtered_images = filter_image_list(
82
+ images=type_filtered_images,
83
+ type_filters=None,
84
+ attribute_filters=job_attribute_filters,
85
+ )
86
+ else:
87
+ num_available_images = 0
82
88
 
83
89
  # Create history item
84
90
  with next(get_sync_db()) as db:
@@ -100,7 +106,7 @@ def execute_tasks_v2(
100
106
  workflowtask_id=wftask.id,
101
107
  workflowtask_dump=workflowtask_dump,
102
108
  task_group_dump=task_group_dump,
103
- num_available_images=len(type_filtered_images),
109
+ num_available_images=num_available_images,
104
110
  status=XXXStatus.SUBMITTED,
105
111
  )
106
112
  db.add(history_run)
@@ -126,6 +132,22 @@ def execute_tasks_v2(
126
132
  history_run_id=history_run_id,
127
133
  dataset_id=dataset.id,
128
134
  )
135
+ elif task.type == "converter_non_parallel":
136
+ (
137
+ current_task_output,
138
+ num_tasks,
139
+ exceptions,
140
+ ) = run_v2_task_converter_non_parallel(
141
+ zarr_dir=zarr_dir,
142
+ wftask=wftask,
143
+ task=task,
144
+ workflow_dir_local=workflow_dir_local,
145
+ workflow_dir_remote=workflow_dir_remote,
146
+ executor=runner,
147
+ submit_setup_call=submit_setup_call,
148
+ history_run_id=history_run_id,
149
+ dataset_id=dataset.id,
150
+ )
129
151
  elif task.type == "parallel":
130
152
  current_task_output, num_tasks, exceptions = run_v2_task_parallel(
131
153
  images=filtered_images,
@@ -151,6 +173,22 @@ def execute_tasks_v2(
151
173
  history_run_id=history_run_id,
152
174
  dataset_id=dataset.id,
153
175
  )
176
+ elif task.type == "converter_compound":
177
+ (
178
+ current_task_output,
179
+ num_tasks,
180
+ exceptions,
181
+ ) = run_v2_task_converter_compound(
182
+ zarr_dir=zarr_dir,
183
+ wftask=wftask,
184
+ task=task,
185
+ workflow_dir_local=workflow_dir_local,
186
+ workflow_dir_remote=workflow_dir_remote,
187
+ executor=runner,
188
+ submit_setup_call=submit_setup_call,
189
+ history_run_id=history_run_id,
190
+ dataset_id=dataset.id,
191
+ )
154
192
  else:
155
193
  raise ValueError(f"Unexpected error: Invalid {task.type=}.")
156
194
 
@@ -155,6 +155,92 @@ def run_v2_task_non_parallel(
155
155
  root_dir_remote=workflow_dir_remote,
156
156
  ),
157
157
  parameters=function_kwargs,
158
+ task_type="non_parallel",
159
+ **executor_options,
160
+ )
161
+
162
+ num_tasks = 1
163
+ with next(get_sync_db()) as db:
164
+ if exception is None:
165
+ db.execute(
166
+ update(HistoryUnit)
167
+ .where(HistoryUnit.id == history_unit_id)
168
+ .values(status=XXXStatus.DONE)
169
+ )
170
+ db.commit()
171
+ if result is None:
172
+ return (TaskOutput(), num_tasks, {})
173
+ else:
174
+ return (_cast_and_validate_TaskOutput(result), num_tasks, {})
175
+ else:
176
+ db.execute(
177
+ update(HistoryUnit)
178
+ .where(HistoryUnit.id == history_unit_id)
179
+ .values(status=XXXStatus.FAILED)
180
+ )
181
+ db.commit()
182
+ return (TaskOutput(), num_tasks, {0: exception})
183
+
184
+
185
+ def run_v2_task_converter_non_parallel(
186
+ *,
187
+ zarr_dir: str,
188
+ task: TaskV2,
189
+ wftask: WorkflowTaskV2,
190
+ workflow_dir_local: Path,
191
+ workflow_dir_remote: Optional[Path] = None,
192
+ executor: BaseRunner,
193
+ submit_setup_call: callable = no_op_submit_setup_call,
194
+ dataset_id: int,
195
+ history_run_id: int,
196
+ ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
197
+ """
198
+ This runs server-side (see `executor` argument)
199
+ """
200
+
201
+ if workflow_dir_remote is None:
202
+ workflow_dir_remote = workflow_dir_local
203
+ logging.warning(
204
+ "In `run_single_task`, workflow_dir_remote=None. Is this right?"
205
+ )
206
+ workflow_dir_remote = workflow_dir_local
207
+
208
+ executor_options = submit_setup_call(
209
+ wftask=wftask,
210
+ root_dir_local=workflow_dir_local,
211
+ root_dir_remote=workflow_dir_remote,
212
+ which_type="non_parallel",
213
+ )
214
+
215
+ function_kwargs = {
216
+ "zarr_dir": zarr_dir,
217
+ _COMPONENT_KEY_: _index_to_component(0),
218
+ **(wftask.args_non_parallel or {}),
219
+ }
220
+
221
+ # Database History operations
222
+ with next(get_sync_db()) as db:
223
+ history_unit = HistoryUnit(
224
+ history_run_id=history_run_id,
225
+ status=XXXStatus.SUBMITTED,
226
+ logfile=None, # FIXME
227
+ zarr_urls=[],
228
+ )
229
+ db.add(history_unit)
230
+ db.commit()
231
+ db.refresh(history_unit)
232
+ history_unit_id = history_unit.id
233
+
234
+ result, exception = executor.submit(
235
+ functools.partial(
236
+ run_single_task,
237
+ wftask=wftask,
238
+ command=task.command_non_parallel,
239
+ root_dir_local=workflow_dir_local,
240
+ root_dir_remote=workflow_dir_remote,
241
+ ),
242
+ task_type="converter_non_parallel",
243
+ parameters=function_kwargs,
158
244
  **executor_options,
159
245
  )
160
246
 
@@ -255,6 +341,7 @@ def run_v2_task_parallel(
255
341
  root_dir_remote=workflow_dir_remote,
256
342
  ),
257
343
  list_parameters=list_function_kwargs,
344
+ task_type="parallel",
258
345
  **executor_options,
259
346
  )
260
347
 
@@ -365,6 +452,159 @@ def run_v2_task_compound(
365
452
  root_dir_remote=workflow_dir_remote,
366
453
  ),
367
454
  parameters=function_kwargs,
455
+ task_type="compound",
456
+ **executor_options_init,
457
+ )
458
+
459
+ num_tasks = 1
460
+ if exception is None:
461
+ if result is None:
462
+ init_task_output = InitTaskOutput()
463
+ else:
464
+ init_task_output = _cast_and_validate_InitTaskOutput(result)
465
+ else:
466
+ with next(get_sync_db()) as db:
467
+ db.execute(
468
+ update(HistoryUnit)
469
+ .where(HistoryUnit.id == history_unit_id)
470
+ .values(status=XXXStatus.FAILED)
471
+ )
472
+ db.commit()
473
+ return (TaskOutput(), num_tasks, {0: exception})
474
+
475
+ parallelization_list = init_task_output.parallelization_list
476
+ parallelization_list = deduplicate_list(parallelization_list)
477
+
478
+ num_tasks = 1 + len(parallelization_list)
479
+
480
+ # 3/B: parallel part of a compound task
481
+ _check_parallelization_list_size(parallelization_list)
482
+
483
+ if len(parallelization_list) == 0:
484
+ with next(get_sync_db()) as db:
485
+ db.execute(
486
+ update(HistoryUnit)
487
+ .where(HistoryUnit.id == history_unit_id)
488
+ .values(status=XXXStatus.DONE)
489
+ )
490
+ db.commit()
491
+ return (TaskOutput(), 0, {})
492
+
493
+ list_function_kwargs = [
494
+ {
495
+ "zarr_url": parallelization_item.zarr_url,
496
+ "init_args": parallelization_item.init_args,
497
+ _COMPONENT_KEY_: f"compute_{_index_to_component(ind)}",
498
+ **(wftask.args_parallel or {}),
499
+ }
500
+ for ind, parallelization_item in enumerate(parallelization_list)
501
+ ]
502
+
503
+ results, exceptions = executor.multisubmit(
504
+ functools.partial(
505
+ run_single_task,
506
+ wftask=wftask,
507
+ command=task.command_parallel,
508
+ root_dir_local=workflow_dir_local,
509
+ root_dir_remote=workflow_dir_remote,
510
+ ),
511
+ list_parameters=list_function_kwargs,
512
+ task_type="compound",
513
+ **executor_options_compute,
514
+ )
515
+
516
+ outputs = []
517
+ failure = False
518
+ for ind in range(len(list_function_kwargs)):
519
+ if ind in results.keys():
520
+ result = results[ind]
521
+ if result is None:
522
+ output = TaskOutput()
523
+ else:
524
+ output = _cast_and_validate_TaskOutput(result)
525
+ outputs.append(output)
526
+
527
+ elif ind in exceptions.keys():
528
+ print(f"Bad: {exceptions[ind]}")
529
+ failure = True
530
+ else:
531
+ print("VERY BAD - should have not reached this point")
532
+
533
+ with next(get_sync_db()) as db:
534
+ if failure:
535
+ db.execute(
536
+ update(HistoryUnit)
537
+ .where(HistoryUnit.id == history_unit_id)
538
+ .values(status=XXXStatus.FAILED)
539
+ )
540
+ else:
541
+ db.execute(
542
+ update(HistoryUnit)
543
+ .where(HistoryUnit.id == history_unit_id)
544
+ .values(status=XXXStatus.DONE)
545
+ )
546
+ db.commit()
547
+
548
+ merged_output = merge_outputs(outputs)
549
+ return (merged_output, num_tasks, exceptions)
550
+
551
+
552
+ def run_v2_task_converter_compound(
553
+ *,
554
+ zarr_dir: str,
555
+ task: TaskV2,
556
+ wftask: WorkflowTaskV2,
557
+ executor: BaseRunner,
558
+ workflow_dir_local: Path,
559
+ workflow_dir_remote: Optional[Path] = None,
560
+ submit_setup_call: callable = no_op_submit_setup_call,
561
+ dataset_id: int,
562
+ history_run_id: int,
563
+ ) -> tuple[TaskOutput, int, dict[int, BaseException]]:
564
+ executor_options_init = submit_setup_call(
565
+ wftask=wftask,
566
+ root_dir_local=workflow_dir_local,
567
+ root_dir_remote=workflow_dir_remote,
568
+ which_type="non_parallel",
569
+ )
570
+ executor_options_compute = submit_setup_call(
571
+ wftask=wftask,
572
+ root_dir_local=workflow_dir_local,
573
+ root_dir_remote=workflow_dir_remote,
574
+ which_type="parallel",
575
+ )
576
+
577
+ # 3/A: non-parallel init task
578
+ function_kwargs = {
579
+ "zarr_dir": zarr_dir,
580
+ _COMPONENT_KEY_: f"init_{_index_to_component(0)}",
581
+ **(wftask.args_non_parallel or {}),
582
+ }
583
+
584
+ # Create database History entries
585
+ with next(get_sync_db()) as db:
586
+ # Create a single `HistoryUnit` for the whole compound task
587
+ history_unit = HistoryUnit(
588
+ history_run_id=history_run_id,
589
+ status=XXXStatus.SUBMITTED,
590
+ logfile=None, # FIXME
591
+ zarr_urls=[],
592
+ )
593
+ db.add(history_unit)
594
+ db.commit()
595
+ db.refresh(history_unit)
596
+ history_unit_id = history_unit.id
597
+
598
+ result, exception = executor.submit(
599
+ functools.partial(
600
+ run_single_task,
601
+ wftask=wftask,
602
+ command=task.command_non_parallel,
603
+ root_dir_local=workflow_dir_local,
604
+ root_dir_remote=workflow_dir_remote,
605
+ ),
606
+ parameters=function_kwargs,
607
+ task_type="converter_compound",
368
608
  **executor_options_init,
369
609
  )
370
610
 
@@ -421,7 +661,7 @@ def run_v2_task_compound(
421
661
  root_dir_remote=workflow_dir_remote,
422
662
  ),
423
663
  list_parameters=list_function_kwargs,
424
- in_compound_task=True,
664
+ task_type="converter_compound",
425
665
  **executor_options_compute,
426
666
  )
427
667
 
@@ -1,4 +1,5 @@
1
1
  from typing import Any
2
+ from typing import Literal
2
3
  from typing import Optional
3
4
 
4
5
  from pydantic import BaseModel
@@ -56,6 +57,16 @@ class TaskManifestV2(BaseModel):
56
57
  modality: Optional[str] = None
57
58
  tags: list[str] = Field(default_factory=list)
58
59
 
60
+ type: Optional[
61
+ Literal[
62
+ "compound",
63
+ "converter_compound",
64
+ "non_parallel",
65
+ "converter_non_parallel",
66
+ "parallel",
67
+ ]
68
+ ] = None
69
+
59
70
  @model_validator(mode="after")
60
71
  def validate_executable_args_meta(self):
61
72
  executable_non_parallel = self.executable_non_parallel
@@ -13,8 +13,20 @@ from .._validators import cant_set_none
13
13
  from fractal_server.app.schemas._validators import NonEmptyString
14
14
  from fractal_server.app.schemas._validators import val_unique_list
15
15
  from fractal_server.app.schemas._validators import valdict_keys
16
+ from fractal_server.logger import set_logger
16
17
  from fractal_server.string_tools import validate_cmd
17
18
 
19
+ TaskTypeType = Literal[
20
+ "compound",
21
+ "converter_compound",
22
+ "non_parallel",
23
+ "converter_non_parallel",
24
+ "parallel",
25
+ ]
26
+
27
+
28
+ logger = set_logger(__name__)
29
+
18
30
 
19
31
  class TaskCreateV2(BaseModel):
20
32
  model_config = ConfigDict(extra="forbid")
@@ -41,6 +53,8 @@ class TaskCreateV2(BaseModel):
41
53
  tags: list[NonEmptyString] = Field(default_factory=list)
42
54
  authors: Optional[NonEmptyString] = None
43
55
 
56
+ type: Optional[TaskTypeType] = None
57
+
44
58
  # Validators
45
59
 
46
60
  @field_validator(
@@ -69,6 +83,23 @@ class TaskCreateV2(BaseModel):
69
83
 
70
84
  return self
71
85
 
86
+ @model_validator(mode="after")
87
+ def set_task_type(self):
88
+ if self.type is None:
89
+ logger.warning(
90
+ f"Task type is not set for task '{self.name}', "
91
+ "which will be deprecated in a future version. "
92
+ "Please move to `fractal-task-tools`."
93
+ )
94
+ if self.command_non_parallel is None:
95
+ self.type = "parallel"
96
+ elif self.command_parallel is None:
97
+ self.type = "non_parallel"
98
+ else:
99
+ self.type = "compound"
100
+
101
+ return self
102
+
72
103
  _meta_non_parallel = field_validator("meta_non_parallel")(
73
104
  classmethod(valdict_keys("meta_non_parallel"))
74
105
  )
@@ -127,6 +158,8 @@ class TaskReadV2(BaseModel):
127
158
  authors: Optional[str] = None
128
159
  tags: list[str]
129
160
 
161
+ type: Optional[TaskTypeType] = None
162
+
130
163
 
131
164
  class TaskUpdateV2(BaseModel):
132
165
  model_config = ConfigDict(extra="forbid")
@@ -16,6 +16,7 @@ from .task import TaskExportV2
16
16
  from .task import TaskImportV2
17
17
  from .task import TaskImportV2Legacy
18
18
  from .task import TaskReadV2
19
+ from .task import TaskTypeType
19
20
 
20
21
  RESERVED_ARGUMENTS = {"zarr_dir", "zarr_url", "zarr_urls", "init_args"}
21
22
 
@@ -113,7 +114,7 @@ class WorkflowTaskReadV2(BaseModel):
113
114
 
114
115
  type_filters: dict[str, bool]
115
116
 
116
- task_type: str
117
+ task_type: TaskTypeType
117
118
  task_id: int
118
119
  task: TaskReadV2
119
120
 
@@ -5,15 +5,6 @@ from fractal_server.app.models.v2 import TaskV2
5
5
  from fractal_server.app.schemas.v2 import TaskCreateV2
6
6
 
7
7
 
8
- def _get_task_type(task: TaskCreateV2) -> str:
9
- if task.command_non_parallel is None:
10
- return "parallel"
11
- elif task.command_parallel is None:
12
- return "non_parallel"
13
- else:
14
- return "compound"
15
-
16
-
17
8
  def create_db_tasks_and_update_task_group(
18
9
  *,
19
10
  task_group_id: int,
@@ -31,13 +22,7 @@ def create_db_tasks_and_update_task_group(
31
22
  Returns:
32
23
  Updated `TaskGroupV2` object.
33
24
  """
34
- actual_task_list = [
35
- TaskV2(
36
- **task.model_dump(),
37
- type=_get_task_type(task),
38
- )
39
- for task in task_list
40
- ]
25
+ actual_task_list = [TaskV2(**task.model_dump()) for task in task_list]
41
26
  task_group = db.get(TaskGroupV2, task_group_id)
42
27
  task_group.task_list = actual_task_list
43
28
  db.add(task_group)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.0a6
3
+ Version: 2.14.0a7
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=AnugSNpe3C8nki8yFEbYV-Vn26Um07z3ucouM8Qt1BE,25
1
+ fractal_server/__init__.py,sha256=1JU9zoVeJhUgQtGc2wlEGMoG-HAWT8I6UZKOOCDkSGA,25
2
2
  fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -44,14 +44,14 @@ fractal_server/app/routes/api/v2/job.py,sha256=m89FTh9Px25oXCeWj2k2NdGWQaO2oxMh-
44
44
  fractal_server/app/routes/api/v2/project.py,sha256=hMvL9QLPUcAAiPGy6ta2LBLTVRozJsfvBPl5D06_MHg,6666
45
45
  fractal_server/app/routes/api/v2/status_legacy.py,sha256=sJLHGGHI9so_Sa4-8JuhMGBPeE6n4K2DmDuiw6IB4XY,6317
46
46
  fractal_server/app/routes/api/v2/submit.py,sha256=K4OjcSg476JXIeeMUaYdTDk8Qpj5IO5UULvfErI7Y5Y,8624
47
- fractal_server/app/routes/api/v2/task.py,sha256=z3_SxsXoKsbM9GGNJUdIiZisQwAJSBqvCc7thaJIOTU,7191
47
+ fractal_server/app/routes/api/v2/task.py,sha256=O7pquZhXIS4lRs5XqHvstiwe8BiCuS-B3ZKJI1g6EJU,6985
48
48
  fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
49
49
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
50
50
  fractal_server/app/routes/api/v2/task_group.py,sha256=j3zDvVZizB7NWEgVgZU42JCXETkaVkk2ImJPr0jS7BQ,8164
51
51
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
52
52
  fractal_server/app/routes/api/v2/workflow.py,sha256=U3iZX5IiFAJ20-R8IjlYToOdm9gXsmtr1lW7ASEH9P8,11689
53
53
  fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
54
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=pi4oxWszNgWkDm6oQavanmq4_P5kpcgN3Dfjz9XAuV8,11063
54
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=7_syX2EO7ibF6Xkm7HBPhsUYq6aYnKNeC5iSaafQhG4,11342
55
55
  fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
56
56
  fractal_server/app/routes/auth/_aux_auth.py,sha256=UZgauY0V6mSqjte_sYI1cBl2h8bcbLaeWzgpl1jdJlk,4883
57
57
  fractal_server/app/routes/auth/current_user.py,sha256=FUegTahlxT3BdPVCQYir0-ogg2YAaZ1DYuLcE_5NC9Y,5906
@@ -71,11 +71,11 @@ fractal_server/app/runner/components.py,sha256=ZF8ct_Ky5k8IAcrmpYOZ-bc6OBgdELEig
71
71
  fractal_server/app/runner/compress_folder.py,sha256=HSc1tv7x2DBjBoXwugZlC79rm9GNBIWtQKK9yWn5ZBI,3991
72
72
  fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrGrLjZcnQl7A,4159
73
73
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
- fractal_server/app/runner/executors/base_runner.py,sha256=0E3gbSndXdEAxZwFCiZXrUd8tjEmvLa_ztPBGMJXtUw,3742
74
+ fractal_server/app/runner/executors/base_runner.py,sha256=v-uUHEvJgRnipTEvU9AmIMkYbkkazqkjm4iAF1GXHEM,4562
75
75
  fractal_server/app/runner/executors/local/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
76
76
  fractal_server/app/runner/executors/local/_local_config.py,sha256=8dyg2Gh8L2FlG_jJRYLMkcMgVHGEY2w7DME9aaKXFFo,3688
77
77
  fractal_server/app/runner/executors/local/_submit_setup.py,sha256=pDc9Q6axXL8_5JAV0byXzGOLOB0bZF88_L9LZykOgwM,1220
78
- fractal_server/app/runner/executors/local/runner.py,sha256=Iy5Pc8rdkj7IGcwUbkO_UgZNDytXtfMysYtMSu_dlY0,5498
78
+ fractal_server/app/runner/executors/local/runner.py,sha256=LNql8q6M-Cn_hEV4IMkNP57XFPQJ6eaVd0YIDKJLk60,5621
79
79
  fractal_server/app/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
80
80
  fractal_server/app/runner/executors/slurm_common/_batching.py,sha256=ZY020JZlDS5mfpgpWTChQkyHU7iLE5kx2HVd57_C6XA,8850
81
81
  fractal_server/app/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
@@ -91,7 +91,7 @@ fractal_server/app/runner/executors/slurm_ssh/executor.py,sha256=JW6zguEy9XsHebS
91
91
  fractal_server/app/runner/executors/slurm_sudo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
92
92
  fractal_server/app/runner/executors/slurm_sudo/_check_jobs_status.py,sha256=eZd9lxbObsqc1M3B96IGMJ-1oC0jo8lBOX4Nto97VvE,2036
93
93
  fractal_server/app/runner/executors/slurm_sudo/_subprocess_run_as_user.py,sha256=O1bNg1DiSDJmQE0RmOk2Ii47DagiXp5ryd0R6KxO2OM,3177
94
- fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=VOWd8bYzCU8C6TziZN7VUurX52lbvNDWLZ2Ht3uvnGc,24057
94
+ fractal_server/app/runner/executors/slurm_sudo/runner.py,sha256=niDcezFNQ7awfv_7nqABmVhG3oLIf8Z7BwlLxM-chZo,21833
95
95
  fractal_server/app/runner/extract_archive.py,sha256=tLpjDrX47OjTNhhoWvm6iNukg8KoieWyTb7ZfvE9eWU,2483
96
96
  fractal_server/app/runner/filenames.py,sha256=lPnxKHtdRizr6FqG3zOdjDPyWA7GoaJGTtiuJV0gA8E,70
97
97
  fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oXGNvnTEoAfv2bxc,959
@@ -105,8 +105,8 @@ fractal_server/app/runner/v2/_slurm_ssh.py,sha256=5w_lwQzySx-R3kVg2Bf-21n5JpWjJA
105
105
  fractal_server/app/runner/v2/_slurm_sudo.py,sha256=CzWUeC6at_Sj-wU1myjA68ZRKMiLZYBTLv9I9odUxBU,2914
106
106
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
107
107
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
108
- fractal_server/app/runner/v2/runner.py,sha256=25kOKm4B7_BhUlg7Dh0yzIcJ2izugA_WFNhVf06O0Y4,13970
109
- fractal_server/app/runner/v2/runner_functions.py,sha256=aH3N_7U_-pqiykvieEVk-W9Y0ir2YohJ9DY8As9zUc8,14445
108
+ fractal_server/app/runner/v2/runner.py,sha256=3M6xGeccY_AZKua305MgWLuCuhkACoqwKgTV4vuNguk,15534
109
+ fractal_server/app/runner/v2/runner_functions.py,sha256=tztuEF7m0iE-ca5n6Vb-l1dDUldQXAGxZkD1OTL6I6w,21889
110
110
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=dvvRK7od8iQ8vdPf80uGUxs3i5i0buGjCodBxSjZ7PQ,3671
111
111
  fractal_server/app/runner/v2/task_interface.py,sha256=e1GGQSYd0MyBj1EZvEVzqv-HpVE4YffXOq82WLrCaOc,1866
112
112
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
@@ -121,14 +121,14 @@ fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjF
121
121
  fractal_server/app/schemas/v2/dataset.py,sha256=9yc-tte70yPPk4CSfy2imykYVbCW8-23K499pi9z2e0,5206
122
122
  fractal_server/app/schemas/v2/dumps.py,sha256=2GUjoqeblUvrSoojBz5odoUUf53IABtbY_5GvFZoMVc,1782
123
123
  fractal_server/app/schemas/v2/job.py,sha256=KhxQOfncpE_SAu7Wed8CXS2G6onh0v875GkotBvKBTY,4304
124
- fractal_server/app/schemas/v2/manifest.py,sha256=tMNKtwVUBhVyAmBHK6f2agzvdKuYhuTP-e2cG9t66y0,7045
124
+ fractal_server/app/schemas/v2/manifest.py,sha256=8mmB0QwxEgAeGgwKD_fT-o-wFy7lb6HxNXbp17IJqNY,7281
125
125
  fractal_server/app/schemas/v2/project.py,sha256=ulgCmUnX0w-0jrSjVYIT7sxeK95CSNGh2msXydhsgYI,885
126
126
  fractal_server/app/schemas/v2/status.py,sha256=SQaUpQkjFq5c5k5J4rOjNhuQaDOEg8lksPhkKmPU5VU,332
127
- fractal_server/app/schemas/v2/task.py,sha256=lPXxhwvPLjXuoZGdVSLZq3wTChfBbP7V-uTEpQa1HoU,5753
127
+ fractal_server/app/schemas/v2/task.py,sha256=-oBE5-2tSUd6u75duAcaFyEFGk3MsQzrdBr_mFldOBc,6627
128
128
  fractal_server/app/schemas/v2/task_collection.py,sha256=dLu4sy-su5k5vDJqCZdJMW8mLT5TX2SV60l_RAvKhwY,5930
129
129
  fractal_server/app/schemas/v2/task_group.py,sha256=A3SFHNHLKPJyrnDz-wbnQvycetafKltp6UsH1u-euwA,3850
130
130
  fractal_server/app/schemas/v2/workflow.py,sha256=ZpM43zTMyLRnEUtkbr_J5DYP00NwjItaC8gweB7GGAA,2172
131
- fractal_server/app/schemas/v2/workflowtask.py,sha256=qMvwlnFCsnyD8uv8HJ4cFy2-QMm2ETUFlTIbxIFUWxk,8056
131
+ fractal_server/app/schemas/v2/workflowtask.py,sha256=qOzpsWQ7QUNey-rK9hPB3PKFrRxXrMvcRzqZxRuEYAI,8096
132
132
  fractal_server/app/security/__init__.py,sha256=e2cveg5hQpieGD3bSPd5GTOMthvJ-HXH3buSb9WVfEU,14096
133
133
  fractal_server/app/security/signup_email.py,sha256=Xd6QYxcdmg0PHpDwmUE8XQmPcOj3Xjy5oROcIMhmltM,1472
134
134
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
@@ -199,15 +199,15 @@ fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=qm1vPy6AkKhWDjCJGXS8LqCLY
199
199
  fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
200
200
  fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
201
201
  fractal_server/tasks/v2/utils_background.py,sha256=W_RvihI1aiYPJNsPo8z4wKuA_bPs0UT2huzLihRpjU4,4248
202
- fractal_server/tasks/v2/utils_database.py,sha256=iLbwkxMxTCgpyKe1JQzdfIR3zBfxohgmLwSdGps1AUo,1274
202
+ fractal_server/tasks/v2/utils_database.py,sha256=h4Pa-JxcVk7WA4_Pz8CxFT9sX3sA43p2rVRg7FVSgvY,967
203
203
  fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
204
204
  fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
205
205
  fractal_server/tasks/v2/utils_templates.py,sha256=Kc_nSzdlV6KIsO0CQSPs1w70zLyENPqJeTQEFiz4bOg,3124
206
206
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
207
207
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
208
208
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
209
- fractal_server-2.14.0a6.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
210
- fractal_server-2.14.0a6.dist-info/METADATA,sha256=uuUN7vjLvxCYZIqs3uvcubWLPb_PLnaQNrtg4U1DHck,4550
211
- fractal_server-2.14.0a6.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
212
- fractal_server-2.14.0a6.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
213
- fractal_server-2.14.0a6.dist-info/RECORD,,
209
+ fractal_server-2.14.0a7.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
210
+ fractal_server-2.14.0a7.dist-info/METADATA,sha256=-H-lBPwP4S9jTTot-k3weBihbHTA2utPTxzC1z7h8so,4550
211
+ fractal_server-2.14.0a7.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
212
+ fractal_server-2.14.0a7.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
213
+ fractal_server-2.14.0a7.dist-info/RECORD,,