fractal-server 2.11.0a3__py3-none-any.whl → 2.11.0a5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +21 -0
  3. fractal_server/app/routes/api/v2/images.py +1 -1
  4. fractal_server/app/routes/api/v2/submit.py +6 -3
  5. fractal_server/app/routes/api/v2/workflow_import.py +8 -0
  6. fractal_server/app/routes/api/v2/workflowtask.py +43 -24
  7. fractal_server/app/runner/executors/slurm/_slurm_config.py +1 -1
  8. fractal_server/app/runner/executors/slurm/ssh/executor.py +2 -2
  9. fractal_server/app/runner/v2/merge_outputs.py +0 -8
  10. fractal_server/app/runner/v2/runner.py +13 -42
  11. fractal_server/app/runner/v2/task_interface.py +0 -41
  12. fractal_server/app/schemas/_filter_validators.py +1 -1
  13. fractal_server/data_migrations/2_11_0.py +2 -0
  14. fractal_server/images/tools.py +25 -0
  15. fractal_server/logger.py +4 -1
  16. fractal_server/tasks/v2/local/collect.py +2 -2
  17. fractal_server/tasks/v2/local/deactivate.py +2 -2
  18. fractal_server/tasks/v2/local/reactivate.py +2 -3
  19. fractal_server/tasks/v2/ssh/collect.py +2 -2
  20. fractal_server/tasks/v2/ssh/deactivate.py +2 -2
  21. fractal_server/tasks/v2/ssh/reactivate.py +2 -2
  22. fractal_server/utils.py +9 -7
  23. {fractal_server-2.11.0a3.dist-info → fractal_server-2.11.0a5.dist-info}/METADATA +1 -1
  24. {fractal_server-2.11.0a3.dist-info → fractal_server-2.11.0a5.dist-info}/RECORD +27 -27
  25. {fractal_server-2.11.0a3.dist-info → fractal_server-2.11.0a5.dist-info}/LICENSE +0 -0
  26. {fractal_server-2.11.0a3.dist-info → fractal_server-2.11.0a5.dist-info}/WHEEL +0 -0
  27. {fractal_server-2.11.0a3.dist-info → fractal_server-2.11.0a5.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a3"
1
+ __VERSION__ = "2.11.0a5"
@@ -22,6 +22,7 @@ from fractal_server.app.routes.auth._aux_auth import (
22
22
  _verify_user_belongs_to_group,
23
23
  )
24
24
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
25
+ from fractal_server.images.tools import merge_type_filters
25
26
  from fractal_server.logger import set_logger
26
27
 
27
28
  logger = set_logger(__name__)
@@ -351,3 +352,23 @@ async def _add_warnings_to_workflow_tasks(
351
352
  wftask_data["warning"] = "Current user has no access to this task."
352
353
  wftask_list_with_warnings.append(wftask_data)
353
354
  return wftask_list_with_warnings
355
+
356
+
357
+ def _check_type_filters_compatibility(
358
+ *,
359
+ task_input_types: dict[str, bool],
360
+ wftask_type_filters: dict[str, bool],
361
+ ) -> None:
362
+ """
363
+ Wrap `merge_type_filters` and raise `HTTPException` if needed.
364
+ """
365
+ try:
366
+ merge_type_filters(
367
+ task_input_types=task_input_types,
368
+ wftask_type_filters=wftask_type_filters,
369
+ )
370
+ except ValueError as e:
371
+ raise HTTPException(
372
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
373
+ detail=f"Incompatible type filters.\nOriginal error: {str(e)}",
374
+ )
@@ -207,7 +207,7 @@ async def query_dataset_images(
207
207
  if page > last_page:
208
208
  page = last_page
209
209
  offset = (page - 1) * page_size
210
- images = images[offset : offset + page_size] # noqa E203
210
+ images = images[offset : offset + page_size]
211
211
 
212
212
  return ImagePage(
213
213
  total_count=total_count,
@@ -28,6 +28,7 @@ from ...aux.validate_user_settings import validate_user_settings
28
28
  from ._aux_functions import _get_dataset_check_owner
29
29
  from ._aux_functions import _get_workflow_check_owner
30
30
  from ._aux_functions import clean_app_job_list_v2
31
+ from ._aux_functions_tasks import _check_type_filters_compatibility
31
32
  from fractal_server.app.models import TaskGroupV2
32
33
  from fractal_server.app.models import UserOAuth
33
34
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
@@ -108,15 +109,17 @@ async def apply_workflow(
108
109
 
109
110
  # Check that tasks have read-access and are `active`
110
111
  used_task_group_ids = set()
111
- for wftask in workflow.task_list[
112
- first_task_index : last_task_index + 1 # noqa: E203
113
- ]:
112
+ for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
114
113
  task = await _get_task_read_access(
115
114
  user_id=user.id,
116
115
  task_id=wftask.task_id,
117
116
  require_active=True,
118
117
  db=db,
119
118
  )
119
+ _check_type_filters_compatibility(
120
+ task_input_types=task.input_types,
121
+ wftask_type_filters=wftask.type_filters,
122
+ )
120
123
  used_task_group_ids.add(task.taskgroupv2_id)
121
124
 
122
125
  # Validate user settings
@@ -19,6 +19,7 @@ from ._aux_functions import _check_workflow_exists
19
19
  from ._aux_functions import _get_project_check_owner
20
20
  from ._aux_functions import _workflow_insert_task
21
21
  from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
22
+ from ._aux_functions_tasks import _check_type_filters_compatibility
22
23
  from fractal_server.app.models import LinkUserGroup
23
24
  from fractal_server.app.models import UserOAuth
24
25
  from fractal_server.app.models.v2 import TaskGroupV2
@@ -325,6 +326,13 @@ async def import_workflow(
325
326
  list_wf_tasks.append(new_wf_task)
326
327
  list_task_ids.append(task_id)
327
328
 
329
+ for wftask, task_id in zip(list_wf_tasks, list_task_ids):
330
+ task = await db.get(TaskV2, task_id)
331
+ _check_type_filters_compatibility(
332
+ task_input_types=task.input_types,
333
+ wftask_type_filters=wftask.type_filters,
334
+ )
335
+
328
336
  # Create new Workflow
329
337
  db_workflow = WorkflowV2(
330
338
  project_id=project_id,
@@ -12,6 +12,7 @@ from ....db import get_async_db
12
12
  from ._aux_functions import _get_workflow_check_owner
13
13
  from ._aux_functions import _get_workflow_task_check_owner
14
14
  from ._aux_functions import _workflow_insert_task
15
+ from ._aux_functions_tasks import _check_type_filters_compatibility
15
16
  from ._aux_functions_tasks import _get_task_read_access
16
17
  from fractal_server.app.models import UserOAuth
17
18
  from fractal_server.app.models.v2 import WorkflowTaskV2
@@ -47,24 +48,29 @@ async def replace_workflowtask(
47
48
  db=db,
48
49
  )
49
50
 
50
- task = await _get_task_read_access(
51
+ new_task = await _get_task_read_access(
51
52
  task_id=task_id, user_id=user.id, db=db, require_active=True
52
53
  )
53
54
 
54
- if task.type != old_workflow_task.task.type:
55
+ if new_task.type != old_workflow_task.task.type:
55
56
  raise HTTPException(
56
57
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
57
58
  detail=(
58
59
  f"Cannot replace a Task '{old_workflow_task.task.type}' with a"
59
- f" Task '{task.type}'."
60
+ f" Task '{new_task.type}'."
60
61
  ),
61
62
  )
62
63
 
64
+ _check_type_filters_compatibility(
65
+ task_input_types=new_task.input_types,
66
+ wftask_type_filters=old_workflow_task.type_filters,
67
+ )
68
+
63
69
  _args_non_parallel = old_workflow_task.args_non_parallel
64
70
  _args_parallel = old_workflow_task.args_parallel
65
71
  if replace is not None:
66
72
  if replace.args_non_parallel is not None:
67
- if task.type == "parallel":
73
+ if new_task.type == "parallel":
68
74
  raise HTTPException(
69
75
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
70
76
  detail=(
@@ -76,7 +82,7 @@ async def replace_workflowtask(
76
82
  _args_non_parallel = replace.args_non_parallel
77
83
 
78
84
  if replace.args_parallel is not None:
79
- if task.type == "non_parallel":
85
+ if new_task.type == "non_parallel":
80
86
  raise HTTPException(
81
87
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
82
88
  detail=(
@@ -92,22 +98,24 @@ async def replace_workflowtask(
92
98
  if (
93
99
  old_workflow_task.meta_non_parallel
94
100
  != old_workflow_task.task.meta_non_parallel
95
- ) and (old_workflow_task.task.meta_non_parallel == task.meta_non_parallel):
101
+ ) and (
102
+ old_workflow_task.task.meta_non_parallel == new_task.meta_non_parallel
103
+ ):
96
104
  _meta_non_parallel = old_workflow_task.meta_non_parallel
97
105
  else:
98
- _meta_non_parallel = task.meta_non_parallel
106
+ _meta_non_parallel = new_task.meta_non_parallel
99
107
  # Same for `meta_parallel`
100
108
  if (
101
109
  old_workflow_task.meta_parallel != old_workflow_task.task.meta_parallel
102
- ) and (old_workflow_task.task.meta_parallel == task.meta_parallel):
110
+ ) and (old_workflow_task.task.meta_parallel == new_task.meta_parallel):
103
111
  _meta_parallel = old_workflow_task.meta_parallel
104
112
  else:
105
- _meta_parallel = task.meta_parallel
113
+ _meta_parallel = new_task.meta_parallel
106
114
 
107
115
  new_workflow_task = WorkflowTaskV2(
108
- task_id=task.id,
109
- task_type=task.type,
110
- task=task,
116
+ task_id=new_task.id,
117
+ task_type=new_task.type,
118
+ task=new_task,
111
119
  # old-task values
112
120
  type_filters=old_workflow_task.type_filters,
113
121
  # possibly new values
@@ -134,7 +142,7 @@ async def create_workflowtask(
134
142
  project_id: int,
135
143
  workflow_id: int,
136
144
  task_id: int,
137
- new_task: WorkflowTaskCreateV2,
145
+ wftask: WorkflowTaskCreateV2,
138
146
  user: UserOAuth = Depends(current_active_user),
139
147
  db: AsyncSession = Depends(get_async_db),
140
148
  ) -> Optional[WorkflowTaskReadV2]:
@@ -152,8 +160,8 @@ async def create_workflowtask(
152
160
 
153
161
  if task.type == "parallel":
154
162
  if (
155
- new_task.meta_non_parallel is not None
156
- or new_task.args_non_parallel is not None
163
+ wftask.meta_non_parallel is not None
164
+ or wftask.args_non_parallel is not None
157
165
  ):
158
166
  raise HTTPException(
159
167
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -165,8 +173,8 @@ async def create_workflowtask(
165
173
  )
166
174
  elif task.type == "non_parallel":
167
175
  if (
168
- new_task.meta_parallel is not None
169
- or new_task.args_parallel is not None
176
+ wftask.meta_parallel is not None
177
+ or wftask.args_parallel is not None
170
178
  ):
171
179
  raise HTTPException(
172
180
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -176,20 +184,26 @@ async def create_workflowtask(
176
184
  "is `non_parallel`."
177
185
  ),
178
186
  )
179
- workflow_task = await _workflow_insert_task(
187
+
188
+ _check_type_filters_compatibility(
189
+ task_input_types=task.input_types,
190
+ wftask_type_filters=wftask.type_filters,
191
+ )
192
+
193
+ wftask_db = await _workflow_insert_task(
180
194
  workflow_id=workflow.id,
181
195
  task_id=task_id,
182
- meta_non_parallel=new_task.meta_non_parallel,
183
- meta_parallel=new_task.meta_parallel,
184
- args_non_parallel=new_task.args_non_parallel,
185
- args_parallel=new_task.args_parallel,
186
- type_filters=new_task.type_filters,
196
+ meta_non_parallel=wftask.meta_non_parallel,
197
+ meta_parallel=wftask.meta_parallel,
198
+ args_non_parallel=wftask.args_non_parallel,
199
+ args_parallel=wftask.args_parallel,
200
+ type_filters=wftask.type_filters,
187
201
  db=db,
188
202
  )
189
203
 
190
204
  await db.close()
191
205
 
192
- return workflow_task
206
+ return wftask_db
193
207
 
194
208
 
195
209
  @router.get(
@@ -236,6 +250,11 @@ async def update_workflowtask(
236
250
  user_id=user.id,
237
251
  db=db,
238
252
  )
253
+ if workflow_task_update.type_filters is not None:
254
+ _check_type_filters_compatibility(
255
+ task_input_types=db_wf_task.task.input_types,
256
+ wftask_type_filters=workflow_task_update.type_filters,
257
+ )
239
258
 
240
259
  if db_wf_task.task_type == "parallel" and (
241
260
  workflow_task_update.args_non_parallel is not None
@@ -423,7 +423,7 @@ def _parse_mem_value(raw_mem: Union[str, int]) -> int:
423
423
  )
424
424
 
425
425
  # Handle integer argument
426
- if isinstance(raw_mem, int):
426
+ if type(raw_mem) is int:
427
427
  return raw_mem
428
428
 
429
429
  # Handle string argument
@@ -966,8 +966,8 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
966
966
  # Retrieve job and future objects
967
967
  job = jobs[ind_job]
968
968
  future = futures[ind_job]
969
- remaining_job_ids = job_ids[ind_job + 1 :] # noqa: E203
970
- remaining_futures = futures[ind_job + 1 :] # noqa: E203
969
+ remaining_job_ids = job_ids[ind_job + 1 :]
970
+ remaining_futures = futures[ind_job + 1 :]
971
971
 
972
972
  outputs = []
973
973
 
@@ -15,13 +15,6 @@ def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
15
15
  final_image_list_updates.extend(task_output.image_list_updates)
16
16
  final_image_list_removals.extend(task_output.image_list_removals)
17
17
 
18
- # Check that all type_filters are the same
19
- if task_output.type_filters != task_outputs[0].type_filters:
20
- raise ValueError(
21
- f"{task_output.type_filters=} "
22
- f"but {task_outputs[0].type_filters=}"
23
- )
24
-
25
18
  # Note: the ordering of `image_list_removals` is not guaranteed
26
19
  final_image_list_updates = deduplicate_list(final_image_list_updates)
27
20
  final_image_list_removals = list(set(final_image_list_removals))
@@ -29,7 +22,6 @@ def merge_outputs(task_outputs: list[TaskOutput]) -> TaskOutput:
29
22
  final_output = TaskOutput(
30
23
  image_list_updates=final_image_list_updates,
31
24
  image_list_removals=final_image_list_removals,
32
- type_filters=task_outputs[0].type_filters,
33
25
  )
34
26
 
35
27
  return final_output
@@ -11,7 +11,6 @@ from sqlalchemy.orm.attributes import flag_modified
11
11
  from ....images import SingleImage
12
12
  from ....images.tools import filter_image_list
13
13
  from ....images.tools import find_image_by_zarr_url
14
- from ....images.tools import match_filter
15
14
  from ..exceptions import JobExecutionError
16
15
  from .runner_functions import no_op_submit_setup_call
17
16
  from .runner_functions import run_v2_task_compound
@@ -24,6 +23,7 @@ from fractal_server.app.models.v2 import WorkflowTaskV2
24
23
  from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
25
24
  from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskStatusTypeV2
26
25
  from fractal_server.images.models import AttributeFiltersType
26
+ from fractal_server.images.tools import merge_type_filters
27
27
 
28
28
 
29
29
  def execute_tasks_v2(
@@ -49,7 +49,7 @@ def execute_tasks_v2(
49
49
  # Initialize local dataset attributes
50
50
  zarr_dir = dataset.zarr_dir
51
51
  tmp_images = deepcopy(dataset.images)
52
- tmp_type_filters = deepcopy(dataset.type_filters)
52
+ current_dataset_type_filters = deepcopy(dataset.type_filters)
53
53
 
54
54
  for wftask in wf_task_list:
55
55
  task = wftask.task
@@ -59,26 +59,18 @@ def execute_tasks_v2(
59
59
  # PRE TASK EXECUTION
60
60
 
61
61
  # Get filtered images
62
- pre_type_filters = copy(tmp_type_filters)
63
- pre_type_filters.update(wftask.type_filters)
62
+ type_filters = copy(current_dataset_type_filters)
63
+ type_filters_patch = merge_type_filters(
64
+ task_input_types=task.input_types,
65
+ wftask_type_filters=wftask.type_filters,
66
+ )
67
+ type_filters.update(type_filters_patch)
64
68
  filtered_images = filter_image_list(
65
69
  images=tmp_images,
66
- type_filters=pre_type_filters,
70
+ type_filters=type_filters,
67
71
  attribute_filters=job_attribute_filters,
68
72
  )
69
- # Verify that filtered images comply with task input_types
70
- for image in filtered_images:
71
- if not match_filter(
72
- image=image,
73
- type_filters=task.input_types,
74
- attribute_filters={},
75
- ):
76
- raise JobExecutionError(
77
- "Invalid filtered image list\n"
78
- f"Task input types: {task.input_types=}\n"
79
- f'Image zarr_url: {image["zarr_url"]}\n'
80
- f'Image types: {image["types"]}\n'
81
- )
73
+
82
74
  # First, set status SUBMITTED in dataset.history for each wftask
83
75
  with next(get_sync_db()) as db:
84
76
  db_dataset = db.get(DatasetV2, dataset.id)
@@ -262,30 +254,9 @@ def execute_tasks_v2(
262
254
  else:
263
255
  tmp_images.pop(img_search["index"])
264
256
 
265
- # Update type_filters
266
-
267
- # Assign the type filters based on different sources
268
- # (task manifest and post-execution task output)
257
+ # Update type_filters based on task-manifest output_types
269
258
  type_filters_from_task_manifest = task.output_types
270
- type_filters_from_task_output = current_task_output.type_filters
271
-
272
- # Check that key sets are disjoint
273
- keys_from_manifest = set(type_filters_from_task_manifest.keys())
274
- keys_from_task_output = set(type_filters_from_task_output.keys())
275
- if not keys_from_manifest.isdisjoint(keys_from_task_output):
276
- overlap = keys_from_manifest.intersection(keys_from_task_output)
277
- raise JobExecutionError(
278
- "Some type filters are being set twice, "
279
- f"for task '{task_name}'.\n"
280
- f"Types from task output: {type_filters_from_task_output}\n"
281
- "Types from task manifest: "
282
- f"{type_filters_from_task_manifest}\n"
283
- f"Overlapping keys: {overlap}"
284
- )
285
-
286
- # Update filters.types
287
- tmp_type_filters.update(type_filters_from_task_manifest)
288
- tmp_type_filters.update(type_filters_from_task_output)
259
+ current_dataset_type_filters.update(type_filters_from_task_manifest)
289
260
 
290
261
  # Write current dataset attributes (history, images, filters) into the
291
262
  # database. They can be used (1) to retrieve the latest state
@@ -294,7 +265,7 @@ def execute_tasks_v2(
294
265
  with next(get_sync_db()) as db:
295
266
  db_dataset = db.get(DatasetV2, dataset.id)
296
267
  db_dataset.history[-1]["status"] = WorkflowTaskStatusTypeV2.DONE
297
- db_dataset.type_filters = tmp_type_filters
268
+ db_dataset.type_filters = current_dataset_type_filters
298
269
  db_dataset.images = tmp_images
299
270
  for attribute_name in [
300
271
  "type_filters",
@@ -1,31 +1,14 @@
1
1
  from typing import Any
2
- from typing import Optional
3
2
 
4
3
  from pydantic import BaseModel
5
4
  from pydantic import Extra
6
5
  from pydantic import Field
7
- from pydantic import root_validator
8
6
  from pydantic import validator
9
7
 
10
8
  from ....images import SingleImageTaskOutput
11
- from fractal_server.app.schemas._filter_validators import validate_type_filters
12
- from fractal_server.app.schemas._validators import root_validate_dict_keys
13
9
  from fractal_server.urls import normalize_url
14
10
 
15
11
 
16
- class LegacyFilters(BaseModel, extra=Extra.forbid):
17
- """
18
- For fractal-server<2.11, task output could include both
19
- `filters["attributes"]` and `filters["types"]`. In the new version
20
- there is a single field, named `type_filters`.
21
- The current schema is only used to convert old type filters into the
22
- new form, but it will reject any attribute filters.
23
- """
24
-
25
- types: dict[str, bool] = Field(default_factory=dict)
26
- _types = validator("types", allow_reuse=True)(validate_type_filters)
27
-
28
-
29
12
  class TaskOutput(BaseModel, extra=Extra.forbid):
30
13
 
31
14
  image_list_updates: list[SingleImageTaskOutput] = Field(
@@ -33,16 +16,6 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
33
16
  )
34
17
  image_list_removals: list[str] = Field(default_factory=list)
35
18
 
36
- filters: Optional[LegacyFilters] = None
37
- type_filters: dict[str, bool] = Field(default_factory=dict)
38
-
39
- _dict_keys = root_validator(pre=True, allow_reuse=True)(
40
- root_validate_dict_keys
41
- )
42
- _type_filters = validator("type_filters", allow_reuse=True)(
43
- validate_type_filters
44
- )
45
-
46
19
  def check_zarr_urls_are_unique(self) -> None:
47
20
  zarr_urls = [img.zarr_url for img in self.image_list_updates]
48
21
  zarr_urls.extend(self.image_list_removals)
@@ -62,20 +35,6 @@ class TaskOutput(BaseModel, extra=Extra.forbid):
62
35
  msg = f"{msg}\n{duplicate}"
63
36
  raise ValueError(msg)
64
37
 
65
- @root_validator()
66
- def update_legacy_filters(cls, values):
67
- if values["filters"] is not None:
68
- if values["type_filters"] != {}:
69
- raise ValueError(
70
- "Cannot set both (legacy) 'filters' and 'type_filters'."
71
- )
72
- else:
73
- # Convert legacy filters.types into new type_filters
74
- values["type_filters"] = values["filters"].types
75
- values["filters"] = None
76
-
77
- return values
78
-
79
38
  @validator("image_list_removals")
80
39
  def normalize_paths(cls, v: list[str]) -> list[str]:
81
40
  return [normalize_url(zarr_url) for zarr_url in v]
@@ -34,7 +34,7 @@ def validate_attribute_filters(
34
34
  # values is a non-empty list, and its items must all be of the
35
35
  # same scalar non-None type
36
36
  _type = type(values[0])
37
- if not all(isinstance(value, _type) for value in values):
37
+ if not all(type(value) is _type for value in values):
38
38
  raise ValueError(
39
39
  f"attribute_filters[{key}] has values with "
40
40
  f"non-homogeneous types: {values}."
@@ -65,3 +65,5 @@ def fix_db():
65
65
 
66
66
  db.commit()
67
67
  logger.info("Changes committed.")
68
+
69
+ logger.info("END execution of fix_db function")
@@ -98,3 +98,28 @@ def filter_image_list(
98
98
  )
99
99
  ]
100
100
  return filtered_images
101
+
102
+
103
+ def merge_type_filters(
104
+ *,
105
+ task_input_types: dict[str, bool],
106
+ wftask_type_filters: dict[str, bool],
107
+ ) -> dict[str, bool]:
108
+ """
109
+ Merge two type-filters sets, if they are compatible.
110
+ """
111
+ all_keys = set(task_input_types.keys()) | set(wftask_type_filters.keys())
112
+ for key in all_keys:
113
+ if (
114
+ key in task_input_types.keys()
115
+ and key in wftask_type_filters.keys()
116
+ and task_input_types[key] != wftask_type_filters[key]
117
+ ):
118
+ raise ValueError(
119
+ "Cannot merge type filters "
120
+ f"`{task_input_types}` (from task) "
121
+ f"and `{wftask_type_filters}` (from workflowtask)."
122
+ )
123
+ merged_dict = task_input_types
124
+ merged_dict.update(wftask_type_filters)
125
+ return merged_dict
fractal_server/logger.py CHANGED
@@ -109,7 +109,10 @@ def set_logger(
109
109
  if isinstance(handler, logging.FileHandler)
110
110
  ]
111
111
  if len(current_file_handlers) > 1:
112
- logger.warning(f"Logger {logger_name} has multiple file handlers.")
112
+ logger.warning(
113
+ f"Logger {logger_name} has multiple file handlers: "
114
+ f"{current_file_handlers}"
115
+ )
113
116
 
114
117
  return logger
115
118
 
@@ -33,8 +33,6 @@ from fractal_server.tasks.v2.utils_templates import (
33
33
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
34
34
  from fractal_server.utils import get_timestamp
35
35
 
36
- LOGGER_NAME = __name__
37
-
38
36
 
39
37
  def collect_local(
40
38
  *,
@@ -59,6 +57,8 @@ def collect_local(
59
57
  wheel_file:
60
58
  """
61
59
 
60
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
61
+
62
62
  with TemporaryDirectory() as tmpdir:
63
63
  log_file_path = get_log_path(Path(tmpdir))
64
64
  logger = set_logger(
@@ -21,8 +21,6 @@ from fractal_server.tasks.v2.utils_background import get_current_log
21
21
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
22
22
  from fractal_server.utils import get_timestamp
23
23
 
24
- LOGGER_NAME = __name__
25
-
26
24
 
27
25
  def deactivate_local(
28
26
  *,
@@ -40,6 +38,8 @@ def deactivate_local(
40
38
  task_group_activity_id:
41
39
  """
42
40
 
41
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
42
+
43
43
  with TemporaryDirectory() as tmpdir:
44
44
  log_file_path = get_log_path(Path(tmpdir))
45
45
  logger = set_logger(
@@ -23,9 +23,6 @@ from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
25
 
26
- LOGGER_NAME = __name__
27
-
28
-
29
26
  def reactivate_local(
30
27
  *,
31
28
  task_group_activity_id: int,
@@ -42,6 +39,8 @@ def reactivate_local(
42
39
  task_group_activity_id:
43
40
  """
44
41
 
42
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
43
+
45
44
  with TemporaryDirectory() as tmpdir:
46
45
  log_file_path = get_log_path(Path(tmpdir))
47
46
  logger = set_logger(
@@ -30,8 +30,6 @@ from fractal_server.tasks.v2.utils_templates import (
30
30
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
31
31
  from fractal_server.utils import get_timestamp
32
32
 
33
- LOGGER_NAME = __name__
34
-
35
33
 
36
34
  def collect_ssh(
37
35
  *,
@@ -62,6 +60,8 @@ def collect_ssh(
62
60
  wheel_file:
63
61
  """
64
62
 
63
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
64
+
65
65
  # Work within a temporary folder, where also logs will be placed
66
66
  with TemporaryDirectory() as tmpdir:
67
67
  LOGGER_NAME = "task_collection_ssh"
@@ -22,8 +22,6 @@ from fractal_server.tasks.v2.utils_background import get_current_log
22
22
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
- LOGGER_NAME = __name__
26
-
27
25
 
28
26
  def deactivate_ssh(
29
27
  *,
@@ -47,6 +45,8 @@ def deactivate_ssh(
47
45
  `user_settings.ssh_tasks_dir`.
48
46
  """
49
47
 
48
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
49
+
50
50
  with TemporaryDirectory() as tmpdir:
51
51
  log_file_path = get_log_path(Path(tmpdir))
52
52
  logger = set_logger(
@@ -22,8 +22,6 @@ from fractal_server.tasks.v2.utils_python_interpreter import (
22
22
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
- LOGGER_NAME = __name__
26
-
27
25
 
28
26
  def reactivate_ssh(
29
27
  *,
@@ -47,6 +45,8 @@ def reactivate_ssh(
47
45
  `user_settings.ssh_tasks_dir`.
48
46
  """
49
47
 
48
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
49
+
50
50
  with TemporaryDirectory() as tmpdir:
51
51
  log_file_path = get_log_path(Path(tmpdir))
52
52
  logger = set_logger(
fractal_server/utils.py CHANGED
@@ -107,18 +107,20 @@ def execute_command_sync(
107
107
  returncode = res.returncode
108
108
  stdout = res.stdout
109
109
  stderr = res.stderr
110
- logger.debug(f"{returncode=}")
111
- logger.debug("STDOUT:")
112
- logger.debug(stdout)
113
- logger.debug("STDERR:")
114
- logger.debug(stderr)
115
110
  if res.returncode != 0:
116
111
  logger.debug(f"ERROR in subprocess call to '{command}'")
117
112
  raise RuntimeError(
118
113
  f"Command {command} failed.\n"
119
114
  f"returncode={res.returncode}\n"
120
- f"{stdout=}\n"
121
- f"{stderr=}\n"
115
+ "STDOUT:\n"
116
+ f"{stdout}\n"
117
+ "STDERR:\n"
118
+ f"{stderr}\n"
122
119
  )
120
+ logger.debug(f"{returncode=}")
121
+ logger.debug("STDOUT:")
122
+ logger.debug(stdout)
123
+ logger.debug("STDERR:")
124
+ logger.debug(stderr)
123
125
  logger.debug(f"END subprocess call to '{command}'")
124
126
  return stdout
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.11.0a3
3
+ Version: 2.11.0a5
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=o_3QCs2FdlBclhmQ7GWAIcInUjrL307BrDoHMarxuSE,25
1
+ fractal_server/__init__.py,sha256=P2nIVsQnIcZXk3AZLCDwdPsHsGyWJzqhBzjjXpYldU0,25
2
2
  fractal_server/__main__.py,sha256=D2YTmSowmXNyvqOjW_HeItCZT2UliWlySl_owicaZg0,8026
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -45,21 +45,21 @@ fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJ
45
45
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
46
46
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=NJ6_1biN_hhIEK1w8Vj6XhLmdkQ5kMVd_MX5JC_nHLU,11524
47
47
  fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXdMhc3nIixX50B1Ka5n7LgbOZm2JbEs7lICQ04,6767
48
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
48
+ fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
49
49
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
50
- fractal_server/app/routes/api/v2/images.py,sha256=EI2Gu4vNVepXDBRjQLtU2Il3ciQSY9fpEyIsGEm8UVU,8845
50
+ fractal_server/app/routes/api/v2/images.py,sha256=0qkItqPrAvWEaK3YHUmCCrKrO_tQuzAPf4Te0q8mON8,8832
51
51
  fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
52
52
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
53
53
  fractal_server/app/routes/api/v2/status.py,sha256=_cDZW-ESYw6zpf-lLFFqko5bLpKhqKrCM6yv1OfqxN4,6300
54
- fractal_server/app/routes/api/v2/submit.py,sha256=Vxvqgu9nh0UCAXEYGEl_XvEfudCrvl_H2nmZwvsFzTo,8429
54
+ fractal_server/app/routes/api/v2/submit.py,sha256=UMPhWwk4FqZmYtVEu6WLPkSr6a2R4wwgPPeVrx0zRME,8622
55
55
  fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
56
56
  fractal_server/app/routes/api/v2/task_collection.py,sha256=9p8w9UnN6RFszC1ohy9Uo3I4HIMVdfD8fYGWuQqzxMU,12682
57
57
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
58
58
  fractal_server/app/routes/api/v2/task_group.py,sha256=4o2N0z7jK7VUVlJZMM4GveCCc4JKxYJx9-PMmsYIlJQ,8256
59
59
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
60
60
  fractal_server/app/routes/api/v2/workflow.py,sha256=vjCNRzMHaAB4YWbAEWGlELHXDN4GjtE26IkIiB15RGM,8682
61
- fractal_server/app/routes/api/v2/workflow_import.py,sha256=-7Er3FWGF_1xI2qHFO9gfLVQAok5bojd7mbzQxa9Ofw,10858
62
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=NueHDKbrPWxP4Jo2hpBvuU_XBCccAyoeZOBNybF74zg,10709
61
+ fractal_server/app/routes/api/v2/workflow_import.py,sha256=DHoHZvxndJQav6l_p5JJW9c9pSRlMEm7bv62h0M5evI,11187
62
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=coYBy-21CbJNIkpmwC84BtPTw3r4DYBrBwaFWiM0dJA,11335
63
63
  fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
64
64
  fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
65
65
  fractal_server/app/routes/auth/current_user.py,sha256=I3aVY5etWAJ_SH6t65Mj5TjvB2X8sAGuu1KG7FxLyPU,5883
@@ -81,12 +81,12 @@ fractal_server/app/runner/exceptions.py,sha256=_qZ_t8O4umAdJ1ikockiF5rDJuxnEskrG
81
81
  fractal_server/app/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
82
  fractal_server/app/runner/executors/slurm/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
83
83
  fractal_server/app/runner/executors/slurm/_batching.py,sha256=3mfeFuYm3UA4EXh4VWuqZTF-dcINECZgTHoPOaOszDo,8840
84
- fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=P0TDfIFf07h0hIhVNZUcY3t5vgdjptU-2T0uC_ZBEB4,15688
84
+ fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=Qa5UgcMZfYEj95dMA5GF3WgNx8zQ_XH6ufNYYmhI4rs,15684
85
85
  fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
86
86
  fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
87
87
  fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
88
88
  fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
89
- fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=U2-tNE_5ECHFIoXjEvBlaSXKaIf-1IXZlDs0c34mab8,54110
89
+ fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=gtjXKTY0cP3h5AtTXRZChtFOP-tbJTBmvxs_6VnEWao,54082
90
90
  fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
91
91
  fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
92
92
  fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=uRRyVHQtK9McHCB6OsjYfDnQsu2E8At9K_UYb_pe2pg,4682
@@ -127,14 +127,14 @@ fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=sGc1wGMPD6yQv6-MmTTG
127
127
  fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
128
128
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=-imwO7OB7ATADEnqVbTElUwoY0YIJCTf_SbWJNN9OZg,639
129
129
  fractal_server/app/runner/v2/handle_failed_job.py,sha256=-zFWw4d208bQEFUF_sAdH2LdHEARyg1FC8BENr1SjhU,2045
130
- fractal_server/app/runner/v2/merge_outputs.py,sha256=DW1d-fT0UcSnJUmz8xfU-AEeI7p_G0aQ_lNpDAe9C2o,1226
131
- fractal_server/app/runner/v2/runner.py,sha256=tdEjF2SWwDKXCHXz0wFRtKTRCG2I5BSjuqY08YSjMHs,12906
130
+ fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
131
+ fractal_server/app/runner/v2/runner.py,sha256=ll0-nglUytoxDnMwMRgjY2901UnFb37bkSsxWK1KLA0,11595
132
132
  fractal_server/app/runner/v2/runner_functions.py,sha256=BLREIcQaE6FSc2AEJyZuiYk6rGazEz_9gprUqUZDljs,9488
133
133
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=1fWvQ6YZUUnDhO_mipXC5hnaT-zK-GHxg8ayoxZX82k,3648
134
- fractal_server/app/runner/v2/task_interface.py,sha256=DU0fppTqvwhVkKwjPvfWPuZt09rV7AV2U5Vpu5CRqX8,3268
134
+ fractal_server/app/runner/v2/task_interface.py,sha256=d6HPwPzrytUMVjExTU6fuCEwtvvWGRaSje0iXcRD45w,1728
135
135
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
136
136
  fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
137
- fractal_server/app/schemas/_filter_validators.py,sha256=9oGzf1yghFkIMbzluvxeRCjPei8V7dn7ot4ggQUmz8w,1731
137
+ fractal_server/app/schemas/_filter_validators.py,sha256=WcfQ3ARc-2pj2oQFB4lWA0X5wtoOPGzpD4hJq4BblXs,1727
138
138
  fractal_server/app/schemas/_validators.py,sha256=3dotVxUHWKAmUO3aeoluYDLRKrw1OS-NxcZ4Fg_HOYk,3560
139
139
  fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
140
140
  fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
@@ -165,14 +165,14 @@ fractal_server/app/security/__init__.py,sha256=qn6idYgl-p5HWea0gTVnz4JnkoxGEkmQj
165
165
  fractal_server/app/security/signup_email.py,sha256=DrL51UdTSrgjleynMD5CRZwTSOpPrZ96fasRV0fvxDE,1165
166
166
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
167
167
  fractal_server/config.py,sha256=9rAzw7OO6ZeHEz-I8NJHuGoHf4xCHxfFLyRNZQD9ytY,27019
168
- fractal_server/data_migrations/2_11_0.py,sha256=PPFg1GxpfW5hElwzr_kx0_fYuEaNSyH23uCxztExO14,2411
168
+ fractal_server/data_migrations/2_11_0.py,sha256=glS3BkhumrA6SpHiE_QFBgA7Bm2cbDCUlQyY3BjEub8,2464
169
169
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
170
170
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
171
171
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
172
172
  fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
173
173
  fractal_server/images/models.py,sha256=fAecChXhs4utRX4123Lgz5e_b_H0YtHrvNHCenR7tOs,3359
174
- fractal_server/images/tools.py,sha256=VFjm5pCOWjc2ms0t0s4nH6bK53ZU867JDL5oTEm4M1Q,2648
175
- fractal_server/logger.py,sha256=zwg_AjIHkNP0ruciXjm5lI5UFP3n6tMHullsM9lDjz4,5039
174
+ fractal_server/images/tools.py,sha256=iqFx_pp46OoHsHjXxX6GrkXJPPfTo_c1WYvRur0olaE,3455
175
+ fractal_server/logger.py,sha256=5Z3rfsFwl8UysVljTOaaIvt8Pyp6CVH492ez3jE8WAw,5113
176
176
  fractal_server/main.py,sha256=gStLT9Du5QMpc9SyvRvtKU21EKwp-dG4HL3zGHzE06A,4908
177
177
  fractal_server/migrations/env.py,sha256=9t_OeKVlhM8WRcukmTrLbWNup-imiBGP_9xNgwCbtpI,2730
178
178
  fractal_server/migrations/naming_convention.py,sha256=htbKrVdetx3pklowb_9Cdo5RqeF0fJ740DNecY5de_M,265
@@ -219,14 +219,14 @@ fractal_server/tasks/v1/utils.py,sha256=HYFyNAyZofmf--mVgdwGC5TJpGShIWIDaS01yRr4
219
219
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
220
220
  fractal_server/tasks/v2/local/__init__.py,sha256=9RVItnS7OyLsJOuJjWMCicaky4ASUPQEYD4SzDs0hOE,141
221
221
  fractal_server/tasks/v2/local/_utils.py,sha256=EvhmVwYjqaNyDCUMEsTWYOUXLgEwR1xr6bu32apCEI8,2491
222
- fractal_server/tasks/v2/local/collect.py,sha256=JuMplfREqrPvVEGlT5kJhcmZXC_iYlwvNlkgFrCaCC0,12107
223
- fractal_server/tasks/v2/local/deactivate.py,sha256=SOFtOaR5yYm3IkbOw48TrQgzEpONQ9647KvyD_zImr8,9899
224
- fractal_server/tasks/v2/local/reactivate.py,sha256=MeUZHx8IKrfTEf-pXlfYms8I4o-26co3jdNgSNAvw60,6053
222
+ fractal_server/tasks/v2/local/collect.py,sha256=Mr4BzscBY8zBRCzWq8ozTjFYy1-VFPXrMCBF7vd9Ods,12143
223
+ fractal_server/tasks/v2/local/deactivate.py,sha256=uAV-tBgfKiN4chkfk6dYCZfF67POwhNizyxuCk_WJK8,9935
224
+ fractal_server/tasks/v2/local/reactivate.py,sha256=DwtOZrSk6jhUHEmAiMcstK3YzqPQ12pfgxmUNdrSgfk,6088
225
225
  fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
226
226
  fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
227
- fractal_server/tasks/v2/ssh/collect.py,sha256=2XXEPpl4LS22A75v_k4Bd46k46tmnLNZfceHyPi3kXo,13457
228
- fractal_server/tasks/v2/ssh/deactivate.py,sha256=D8rfnC46davmDKZCipPdWZHDD4TIZ-4nr9vxZSV2aC0,11261
229
- fractal_server/tasks/v2/ssh/reactivate.py,sha256=cmdT2P1J0FwS1NYYRrhxHsSRyUZ5uu78hS3fDrSVbKo,7837
227
+ fractal_server/tasks/v2/ssh/collect.py,sha256=yLVcilvU7uMH8woc__qG_3a0wyT2mNTCuq9I93HVKNM,13493
228
+ fractal_server/tasks/v2/ssh/deactivate.py,sha256=bFlcpZpGiTZcwG845YiLEIIYpiG7vslcSp6_NkXtHGw,11297
229
+ fractal_server/tasks/v2/ssh/reactivate.py,sha256=RoXM5HpIc0rVz4-8UCr3uWv-9zA8bobGSTNJamYsMOo,7873
230
230
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
231
231
  fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Gpk2io8u9YaflFUlQu2NgkDQw5AA4m4AOVG1sB4yrHQ,1822
232
232
  fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
@@ -239,10 +239,10 @@ fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95v
239
239
  fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
240
240
  fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxeRumA6DdgMgCWk,2947
241
241
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
242
- fractal_server/utils.py,sha256=utvmBx8K9I8hRWFquxna2pBaOqe0JifDL_NVPmihEJI,3525
242
+ fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
243
243
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
244
- fractal_server-2.11.0a3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
- fractal_server-2.11.0a3.dist-info/METADATA,sha256=PpEo4R-FpyRduTHTutRl1UdRHHwtb1dyFUKmLT2Zm1I,4564
246
- fractal_server-2.11.0a3.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
- fractal_server-2.11.0a3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
- fractal_server-2.11.0a3.dist-info/RECORD,,
244
+ fractal_server-2.11.0a5.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
+ fractal_server-2.11.0a5.dist-info/METADATA,sha256=i_O42zmSGOyXWu-sT-rHUCDZ-flG0SiuK3oVNKwrmpE,4564
246
+ fractal_server-2.11.0a5.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
+ fractal_server-2.11.0a5.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
+ fractal_server-2.11.0a5.dist-info/RECORD,,