fractal-server 2.11.0a4__py3-none-any.whl → 2.11.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a4"
1
+ __VERSION__ = "2.11.0a6"
@@ -22,6 +22,7 @@ from fractal_server.app.routes.auth._aux_auth import (
22
22
  _verify_user_belongs_to_group,
23
23
  )
24
24
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
25
+ from fractal_server.images.tools import merge_type_filters
25
26
  from fractal_server.logger import set_logger
26
27
 
27
28
  logger = set_logger(__name__)
@@ -351,3 +352,23 @@ async def _add_warnings_to_workflow_tasks(
351
352
  wftask_data["warning"] = "Current user has no access to this task."
352
353
  wftask_list_with_warnings.append(wftask_data)
353
354
  return wftask_list_with_warnings
355
+
356
+
357
+ def _check_type_filters_compatibility(
358
+ *,
359
+ task_input_types: dict[str, bool],
360
+ wftask_type_filters: dict[str, bool],
361
+ ) -> None:
362
+ """
363
+ Wrap `merge_type_filters` and raise `HTTPException` if needed.
364
+ """
365
+ try:
366
+ merge_type_filters(
367
+ task_input_types=task_input_types,
368
+ wftask_type_filters=wftask_type_filters,
369
+ )
370
+ except ValueError as e:
371
+ raise HTTPException(
372
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
373
+ detail=f"Incompatible type filters.\nOriginal error: {str(e)}",
374
+ )
@@ -207,7 +207,7 @@ async def query_dataset_images(
207
207
  if page > last_page:
208
208
  page = last_page
209
209
  offset = (page - 1) * page_size
210
- images = images[offset : offset + page_size] # noqa E203
210
+ images = images[offset : offset + page_size]
211
211
 
212
212
  return ImagePage(
213
213
  total_count=total_count,
@@ -28,6 +28,7 @@ from ...aux.validate_user_settings import validate_user_settings
28
28
  from ._aux_functions import _get_dataset_check_owner
29
29
  from ._aux_functions import _get_workflow_check_owner
30
30
  from ._aux_functions import clean_app_job_list_v2
31
+ from ._aux_functions_tasks import _check_type_filters_compatibility
31
32
  from fractal_server.app.models import TaskGroupV2
32
33
  from fractal_server.app.models import UserOAuth
33
34
  from fractal_server.app.routes.api.v2._aux_functions_tasks import (
@@ -108,15 +109,17 @@ async def apply_workflow(
108
109
 
109
110
  # Check that tasks have read-access and are `active`
110
111
  used_task_group_ids = set()
111
- for wftask in workflow.task_list[
112
- first_task_index : last_task_index + 1 # noqa: E203
113
- ]:
112
+ for wftask in workflow.task_list[first_task_index : last_task_index + 1]:
114
113
  task = await _get_task_read_access(
115
114
  user_id=user.id,
116
115
  task_id=wftask.task_id,
117
116
  require_active=True,
118
117
  db=db,
119
118
  )
119
+ _check_type_filters_compatibility(
120
+ task_input_types=task.input_types,
121
+ wftask_type_filters=wftask.type_filters,
122
+ )
120
123
  used_task_group_ids.add(task.taskgroupv2_id)
121
124
 
122
125
  # Validate user settings
@@ -19,6 +19,7 @@ from ._aux_functions import _check_workflow_exists
19
19
  from ._aux_functions import _get_project_check_owner
20
20
  from ._aux_functions import _workflow_insert_task
21
21
  from ._aux_functions_tasks import _add_warnings_to_workflow_tasks
22
+ from ._aux_functions_tasks import _check_type_filters_compatibility
22
23
  from fractal_server.app.models import LinkUserGroup
23
24
  from fractal_server.app.models import UserOAuth
24
25
  from fractal_server.app.models.v2 import TaskGroupV2
@@ -325,6 +326,13 @@ async def import_workflow(
325
326
  list_wf_tasks.append(new_wf_task)
326
327
  list_task_ids.append(task_id)
327
328
 
329
+ for wftask, task_id in zip(list_wf_tasks, list_task_ids):
330
+ task = await db.get(TaskV2, task_id)
331
+ _check_type_filters_compatibility(
332
+ task_input_types=task.input_types,
333
+ wftask_type_filters=wftask.type_filters,
334
+ )
335
+
328
336
  # Create new Workflow
329
337
  db_workflow = WorkflowV2(
330
338
  project_id=project_id,
@@ -12,6 +12,7 @@ from ....db import get_async_db
12
12
  from ._aux_functions import _get_workflow_check_owner
13
13
  from ._aux_functions import _get_workflow_task_check_owner
14
14
  from ._aux_functions import _workflow_insert_task
15
+ from ._aux_functions_tasks import _check_type_filters_compatibility
15
16
  from ._aux_functions_tasks import _get_task_read_access
16
17
  from fractal_server.app.models import UserOAuth
17
18
  from fractal_server.app.models.v2 import WorkflowTaskV2
@@ -47,24 +48,29 @@ async def replace_workflowtask(
47
48
  db=db,
48
49
  )
49
50
 
50
- task = await _get_task_read_access(
51
+ new_task = await _get_task_read_access(
51
52
  task_id=task_id, user_id=user.id, db=db, require_active=True
52
53
  )
53
54
 
54
- if task.type != old_workflow_task.task.type:
55
+ if new_task.type != old_workflow_task.task.type:
55
56
  raise HTTPException(
56
57
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
57
58
  detail=(
58
59
  f"Cannot replace a Task '{old_workflow_task.task.type}' with a"
59
- f" Task '{task.type}'."
60
+ f" Task '{new_task.type}'."
60
61
  ),
61
62
  )
62
63
 
64
+ _check_type_filters_compatibility(
65
+ task_input_types=new_task.input_types,
66
+ wftask_type_filters=old_workflow_task.type_filters,
67
+ )
68
+
63
69
  _args_non_parallel = old_workflow_task.args_non_parallel
64
70
  _args_parallel = old_workflow_task.args_parallel
65
71
  if replace is not None:
66
72
  if replace.args_non_parallel is not None:
67
- if task.type == "parallel":
73
+ if new_task.type == "parallel":
68
74
  raise HTTPException(
69
75
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
70
76
  detail=(
@@ -76,7 +82,7 @@ async def replace_workflowtask(
76
82
  _args_non_parallel = replace.args_non_parallel
77
83
 
78
84
  if replace.args_parallel is not None:
79
- if task.type == "non_parallel":
85
+ if new_task.type == "non_parallel":
80
86
  raise HTTPException(
81
87
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
82
88
  detail=(
@@ -92,22 +98,24 @@ async def replace_workflowtask(
92
98
  if (
93
99
  old_workflow_task.meta_non_parallel
94
100
  != old_workflow_task.task.meta_non_parallel
95
- ) and (old_workflow_task.task.meta_non_parallel == task.meta_non_parallel):
101
+ ) and (
102
+ old_workflow_task.task.meta_non_parallel == new_task.meta_non_parallel
103
+ ):
96
104
  _meta_non_parallel = old_workflow_task.meta_non_parallel
97
105
  else:
98
- _meta_non_parallel = task.meta_non_parallel
106
+ _meta_non_parallel = new_task.meta_non_parallel
99
107
  # Same for `meta_parallel`
100
108
  if (
101
109
  old_workflow_task.meta_parallel != old_workflow_task.task.meta_parallel
102
- ) and (old_workflow_task.task.meta_parallel == task.meta_parallel):
110
+ ) and (old_workflow_task.task.meta_parallel == new_task.meta_parallel):
103
111
  _meta_parallel = old_workflow_task.meta_parallel
104
112
  else:
105
- _meta_parallel = task.meta_parallel
113
+ _meta_parallel = new_task.meta_parallel
106
114
 
107
115
  new_workflow_task = WorkflowTaskV2(
108
- task_id=task.id,
109
- task_type=task.type,
110
- task=task,
116
+ task_id=new_task.id,
117
+ task_type=new_task.type,
118
+ task=new_task,
111
119
  # old-task values
112
120
  type_filters=old_workflow_task.type_filters,
113
121
  # possibly new values
@@ -134,7 +142,7 @@ async def create_workflowtask(
134
142
  project_id: int,
135
143
  workflow_id: int,
136
144
  task_id: int,
137
- new_task: WorkflowTaskCreateV2,
145
+ wftask: WorkflowTaskCreateV2,
138
146
  user: UserOAuth = Depends(current_active_user),
139
147
  db: AsyncSession = Depends(get_async_db),
140
148
  ) -> Optional[WorkflowTaskReadV2]:
@@ -152,8 +160,8 @@ async def create_workflowtask(
152
160
 
153
161
  if task.type == "parallel":
154
162
  if (
155
- new_task.meta_non_parallel is not None
156
- or new_task.args_non_parallel is not None
163
+ wftask.meta_non_parallel is not None
164
+ or wftask.args_non_parallel is not None
157
165
  ):
158
166
  raise HTTPException(
159
167
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -165,8 +173,8 @@ async def create_workflowtask(
165
173
  )
166
174
  elif task.type == "non_parallel":
167
175
  if (
168
- new_task.meta_parallel is not None
169
- or new_task.args_parallel is not None
176
+ wftask.meta_parallel is not None
177
+ or wftask.args_parallel is not None
170
178
  ):
171
179
  raise HTTPException(
172
180
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -176,20 +184,26 @@ async def create_workflowtask(
176
184
  "is `non_parallel`."
177
185
  ),
178
186
  )
179
- workflow_task = await _workflow_insert_task(
187
+
188
+ _check_type_filters_compatibility(
189
+ task_input_types=task.input_types,
190
+ wftask_type_filters=wftask.type_filters,
191
+ )
192
+
193
+ wftask_db = await _workflow_insert_task(
180
194
  workflow_id=workflow.id,
181
195
  task_id=task_id,
182
- meta_non_parallel=new_task.meta_non_parallel,
183
- meta_parallel=new_task.meta_parallel,
184
- args_non_parallel=new_task.args_non_parallel,
185
- args_parallel=new_task.args_parallel,
186
- type_filters=new_task.type_filters,
196
+ meta_non_parallel=wftask.meta_non_parallel,
197
+ meta_parallel=wftask.meta_parallel,
198
+ args_non_parallel=wftask.args_non_parallel,
199
+ args_parallel=wftask.args_parallel,
200
+ type_filters=wftask.type_filters,
187
201
  db=db,
188
202
  )
189
203
 
190
204
  await db.close()
191
205
 
192
- return workflow_task
206
+ return wftask_db
193
207
 
194
208
 
195
209
  @router.get(
@@ -236,6 +250,11 @@ async def update_workflowtask(
236
250
  user_id=user.id,
237
251
  db=db,
238
252
  )
253
+ if workflow_task_update.type_filters is not None:
254
+ _check_type_filters_compatibility(
255
+ task_input_types=db_wf_task.task.input_types,
256
+ wftask_type_filters=workflow_task_update.type_filters,
257
+ )
239
258
 
240
259
  if db_wf_task.task_type == "parallel" and (
241
260
  workflow_task_update.args_non_parallel is not None
@@ -966,8 +966,8 @@ class FractalSlurmSSHExecutor(SlurmExecutor):
966
966
  # Retrieve job and future objects
967
967
  job = jobs[ind_job]
968
968
  future = futures[ind_job]
969
- remaining_job_ids = job_ids[ind_job + 1 :] # noqa: E203
970
- remaining_futures = futures[ind_job + 1 :] # noqa: E203
969
+ remaining_job_ids = job_ids[ind_job + 1 :]
970
+ remaining_futures = futures[ind_job + 1 :]
971
971
 
972
972
  outputs = []
973
973
 
@@ -22,14 +22,10 @@ def validate_attribute_filters(
22
22
 
23
23
  attribute_filters = valdict_keys("attribute_filters")(attribute_filters)
24
24
  for key, values in attribute_filters.items():
25
- if values is None:
26
- # values=None corresponds to not applying any filter for
27
- # attribute `key`
28
- pass
29
- elif values == []:
30
- # WARNING: in this case, no image can match with the current
31
- # filter. In the future we may deprecate this possibility.
32
- pass
25
+ if values == []:
26
+ raise ValueError(
27
+ f"attribute_filters[{key}] cannot be an empty list."
28
+ )
33
29
  else:
34
30
  # values is a non-empty list, and its items must all be of the
35
31
  # same scalar non-None type
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from typing import Union
2
3
 
3
4
  from sqlalchemy.orm.attributes import flag_modified
4
5
  from sqlmodel import select
@@ -7,23 +8,52 @@ from fractal_server.app.db import get_sync_db
7
8
  from fractal_server.app.models import DatasetV2
8
9
  from fractal_server.app.models import JobV2
9
10
  from fractal_server.app.models import WorkflowTaskV2
11
+ from fractal_server.app.schemas.v2 import DatasetReadV2
12
+ from fractal_server.app.schemas.v2 import JobReadV2
13
+ from fractal_server.app.schemas.v2 import ProjectReadV2
14
+ from fractal_server.app.schemas.v2 import TaskReadV2
15
+ from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
10
16
 
11
17
  logger = logging.getLogger("fix_db")
12
18
  logger.setLevel(logging.INFO)
13
19
 
14
20
 
15
- def fix_db():
21
+ def dict_values_to_list(
22
+ input_dict: dict[str, Union[int, float, bool, str, None]],
23
+ identifier: str,
24
+ ) -> dict[str, list[Union[int, float, bool, str]]]:
25
+ for k, v in input_dict.items():
26
+ if not isinstance(v, (int, float, bool, str, type(None))):
27
+ error_msg = (
28
+ f"Attribute '{k}' from '{identifier}' "
29
+ f"has invalid type '{type(v)}'."
30
+ )
31
+ logger.error(error_msg)
32
+ raise RuntimeError(error_msg)
33
+ elif v is None:
34
+ logger.warning(
35
+ f"Attribute '{k}' from '{identifier}' is None and it "
36
+ "will be removed."
37
+ )
38
+ else:
39
+ input_dict[k] = [v]
40
+ return input_dict
41
+
16
42
 
43
+ def fix_db():
17
44
  logger.info("START execution of fix_db function")
18
45
 
19
46
  with next(get_sync_db()) as db:
20
-
21
47
  # DatasetV2.filters
22
48
  # DatasetV2.history[].workflowtask.input_filters
23
49
  stm = select(DatasetV2).order_by(DatasetV2.id)
24
50
  datasets = db.execute(stm).scalars().all()
25
51
  for ds in datasets:
26
- ds.attribute_filters = ds.filters["attributes"]
52
+ logger.info(f"DatasetV2[{ds.id}] START")
53
+ ds.attribute_filters = dict_values_to_list(
54
+ ds.filters["attributes"],
55
+ f"Dataset[{ds.id}].filters.attributes",
56
+ )
27
57
  ds.type_filters = ds.filters["types"]
28
58
  ds.filters = None
29
59
  for i, h in enumerate(ds.history):
@@ -31,37 +61,54 @@ def fix_db():
31
61
  "workflowtask"
32
62
  ]["input_filters"]["types"]
33
63
  flag_modified(ds, "history")
64
+ DatasetReadV2(
65
+ **ds.model_dump(),
66
+ project=ProjectReadV2(**ds.project.model_dump()),
67
+ )
34
68
  db.add(ds)
35
- logger.info(f"Fixed filters in DatasetV2[{ds.id}]")
69
+ logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
70
+
71
+ logger.info("------ switch from dataset to workflowtasks ------")
36
72
 
37
73
  # WorkflowTaskV2.input_filters
38
74
  stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
39
75
  wftasks = db.execute(stm).scalars().all()
40
76
  for wft in wftasks:
77
+ logger.info(f"WorkflowTaskV2[{wft.id}] START")
41
78
  wft.type_filters = wft.input_filters["types"]
42
79
  if wft.input_filters["attributes"]:
43
80
  logger.warning(
44
- f"Removing WorkflowTaskV2[{wft.id}].input_filters"
45
- f"['attributes'] = {wft.input_filters['attributes']}"
81
+ "Removing input_filters['attributes']. "
82
+ f"(previous value: {wft.input_filters['attributes']})"
46
83
  )
47
84
  wft.input_filters = None
48
85
  flag_modified(wft, "input_filters")
86
+ WorkflowTaskReadV2(
87
+ **wft.model_dump(),
88
+ task=TaskReadV2(**wft.task.model_dump()),
89
+ )
49
90
  db.add(wft)
50
- logger.info(f"Fixed filters in WorkflowTaskV2[{wft.id}]")
91
+ logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
92
+
93
+ logger.info("------ switch from workflowtasks to jobs ------")
51
94
 
52
95
  # JOBS V2
53
96
  stm = select(JobV2).order_by(JobV2.id)
54
97
  jobs = db.execute(stm).scalars().all()
55
98
  for job in jobs:
99
+ logger.info(f"JobV2[{job.id}] START")
56
100
  job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
57
101
  "types"
58
102
  ]
59
- job.dataset_dump["attribute_filters"] = job.dataset_dump[
60
- "filters"
61
- ]["attributes"]
103
+ job.dataset_dump["attribute_filters"] = dict_values_to_list(
104
+ job.dataset_dump["filters"]["attributes"],
105
+ f"JobV2[{job.id}].dataset_dump.filters.attributes",
106
+ )
62
107
  job.dataset_dump.pop("filters")
63
108
  flag_modified(job, "dataset_dump")
64
- logger.info(f"Fixed filters in JobV2[{job.id}].datasetdump")
109
+ JobReadV2(**job.model_dump())
110
+ db.add(job)
111
+ logger.info(f"JobV2[{job.id}] END - fixed filters")
65
112
 
66
113
  db.commit()
67
114
  logger.info("Changes committed.")
@@ -9,7 +9,7 @@ from pydantic import validator
9
9
  from fractal_server.app.schemas._validators import valdict_keys
10
10
  from fractal_server.urls import normalize_url
11
11
 
12
- AttributeFiltersType = dict[str, Optional[list[Any]]]
12
+ AttributeFiltersType = dict[str, list[Any]]
13
13
 
14
14
 
15
15
  class _SingleImageBase(BaseModel):
@@ -57,8 +57,6 @@ def match_filter(
57
57
 
58
58
  # Verify match with attributes (only for not-None filters)
59
59
  for key, values in attribute_filters.items():
60
- if values is None:
61
- continue
62
60
  if image["attributes"].get(key) not in values:
63
61
  return False
64
62
 
fractal_server/logger.py CHANGED
@@ -109,7 +109,10 @@ def set_logger(
109
109
  if isinstance(handler, logging.FileHandler)
110
110
  ]
111
111
  if len(current_file_handlers) > 1:
112
- logger.warning(f"Logger {logger_name} has multiple file handlers.")
112
+ logger.warning(
113
+ f"Logger {logger_name} has multiple file handlers: "
114
+ f"{current_file_handlers}"
115
+ )
113
116
 
114
117
  return logger
115
118
 
@@ -33,8 +33,6 @@ from fractal_server.tasks.v2.utils_templates import (
33
33
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
34
34
  from fractal_server.utils import get_timestamp
35
35
 
36
- LOGGER_NAME = __name__
37
-
38
36
 
39
37
  def collect_local(
40
38
  *,
@@ -59,6 +57,8 @@ def collect_local(
59
57
  wheel_file:
60
58
  """
61
59
 
60
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
61
+
62
62
  with TemporaryDirectory() as tmpdir:
63
63
  log_file_path = get_log_path(Path(tmpdir))
64
64
  logger = set_logger(
@@ -21,8 +21,6 @@ from fractal_server.tasks.v2.utils_background import get_current_log
21
21
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
22
22
  from fractal_server.utils import get_timestamp
23
23
 
24
- LOGGER_NAME = __name__
25
-
26
24
 
27
25
  def deactivate_local(
28
26
  *,
@@ -40,6 +38,8 @@ def deactivate_local(
40
38
  task_group_activity_id:
41
39
  """
42
40
 
41
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
42
+
43
43
  with TemporaryDirectory() as tmpdir:
44
44
  log_file_path = get_log_path(Path(tmpdir))
45
45
  logger = set_logger(
@@ -23,9 +23,6 @@ from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
25
 
26
- LOGGER_NAME = __name__
27
-
28
-
29
26
  def reactivate_local(
30
27
  *,
31
28
  task_group_activity_id: int,
@@ -42,6 +39,8 @@ def reactivate_local(
42
39
  task_group_activity_id:
43
40
  """
44
41
 
42
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
43
+
45
44
  with TemporaryDirectory() as tmpdir:
46
45
  log_file_path = get_log_path(Path(tmpdir))
47
46
  logger = set_logger(
@@ -30,8 +30,6 @@ from fractal_server.tasks.v2.utils_templates import (
30
30
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
31
31
  from fractal_server.utils import get_timestamp
32
32
 
33
- LOGGER_NAME = __name__
34
-
35
33
 
36
34
  def collect_ssh(
37
35
  *,
@@ -62,6 +60,8 @@ def collect_ssh(
62
60
  wheel_file:
63
61
  """
64
62
 
63
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
64
+
65
65
  # Work within a temporary folder, where also logs will be placed
66
66
  with TemporaryDirectory() as tmpdir:
67
67
  LOGGER_NAME = "task_collection_ssh"
@@ -22,8 +22,6 @@ from fractal_server.tasks.v2.utils_background import get_current_log
22
22
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
- LOGGER_NAME = __name__
26
-
27
25
 
28
26
  def deactivate_ssh(
29
27
  *,
@@ -47,6 +45,8 @@ def deactivate_ssh(
47
45
  `user_settings.ssh_tasks_dir`.
48
46
  """
49
47
 
48
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
49
+
50
50
  with TemporaryDirectory() as tmpdir:
51
51
  log_file_path = get_log_path(Path(tmpdir))
52
52
  logger = set_logger(
@@ -22,8 +22,6 @@ from fractal_server.tasks.v2.utils_python_interpreter import (
22
22
  from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
23
23
  from fractal_server.utils import get_timestamp
24
24
 
25
- LOGGER_NAME = __name__
26
-
27
25
 
28
26
  def reactivate_ssh(
29
27
  *,
@@ -47,6 +45,8 @@ def reactivate_ssh(
47
45
  `user_settings.ssh_tasks_dir`.
48
46
  """
49
47
 
48
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
49
+
50
50
  with TemporaryDirectory() as tmpdir:
51
51
  log_file_path = get_log_path(Path(tmpdir))
52
52
  logger = set_logger(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.11.0a4
3
+ Version: 2.11.0a6
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=DZWnRj7O86D5FPeOgamWSQFlyHVz7jh1SwTyyBfs6GQ,25
1
+ fractal_server/__init__.py,sha256=aC0yx2HX8xIU9XVrkikksq9g-IXOskfofOk8lZZtKNc,25
2
2
  fractal_server/__main__.py,sha256=D2YTmSowmXNyvqOjW_HeItCZT2UliWlySl_owicaZg0,8026
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -45,21 +45,21 @@ fractal_server/app/routes/api/v1/workflowtask.py,sha256=OYYConwJbmNULDw5I3T-UbSJ
45
45
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
46
46
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=NJ6_1biN_hhIEK1w8Vj6XhLmdkQ5kMVd_MX5JC_nHLU,11524
47
47
  fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXdMhc3nIixX50B1Ka5n7LgbOZm2JbEs7lICQ04,6767
48
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=PuapLtvSk9yhBAsKNEp1w2oagOMr0YZTo247-CU3hdM,11008
48
+ fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
49
49
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
50
- fractal_server/app/routes/api/v2/images.py,sha256=EI2Gu4vNVepXDBRjQLtU2Il3ciQSY9fpEyIsGEm8UVU,8845
50
+ fractal_server/app/routes/api/v2/images.py,sha256=0qkItqPrAvWEaK3YHUmCCrKrO_tQuzAPf4Te0q8mON8,8832
51
51
  fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
52
52
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
53
53
  fractal_server/app/routes/api/v2/status.py,sha256=_cDZW-ESYw6zpf-lLFFqko5bLpKhqKrCM6yv1OfqxN4,6300
54
- fractal_server/app/routes/api/v2/submit.py,sha256=Vxvqgu9nh0UCAXEYGEl_XvEfudCrvl_H2nmZwvsFzTo,8429
54
+ fractal_server/app/routes/api/v2/submit.py,sha256=UMPhWwk4FqZmYtVEu6WLPkSr6a2R4wwgPPeVrx0zRME,8622
55
55
  fractal_server/app/routes/api/v2/task.py,sha256=K0ik33t7vL8BAK5S7fqyJDNdRK4stGqb_73bSa8tvPE,7159
56
56
  fractal_server/app/routes/api/v2/task_collection.py,sha256=9p8w9UnN6RFszC1ohy9Uo3I4HIMVdfD8fYGWuQqzxMU,12682
57
57
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
58
58
  fractal_server/app/routes/api/v2/task_group.py,sha256=4o2N0z7jK7VUVlJZMM4GveCCc4JKxYJx9-PMmsYIlJQ,8256
59
59
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
60
60
  fractal_server/app/routes/api/v2/workflow.py,sha256=vjCNRzMHaAB4YWbAEWGlELHXDN4GjtE26IkIiB15RGM,8682
61
- fractal_server/app/routes/api/v2/workflow_import.py,sha256=-7Er3FWGF_1xI2qHFO9gfLVQAok5bojd7mbzQxa9Ofw,10858
62
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=NueHDKbrPWxP4Jo2hpBvuU_XBCccAyoeZOBNybF74zg,10709
61
+ fractal_server/app/routes/api/v2/workflow_import.py,sha256=DHoHZvxndJQav6l_p5JJW9c9pSRlMEm7bv62h0M5evI,11187
62
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=coYBy-21CbJNIkpmwC84BtPTw3r4DYBrBwaFWiM0dJA,11335
63
63
  fractal_server/app/routes/auth/__init__.py,sha256=fao6CS0WiAjHDTvBzgBVV_bSXFpEAeDBF6Z6q7rRkPc,1658
64
64
  fractal_server/app/routes/auth/_aux_auth.py,sha256=ifkNocTYatBSMYGwiR14qohmvR9SfMldceiEj6uJBrU,4783
65
65
  fractal_server/app/routes/auth/current_user.py,sha256=I3aVY5etWAJ_SH6t65Mj5TjvB2X8sAGuu1KG7FxLyPU,5883
@@ -86,7 +86,7 @@ fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77
86
86
  fractal_server/app/runner/executors/slurm/ssh/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
87
87
  fractal_server/app/runner/executors/slurm/ssh/_executor_wait_thread.py,sha256=bKo5Ja0IGxJWpPWyh9dN0AG-PwzTDZzD5LyaEHB3YU4,3742
88
88
  fractal_server/app/runner/executors/slurm/ssh/_slurm_job.py,sha256=rwlqZzoGo4SAb4nSlFjsQJdaCgfM1J6YGcjb8yYxlqc,4506
89
- fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=U2-tNE_5ECHFIoXjEvBlaSXKaIf-1IXZlDs0c34mab8,54110
89
+ fractal_server/app/runner/executors/slurm/ssh/executor.py,sha256=gtjXKTY0cP3h5AtTXRZChtFOP-tbJTBmvxs_6VnEWao,54082
90
90
  fractal_server/app/runner/executors/slurm/sudo/__init__.py,sha256=Cjn1rYvljddi96tAwS-qqGkNfOcfPzjChdaEZEObCcM,65
91
91
  fractal_server/app/runner/executors/slurm/sudo/_check_jobs_status.py,sha256=wAgwpVcr6JIslKHOuS0FhRa_6T1KCManyRJqA-fifzw,1909
92
92
  fractal_server/app/runner/executors/slurm/sudo/_executor_wait_thread.py,sha256=uRRyVHQtK9McHCB6OsjYfDnQsu2E8At9K_UYb_pe2pg,4682
@@ -134,7 +134,7 @@ fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=1fWvQ6YZUUnDhO
134
134
  fractal_server/app/runner/v2/task_interface.py,sha256=d6HPwPzrytUMVjExTU6fuCEwtvvWGRaSje0iXcRD45w,1728
135
135
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
136
136
  fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
137
- fractal_server/app/schemas/_filter_validators.py,sha256=WcfQ3ARc-2pj2oQFB4lWA0X5wtoOPGzpD4hJq4BblXs,1727
137
+ fractal_server/app/schemas/_filter_validators.py,sha256=0wJuZzMa-hJsMCEMxtBalW3lSk1Qey25uSVmS7GVcPM,1534
138
138
  fractal_server/app/schemas/_validators.py,sha256=3dotVxUHWKAmUO3aeoluYDLRKrw1OS-NxcZ4Fg_HOYk,3560
139
139
  fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
140
140
  fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
@@ -165,14 +165,14 @@ fractal_server/app/security/__init__.py,sha256=qn6idYgl-p5HWea0gTVnz4JnkoxGEkmQj
165
165
  fractal_server/app/security/signup_email.py,sha256=DrL51UdTSrgjleynMD5CRZwTSOpPrZ96fasRV0fvxDE,1165
166
166
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
167
167
  fractal_server/config.py,sha256=9rAzw7OO6ZeHEz-I8NJHuGoHf4xCHxfFLyRNZQD9ytY,27019
168
- fractal_server/data_migrations/2_11_0.py,sha256=glS3BkhumrA6SpHiE_QFBgA7Bm2cbDCUlQyY3BjEub8,2464
168
+ fractal_server/data_migrations/2_11_0.py,sha256=hkOZsvTWi30TqpSsAWuPyL4XxEJlqLwAv02PkVIk4Rw,4337
169
169
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
170
170
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
171
171
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
172
172
  fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
173
- fractal_server/images/models.py,sha256=fAecChXhs4utRX4123Lgz5e_b_H0YtHrvNHCenR7tOs,3359
174
- fractal_server/images/tools.py,sha256=iqFx_pp46OoHsHjXxX6GrkXJPPfTo_c1WYvRur0olaE,3455
175
- fractal_server/logger.py,sha256=zwg_AjIHkNP0ruciXjm5lI5UFP3n6tMHullsM9lDjz4,5039
173
+ fractal_server/images/models.py,sha256=t4zcUFmWxhAzGgy7kkxs9Ctq8SAhVs0v910UcXcHIUw,3349
174
+ fractal_server/images/tools.py,sha256=4kfPAFJJnvg7fM-cL0JMx97Dc1Npva_0ghitEji3JUU,3407
175
+ fractal_server/logger.py,sha256=5Z3rfsFwl8UysVljTOaaIvt8Pyp6CVH492ez3jE8WAw,5113
176
176
  fractal_server/main.py,sha256=gStLT9Du5QMpc9SyvRvtKU21EKwp-dG4HL3zGHzE06A,4908
177
177
  fractal_server/migrations/env.py,sha256=9t_OeKVlhM8WRcukmTrLbWNup-imiBGP_9xNgwCbtpI,2730
178
178
  fractal_server/migrations/naming_convention.py,sha256=htbKrVdetx3pklowb_9Cdo5RqeF0fJ740DNecY5de_M,265
@@ -219,14 +219,14 @@ fractal_server/tasks/v1/utils.py,sha256=HYFyNAyZofmf--mVgdwGC5TJpGShIWIDaS01yRr4
219
219
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
220
220
  fractal_server/tasks/v2/local/__init__.py,sha256=9RVItnS7OyLsJOuJjWMCicaky4ASUPQEYD4SzDs0hOE,141
221
221
  fractal_server/tasks/v2/local/_utils.py,sha256=EvhmVwYjqaNyDCUMEsTWYOUXLgEwR1xr6bu32apCEI8,2491
222
- fractal_server/tasks/v2/local/collect.py,sha256=JuMplfREqrPvVEGlT5kJhcmZXC_iYlwvNlkgFrCaCC0,12107
223
- fractal_server/tasks/v2/local/deactivate.py,sha256=SOFtOaR5yYm3IkbOw48TrQgzEpONQ9647KvyD_zImr8,9899
224
- fractal_server/tasks/v2/local/reactivate.py,sha256=MeUZHx8IKrfTEf-pXlfYms8I4o-26co3jdNgSNAvw60,6053
222
+ fractal_server/tasks/v2/local/collect.py,sha256=Mr4BzscBY8zBRCzWq8ozTjFYy1-VFPXrMCBF7vd9Ods,12143
223
+ fractal_server/tasks/v2/local/deactivate.py,sha256=uAV-tBgfKiN4chkfk6dYCZfF67POwhNizyxuCk_WJK8,9935
224
+ fractal_server/tasks/v2/local/reactivate.py,sha256=DwtOZrSk6jhUHEmAiMcstK3YzqPQ12pfgxmUNdrSgfk,6088
225
225
  fractal_server/tasks/v2/ssh/__init__.py,sha256=aSQbVi6Ummt9QzcSLWNmSqYjfdxrn9ROmqgH6bDpI7k,135
226
226
  fractal_server/tasks/v2/ssh/_utils.py,sha256=LjaEYVUJDChilu3YuhxuGWYRNnVJ_zqNE9SDHdRTIHY,2824
227
- fractal_server/tasks/v2/ssh/collect.py,sha256=2XXEPpl4LS22A75v_k4Bd46k46tmnLNZfceHyPi3kXo,13457
228
- fractal_server/tasks/v2/ssh/deactivate.py,sha256=D8rfnC46davmDKZCipPdWZHDD4TIZ-4nr9vxZSV2aC0,11261
229
- fractal_server/tasks/v2/ssh/reactivate.py,sha256=cmdT2P1J0FwS1NYYRrhxHsSRyUZ5uu78hS3fDrSVbKo,7837
227
+ fractal_server/tasks/v2/ssh/collect.py,sha256=yLVcilvU7uMH8woc__qG_3a0wyT2mNTCuq9I93HVKNM,13493
228
+ fractal_server/tasks/v2/ssh/deactivate.py,sha256=bFlcpZpGiTZcwG845YiLEIIYpiG7vslcSp6_NkXtHGw,11297
229
+ fractal_server/tasks/v2/ssh/reactivate.py,sha256=RoXM5HpIc0rVz4-8UCr3uWv-9zA8bobGSTNJamYsMOo,7873
230
230
  fractal_server/tasks/v2/templates/1_create_venv.sh,sha256=PK0jdHKtQpda1zULebBaVPORt4t6V17wa4N1ohcj5ac,548
231
231
  fractal_server/tasks/v2/templates/2_pip_install.sh,sha256=Gpk2io8u9YaflFUlQu2NgkDQw5AA4m4AOVG1sB4yrHQ,1822
232
232
  fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V4aI-FnX9ZvVNxgpSOBFc,168
@@ -241,8 +241,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
241
241
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
242
242
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
243
243
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
244
- fractal_server-2.11.0a4.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
- fractal_server-2.11.0a4.dist-info/METADATA,sha256=Y_VNqSmaDdR4YhuzzFtSRAbS4nPicovQyWytyGfKB18,4564
246
- fractal_server-2.11.0a4.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
- fractal_server-2.11.0a4.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
- fractal_server-2.11.0a4.dist-info/RECORD,,
244
+ fractal_server-2.11.0a6.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
+ fractal_server-2.11.0a6.dist-info/METADATA,sha256=jtJXV-kHK3rf-ha0pEYyhIzwCRKox9fHBQ38M1xti38,4564
246
+ fractal_server-2.11.0a6.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
+ fractal_server-2.11.0a6.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
+ fractal_server-2.11.0a6.dist-info/RECORD,,