fractal-server 2.11.0a5__py3-none-any.whl → 2.11.0a7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a5"
1
+ __VERSION__ = "2.11.0a7"
@@ -22,14 +22,10 @@ def validate_attribute_filters(
22
22
 
23
23
  attribute_filters = valdict_keys("attribute_filters")(attribute_filters)
24
24
  for key, values in attribute_filters.items():
25
- if values is None:
26
- # values=None corresponds to not applying any filter for
27
- # attribute `key`
28
- pass
29
- elif values == []:
30
- # WARNING: in this case, no image can match with the current
31
- # filter. In the future we may deprecate this possibility.
32
- pass
25
+ if values == []:
26
+ raise ValueError(
27
+ f"attribute_filters[{key}] cannot be an empty list."
28
+ )
33
29
  else:
34
30
  # values is a non-empty list, and its items must all be of the
35
31
  # same scalar non-None type
@@ -1,4 +1,5 @@
1
1
  import logging
2
+ from typing import Union
2
3
 
3
4
  from sqlalchemy.orm.attributes import flag_modified
4
5
  from sqlmodel import select
@@ -6,24 +7,67 @@ from sqlmodel import select
6
7
  from fractal_server.app.db import get_sync_db
7
8
  from fractal_server.app.models import DatasetV2
8
9
  from fractal_server.app.models import JobV2
10
+ from fractal_server.app.models import ProjectV2
9
11
  from fractal_server.app.models import WorkflowTaskV2
12
+ from fractal_server.app.models import WorkflowV2
13
+ from fractal_server.app.schemas.v2 import DatasetReadV2
14
+ from fractal_server.app.schemas.v2 import JobReadV2
15
+ from fractal_server.app.schemas.v2 import ProjectReadV2
16
+ from fractal_server.app.schemas.v2 import TaskReadV2
17
+ from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
18
+ from fractal_server.images.models import AttributeFiltersType
10
19
 
11
20
  logger = logging.getLogger("fix_db")
12
21
  logger.setLevel(logging.INFO)
13
22
 
14
23
 
15
- def fix_db():
24
+ def dict_values_to_list(
25
+ input_dict: dict[str, Union[int, float, bool, str, None]],
26
+ identifier: str,
27
+ ) -> tuple[AttributeFiltersType, bool]:
28
+ was_there_a_warning = False
29
+ for k, v in input_dict.items():
30
+ if not isinstance(v, (int, float, bool, str, type(None))):
31
+ error_msg = (
32
+ f"Attribute '{k}' from '{identifier}' "
33
+ "has invalid type '{type(v)}'."
34
+ )
35
+ logger.error(error_msg)
36
+ raise RuntimeError(error_msg)
37
+ elif v is None:
38
+ logger.warning(
39
+ f"Attribute '{k}' from '{identifier}' is "
40
+ "None and it will be removed."
41
+ )
42
+ was_there_a_warning = True
43
+ else:
44
+ input_dict[k] = [v]
45
+ return input_dict, was_there_a_warning
46
+
16
47
 
48
+ def fix_db():
17
49
  logger.info("START execution of fix_db function")
18
50
 
19
51
  with next(get_sync_db()) as db:
20
-
21
52
  # DatasetV2.filters
22
- # DatasetV2.history[].workflowtask.input_filters
23
53
  stm = select(DatasetV2).order_by(DatasetV2.id)
24
54
  datasets = db.execute(stm).scalars().all()
25
55
  for ds in datasets:
26
- ds.attribute_filters = ds.filters["attributes"]
56
+ logger.info(f"DatasetV2[{ds.id}] START")
57
+ ds.attribute_filters, warning = dict_values_to_list(
58
+ ds.filters["attributes"],
59
+ f"Dataset[{ds.id}].filters.attributes",
60
+ )
61
+ if warning:
62
+ proj = db.get(ProjectV2, ds.project_id)
63
+ logger.warning(
64
+ "Additional information: "
65
+ f"{proj.id=}, "
66
+ f"{proj.name=}, "
67
+ f"{proj.user_list[0].email=}, "
68
+ f"{ds.id=}, "
69
+ f"{ds.name=}"
70
+ )
27
71
  ds.type_filters = ds.filters["types"]
28
72
  ds.filters = None
29
73
  for i, h in enumerate(ds.history):
@@ -31,37 +75,83 @@ def fix_db():
31
75
  "workflowtask"
32
76
  ]["input_filters"]["types"]
33
77
  flag_modified(ds, "history")
78
+ DatasetReadV2(
79
+ **ds.model_dump(),
80
+ project=ProjectReadV2(**ds.project.model_dump()),
81
+ )
34
82
  db.add(ds)
35
- logger.info(f"Fixed filters in DatasetV2[{ds.id}]")
83
+ logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
84
+
85
+ logger.info("------ switch from dataset to workflowtasks ------")
36
86
 
37
87
  # WorkflowTaskV2.input_filters
38
88
  stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
39
89
  wftasks = db.execute(stm).scalars().all()
40
90
  for wft in wftasks:
91
+ logger.info(f"WorkflowTaskV2[{wft.id}] START")
41
92
  wft.type_filters = wft.input_filters["types"]
42
93
  if wft.input_filters["attributes"]:
43
94
  logger.warning(
44
- f"Removing WorkflowTaskV2[{wft.id}].input_filters"
45
- f"['attributes'] = {wft.input_filters['attributes']}"
95
+ "Removing input_filters['attributes']. "
96
+ f"(previous value: {wft.input_filters['attributes']})"
97
+ )
98
+ wf = db.get(WorkflowV2, wft.workflow_id)
99
+ proj = db.get(ProjectV2, wf.project_id)
100
+ logger.warning(
101
+ "Additional information: "
102
+ f"{proj.id=}, "
103
+ f"{proj.name=}, "
104
+ f"{proj.user_list[0].email=}, "
105
+ f"{wf.id=}, "
106
+ f"{wf.name=}, "
107
+ f"{wft.task.name=}"
46
108
  )
47
109
  wft.input_filters = None
48
110
  flag_modified(wft, "input_filters")
111
+ WorkflowTaskReadV2(
112
+ **wft.model_dump(),
113
+ task=TaskReadV2(**wft.task.model_dump()),
114
+ )
49
115
  db.add(wft)
50
- logger.info(f"Fixed filters in WorkflowTaskV2[{wft.id}]")
116
+ logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
117
+
118
+ logger.info("------ switch from workflowtasks to jobs ------")
51
119
 
52
120
  # JOBS V2
53
121
  stm = select(JobV2).order_by(JobV2.id)
54
122
  jobs = db.execute(stm).scalars().all()
55
123
  for job in jobs:
124
+ logger.info(f"JobV2[{job.id}] START")
56
125
  job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
57
126
  "types"
58
127
  ]
59
- job.dataset_dump["attribute_filters"] = job.dataset_dump[
60
- "filters"
61
- ]["attributes"]
128
+ (
129
+ job.dataset_dump["attribute_filters"],
130
+ warning,
131
+ ) = dict_values_to_list(
132
+ job.dataset_dump["filters"]["attributes"],
133
+ f"JobV2[{job.id}].dataset_dump.filters.attributes",
134
+ )
135
+ if warning and job.project_id is not None:
136
+ proj = db.get(ProjectV2, job.project_id)
137
+ logger.warning(
138
+ "Additional information: "
139
+ f"{proj.id=}, "
140
+ f"{proj.name=}, "
141
+ f"{proj.user_list[0].email=}, "
142
+ f"{job.id=}, "
143
+ f"{job.start_timestamp=}, "
144
+ f"{job.end_timestamp=}, "
145
+ f"{job.dataset_id=}, "
146
+ f"{job.workflow_id=}."
147
+ )
148
+ # FIXME
149
+ pass
62
150
  job.dataset_dump.pop("filters")
63
151
  flag_modified(job, "dataset_dump")
64
- logger.info(f"Fixed filters in JobV2[{job.id}].datasetdump")
152
+ JobReadV2(**job.model_dump())
153
+ db.add(job)
154
+ logger.info(f"JobV2[{job.id}] END - fixed filters")
65
155
 
66
156
  db.commit()
67
157
  logger.info("Changes committed.")
@@ -9,7 +9,7 @@ from pydantic import validator
9
9
  from fractal_server.app.schemas._validators import valdict_keys
10
10
  from fractal_server.urls import normalize_url
11
11
 
12
- AttributeFiltersType = dict[str, Optional[list[Any]]]
12
+ AttributeFiltersType = dict[str, list[Any]]
13
13
 
14
14
 
15
15
  class _SingleImageBase(BaseModel):
@@ -57,8 +57,6 @@ def match_filter(
57
57
 
58
58
  # Verify match with attributes (only for not-None filters)
59
59
  for key, values in attribute_filters.items():
60
- if values is None:
61
- continue
62
60
  if image["attributes"].get(key) not in values:
63
61
  return False
64
62
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.11.0a5
3
+ Version: 2.11.0a7
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=P2nIVsQnIcZXk3AZLCDwdPsHsGyWJzqhBzjjXpYldU0,25
1
+ fractal_server/__init__.py,sha256=mM9PejPd0ZnCDoi6-w05W_qaatAjbN_mn-vxn7wialA,25
2
2
  fractal_server/__main__.py,sha256=D2YTmSowmXNyvqOjW_HeItCZT2UliWlySl_owicaZg0,8026
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -134,7 +134,7 @@ fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=1fWvQ6YZUUnDhO
134
134
  fractal_server/app/runner/v2/task_interface.py,sha256=d6HPwPzrytUMVjExTU6fuCEwtvvWGRaSje0iXcRD45w,1728
135
135
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
136
136
  fractal_server/app/schemas/__init__.py,sha256=stURAU_t3AOBaH0HSUbV-GKhlPKngnnIMoqWc3orFyI,135
137
- fractal_server/app/schemas/_filter_validators.py,sha256=WcfQ3ARc-2pj2oQFB4lWA0X5wtoOPGzpD4hJq4BblXs,1727
137
+ fractal_server/app/schemas/_filter_validators.py,sha256=0wJuZzMa-hJsMCEMxtBalW3lSk1Qey25uSVmS7GVcPM,1534
138
138
  fractal_server/app/schemas/_validators.py,sha256=3dotVxUHWKAmUO3aeoluYDLRKrw1OS-NxcZ4Fg_HOYk,3560
139
139
  fractal_server/app/schemas/user.py,sha256=icjox9gK_invW44Nh_L4CvqfRa92qghyQhmevyg09nQ,2243
140
140
  fractal_server/app/schemas/user_group.py,sha256=t30Kd07PY43G_AqFDb8vjdInTeLeU9WvFZDx8fVLPSI,1750
@@ -165,13 +165,13 @@ fractal_server/app/security/__init__.py,sha256=qn6idYgl-p5HWea0gTVnz4JnkoxGEkmQj
165
165
  fractal_server/app/security/signup_email.py,sha256=DrL51UdTSrgjleynMD5CRZwTSOpPrZ96fasRV0fvxDE,1165
166
166
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
167
167
  fractal_server/config.py,sha256=9rAzw7OO6ZeHEz-I8NJHuGoHf4xCHxfFLyRNZQD9ytY,27019
168
- fractal_server/data_migrations/2_11_0.py,sha256=glS3BkhumrA6SpHiE_QFBgA7Bm2cbDCUlQyY3BjEub8,2464
168
+ fractal_server/data_migrations/2_11_0.py,sha256=Zlmo4e39q5IiSM8p38T5NCnUEAsUxe08lt6TeUx-fRE,5997
169
169
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
170
170
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
171
171
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
172
172
  fractal_server/images/__init__.py,sha256=-_wjoKtSX02P1KjDxDP_EXKvmbONTRmbf7iGVTsyBpM,154
173
- fractal_server/images/models.py,sha256=fAecChXhs4utRX4123Lgz5e_b_H0YtHrvNHCenR7tOs,3359
174
- fractal_server/images/tools.py,sha256=iqFx_pp46OoHsHjXxX6GrkXJPPfTo_c1WYvRur0olaE,3455
173
+ fractal_server/images/models.py,sha256=t4zcUFmWxhAzGgy7kkxs9Ctq8SAhVs0v910UcXcHIUw,3349
174
+ fractal_server/images/tools.py,sha256=4kfPAFJJnvg7fM-cL0JMx97Dc1Npva_0ghitEji3JUU,3407
175
175
  fractal_server/logger.py,sha256=5Z3rfsFwl8UysVljTOaaIvt8Pyp6CVH492ez3jE8WAw,5113
176
176
  fractal_server/main.py,sha256=gStLT9Du5QMpc9SyvRvtKU21EKwp-dG4HL3zGHzE06A,4908
177
177
  fractal_server/migrations/env.py,sha256=9t_OeKVlhM8WRcukmTrLbWNup-imiBGP_9xNgwCbtpI,2730
@@ -241,8 +241,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
241
241
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
242
242
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
243
243
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
244
- fractal_server-2.11.0a5.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
- fractal_server-2.11.0a5.dist-info/METADATA,sha256=i_O42zmSGOyXWu-sT-rHUCDZ-flG0SiuK3oVNKwrmpE,4564
246
- fractal_server-2.11.0a5.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
- fractal_server-2.11.0a5.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
- fractal_server-2.11.0a5.dist-info/RECORD,,
244
+ fractal_server-2.11.0a7.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
+ fractal_server-2.11.0a7.dist-info/METADATA,sha256=3j0iczgnV5Ok6R3O8Zq56z41Hto-jwVnx8ujk2ikeUQ,4564
246
+ fractal_server-2.11.0a7.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
+ fractal_server-2.11.0a7.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
+ fractal_server-2.11.0a7.dist-info/RECORD,,