fractal-server 2.11.0a10__py3-none-any.whl → 2.11.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a10"
1
+ __VERSION__ = "2.11.1"
@@ -1,6 +1,5 @@
1
1
  from datetime import datetime
2
2
  from typing import Any
3
- from typing import Literal
4
3
  from typing import Optional
5
4
 
6
5
  from sqlalchemy import Column
@@ -42,9 +41,6 @@ class DatasetV2(SQLModel, table=True):
42
41
  sa_column=Column(JSON, server_default="[]", nullable=False)
43
42
  )
44
43
 
45
- filters: Optional[
46
- dict[Literal["attributes", "types"], dict[str, Any]]
47
- ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
48
44
  type_filters: dict[str, bool] = Field(
49
45
  sa_column=Column(JSON, nullable=False, server_default="{}")
50
46
  )
@@ -1,5 +1,4 @@
1
1
  from typing import Any
2
- from typing import Literal
3
2
  from typing import Optional
4
3
 
5
4
  from sqlalchemy import Column
@@ -25,9 +24,6 @@ class WorkflowTaskV2(SQLModel, table=True):
25
24
  args_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
26
25
  args_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
27
26
 
28
- input_filters: Optional[
29
- dict[Literal["attributes", "types"], dict[str, Any]]
30
- ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
31
27
  type_filters: dict[str, bool] = Field(
32
28
  sa_column=Column(JSON, nullable=False, server_default="{}")
33
29
  )
@@ -0,0 +1,54 @@
1
+ """drop old filter columns
2
+
3
+ Revision ID: af8673379a5c
4
+ Revises: db09233ad13a
5
+ Create Date: 2025-01-30 14:44:04.302795
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "af8673379a5c"
14
+ down_revision = "db09233ad13a"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.drop_column("filters")
23
+
24
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
25
+ batch_op.drop_column("input_filters")
26
+
27
+ # ### end Alembic commands ###
28
+
29
+
30
+ def downgrade() -> None:
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
33
+ batch_op.add_column(
34
+ sa.Column(
35
+ "input_filters",
36
+ postgresql.JSON(astext_type=sa.Text()),
37
+ server_default=sa.text("'null'::json"),
38
+ autoincrement=False,
39
+ nullable=True,
40
+ )
41
+ )
42
+
43
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
44
+ batch_op.add_column(
45
+ sa.Column(
46
+ "filters",
47
+ postgresql.JSON(astext_type=sa.Text()),
48
+ server_default=sa.text("'null'::json"),
49
+ autoincrement=False,
50
+ nullable=True,
51
+ )
52
+ )
53
+
54
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.11.0a10
3
+ Version: 2.11.1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=clpVem1dBwk8HjXFnS1878guQBQS-4Cvk0DBQPNJpWQ,26
1
+ fractal_server/__init__.py,sha256=IDMsn_8tp36mX2SfY8u--GT2UfFEEyrK2bOGt3Pg-e0,23
2
2
  fractal_server/__main__.py,sha256=D2YTmSowmXNyvqOjW_HeItCZT2UliWlySl_owicaZg0,8026
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -16,13 +16,13 @@ fractal_server/app/models/v1/state.py,sha256=m9gMZqqnm3oDpJNJp-Lht4kM7oO7pcEI7sL
16
16
  fractal_server/app/models/v1/task.py,sha256=uFXam7eu3Ye1Yt7_g7llCzY8BetmDRilsq5hR2C1Zbg,2640
17
17
  fractal_server/app/models/v1/workflow.py,sha256=dnY5eMaOe3oZv8arn00RNX9qVkBtTLG-vYdWXcQuyo4,3950
18
18
  fractal_server/app/models/v2/__init__.py,sha256=63THGEZQlxWcosGCI74SEvJU7wOoOn1j1byTjf4NFOI,526
19
- fractal_server/app/models/v2/dataset.py,sha256=gO_V_Fsmw44uB1Nc6MLBVOGxQeutxpDi8nvhhwkggbE,1696
19
+ fractal_server/app/models/v2/dataset.py,sha256=RuqTHXWEgs4A3OSk8Pq9DTq9Xr7w1IJNnyXhOzrDiR0,1509
20
20
  fractal_server/app/models/v2/job.py,sha256=BMmu5oXdZvN7jEIAMZvQMB3PQBcCYzxn6Qm6HdRWre4,1725
21
21
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
22
22
  fractal_server/app/models/v2/task.py,sha256=jebD28Pz8tGcsWCItxj6uKjcD8BMMnnU8dqYhvhEB6c,1520
23
23
  fractal_server/app/models/v2/task_group.py,sha256=Sd-fb7EN18eOxrS-RT4ekczLWp-tQcbX5C4LrcmjoIM,3443
24
24
  fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
25
- fractal_server/app/models/v2/workflowtask.py,sha256=nidHo87GNgMVvL2WDm0HOfGLwGY3m65GTDAInwbqk1Q,1312
25
+ fractal_server/app/models/v2/workflowtask.py,sha256=sBnKiEx9KqhprUaZVY6R8lyfCdwRaX8obYm6bXdn6_E,1119
26
26
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  fractal_server/app/routes/admin/v1.py,sha256=ggJZMeKhRijfVe2h2VzfIcpR15FqkKImANhkTXl0mSk,12908
@@ -165,7 +165,6 @@ fractal_server/app/security/__init__.py,sha256=qn6idYgl-p5HWea0gTVnz4JnkoxGEkmQj
165
165
  fractal_server/app/security/signup_email.py,sha256=DrL51UdTSrgjleynMD5CRZwTSOpPrZ96fasRV0fvxDE,1165
166
166
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
167
167
  fractal_server/config.py,sha256=9rAzw7OO6ZeHEz-I8NJHuGoHf4xCHxfFLyRNZQD9ytY,27019
168
- fractal_server/data_migrations/2_11_0.py,sha256=Cc-P_eXKnlQxc2AnRzV0K4B4DDeEHEFm1WltFjtRMnA,6400
169
168
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
170
169
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
171
170
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
@@ -196,6 +195,7 @@ fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0i
196
195
  fractal_server/migrations/versions/9c5ae74c9b98_add_user_settings_table.py,sha256=syONdZNf4-OnAcWIsbzXpYwpXPsXZ4SsmjwVvmVG0PU,2256
197
196
  fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py,sha256=4l1AHGUsa0ONoJVZlr3fTXw_xbbQ8O7wlD92Az2aRfM,1849
198
197
  fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
198
+ fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha256=9sLd0F7nO5chHHm7RZ4wBA-9bvWomS-av_odKwODADM,1551
199
199
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
200
200
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
201
201
  fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=yGWSA2HIHUybcVy66xBITk08opV2DFYSCIIrulaUZhI,901
@@ -241,8 +241,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
241
241
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
242
242
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
243
243
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
244
- fractal_server-2.11.0a10.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
- fractal_server-2.11.0a10.dist-info/METADATA,sha256=oQdGPsnAFNH-PEtO239jJjVsxCDWPztSto5qizFAxbw,4565
246
- fractal_server-2.11.0a10.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
- fractal_server-2.11.0a10.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
- fractal_server-2.11.0a10.dist-info/RECORD,,
244
+ fractal_server-2.11.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
+ fractal_server-2.11.1.dist-info/METADATA,sha256=2-8AQ21JsDrgiHyn28rTFVP8-viNbAJu9rJxBC5z2nA,4562
246
+ fractal_server-2.11.1.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
+ fractal_server-2.11.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
+ fractal_server-2.11.1.dist-info/RECORD,,
@@ -1,168 +0,0 @@
1
- import logging
2
- from typing import Union
3
-
4
- from sqlalchemy.orm.attributes import flag_modified
5
- from sqlmodel import select
6
-
7
- from fractal_server.app.db import get_sync_db
8
- from fractal_server.app.models import DatasetV2
9
- from fractal_server.app.models import JobV2
10
- from fractal_server.app.models import ProjectV2
11
- from fractal_server.app.models import WorkflowTaskV2
12
- from fractal_server.app.models import WorkflowV2
13
- from fractal_server.app.schemas.v2 import DatasetReadV2
14
- from fractal_server.app.schemas.v2 import JobReadV2
15
- from fractal_server.app.schemas.v2 import ProjectReadV2
16
- from fractal_server.app.schemas.v2 import TaskReadV2
17
- from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
18
- from fractal_server.images.models import AttributeFiltersType
19
-
20
- logger = logging.getLogger("fix_db")
21
- logger.setLevel(logging.INFO)
22
-
23
-
24
- def dict_values_to_list(
25
- input_dict: dict[str, Union[int, float, bool, str, None]],
26
- identifier: str,
27
- ) -> tuple[AttributeFiltersType, bool]:
28
- was_there_a_warning = False
29
- for k, v in input_dict.items():
30
- if not isinstance(v, (int, float, bool, str, type(None))):
31
- error_msg = (
32
- f"Attribute '{k}' from '{identifier}' "
33
- "has invalid type '{type(v)}'."
34
- )
35
- logger.error(error_msg)
36
- raise RuntimeError(error_msg)
37
- elif v is None:
38
- logger.warning(
39
- f"Attribute '{k}' from '{identifier}' is "
40
- "None and it will be removed."
41
- )
42
- was_there_a_warning = True
43
- else:
44
- input_dict[k] = [v]
45
- return input_dict, was_there_a_warning
46
-
47
-
48
- def fix_db():
49
- logger.info("START execution of fix_db function")
50
-
51
- with next(get_sync_db()) as db:
52
- # DatasetV2.filters
53
- stm = select(DatasetV2).order_by(DatasetV2.id)
54
- datasets = db.execute(stm).scalars().all()
55
- for ds in datasets:
56
- logger.info(f"DatasetV2[{ds.id}] START")
57
- if ds.filters is None:
58
- logger.info(f"DatasetV2[{ds.id}] SKIP")
59
- continue
60
-
61
- ds.attribute_filters, warning = dict_values_to_list(
62
- ds.filters["attributes"],
63
- f"Dataset[{ds.id}].filters.attributes",
64
- )
65
- if warning:
66
- proj = db.get(ProjectV2, ds.project_id)
67
- logger.warning(
68
- "Additional information: "
69
- f"{proj.id=}, "
70
- f"{proj.name=}, "
71
- f"{proj.user_list[0].email=}, "
72
- f"{ds.id=}, "
73
- f"{ds.name=}"
74
- )
75
- ds.type_filters = ds.filters["types"]
76
- ds.filters = None
77
- for i, h in enumerate(ds.history):
78
- ds.history[i]["workflowtask"]["type_filters"] = h[
79
- "workflowtask"
80
- ]["input_filters"]["types"]
81
- ds.history[i]["workflowtask"].pop("input_filters")
82
- flag_modified(ds, "history")
83
- DatasetReadV2(
84
- **ds.model_dump(),
85
- project=ProjectReadV2(**ds.project.model_dump()),
86
- )
87
- db.add(ds)
88
- logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
89
-
90
- logger.info("------ switch from dataset to workflowtasks ------")
91
-
92
- # WorkflowTaskV2.input_filters
93
- stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
94
- wftasks = db.execute(stm).scalars().all()
95
- for wft in wftasks:
96
- logger.info(f"WorkflowTaskV2[{wft.id}] START")
97
- if wft.input_filters is None:
98
- logger.info(f"WorkflowTaskV2[{wft.id}] SKIP")
99
- continue
100
- wft.type_filters = wft.input_filters["types"]
101
- if wft.input_filters["attributes"]:
102
- logger.warning(
103
- "Removing input_filters['attributes']. "
104
- f"(previous value: {wft.input_filters['attributes']})"
105
- )
106
- wf = db.get(WorkflowV2, wft.workflow_id)
107
- proj = db.get(ProjectV2, wf.project_id)
108
- logger.warning(
109
- "Additional information: "
110
- f"{proj.id=}, "
111
- f"{proj.name=}, "
112
- f"{proj.user_list[0].email=}, "
113
- f"{wf.id=}, "
114
- f"{wf.name=}, "
115
- f"{wft.task.name=}"
116
- )
117
- wft.input_filters = None
118
- flag_modified(wft, "input_filters")
119
- WorkflowTaskReadV2(
120
- **wft.model_dump(),
121
- task=TaskReadV2(**wft.task.model_dump()),
122
- )
123
- db.add(wft)
124
- logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
125
-
126
- logger.info("------ switch from workflowtasks to jobs ------")
127
-
128
- # JOBS V2
129
- stm = select(JobV2).order_by(JobV2.id)
130
- jobs = db.execute(stm).scalars().all()
131
- for job in jobs:
132
- logger.info(f"JobV2[{job.id}] START")
133
- if "filters" not in job.dataset_dump.keys():
134
- logger.info(f"JobV2[{job.id}] SKIP")
135
- continue
136
- job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
137
- "types"
138
- ]
139
- (
140
- job.dataset_dump["attribute_filters"],
141
- warning,
142
- ) = dict_values_to_list(
143
- job.dataset_dump["filters"]["attributes"],
144
- f"JobV2[{job.id}].dataset_dump.filters.attributes",
145
- )
146
- if warning and job.project_id is not None:
147
- proj = db.get(ProjectV2, job.project_id)
148
- logger.warning(
149
- "Additional information: "
150
- f"{proj.id=}, "
151
- f"{proj.name=}, "
152
- f"{proj.user_list[0].email=}, "
153
- f"{job.id=}, "
154
- f"{job.start_timestamp=}, "
155
- f"{job.end_timestamp=}, "
156
- f"{job.dataset_id=}, "
157
- f"{job.workflow_id=}."
158
- )
159
- job.dataset_dump.pop("filters")
160
- flag_modified(job, "dataset_dump")
161
- JobReadV2(**job.model_dump())
162
- db.add(job)
163
- logger.info(f"JobV2[{job.id}] END - fixed filters")
164
-
165
- db.commit()
166
- logger.info("Changes committed.")
167
-
168
- logger.info("END execution of fix_db function")