fractal-server 2.11.0a9__py3-none-any.whl → 2.11.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- __VERSION__ = "2.11.0a9"
1
+ __VERSION__ = "2.11.1"
@@ -1,6 +1,5 @@
1
1
  from datetime import datetime
2
2
  from typing import Any
3
- from typing import Literal
4
3
  from typing import Optional
5
4
 
6
5
  from sqlalchemy import Column
@@ -42,9 +41,6 @@ class DatasetV2(SQLModel, table=True):
42
41
  sa_column=Column(JSON, server_default="[]", nullable=False)
43
42
  )
44
43
 
45
- filters: Optional[
46
- dict[Literal["attributes", "types"], dict[str, Any]]
47
- ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
48
44
  type_filters: dict[str, bool] = Field(
49
45
  sa_column=Column(JSON, nullable=False, server_default="{}")
50
46
  )
@@ -1,5 +1,4 @@
1
1
  from typing import Any
2
- from typing import Literal
3
2
  from typing import Optional
4
3
 
5
4
  from sqlalchemy import Column
@@ -25,9 +24,6 @@ class WorkflowTaskV2(SQLModel, table=True):
25
24
  args_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
26
25
  args_non_parallel: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
27
26
 
28
- input_filters: Optional[
29
- dict[Literal["attributes", "types"], dict[str, Any]]
30
- ] = Field(sa_column=Column(JSON, nullable=True, server_default="null"))
31
27
  type_filters: dict[str, bool] = Field(
32
28
  sa_column=Column(JSON, nullable=False, server_default="{}")
33
29
  )
@@ -1,4 +1,6 @@
1
+ import asyncio
1
2
  from pathlib import Path
3
+ from typing import Iterator
2
4
  from typing import Optional
3
5
 
4
6
  from fastapi import APIRouter
@@ -24,6 +26,14 @@ from ._aux_functions import _get_workflow_check_owner
24
26
  from fractal_server.app.models import UserOAuth
25
27
  from fractal_server.app.routes.auth import current_active_user
26
28
 
29
+
30
+ # https://docs.python.org/3/library/asyncio-task.html#asyncio.to_thread
31
+ # This moves the function execution to a separate thread,
32
+ # preventing it from blocking the event loop.
33
+ async def zip_folder_threaded(folder: str) -> Iterator[bytes]:
34
+ return await asyncio.to_thread(_zip_folder_to_byte_stream_iterator, folder)
35
+
36
+
27
37
  router = APIRouter()
28
38
 
29
39
 
@@ -128,8 +138,11 @@ async def download_job_logs(
128
138
  )
129
139
  job = output["job"]
130
140
  zip_name = f"{Path(job.working_dir).name}_archive.zip"
141
+
142
+ zip_bytes_iterator = await zip_folder_threaded(job.working_dir)
143
+
131
144
  return StreamingResponse(
132
- _zip_folder_to_byte_stream_iterator(folder=job.working_dir),
145
+ zip_bytes_iterator,
133
146
  media_type="application/x-zip-compressed",
134
147
  headers={"Content-Disposition": f"attachment;filename={zip_name}"},
135
148
  )
@@ -180,7 +180,7 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
180
180
  args_non_parallel: Optional[dict[str, Any]] = None
181
181
  args_parallel: Optional[dict[str, Any]] = None
182
182
  type_filters: Optional[dict[str, bool]] = None
183
- filters: Optional[dict[str, Any]] = None
183
+ input_filters: Optional[dict[str, Any]] = None
184
184
 
185
185
  task: Union[TaskImportV2, TaskImportV2Legacy]
186
186
 
@@ -191,7 +191,7 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
191
191
  Transform legacy filters (created with fractal-server<2.11.0)
192
192
  into type filters
193
193
  """
194
- if values.get("filters") is not None:
194
+ if values.get("input_filters") is not None:
195
195
  if "type_filters" in values.keys():
196
196
  raise ValueError(
197
197
  "Cannot set filters both through the legacy field "
@@ -201,13 +201,15 @@ class WorkflowTaskImportV2(BaseModel, extra=Extra.forbid):
201
201
  else:
202
202
  # As of 2.11.0, WorkflowTask do not have attribute filters
203
203
  # any more.
204
- if values["filters"]["attributes"] != {}:
204
+ if values["input_filters"]["attributes"] != {}:
205
205
  raise ValueError(
206
206
  "Cannot set attribute filters for WorkflowTasks."
207
207
  )
208
208
  # Convert legacy filters.types into new type_filters
209
- values["type_filters"] = values["filters"].get("types", {})
210
- values["filters"] = None
209
+ values["type_filters"] = values["input_filters"].get(
210
+ "types", {}
211
+ )
212
+ values["input_filters"] = None
211
213
 
212
214
  return values
213
215
 
@@ -0,0 +1,54 @@
1
+ """drop old filter columns
2
+
3
+ Revision ID: af8673379a5c
4
+ Revises: db09233ad13a
5
+ Create Date: 2025-01-30 14:44:04.302795
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "af8673379a5c"
14
+ down_revision = "db09233ad13a"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.drop_column("filters")
23
+
24
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
25
+ batch_op.drop_column("input_filters")
26
+
27
+ # ### end Alembic commands ###
28
+
29
+
30
+ def downgrade() -> None:
31
+ # ### commands auto generated by Alembic - please adjust! ###
32
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
33
+ batch_op.add_column(
34
+ sa.Column(
35
+ "input_filters",
36
+ postgresql.JSON(astext_type=sa.Text()),
37
+ server_default=sa.text("'null'::json"),
38
+ autoincrement=False,
39
+ nullable=True,
40
+ )
41
+ )
42
+
43
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
44
+ batch_op.add_column(
45
+ sa.Column(
46
+ "filters",
47
+ postgresql.JSON(astext_type=sa.Text()),
48
+ server_default=sa.text("'null'::json"),
49
+ autoincrement=False,
50
+ nullable=True,
51
+ )
52
+ )
53
+
54
+ # ### end Alembic commands ###
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.11.0a9
3
+ Version: 2.11.1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=Cq36Bza-Whq5ZsTq5ttVGI_RHoX8KyV7NfthnFizHn8,25
1
+ fractal_server/__init__.py,sha256=IDMsn_8tp36mX2SfY8u--GT2UfFEEyrK2bOGt3Pg-e0,23
2
2
  fractal_server/__main__.py,sha256=D2YTmSowmXNyvqOjW_HeItCZT2UliWlySl_owicaZg0,8026
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -16,13 +16,13 @@ fractal_server/app/models/v1/state.py,sha256=m9gMZqqnm3oDpJNJp-Lht4kM7oO7pcEI7sL
16
16
  fractal_server/app/models/v1/task.py,sha256=uFXam7eu3Ye1Yt7_g7llCzY8BetmDRilsq5hR2C1Zbg,2640
17
17
  fractal_server/app/models/v1/workflow.py,sha256=dnY5eMaOe3oZv8arn00RNX9qVkBtTLG-vYdWXcQuyo4,3950
18
18
  fractal_server/app/models/v2/__init__.py,sha256=63THGEZQlxWcosGCI74SEvJU7wOoOn1j1byTjf4NFOI,526
19
- fractal_server/app/models/v2/dataset.py,sha256=gO_V_Fsmw44uB1Nc6MLBVOGxQeutxpDi8nvhhwkggbE,1696
19
+ fractal_server/app/models/v2/dataset.py,sha256=RuqTHXWEgs4A3OSk8Pq9DTq9Xr7w1IJNnyXhOzrDiR0,1509
20
20
  fractal_server/app/models/v2/job.py,sha256=BMmu5oXdZvN7jEIAMZvQMB3PQBcCYzxn6Qm6HdRWre4,1725
21
21
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
22
22
  fractal_server/app/models/v2/task.py,sha256=jebD28Pz8tGcsWCItxj6uKjcD8BMMnnU8dqYhvhEB6c,1520
23
23
  fractal_server/app/models/v2/task_group.py,sha256=Sd-fb7EN18eOxrS-RT4ekczLWp-tQcbX5C4LrcmjoIM,3443
24
24
  fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
25
- fractal_server/app/models/v2/workflowtask.py,sha256=nidHo87GNgMVvL2WDm0HOfGLwGY3m65GTDAInwbqk1Q,1312
25
+ fractal_server/app/models/v2/workflowtask.py,sha256=sBnKiEx9KqhprUaZVY6R8lyfCdwRaX8obYm6bXdn6_E,1119
26
26
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  fractal_server/app/routes/admin/v1.py,sha256=ggJZMeKhRijfVe2h2VzfIcpR15FqkKImANhkTXl0mSk,12908
@@ -48,7 +48,7 @@ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXd
48
48
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
49
49
  fractal_server/app/routes/api/v2/dataset.py,sha256=Y6uZz--YSEGgnPYu05rZ9sr1Ug08bNl2v1h3VeApBe8,9441
50
50
  fractal_server/app/routes/api/v2/images.py,sha256=0qkItqPrAvWEaK3YHUmCCrKrO_tQuzAPf4Te0q8mON8,8832
51
- fractal_server/app/routes/api/v2/job.py,sha256=Bga2Kz1OjvDIdxZObWaaXVhNIhC_5JKhKRjEH2_ayEE,5157
51
+ fractal_server/app/routes/api/v2/job.py,sha256=m89FTh9Px25oXCeWj2k2NdGWQaO2oxMh-6lZppcsJOY,5551
52
52
  fractal_server/app/routes/api/v2/project.py,sha256=eWYFJ7F2ZYQcpi-_n-rhPF-Q4gJhzYBsVGYFhHZZXAE,6653
53
53
  fractal_server/app/routes/api/v2/status.py,sha256=_cDZW-ESYw6zpf-lLFFqko5bLpKhqKrCM6yv1OfqxN4,6300
54
54
  fractal_server/app/routes/api/v2/submit.py,sha256=UMPhWwk4FqZmYtVEu6WLPkSr6a2R4wwgPPeVrx0zRME,8622
@@ -160,12 +160,11 @@ fractal_server/app/schemas/v2/task.py,sha256=9W4xe-p19pbLpjsNJrYVIMpkrlwuCxGAJNO
160
160
  fractal_server/app/schemas/v2/task_collection.py,sha256=9c_yyFcVBXdAZpQQniy1bROhYnQT7G1BflOpMY1joPE,6250
161
161
  fractal_server/app/schemas/v2/task_group.py,sha256=zZfvAH7c3MZC4_An09TMuOkNE_e1Z9XsYEnmN-axHdU,3217
162
162
  fractal_server/app/schemas/v2/workflow.py,sha256=-KWvXnbHBFA3pj5n7mfSyLKJQSqkJmoziIEe7mpLl3M,1875
163
- fractal_server/app/schemas/v2/workflowtask.py,sha256=RLAoKIpc7WFYp20N3Pr6lUu6BkOHpPf4Yvs77FH_Uzg,7658
163
+ fractal_server/app/schemas/v2/workflowtask.py,sha256=xjFTmnKuHSetJvN-9k_GTMbPKwPg-J00zUkc_96QO7E,7726
164
164
  fractal_server/app/security/__init__.py,sha256=qn6idYgl-p5HWea0gTVnz4JnkoxGEkmQjPzvKpDWT0I,14035
165
165
  fractal_server/app/security/signup_email.py,sha256=DrL51UdTSrgjleynMD5CRZwTSOpPrZ96fasRV0fvxDE,1165
166
166
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
167
167
  fractal_server/config.py,sha256=9rAzw7OO6ZeHEz-I8NJHuGoHf4xCHxfFLyRNZQD9ytY,27019
168
- fractal_server/data_migrations/2_11_0.py,sha256=Cc-P_eXKnlQxc2AnRzV0K4B4DDeEHEFm1WltFjtRMnA,6400
169
168
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
170
169
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
171
170
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
@@ -196,6 +195,7 @@ fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0i
196
195
  fractal_server/migrations/versions/9c5ae74c9b98_add_user_settings_table.py,sha256=syONdZNf4-OnAcWIsbzXpYwpXPsXZ4SsmjwVvmVG0PU,2256
197
196
  fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py,sha256=4l1AHGUsa0ONoJVZlr3fTXw_xbbQ8O7wlD92Az2aRfM,1849
198
197
  fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
198
+ fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha256=9sLd0F7nO5chHHm7RZ4wBA-9bvWomS-av_odKwODADM,1551
199
199
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
200
200
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
201
201
  fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=yGWSA2HIHUybcVy66xBITk08opV2DFYSCIIrulaUZhI,901
@@ -241,8 +241,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
241
241
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
242
242
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
243
243
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
244
- fractal_server-2.11.0a9.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
- fractal_server-2.11.0a9.dist-info/METADATA,sha256=xTEE_9-GBVNq4xonqeNLqXtQ62YJvN9IR8ExYa_L9bc,4564
246
- fractal_server-2.11.0a9.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
- fractal_server-2.11.0a9.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
- fractal_server-2.11.0a9.dist-info/RECORD,,
244
+ fractal_server-2.11.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
245
+ fractal_server-2.11.1.dist-info/METADATA,sha256=2-8AQ21JsDrgiHyn28rTFVP8-viNbAJu9rJxBC5z2nA,4562
246
+ fractal_server-2.11.1.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
247
+ fractal_server-2.11.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
248
+ fractal_server-2.11.1.dist-info/RECORD,,
@@ -1,168 +0,0 @@
1
- import logging
2
- from typing import Union
3
-
4
- from sqlalchemy.orm.attributes import flag_modified
5
- from sqlmodel import select
6
-
7
- from fractal_server.app.db import get_sync_db
8
- from fractal_server.app.models import DatasetV2
9
- from fractal_server.app.models import JobV2
10
- from fractal_server.app.models import ProjectV2
11
- from fractal_server.app.models import WorkflowTaskV2
12
- from fractal_server.app.models import WorkflowV2
13
- from fractal_server.app.schemas.v2 import DatasetReadV2
14
- from fractal_server.app.schemas.v2 import JobReadV2
15
- from fractal_server.app.schemas.v2 import ProjectReadV2
16
- from fractal_server.app.schemas.v2 import TaskReadV2
17
- from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
18
- from fractal_server.images.models import AttributeFiltersType
19
-
20
- logger = logging.getLogger("fix_db")
21
- logger.setLevel(logging.INFO)
22
-
23
-
24
- def dict_values_to_list(
25
- input_dict: dict[str, Union[int, float, bool, str, None]],
26
- identifier: str,
27
- ) -> tuple[AttributeFiltersType, bool]:
28
- was_there_a_warning = False
29
- for k, v in input_dict.items():
30
- if not isinstance(v, (int, float, bool, str, type(None))):
31
- error_msg = (
32
- f"Attribute '{k}' from '{identifier}' "
33
- "has invalid type '{type(v)}'."
34
- )
35
- logger.error(error_msg)
36
- raise RuntimeError(error_msg)
37
- elif v is None:
38
- logger.warning(
39
- f"Attribute '{k}' from '{identifier}' is "
40
- "None and it will be removed."
41
- )
42
- was_there_a_warning = True
43
- else:
44
- input_dict[k] = [v]
45
- return input_dict, was_there_a_warning
46
-
47
-
48
- def fix_db():
49
- logger.info("START execution of fix_db function")
50
-
51
- with next(get_sync_db()) as db:
52
- # DatasetV2.filters
53
- stm = select(DatasetV2).order_by(DatasetV2.id)
54
- datasets = db.execute(stm).scalars().all()
55
- for ds in datasets:
56
- logger.info(f"DatasetV2[{ds.id}] START")
57
- if ds.filters is None:
58
- logger.info(f"DatasetV2[{ds.id}] SKIP")
59
- continue
60
-
61
- ds.attribute_filters, warning = dict_values_to_list(
62
- ds.filters["attributes"],
63
- f"Dataset[{ds.id}].filters.attributes",
64
- )
65
- if warning:
66
- proj = db.get(ProjectV2, ds.project_id)
67
- logger.warning(
68
- "Additional information: "
69
- f"{proj.id=}, "
70
- f"{proj.name=}, "
71
- f"{proj.user_list[0].email=}, "
72
- f"{ds.id=}, "
73
- f"{ds.name=}"
74
- )
75
- ds.type_filters = ds.filters["types"]
76
- ds.filters = None
77
- for i, h in enumerate(ds.history):
78
- ds.history[i]["workflowtask"]["type_filters"] = h[
79
- "workflowtask"
80
- ]["input_filters"]["types"]
81
- ds.history[i]["workflowtask"].pop("input_filters")
82
- flag_modified(ds, "history")
83
- DatasetReadV2(
84
- **ds.model_dump(),
85
- project=ProjectReadV2(**ds.project.model_dump()),
86
- )
87
- db.add(ds)
88
- logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
89
-
90
- logger.info("------ switch from dataset to workflowtasks ------")
91
-
92
- # WorkflowTaskV2.input_filters
93
- stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
94
- wftasks = db.execute(stm).scalars().all()
95
- for wft in wftasks:
96
- logger.info(f"WorkflowTaskV2[{wft.id}] START")
97
- if wft.input_filters is None:
98
- logger.info(f"WorkflowTaskV2[{wft.id}] SKIP")
99
- continue
100
- wft.type_filters = wft.input_filters["types"]
101
- if wft.input_filters["attributes"]:
102
- logger.warning(
103
- "Removing input_filters['attributes']. "
104
- f"(previous value: {wft.input_filters['attributes']})"
105
- )
106
- wf = db.get(WorkflowV2, wft.workflow_id)
107
- proj = db.get(ProjectV2, wf.project_id)
108
- logger.warning(
109
- "Additional information: "
110
- f"{proj.id=}, "
111
- f"{proj.name=}, "
112
- f"{proj.user_list[0].email=}, "
113
- f"{wf.id=}, "
114
- f"{wf.name=}, "
115
- f"{wft.task.name=}"
116
- )
117
- wft.input_filters = None
118
- flag_modified(wft, "input_filters")
119
- WorkflowTaskReadV2(
120
- **wft.model_dump(),
121
- task=TaskReadV2(**wft.task.model_dump()),
122
- )
123
- db.add(wft)
124
- logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
125
-
126
- logger.info("------ switch from workflowtasks to jobs ------")
127
-
128
- # JOBS V2
129
- stm = select(JobV2).order_by(JobV2.id)
130
- jobs = db.execute(stm).scalars().all()
131
- for job in jobs:
132
- logger.info(f"JobV2[{job.id}] START")
133
- if "filters" not in job.dataset_dump.keys():
134
- logger.info(f"JobV2[{job.id}] SKIP")
135
- continue
136
- job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
137
- "types"
138
- ]
139
- (
140
- job.dataset_dump["attribute_filters"],
141
- warning,
142
- ) = dict_values_to_list(
143
- job.dataset_dump["filters"]["attributes"],
144
- f"JobV2[{job.id}].dataset_dump.filters.attributes",
145
- )
146
- if warning and job.project_id is not None:
147
- proj = db.get(ProjectV2, job.project_id)
148
- logger.warning(
149
- "Additional information: "
150
- f"{proj.id=}, "
151
- f"{proj.name=}, "
152
- f"{proj.user_list[0].email=}, "
153
- f"{job.id=}, "
154
- f"{job.start_timestamp=}, "
155
- f"{job.end_timestamp=}, "
156
- f"{job.dataset_id=}, "
157
- f"{job.workflow_id=}."
158
- )
159
- job.dataset_dump.pop("filters")
160
- flag_modified(job, "dataset_dump")
161
- JobReadV2(**job.model_dump())
162
- db.add(job)
163
- logger.info(f"JobV2[{job.id}] END - fixed filters")
164
-
165
- db.commit()
166
- logger.info("Changes committed.")
167
-
168
- logger.info("END execution of fix_db function")