fractal-server 2.6.3__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +1 -1
  3. fractal_server/app/models/linkusergroup.py +11 -0
  4. fractal_server/app/models/v2/__init__.py +2 -0
  5. fractal_server/app/models/v2/collection_state.py +1 -0
  6. fractal_server/app/models/v2/task.py +67 -2
  7. fractal_server/app/routes/admin/v2/__init__.py +16 -0
  8. fractal_server/app/routes/admin/{v2.py → v2/job.py} +20 -191
  9. fractal_server/app/routes/admin/v2/project.py +43 -0
  10. fractal_server/app/routes/admin/v2/task.py +133 -0
  11. fractal_server/app/routes/admin/v2/task_group.py +162 -0
  12. fractal_server/app/routes/api/v1/task_collection.py +4 -4
  13. fractal_server/app/routes/api/v2/__init__.py +8 -0
  14. fractal_server/app/routes/api/v2/_aux_functions.py +1 -68
  15. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +343 -0
  16. fractal_server/app/routes/api/v2/submit.py +16 -35
  17. fractal_server/app/routes/api/v2/task.py +85 -110
  18. fractal_server/app/routes/api/v2/task_collection.py +184 -196
  19. fractal_server/app/routes/api/v2/task_collection_custom.py +70 -64
  20. fractal_server/app/routes/api/v2/task_group.py +173 -0
  21. fractal_server/app/routes/api/v2/workflow.py +39 -102
  22. fractal_server/app/routes/api/v2/workflow_import.py +360 -0
  23. fractal_server/app/routes/api/v2/workflowtask.py +4 -8
  24. fractal_server/app/routes/auth/_aux_auth.py +86 -40
  25. fractal_server/app/routes/auth/current_user.py +5 -5
  26. fractal_server/app/routes/auth/group.py +73 -23
  27. fractal_server/app/routes/auth/router.py +0 -2
  28. fractal_server/app/routes/auth/users.py +8 -7
  29. fractal_server/app/runner/executors/slurm/ssh/executor.py +82 -63
  30. fractal_server/app/runner/v2/__init__.py +13 -7
  31. fractal_server/app/runner/v2/task_interface.py +4 -9
  32. fractal_server/app/schemas/user.py +1 -2
  33. fractal_server/app/schemas/v2/__init__.py +7 -0
  34. fractal_server/app/schemas/v2/dataset.py +2 -7
  35. fractal_server/app/schemas/v2/dumps.py +1 -2
  36. fractal_server/app/schemas/v2/job.py +1 -1
  37. fractal_server/app/schemas/v2/manifest.py +25 -1
  38. fractal_server/app/schemas/v2/project.py +1 -1
  39. fractal_server/app/schemas/v2/task.py +95 -36
  40. fractal_server/app/schemas/v2/task_collection.py +8 -6
  41. fractal_server/app/schemas/v2/task_group.py +85 -0
  42. fractal_server/app/schemas/v2/workflow.py +7 -2
  43. fractal_server/app/schemas/v2/workflowtask.py +9 -6
  44. fractal_server/app/security/__init__.py +8 -1
  45. fractal_server/config.py +8 -28
  46. fractal_server/data_migrations/2_7_0.py +323 -0
  47. fractal_server/images/models.py +2 -4
  48. fractal_server/main.py +1 -1
  49. fractal_server/migrations/env.py +4 -1
  50. fractal_server/migrations/versions/034a469ec2eb_task_groups.py +184 -0
  51. fractal_server/ssh/_fabric.py +186 -73
  52. fractal_server/string_tools.py +6 -2
  53. fractal_server/tasks/utils.py +19 -5
  54. fractal_server/tasks/v1/_TaskCollectPip.py +1 -1
  55. fractal_server/tasks/v1/background_operations.py +5 -5
  56. fractal_server/tasks/v1/get_collection_data.py +2 -2
  57. fractal_server/tasks/v2/_venv_pip.py +67 -70
  58. fractal_server/tasks/v2/background_operations.py +180 -69
  59. fractal_server/tasks/v2/background_operations_ssh.py +57 -70
  60. fractal_server/tasks/v2/database_operations.py +44 -0
  61. fractal_server/tasks/v2/endpoint_operations.py +104 -116
  62. fractal_server/tasks/v2/templates/_1_create_venv.sh +9 -5
  63. fractal_server/tasks/v2/templates/{_2_upgrade_pip.sh → _2_preliminary_pip_operations.sh} +1 -0
  64. fractal_server/tasks/v2/utils.py +5 -0
  65. fractal_server/utils.py +3 -2
  66. {fractal_server-2.6.3.dist-info → fractal_server-2.7.0.dist-info}/METADATA +3 -7
  67. {fractal_server-2.6.3.dist-info → fractal_server-2.7.0.dist-info}/RECORD +70 -61
  68. fractal_server/app/routes/auth/group_names.py +0 -34
  69. fractal_server/tasks/v2/_TaskCollectPip.py +0 -132
  70. {fractal_server-2.6.3.dist-info → fractal_server-2.7.0.dist-info}/LICENSE +0 -0
  71. {fractal_server-2.6.3.dist-info → fractal_server-2.7.0.dist-info}/WHEEL +0 -0
  72. {fractal_server-2.6.3.dist-info → fractal_server-2.7.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,323 @@
1
+ import asyncio
2
+ import logging
3
+ import os
4
+ import sys
5
+ from pathlib import Path
6
+ from typing import Any
7
+ from typing import Optional
8
+
9
+ from fastapi import HTTPException
10
+ from sqlalchemy import select
11
+ from sqlalchemy.orm import Session
12
+
13
+ from fractal_server.app.db import get_async_db
14
+ from fractal_server.app.db import get_sync_db
15
+ from fractal_server.app.models import TaskGroupV2
16
+ from fractal_server.app.models import TaskV2
17
+ from fractal_server.app.models import UserGroup
18
+ from fractal_server.app.models import UserOAuth
19
+ from fractal_server.app.models import UserSettings
20
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
21
+ _verify_non_duplication_group_constraint,
22
+ )
23
+ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
24
+ _verify_non_duplication_user_constraint,
25
+ )
26
+ from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
27
+ from fractal_server.data_migrations.tools import _check_current_version
28
+ from fractal_server.tasks.utils import _normalize_package_name
29
+ from fractal_server.utils import get_timestamp
30
+
31
+ logger = logging.getLogger("fix_db")
32
+
33
+
34
+ async def check_non_duplication_constraints(
35
+ *,
36
+ user_id: int,
37
+ pkg_name: str,
38
+ version: Optional[str] = None,
39
+ user_group_id: Optional[int] = None,
40
+ ):
41
+ try:
42
+ async for db_async in get_async_db():
43
+ await _verify_non_duplication_user_constraint(
44
+ user_id=user_id,
45
+ pkg_name=pkg_name,
46
+ version=version,
47
+ db=db_async,
48
+ )
49
+ await _verify_non_duplication_group_constraint(
50
+ user_group_id=user_group_id,
51
+ pkg_name=pkg_name,
52
+ version=version,
53
+ db=db_async,
54
+ )
55
+ except HTTPException as e:
56
+ logger.error(
57
+ "Adding a `TaskGroupV2` with "
58
+ f"{user_id=}, {pkg_name=}, {version=} and {user_group_id=} "
59
+ "would break the non-duplication constraint."
60
+ )
61
+ logger.error(f"Original error: {str(e)}")
62
+
63
+ sys.exit("ERROR")
64
+
65
+
66
+ def get_unique_value(list_of_objects: list[dict[str, Any]], key: str):
67
+ """
68
+ Loop over `list_of_objects` and extract (unique) value for `key`.
69
+ """
70
+ unique_values = set()
71
+ for this_obj in list_of_objects:
72
+ this_value = this_obj.get(key, None)
73
+ unique_values.add(this_value)
74
+ if len(unique_values) != 1:
75
+ raise RuntimeError(
76
+ f"There must be a single taskgroup `{key}`, but {unique_values=}"
77
+ )
78
+ return unique_values.pop()
79
+
80
+
81
+ def get_users_mapping(db) -> dict[str, int]:
82
+ logger.warning("START _check_users")
83
+ print()
84
+
85
+ stm_users = select(UserOAuth).order_by(UserOAuth.id)
86
+ users = db.execute(stm_users).scalars().unique().all()
87
+ name_to_user_id = {}
88
+ for user in users:
89
+ logger.warning(f"START handling user {user.id}: '{user.email}'")
90
+ # Compute "name" attribute
91
+ user_settings = db.get(UserSettings, user.user_settings_id)
92
+ name = user.username or user_settings.slurm_user
93
+ logger.warning(f"{name=}")
94
+ # Fail for missing values
95
+ if name is None:
96
+ raise ValueError(
97
+ f"User with {user.id=} and {user.email=} has no "
98
+ "`username` or `slurm_user` set."
99
+ "Please fix this issue manually."
100
+ )
101
+ # Fail for non-unique values
102
+ existing_user = name_to_user_id.get(name, None)
103
+ if existing_user is not None:
104
+ raise ValueError(
105
+ f"User with {user.id=} and {user.email=} has same "
106
+ f"`(username or slurm_user)={name}` as another user. "
107
+ "Please fix this issue manually."
108
+ )
109
+ # Update dictionary
110
+ name_to_user_id[name] = user.id
111
+ logger.warning(f"END handling user {user.id}: '{user.email}'")
112
+ print()
113
+ logger.warning("END _check_users")
114
+ print()
115
+ return name_to_user_id
116
+
117
+
118
+ def get_default_user_group_id(db):
119
+ stm = select(UserGroup.id).where(
120
+ UserGroup.name == FRACTAL_DEFAULT_GROUP_NAME
121
+ )
122
+ res = db.execute(stm)
123
+ default_group_id = res.scalars().one_or_none()
124
+ if default_group_id is None:
125
+ raise RuntimeError("Default user group is missing.")
126
+ else:
127
+ return default_group_id
128
+
129
+
130
+ def get_default_user_id(db):
131
+ DEFAULT_USER_EMAIL = os.getenv("FRACTAL_V27_DEFAULT_USER_EMAIL")
132
+ if DEFAULT_USER_EMAIL is None:
133
+ raise ValueError(
134
+ "FRACTAL_V27_DEFAULT_USER_EMAIL env variable is not set. "
135
+ "Please set it to be the email of the user who will own "
136
+ "all previously-global tasks."
137
+ )
138
+
139
+ stm = select(UserOAuth.id).where(UserOAuth.email == DEFAULT_USER_EMAIL)
140
+ res = db.execute(stm)
141
+ default_user_id = res.scalars().one_or_none()
142
+ if default_user_id is None:
143
+ raise RuntimeError(
144
+ f"Default user with email {DEFAULT_USER_EMAIL} is missing."
145
+ )
146
+ else:
147
+ return default_user_id
148
+
149
+
150
+ def prepare_task_groups(
151
+ *,
152
+ user_mapping: dict[str, int],
153
+ default_user_group_id: int,
154
+ default_user_id: int,
155
+ db: Session,
156
+ ):
157
+ stm_tasks = select(TaskV2).order_by(TaskV2.id)
158
+ res = db.execute(stm_tasks).scalars().all()
159
+ task_groups = {}
160
+ for task in res:
161
+ if (
162
+ task.source.startswith(("pip_remote", "pip_local"))
163
+ and task.source.count(":") == 5
164
+ ):
165
+ source_fields = task.source.split(":")
166
+ (
167
+ collection_mode,
168
+ pkg_name,
169
+ version,
170
+ extras,
171
+ python_version,
172
+ name,
173
+ ) = source_fields
174
+ pkg_name = _normalize_package_name(pkg_name)
175
+ task_group_key = ":".join(
176
+ [pkg_name, version, extras, python_version]
177
+ )
178
+ if collection_mode == "pip_remote":
179
+ origin = "pypi"
180
+ elif collection_mode == "pip_local":
181
+ origin = "wheel-file"
182
+ else:
183
+ raise RuntimeError(
184
+ f"Invalid {collection_mode=} for {task.source=}."
185
+ )
186
+ new_obj = dict(
187
+ task=task,
188
+ user_id=default_user_id,
189
+ origin=origin,
190
+ pkg_name=pkg_name,
191
+ version=version,
192
+ pip_extras=extras,
193
+ python_version=python_version,
194
+ )
195
+
196
+ if task_group_key in task_groups:
197
+ task_groups[task_group_key].append(new_obj)
198
+ else:
199
+ task_groups[task_group_key] = [new_obj]
200
+ else:
201
+ owner = task.owner
202
+ if owner is None:
203
+ raise RuntimeError(
204
+ "Error: `owner` is `None` for "
205
+ f"{task.id=}, {task.source=}, {task.owner=}."
206
+ )
207
+ user_id = user_mapping.get(owner, None)
208
+ if user_id is None:
209
+ raise RuntimeError(
210
+ "Error: `user_id` is `None` for "
211
+ f"{task.id=}, {task.source=}, {task.owner=}"
212
+ )
213
+ task_group_key = "-".join(
214
+ [
215
+ "NOT_PIP",
216
+ str(task.id),
217
+ str(task.version),
218
+ task.source,
219
+ str(task.owner),
220
+ ]
221
+ )
222
+ if task_group_key in task_groups:
223
+ raise RuntimeError(
224
+ f"ERROR: Duplicated {task_group_key=} for "
225
+ f"{task.id=}, {task.source=}, {task.owner=}"
226
+ )
227
+ else:
228
+ task_groups[task_group_key] = [
229
+ dict(
230
+ task=task,
231
+ user_id=user_id,
232
+ origin="other",
233
+ pkg_name=task.source,
234
+ version=task.version,
235
+ )
236
+ ]
237
+
238
+ for task_group_key, task_group_objects in task_groups.items():
239
+ print("-" * 80)
240
+ print(f"Start handling task group with key '{task_group_key}")
241
+ task_group_task_list = [item["task"] for item in task_group_objects]
242
+ print("List of tasks to be included")
243
+ for task in task_group_task_list:
244
+ print(f" {task.id=}, {task.source=}")
245
+
246
+ task_group_attributes = dict(
247
+ pkg_name=get_unique_value(task_group_objects, "pkg_name"),
248
+ version=get_unique_value(task_group_objects, "version"),
249
+ origin=get_unique_value(task_group_objects, "origin"),
250
+ user_id=get_unique_value(task_group_objects, "user_id"),
251
+ user_group_id=default_user_group_id,
252
+ python_version=get_unique_value(
253
+ task_group_objects, "python_version"
254
+ ),
255
+ pip_extras=get_unique_value(task_group_objects, "pip_extras"),
256
+ task_list=task_group_task_list,
257
+ active=True,
258
+ timestamp_created=get_timestamp(),
259
+ )
260
+
261
+ if not task_group_key.startswith("NOT_PIP"):
262
+ cmd = next(
263
+ getattr(task_group_task_list[0], attr_name)
264
+ for attr_name in ["command_non_parallel", "command_parallel"]
265
+ if getattr(task_group_task_list[0], attr_name) is not None
266
+ )
267
+ python_bin = cmd.split()[0]
268
+ venv_path = Path(python_bin).parents[1].as_posix()
269
+ path = Path(python_bin).parents[2].as_posix()
270
+ task_group_attributes["venv_path"] = venv_path
271
+ task_group_attributes["path"] = path
272
+
273
+ print()
274
+ print("List of task-group attributes")
275
+ for key, value in task_group_attributes.items():
276
+ if key != "task_list":
277
+ print(f" {key}: {value}")
278
+
279
+ print()
280
+
281
+ # Verify non-duplication constraints
282
+ asyncio.run(
283
+ check_non_duplication_constraints(
284
+ user_id=task_group_attributes["user_id"],
285
+ user_group_id=task_group_attributes["user_group_id"],
286
+ pkg_name=task_group_attributes["pkg_name"],
287
+ version=task_group_attributes["version"],
288
+ )
289
+ )
290
+ logger.warning(
291
+ "Non-duplication-constraint check is OK, "
292
+ "proceed and create TaskGroupV2."
293
+ )
294
+
295
+ # Create the TaskGroupV2 object and commit it
296
+ task_group = TaskGroupV2(**task_group_attributes)
297
+ db.add(task_group)
298
+ db.commit()
299
+ db.refresh(task_group)
300
+ logger.warning(f"Created task group {task_group.id=}")
301
+ print()
302
+
303
+ return
304
+
305
+
306
+ def fix_db():
307
+ logger.warning("START execution of fix_db function")
308
+ _check_current_version("2.7.0")
309
+
310
+ with next(get_sync_db()) as db:
311
+ user_mapping = get_users_mapping(db)
312
+ default_user_id = get_default_user_id(db)
313
+ default_user_group_id = get_default_user_group_id(db)
314
+
315
+ prepare_task_groups(
316
+ user_mapping=user_mapping,
317
+ default_user_id=default_user_id,
318
+ default_user_group_id=default_user_group_id,
319
+ db=db,
320
+ )
321
+
322
+ logger.warning("END of execution of fix_db function")
323
+ print()
@@ -3,6 +3,7 @@ from typing import Optional
3
3
  from typing import Union
4
4
 
5
5
  from pydantic import BaseModel
6
+ from pydantic import Extra
6
7
  from pydantic import Field
7
8
  from pydantic import validator
8
9
 
@@ -109,13 +110,10 @@ class SingleImageUpdate(BaseModel):
109
110
  _types = validator("types", allow_reuse=True)(valdictkeys("types"))
110
111
 
111
112
 
112
- class Filters(BaseModel):
113
+ class Filters(BaseModel, extra=Extra.forbid):
113
114
  attributes: dict[str, Any] = Field(default_factory=dict)
114
115
  types: dict[str, bool] = Field(default_factory=dict)
115
116
 
116
- class Config:
117
- extra = "forbid"
118
-
119
117
  # Validators
120
118
  _attributes = validator("attributes", allow_reuse=True)(
121
119
  valdictkeys("attributes")
fractal_server/main.py CHANGED
@@ -20,7 +20,7 @@ from contextlib import asynccontextmanager
20
20
 
21
21
  from fastapi import FastAPI
22
22
 
23
- from .app.routes.aux._runner import _backend_supports_shutdown # FIXME: change
23
+ from .app.routes.aux._runner import _backend_supports_shutdown
24
24
  from .app.runner.shutdown import cleanup_after_shutdown
25
25
  from .config import get_settings
26
26
  from .logger import config_uvicorn_loggers
@@ -5,7 +5,6 @@ from alembic import context
5
5
  from sqlalchemy.engine import Connection
6
6
  from sqlmodel import SQLModel
7
7
 
8
- from fractal_server.app import models # noqa
9
8
  from fractal_server.config import get_settings
10
9
  from fractal_server.migrations.naming_convention import NAMING_CONVENTION
11
10
  from fractal_server.syringe import Inject
@@ -27,6 +26,10 @@ if config.config_file_name is not None:
27
26
  # target_metadata = mymodel.Base.metadata
28
27
  target_metadata = SQLModel.metadata
29
28
  target_metadata.naming_convention = NAMING_CONVENTION
29
+ # Importing `fractal_server.app.models` after defining
30
+ # `SQLModel.metadata.naming_convention` in order to apply the naming convention
31
+ # when autogenerating migrations (see issue #1819).
32
+ from fractal_server.app import models # noqa
30
33
 
31
34
  # other values from the config, defined by the needs of env.py,
32
35
  # can be acquired:
@@ -0,0 +1,184 @@
1
+ """task groups
2
+
3
+ Revision ID: 034a469ec2eb
4
+ Revises: da2cb2ac4255
5
+ Create Date: 2024-10-10 16:14:13.976231
6
+
7
+ """
8
+ from datetime import datetime
9
+ from datetime import timezone
10
+
11
+ import sqlalchemy as sa
12
+ import sqlmodel
13
+ from alembic import op
14
+
15
+
16
+ # revision identifiers, used by Alembic.
17
+ revision = "034a469ec2eb"
18
+ down_revision = "da2cb2ac4255"
19
+ branch_labels = None
20
+ depends_on = None
21
+
22
+
23
+ def upgrade() -> None:
24
+ op.create_table(
25
+ "taskgroupv2",
26
+ sa.Column("id", sa.Integer(), nullable=False),
27
+ sa.Column("user_id", sa.Integer(), nullable=False),
28
+ sa.Column("user_group_id", sa.Integer(), nullable=True),
29
+ sa.Column(
30
+ "origin", sqlmodel.sql.sqltypes.AutoString(), nullable=False
31
+ ),
32
+ sa.Column(
33
+ "pkg_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False
34
+ ),
35
+ sa.Column(
36
+ "version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
37
+ ),
38
+ sa.Column(
39
+ "python_version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
40
+ ),
41
+ sa.Column("path", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
42
+ sa.Column(
43
+ "venv_path", sqlmodel.sql.sqltypes.AutoString(), nullable=True
44
+ ),
45
+ sa.Column(
46
+ "wheel_path", sqlmodel.sql.sqltypes.AutoString(), nullable=True
47
+ ),
48
+ sa.Column(
49
+ "pip_extras", sqlmodel.sql.sqltypes.AutoString(), nullable=True
50
+ ),
51
+ sa.Column(
52
+ "pinned_package_versions",
53
+ sa.JSON(),
54
+ server_default="{}",
55
+ nullable=True,
56
+ ),
57
+ sa.Column("active", sa.Boolean(), nullable=False),
58
+ sa.Column(
59
+ "timestamp_created", sa.DateTime(timezone=True), nullable=False
60
+ ),
61
+ sa.ForeignKeyConstraint(
62
+ ["user_group_id"],
63
+ ["usergroup.id"],
64
+ name=op.f("fk_taskgroupv2_user_group_id_usergroup"),
65
+ ),
66
+ sa.ForeignKeyConstraint(
67
+ ["user_id"],
68
+ ["user_oauth.id"],
69
+ name=op.f("fk_taskgroupv2_user_id_user_oauth"),
70
+ ),
71
+ sa.PrimaryKeyConstraint("id", name=op.f("pk_taskgroupv2")),
72
+ )
73
+ with op.batch_alter_table("collectionstatev2", schema=None) as batch_op:
74
+ batch_op.add_column(
75
+ sa.Column("taskgroupv2_id", sa.Integer(), nullable=True)
76
+ )
77
+ batch_op.create_foreign_key(
78
+ batch_op.f("fk_collectionstatev2_taskgroupv2_id_taskgroupv2"),
79
+ "taskgroupv2",
80
+ ["taskgroupv2_id"],
81
+ ["id"],
82
+ )
83
+
84
+ with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
85
+ batch_op.add_column(
86
+ sa.Column(
87
+ "timestamp_created",
88
+ sa.DateTime(timezone=True),
89
+ nullable=False,
90
+ server_default=str(datetime(2000, 1, 1, tzinfo=timezone.utc)),
91
+ )
92
+ )
93
+
94
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
95
+ batch_op.add_column(
96
+ sa.Column("taskgroupv2_id", sa.Integer(), nullable=True)
97
+ )
98
+ batch_op.add_column(
99
+ sa.Column(
100
+ "category", sqlmodel.sql.sqltypes.AutoString(), nullable=True
101
+ )
102
+ )
103
+ batch_op.add_column(
104
+ sa.Column(
105
+ "modality", sqlmodel.sql.sqltypes.AutoString(), nullable=True
106
+ )
107
+ )
108
+ batch_op.add_column(
109
+ sa.Column(
110
+ "authors", sqlmodel.sql.sqltypes.AutoString(), nullable=True
111
+ )
112
+ )
113
+ batch_op.add_column(
114
+ sa.Column("tags", sa.JSON(), server_default="[]", nullable=False)
115
+ )
116
+ batch_op.alter_column(
117
+ "source", existing_type=sa.VARCHAR(), nullable=True
118
+ )
119
+
120
+ try:
121
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
122
+ batch_op.drop_constraint("uq_taskv2_source", type_="unique")
123
+ except BaseException as e:
124
+ if op.get_bind().dialect.name != "sqlite":
125
+ raise e
126
+ import sqlite3
127
+ import logging
128
+
129
+ logger = logging.getLogger("alembic.runtime.migration")
130
+ logger.warning(
131
+ f"Using sqlite, with {sqlite3.version=} and "
132
+ f"{sqlite3.sqlite_version=}"
133
+ )
134
+
135
+ logger.warning(
136
+ "Could not drop 'uq_taskv2_source' constraint; this is expected "
137
+ "when the database was created before the naming convention "
138
+ "was added."
139
+ )
140
+ logger.warning(
141
+ "As a workaround, we recreate the constraint before dropping it."
142
+ )
143
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
144
+ batch_op.create_unique_constraint("uq_taskv2_source", ["source"])
145
+ batch_op.drop_constraint("uq_taskv2_source", type_="unique")
146
+
147
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
148
+ batch_op.create_foreign_key(
149
+ batch_op.f("fk_taskv2_taskgroupv2_id_taskgroupv2"),
150
+ "taskgroupv2",
151
+ ["taskgroupv2_id"],
152
+ ["id"],
153
+ )
154
+
155
+
156
+ def downgrade() -> None:
157
+ # ### commands auto generated by Alembic - please adjust! ###
158
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
159
+ batch_op.drop_constraint(
160
+ batch_op.f("fk_taskv2_taskgroupv2_id_taskgroupv2"),
161
+ type_="foreignkey",
162
+ )
163
+ batch_op.create_unique_constraint("uq_taskv2_source", ["source"])
164
+ batch_op.alter_column(
165
+ "source", existing_type=sa.VARCHAR(), nullable=False
166
+ )
167
+ batch_op.drop_column("tags")
168
+ batch_op.drop_column("authors")
169
+ batch_op.drop_column("modality")
170
+ batch_op.drop_column("category")
171
+ batch_op.drop_column("taskgroupv2_id")
172
+
173
+ with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
174
+ batch_op.drop_column("timestamp_created")
175
+
176
+ with op.batch_alter_table("collectionstatev2", schema=None) as batch_op:
177
+ batch_op.drop_constraint(
178
+ batch_op.f("fk_collectionstatev2_taskgroupv2_id_taskgroupv2"),
179
+ type_="foreignkey",
180
+ )
181
+ batch_op.drop_column("taskgroupv2_id")
182
+
183
+ op.drop_table("taskgroupv2")
184
+ # ### end Alembic commands ###