fractal-server 2.19.0__py3-none-any.whl → 2.19.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/dataset.py +4 -0
  3. fractal_server/app/models/v2/job.py +0 -4
  4. fractal_server/app/models/v2/task.py +1 -0
  5. fractal_server/app/models/v2/task_group.py +0 -4
  6. fractal_server/app/models/v2/workflow.py +0 -2
  7. fractal_server/app/models/v2/workflowtask.py +0 -3
  8. fractal_server/app/routes/admin/v2/task.py +5 -0
  9. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +0 -6
  10. fractal_server/app/routes/api/v2/__init__.py +2 -0
  11. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +20 -1
  12. fractal_server/app/routes/api/v2/status_legacy.py +156 -0
  13. fractal_server/app/routes/api/v2/submit.py +1 -3
  14. fractal_server/app/routes/api/v2/task_collection.py +0 -2
  15. fractal_server/app/routes/api/v2/task_collection_pixi.py +0 -2
  16. fractal_server/app/routes/api/v2/task_group.py +24 -2
  17. fractal_server/app/routes/api/v2/task_group_lifecycle.py +0 -6
  18. fractal_server/app/routes/api/v2/workflow.py +1 -1
  19. fractal_server/app/routes/api/v2/workflow_import.py +43 -12
  20. fractal_server/app/routes/api/v2/workflowtask.py +6 -1
  21. fractal_server/app/routes/auth/current_user.py +0 -8
  22. fractal_server/app/schemas/v2/__init__.py +1 -0
  23. fractal_server/app/schemas/v2/dumps.py +1 -0
  24. fractal_server/app/schemas/v2/task.py +5 -0
  25. fractal_server/app/schemas/v2/workflow.py +0 -2
  26. fractal_server/app/schemas/v2/workflowtask.py +2 -6
  27. fractal_server/app/security/__init__.py +3 -8
  28. fractal_server/runner/v2/submit_workflow.py +2 -0
  29. {fractal_server-2.19.0.dist-info → fractal_server-2.19.0a0.dist-info}/METADATA +2 -2
  30. {fractal_server-2.19.0.dist-info → fractal_server-2.19.0a0.dist-info}/RECORD +33 -37
  31. fractal_server/app/routes/aux/_versions.py +0 -42
  32. fractal_server/migrations/versions/18a26fcdea5d_drop_dataset_history.py +0 -41
  33. fractal_server/migrations/versions/1bf8785755f9_add_description_to_workflow_and_.py +0 -53
  34. fractal_server/migrations/versions/5fb08bf05b14_drop_taskv2_source.py +0 -36
  35. fractal_server/migrations/versions/cfd13f7954e7_add_fractal_server_version_to_jobv2_and_.py +0 -52
  36. {fractal_server-2.19.0.dist-info → fractal_server-2.19.0a0.dist-info}/WHEEL +0 -0
  37. {fractal_server-2.19.0.dist-info → fractal_server-2.19.0a0.dist-info}/entry_points.txt +0 -0
  38. {fractal_server-2.19.0.dist-info → fractal_server-2.19.0a0.dist-info}/licenses/LICENSE +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.19.0"
1
+ __VERSION__ = "2.19.0a0"
@@ -27,6 +27,10 @@ class DatasetV2(SQLModel, table=True):
27
27
  sa_relationship_kwargs=dict(lazy="selectin"),
28
28
  )
29
29
 
30
+ history: list[dict[str, Any]] = Field(
31
+ sa_column=Column(JSONB, server_default="[]", nullable=False)
32
+ )
33
+
30
34
  timestamp_created: datetime = Field(
31
35
  default_factory=get_timestamp,
32
36
  sa_column=Column(DateTime(timezone=True), nullable=False),
@@ -5,7 +5,6 @@ from pydantic import ConfigDict
5
5
  from sqlalchemy import Column
6
6
  from sqlalchemy.dialects.postgresql import JSONB
7
7
  from sqlalchemy.types import DateTime
8
- from sqlalchemy.types import String
9
8
  from sqlmodel import Field
10
9
  from sqlmodel import Index
11
10
  from sqlmodel import SQLModel
@@ -45,9 +44,6 @@ class JobV2(SQLModel, table=True):
45
44
  project_dump: dict[str, Any] = Field(
46
45
  sa_column=Column(JSONB, nullable=False)
47
46
  )
48
- fractal_server_version: str = Field(
49
- sa_column=Column(String, server_default="pre-2.19.0", nullable=False)
50
- )
51
47
 
52
48
  worker_init: str | None = None
53
49
  working_dir: str | None = None
@@ -14,6 +14,7 @@ class TaskV2(SQLModel, table=True):
14
14
  type: str
15
15
  command_non_parallel: str | None = None
16
16
  command_parallel: str | None = None
17
+ source: str | None = None
17
18
 
18
19
  meta_non_parallel: dict[str, Any] = Field(
19
20
  sa_column=Column(JSON, server_default="{}", default={}, nullable=False)
@@ -4,7 +4,6 @@ from datetime import timezone
4
4
  from sqlalchemy import Column
5
5
  from sqlalchemy.dialects.postgresql import JSONB
6
6
  from sqlalchemy.types import DateTime
7
- from sqlalchemy.types import String
8
7
  from sqlmodel import Field
9
8
  from sqlmodel import Relationship
10
9
  from sqlmodel import SQLModel
@@ -152,6 +151,3 @@ class TaskGroupActivityV2(SQLModel, table=True):
152
151
  default=None,
153
152
  sa_column=Column(DateTime(timezone=True)),
154
153
  )
155
- fractal_server_version: str = Field(
156
- sa_column=Column(String, server_default="pre-2.19.0", nullable=False)
157
- )
@@ -32,5 +32,3 @@ class WorkflowV2(SQLModel, table=True):
32
32
  default_factory=get_timestamp,
33
33
  sa_column=Column(DateTime(timezone=True), nullable=False),
34
34
  )
35
-
36
- description: str | None = Field(default=None, nullable=True)
@@ -39,6 +39,3 @@ class WorkflowTaskV2(SQLModel, table=True):
39
39
  task_type: str
40
40
  task_id: int = Field(foreign_key="taskv2.id")
41
41
  task: TaskV2 = Relationship(sa_relationship_kwargs=dict(lazy="selectin"))
42
-
43
- alias: str | None = Field(default=None, nullable=True)
44
- description: str | None = Field(default=None, nullable=True)
@@ -30,6 +30,7 @@ class TaskMinimal(BaseModel):
30
30
  taskgroupv2_id: int
31
31
  command_non_parallel: str | None = None
32
32
  command_parallel: str | None = None
33
+ source: str | None = None
33
34
  version: str | None = None
34
35
 
35
36
 
@@ -54,6 +55,7 @@ class TaskInfo(BaseModel):
54
55
  @router.get("/", response_model=PaginationResponse[TaskInfo])
55
56
  async def query_tasks(
56
57
  id: int | None = None,
58
+ source: str | None = None,
57
59
  version: str | None = None,
58
60
  name: str | None = None,
59
61
  task_type: TaskType | None = None,
@@ -79,6 +81,9 @@ async def query_tasks(
79
81
  if id is not None:
80
82
  stm = stm.where(TaskV2.id == id)
81
83
  stm_count = stm_count.where(TaskV2.id == id)
84
+ if source is not None:
85
+ stm = stm.where(TaskV2.source.icontains(source))
86
+ stm_count = stm_count.where(TaskV2.source.icontains(source))
82
87
  if version is not None:
83
88
  stm = stm.where(TaskV2.version == version)
84
89
  stm_count = stm_count.where(TaskV2.version == version)
@@ -5,7 +5,6 @@ from fastapi import HTTPException
5
5
  from fastapi import Response
6
6
  from fastapi import status
7
7
 
8
- from fractal_server import __VERSION__
9
8
  from fractal_server.app.db import AsyncSession
10
9
  from fractal_server.app.db import get_async_db
11
10
  from fractal_server.app.models import UserOAuth
@@ -94,7 +93,6 @@ async def deactivate_task_group(
94
93
  ),
95
94
  timestamp_started=get_timestamp(),
96
95
  timestamp_ended=get_timestamp(),
97
- fractal_server_version=__VERSION__,
98
96
  )
99
97
  db.add(task_group)
100
98
  db.add(task_group_activity)
@@ -110,7 +108,6 @@ async def deactivate_task_group(
110
108
  pkg_name=task_group.pkg_name,
111
109
  version=task_group.version,
112
110
  timestamp_started=get_timestamp(),
113
- fractal_server_version=__VERSION__,
114
111
  )
115
112
  db.add(task_group_activity)
116
113
  await db.commit()
@@ -191,7 +188,6 @@ async def reactivate_task_group(
191
188
  ),
192
189
  timestamp_started=get_timestamp(),
193
190
  timestamp_ended=get_timestamp(),
194
- fractal_server_version=__VERSION__,
195
191
  )
196
192
  db.add(task_group)
197
193
  db.add(task_group_activity)
@@ -215,7 +211,6 @@ async def reactivate_task_group(
215
211
  pkg_name=task_group.pkg_name,
216
212
  version=task_group.version,
217
213
  timestamp_started=get_timestamp(),
218
- fractal_server_version=__VERSION__,
219
214
  )
220
215
  db.add(task_group_activity)
221
216
  await db.commit()
@@ -269,7 +264,6 @@ async def delete_task_group(
269
264
  pkg_name=task_group.pkg_name,
270
265
  version=(task_group.version or "N/A"),
271
266
  timestamp_started=get_timestamp(),
272
- fractal_server_version=__VERSION__,
273
267
  )
274
268
  db.add(task_group_activity)
275
269
  await db.commit()
@@ -14,6 +14,7 @@ from .job import router as job_router
14
14
  from .pre_submission_checks import router as pre_submission_checks_router
15
15
  from .project import router as project_router
16
16
  from .sharing import router as sharing_router
17
+ from .status_legacy import router as status_legacy_router
17
18
  from .submit import router as submit_job_router
18
19
  from .task import router as task_router
19
20
  from .task_collection import router as task_collection_router
@@ -36,6 +37,7 @@ router_api.include_router(sharing_router, tags=["Project Sharing"])
36
37
  router_api.include_router(project_router, tags=["Project"])
37
38
  router_api.include_router(submit_job_router, tags=["Job"])
38
39
  router_api.include_router(history_router, tags=["History"])
40
+ router_api.include_router(status_legacy_router, tags=["Status Legacy"])
39
41
 
40
42
 
41
43
  settings = Inject(get_settings)
@@ -2,6 +2,8 @@ from fastapi import HTTPException
2
2
  from fastapi import status
3
3
  from httpx import AsyncClient
4
4
  from httpx import TimeoutException
5
+ from packaging.version import InvalidVersion
6
+ from packaging.version import Version
5
7
  from sqlmodel import func
6
8
  from sqlmodel import select
7
9
 
@@ -12,7 +14,6 @@ from fractal_server.app.models.v2 import TaskGroupV2
12
14
  from fractal_server.app.models.v2 import TaskV2
13
15
  from fractal_server.app.models.v2 import WorkflowTaskV2
14
16
  from fractal_server.app.models.v2 import WorkflowV2
15
- from fractal_server.app.routes.aux._versions import _find_latest_version_or_422
16
17
  from fractal_server.app.schemas.v2 import JobStatusType
17
18
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
18
19
  from fractal_server.logger import set_logger
@@ -25,6 +26,24 @@ PYPI_JSON_HEADERS = {"Accept": "application/vnd.pypi.simple.v1+json"}
25
26
  logger = set_logger(__name__)
26
27
 
27
28
 
29
+ def _find_latest_version_or_422(versions: list[str]) -> str:
30
+ """
31
+ > For PEP 440 versions, this is easy enough for the client to do (using
32
+ > the `packaging` library [...]. For non-standard versions, there is no
33
+ > well-defined ordering, and clients will need to decide on what rule is
34
+ > appropriate for their needs.
35
+ (https://peps.python.org/pep-0700/#why-not-provide-a-latest-version-value)
36
+ """
37
+ try:
38
+ latest = max(versions, key=lambda v_str: Version(v_str))
39
+ return latest
40
+ except InvalidVersion as e:
41
+ raise HTTPException(
42
+ status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
43
+ detail=f"Cannot find latest version (original error: {str(e)}).",
44
+ )
45
+
46
+
28
47
  async def get_package_version_from_pypi(
29
48
  name: str,
30
49
  version: str | None = None,
@@ -0,0 +1,156 @@
1
+ from fastapi import APIRouter
2
+ from fastapi import Depends
3
+
4
+ from fractal_server.app.db import AsyncSession
5
+ from fractal_server.app.db import get_async_db
6
+ from fractal_server.app.models import UserOAuth
7
+ from fractal_server.app.models.v2 import JobV2
8
+ from fractal_server.app.routes.auth import get_api_guest
9
+ from fractal_server.app.schemas.v2.sharing import ProjectPermissions
10
+ from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead
11
+ from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType
12
+ from fractal_server.logger import set_logger
13
+
14
+ from ._aux_functions import _get_dataset_check_access
15
+ from ._aux_functions import _get_submitted_jobs_statement
16
+ from ._aux_functions import _get_workflow_check_access
17
+
18
+ router = APIRouter()
19
+
20
+ logger = set_logger(__name__)
21
+
22
+
23
+ @router.get(
24
+ "/project/{project_id}/status-legacy/",
25
+ response_model=LegacyStatusRead,
26
+ )
27
+ async def get_workflowtask_status(
28
+ project_id: int,
29
+ dataset_id: int,
30
+ workflow_id: int,
31
+ user: UserOAuth = Depends(get_api_guest),
32
+ db: AsyncSession = Depends(get_async_db),
33
+ ) -> LegacyStatusRead | None:
34
+ """
35
+ Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
36
+ on a given `DatasetV2`.
37
+
38
+ *NOTE*: the current endpoint is not guaranteed to provide consistent
39
+ results if the workflow task list is modified in a non-trivial way
40
+ (that is, by adding intermediate tasks, removing tasks, or changing their
41
+ order). See fractal-server GitHub issues: 793, 1083.
42
+ """
43
+ # Get the dataset DB entry
44
+ output = await _get_dataset_check_access(
45
+ project_id=project_id,
46
+ dataset_id=dataset_id,
47
+ user_id=user.id,
48
+ required_permissions=ProjectPermissions.READ,
49
+ db=db,
50
+ )
51
+ dataset = output["dataset"]
52
+
53
+ # Get the workflow DB entry
54
+ workflow = await _get_workflow_check_access(
55
+ project_id=project_id,
56
+ workflow_id=workflow_id,
57
+ user_id=user.id,
58
+ required_permissions=ProjectPermissions.READ,
59
+ db=db,
60
+ )
61
+
62
+ # Check whether there exists a submitted job associated to this
63
+ # workflow/dataset pair. If it does exist, it will be used later.
64
+ # If there are multiple jobs, raise an error.
65
+ res = await db.execute(
66
+ _get_submitted_jobs_statement()
67
+ .where(JobV2.dataset_id == dataset_id)
68
+ .where(JobV2.workflow_id == workflow_id)
69
+ )
70
+ running_job = res.scalars().one_or_none()
71
+
72
+ # Initialize empty dictionary for WorkflowTaskV2 status
73
+ workflow_tasks_status_dict: dict = {}
74
+
75
+ # Lowest priority: read status from DB, which corresponds to jobs that are
76
+ # not running
77
+ history = dataset.history
78
+ for history_item in history:
79
+ wftask_id = history_item["workflowtask"]["id"]
80
+ wftask_status = history_item["status"]
81
+ workflow_tasks_status_dict[wftask_id] = wftask_status
82
+
83
+ if running_job is None:
84
+ # If no job is running, the chronological-last history item is also the
85
+ # positional-last workflow task to be included in the response.
86
+ if len(history) > 0:
87
+ last_valid_wftask_id = history[-1]["workflowtask"]["id"]
88
+ else:
89
+ last_valid_wftask_id = None
90
+ else:
91
+ # If a job is running, then gather more up-to-date information
92
+
93
+ # Mid priority: Set all WorkflowTask's that are part of the running job
94
+ # as "submitted"
95
+ start = running_job.first_task_index
96
+ end = running_job.last_task_index + 1
97
+
98
+ running_job_wftasks = workflow.task_list[start:end]
99
+ running_job_statuses = [
100
+ workflow_tasks_status_dict.get(wft.id, None)
101
+ for wft in running_job_wftasks
102
+ ]
103
+ try:
104
+ first_submitted_index = running_job_statuses.index(
105
+ WorkflowTaskStatusType.SUBMITTED
106
+ )
107
+ except ValueError:
108
+ logger.warning(
109
+ f"Job {running_job.id} is submitted but its task list does not"
110
+ f" contain a {WorkflowTaskStatusType.SUBMITTED} task."
111
+ )
112
+ first_submitted_index = 0
113
+
114
+ for wftask in running_job_wftasks[first_submitted_index:]:
115
+ workflow_tasks_status_dict[wftask.id] = (
116
+ WorkflowTaskStatusType.SUBMITTED
117
+ )
118
+
119
+ # The last workflow task that is included in the submitted job is also
120
+ # the positional-last workflow task to be included in the response.
121
+ try:
122
+ last_valid_wftask_id = workflow.task_list[end - 1].id
123
+ except IndexError as e:
124
+ logger.warning(
125
+ f"Handled IndexError in `get_workflowtask_status` ({str(e)})."
126
+ )
127
+ logger.warning(
128
+ "Additional information: "
129
+ f"{running_job.first_task_index=}; "
130
+ f"{running_job.last_task_index=}; "
131
+ f"{len(workflow.task_list)=}; "
132
+ f"{dataset_id=}; "
133
+ f"{workflow_id=}."
134
+ )
135
+ last_valid_wftask_id = None
136
+ logger.warning(f"Now setting {last_valid_wftask_id=}.")
137
+
138
+ # Based on previously-gathered information, clean up the response body
139
+ clean_workflow_tasks_status_dict = {}
140
+ for wf_task in workflow.task_list:
141
+ wf_task_status = workflow_tasks_status_dict.get(wf_task.id)
142
+ if wf_task_status is None:
143
+ # If a wftask ID was not found, ignore it and continue
144
+ continue
145
+ clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
146
+ if wf_task_status == WorkflowTaskStatusType.FAILED:
147
+ # Starting from the beginning of `workflow.task_list`, stop the
148
+ # first time that you hit a failed job
149
+ break
150
+ if wf_task.id == last_valid_wftask_id:
151
+ # Starting from the beginning of `workflow.task_list`, stop the
152
+ # first time that you hit `last_valid_wftask_id``
153
+ break
154
+
155
+ response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict)
156
+ return response_body
@@ -11,7 +11,6 @@ from fastapi import status
11
11
  from sqlmodel import select
12
12
  from sqlmodel import update
13
13
 
14
- from fractal_server import __VERSION__
15
14
  from fractal_server.app.db import AsyncSession
16
15
  from fractal_server.app.db import get_async_db
17
16
  from fractal_server.app.models import Profile
@@ -217,12 +216,11 @@ async def submit_job(
217
216
  dataset.model_dump_json(exclude={"images", "history"})
218
217
  ),
219
218
  workflow_dump=json.loads(
220
- workflow.model_dump_json(exclude={"task_list", "description"})
219
+ workflow.model_dump_json(exclude={"task_list"})
221
220
  ),
222
221
  project_dump=json.loads(
223
222
  project.model_dump_json(exclude={"resource_id"})
224
223
  ),
225
- fractal_server_version=__VERSION__,
226
224
  **job_create.model_dump(),
227
225
  )
228
226
 
@@ -14,7 +14,6 @@ from pydantic import BaseModel
14
14
  from pydantic import ValidationError
15
15
  from pydantic import model_validator
16
16
 
17
- from fractal_server import __VERSION__
18
17
  from fractal_server.app.db import AsyncSession
19
18
  from fractal_server.app.db import get_async_db
20
19
  from fractal_server.app.models import UserOAuth
@@ -333,7 +332,6 @@ async def collect_tasks_pip(
333
332
  action=TaskGroupActivityAction.COLLECT,
334
333
  pkg_name=task_group.pkg_name,
335
334
  version=task_group.version,
336
- fractal_server_version=__VERSION__,
337
335
  )
338
336
  db.add(task_group_activity)
339
337
  await db.commit()
@@ -10,7 +10,6 @@ from fastapi import Response
10
10
  from fastapi import UploadFile
11
11
  from fastapi import status
12
12
 
13
- from fractal_server import __VERSION__
14
13
  from fractal_server.app.db import AsyncSession
15
14
  from fractal_server.app.db import get_async_db
16
15
  from fractal_server.app.models import UserOAuth
@@ -183,7 +182,6 @@ async def collect_task_pixi(
183
182
  action=TaskGroupActivityAction.COLLECT,
184
183
  pkg_name=task_group.pkg_name,
185
184
  version=task_group.version,
186
- fractal_server_version=__VERSION__,
187
185
  )
188
186
  db.add(task_group_activity)
189
187
  await db.commit()
@@ -4,6 +4,9 @@ from fastapi import APIRouter
4
4
  from fastapi import Depends
5
5
  from fastapi import HTTPException
6
6
  from fastapi import status
7
+ from packaging.version import InvalidVersion
8
+ from packaging.version import Version
9
+ from packaging.version import parse
7
10
  from pydantic.types import AwareDatetime
8
11
  from sqlmodel import or_
9
12
  from sqlmodel import select
@@ -22,7 +25,6 @@ from fractal_server.app.routes.auth._aux_auth import (
22
25
  from fractal_server.app.routes.auth._aux_auth import (
23
26
  _verify_user_belongs_to_group,
24
27
  )
25
- from fractal_server.app.routes.aux._versions import _version_sort_key
26
28
  from fractal_server.app.schemas.v2 import TaskGroupActivityAction
27
29
  from fractal_server.app.schemas.v2 import TaskGroupActivityRead
28
30
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatus
@@ -41,6 +43,26 @@ router = APIRouter()
41
43
  logger = set_logger(__name__)
42
44
 
43
45
 
46
+ def _version_sort_key(
47
+ task_group: TaskGroupV2,
48
+ ) -> tuple[int, Version | str | None]:
49
+ """
50
+ Returns a tuple used as (reverse) ordering key for TaskGroups in
51
+ `get_task_group_list`.
52
+ The TaskGroups with a parsable versions are the first in order,
53
+ sorted according to the sorting rules of packaging.version.Version.
54
+ Next in order we have the TaskGroups with non-null non-parsable versions,
55
+ sorted alphabetically.
56
+ Last we have the TaskGroups with null version.
57
+ """
58
+ if task_group.version is None:
59
+ return (0, task_group.version)
60
+ try:
61
+ return (2, parse(task_group.version))
62
+ except InvalidVersion:
63
+ return (1, task_group.version)
64
+
65
+
44
66
  @router.get("/activity/", response_model=list[TaskGroupActivityRead])
45
67
  async def get_task_group_activity_list(
46
68
  task_group_activity_id: int | None = None,
@@ -154,7 +176,7 @@ async def get_task_group_list(
154
176
  await remove_duplicate_task_groups(
155
177
  task_groups=sorted(
156
178
  list(groups),
157
- key=lambda group: _version_sort_key(group.version),
179
+ key=_version_sort_key,
158
180
  reverse=True,
159
181
  ),
160
182
  user_id=user.id,
@@ -5,7 +5,6 @@ from fastapi import HTTPException
5
5
  from fastapi import Response
6
6
  from fastapi import status
7
7
 
8
- from fractal_server import __VERSION__
9
8
  from fractal_server.app.db import AsyncSession
10
9
  from fractal_server.app.db import get_async_db
11
10
  from fractal_server.app.models import UserOAuth
@@ -100,7 +99,6 @@ async def deactivate_task_group(
100
99
  ),
101
100
  timestamp_started=get_timestamp(),
102
101
  timestamp_ended=get_timestamp(),
103
- fractal_server_version=__VERSION__,
104
102
  )
105
103
  db.add(task_group)
106
104
  db.add(task_group_activity)
@@ -116,7 +114,6 @@ async def deactivate_task_group(
116
114
  pkg_name=task_group.pkg_name,
117
115
  version=task_group.version,
118
116
  timestamp_started=get_timestamp(),
119
- fractal_server_version=__VERSION__,
120
117
  )
121
118
  task_group.active = False
122
119
  db.add(task_group)
@@ -205,7 +202,6 @@ async def reactivate_task_group(
205
202
  ),
206
203
  timestamp_started=get_timestamp(),
207
204
  timestamp_ended=get_timestamp(),
208
- fractal_server_version=__VERSION__,
209
205
  )
210
206
  db.add(task_group)
211
207
  db.add(task_group_activity)
@@ -229,7 +225,6 @@ async def reactivate_task_group(
229
225
  pkg_name=task_group.pkg_name,
230
226
  version=task_group.version,
231
227
  timestamp_started=get_timestamp(),
232
- fractal_server_version=__VERSION__,
233
228
  )
234
229
  db.add(task_group_activity)
235
230
  await db.commit()
@@ -293,7 +288,6 @@ async def delete_task_group(
293
288
  pkg_name=task_group.pkg_name,
294
289
  version=(task_group.version or "N/A"),
295
290
  timestamp_started=get_timestamp(),
296
- fractal_server_version=__VERSION__,
297
291
  )
298
292
  db.add(task_group_activity)
299
293
  await db.commit()
@@ -149,7 +149,7 @@ async def update_workflow(
149
149
  db=db,
150
150
  )
151
151
 
152
- if patch.name and patch.name != workflow.name:
152
+ if patch.name:
153
153
  await _check_workflow_exists(
154
154
  name=patch.name, project_id=project_id, db=db
155
155
  )
@@ -19,8 +19,8 @@ from fractal_server.app.routes.auth import get_api_user
19
19
  from fractal_server.app.routes.auth._aux_auth import (
20
20
  _get_default_usergroup_id_or_none,
21
21
  )
22
- from fractal_server.app.routes.aux._versions import _version_sort_key
23
22
  from fractal_server.app.schemas.v2 import TaskImport
23
+ from fractal_server.app.schemas.v2 import TaskImportLegacy
24
24
  from fractal_server.app.schemas.v2 import WorkflowImport
25
25
  from fractal_server.app.schemas.v2 import WorkflowReadWithWarnings
26
26
  from fractal_server.app.schemas.v2 import WorkflowTaskCreate
@@ -73,6 +73,32 @@ async def _get_user_accessible_taskgroups(
73
73
  return accessible_task_groups
74
74
 
75
75
 
76
+ async def _get_task_by_source(
77
+ source: str,
78
+ task_groups_list: list[TaskGroupV2],
79
+ ) -> int | None:
80
+ """
81
+ Find task with a given source.
82
+
83
+ Args:
84
+ source: `source` of the task to be imported.
85
+ task_groups_list: Current list of valid task groups.
86
+
87
+ Return:
88
+ `id` of the matching task, or `None`.
89
+ """
90
+ task_id = next(
91
+ iter(
92
+ task.id
93
+ for task_group in task_groups_list
94
+ for task in task_group.task_list
95
+ if task.source == source
96
+ ),
97
+ None,
98
+ )
99
+ return task_id
100
+
101
+
76
102
  async def _get_task_by_taskimport(
77
103
  *,
78
104
  task_import: TaskImport,
@@ -115,15 +141,14 @@ async def _get_task_by_taskimport(
115
141
  return None
116
142
 
117
143
  # Determine target `version`
144
+ # Note that task_import.version cannot be "", due to a validator
118
145
  if task_import.version is None:
119
146
  logger.debug(
120
147
  "[_get_task_by_taskimport] "
121
148
  "No version requested, looking for latest."
122
149
  )
123
- version = max(
124
- [tg.version for tg in matching_task_groups],
125
- key=_version_sort_key,
126
- )
150
+ latest_task = max(matching_task_groups, key=lambda tg: tg.version or "")
151
+ version = latest_task.version
127
152
  logger.debug(
128
153
  f"[_get_task_by_taskimport] Latest version set to {version}."
129
154
  )
@@ -221,13 +246,19 @@ async def import_workflow(
221
246
  list_task_ids = []
222
247
  for wf_task in workflow_import.task_list:
223
248
  task_import = wf_task.task
224
- task_id = await _get_task_by_taskimport(
225
- task_import=task_import,
226
- user_id=user.id,
227
- default_group_id=default_group_id,
228
- task_groups_list=task_group_list,
229
- db=db,
230
- )
249
+ if isinstance(task_import, TaskImportLegacy):
250
+ task_id = await _get_task_by_source(
251
+ source=task_import.source,
252
+ task_groups_list=task_group_list,
253
+ )
254
+ else:
255
+ task_id = await _get_task_by_taskimport(
256
+ task_import=task_import,
257
+ user_id=user.id,
258
+ default_group_id=default_group_id,
259
+ task_groups_list=task_group_list,
260
+ db=db,
261
+ )
231
262
  if task_id is None:
232
263
  raise HTTPException(
233
264
  status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
@@ -193,8 +193,13 @@ async def update_workflowtask(
193
193
  if not actual_args:
194
194
  actual_args = None
195
195
  setattr(db_wf_task, key, actual_args)
196
- else:
196
+ elif key in ["meta_parallel", "meta_non_parallel", "type_filters"]:
197
197
  setattr(db_wf_task, key, value)
198
+ else:
199
+ raise HTTPException(
200
+ status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
201
+ detail=f"patch_workflow_task endpoint cannot set {key=}",
202
+ )
198
203
 
199
204
  await db.commit()
200
205
  await db.refresh(db_wf_task)
@@ -4,8 +4,6 @@ Definition of `/auth/current-user/` endpoints
4
4
 
5
5
  from fastapi import APIRouter
6
6
  from fastapi import Depends
7
- from fastapi import HTTPException
8
- from fastapi import status
9
7
  from sqlalchemy.ext.asyncio import AsyncSession
10
8
  from sqlmodel import select
11
9
 
@@ -54,12 +52,6 @@ async def patch_current_user(
54
52
  Note: a user cannot patch their own password (as enforced within the
55
53
  `UserUpdateStrict` schema).
56
54
  """
57
- if current_user.is_guest:
58
- raise HTTPException(
59
- status_code=status.HTTP_403_FORBIDDEN,
60
- detail="This feature is not available for guest users.",
61
- )
62
-
63
55
  update = UserUpdate(**user_update.model_dump(exclude_unset=True))
64
56
 
65
57
  # NOTE: here it would be relevant to catch an `InvalidPasswordException`
@@ -47,6 +47,7 @@ from .status_legacy import WorkflowTaskStatusType # noqa F401
47
47
  from .task import TaskCreate # noqa F401
48
48
  from .task import TaskExport # noqa F401
49
49
  from .task import TaskImport # noqa F401
50
+ from .task import TaskImportLegacy # noqa F401
50
51
  from .task import TaskRead # noqa F401
51
52
  from .task import TaskType # noqa F401
52
53
  from .task import TaskUpdate # noqa F401
@@ -30,6 +30,7 @@ class TaskDump(BaseModel):
30
30
 
31
31
  command_non_parallel: str | None = None
32
32
  command_parallel: str | None = None
33
+ source: str | None = None
33
34
  version: str | None = None
34
35
 
35
36
  input_types: dict[str, bool]
@@ -94,6 +94,7 @@ class TaskRead(BaseModel):
94
94
  id: int
95
95
  name: str
96
96
  type: TaskType
97
+ source: str | None = None
97
98
  version: str | None = None
98
99
 
99
100
  command_non_parallel: str | None = None
@@ -138,6 +139,10 @@ class TaskImport(BaseModel):
138
139
  name: NonEmptyStr
139
140
 
140
141
 
142
+ class TaskImportLegacy(BaseModel):
143
+ source: NonEmptyStr
144
+
145
+
141
146
  class TaskExport(BaseModel):
142
147
  pkg_name: NonEmptyStr
143
148
  version: NonEmptyStr | None = None
@@ -29,7 +29,6 @@ class WorkflowRead(BaseModel):
29
29
  task_list: list[WorkflowTaskRead]
30
30
  project: ProjectRead
31
31
  timestamp_created: AwareDatetime
32
- description: str | None
33
32
 
34
33
  @field_serializer("timestamp_created")
35
34
  def serialize_datetime(v: datetime) -> str:
@@ -45,7 +44,6 @@ class WorkflowUpdate(BaseModel):
45
44
 
46
45
  name: NonEmptyStr = None
47
46
  reordered_workflowtask_ids: ListUniqueNonNegativeInt | None = None
48
- description: str | None = None
49
47
 
50
48
 
51
49
  class WorkflowImport(BaseModel):
@@ -11,6 +11,7 @@ from fractal_server.types import WorkflowTaskArgument
11
11
 
12
12
  from .task import TaskExport
13
13
  from .task import TaskImport
14
+ from .task import TaskImportLegacy
14
15
  from .task import TaskRead
15
16
  from .task import TaskType
16
17
 
@@ -49,9 +50,6 @@ class WorkflowTaskRead(BaseModel):
49
50
  task_id: int
50
51
  task: TaskRead
51
52
 
52
- alias: str | None = None
53
- description: str | None = None
54
-
55
53
 
56
54
  class WorkflowTaskReadWithWarning(WorkflowTaskRead):
57
55
  warning: str | None = None
@@ -65,8 +63,6 @@ class WorkflowTaskUpdate(BaseModel):
65
63
  args_non_parallel: WorkflowTaskArgument | None = None
66
64
  args_parallel: WorkflowTaskArgument | None = None
67
65
  type_filters: TypeFilters = None
68
- description: str | None = None
69
- alias: str | None = None
70
66
 
71
67
 
72
68
  class WorkflowTaskImport(BaseModel):
@@ -79,7 +75,7 @@ class WorkflowTaskImport(BaseModel):
79
75
  type_filters: TypeFilters | None = None
80
76
  input_filters: dict[str, Any] | None = None
81
77
 
82
- task: TaskImport
78
+ task: TaskImport | TaskImportLegacy
83
79
 
84
80
  @model_validator(mode="before")
85
81
  @classmethod
@@ -21,7 +21,6 @@ from collections.abc import AsyncGenerator
21
21
  from typing import Any
22
22
  from typing import Generic
23
23
  from typing import Self
24
- from typing import override
25
24
 
26
25
  from fastapi import Depends
27
26
  from fastapi import Request
@@ -189,22 +188,19 @@ class UserManager(IntegerIDMixin, BaseUserManager[UserOAuth, int]):
189
188
  password_helper=password_helper,
190
189
  )
191
190
 
192
- @override
193
191
  async def validate_password(self, password: str, user: UserOAuth) -> None:
194
192
  # check password length
195
193
  min_length = 4
196
- max_length = 72
194
+ max_length = 100
197
195
  if len(password) < min_length:
198
196
  raise InvalidPasswordException(
199
197
  f"The password is too short (minimum length: {min_length})."
200
198
  )
201
- if len(password.encode("utf-8")) > max_length:
199
+ elif len(password) > max_length:
202
200
  raise InvalidPasswordException(
203
- "The password is too long "
204
- f"(maximum length: {max_length} bytes)."
201
+ f"The password is too long (maximum length: {min_length})."
205
202
  )
206
203
 
207
- @override
208
204
  async def oauth_callback(
209
205
  self: Self,
210
206
  oauth_name: str,
@@ -328,7 +324,6 @@ class UserManager(IntegerIDMixin, BaseUserManager[UserOAuth, int]):
328
324
 
329
325
  return user
330
326
 
331
- @override
332
327
  async def on_after_register(
333
328
  self, user: UserOAuth, request: Request | None = None
334
329
  ):
@@ -13,6 +13,7 @@ from typing import Protocol
13
13
 
14
14
  from sqlalchemy.orm import Session as DBSyncSession
15
15
 
16
+ from fractal_server import __VERSION__
16
17
  from fractal_server.app.db import DB
17
18
  from fractal_server.app.models.v2 import DatasetV2
18
19
  from fractal_server.app.models.v2 import JobV2
@@ -223,6 +224,7 @@ def submit_workflow(
223
224
  f'Start execution of workflow "{workflow.name}"; '
224
225
  f"more logs at {str(log_file_path)}"
225
226
  )
227
+ logger.debug(f"fractal_server.__VERSION__: {__VERSION__}")
226
228
  logger.debug(f"Resource name: {resource.name}")
227
229
  logger.debug(f"Profile name: {profile.name}")
228
230
  logger.debug(f"Username: {profile.username}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fractal-server
3
- Version: 2.19.0
3
+ Version: 2.19.0a0
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Author: Tommaso Comparin, Marco Franzon, Yuri Chiucconi, Jacopo Nespolo
6
6
  Author-email: Tommaso Comparin <tommaso.comparin@exact-lab.it>, Marco Franzon <marco.franzon@exact-lab.it>, Yuri Chiucconi <yuri.chiucconi@exact-lab.it>, Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
@@ -17,7 +17,7 @@ Requires-Dist: pydantic>=2.12.0,<2.13.0
17
17
  Requires-Dist: pydantic-settings==2.12.0
18
18
  Requires-Dist: packaging>=25.0.0,<26.0.0
19
19
  Requires-Dist: fabric>=3.2.2,<3.3.0
20
- Requires-Dist: gunicorn>=24.1,<25
20
+ Requires-Dist: gunicorn>=23.0,<24.0
21
21
  Requires-Dist: psycopg[binary]>=3.1.0,<4.0.0
22
22
  Requires-Dist: tomli-w>=1.2.0,<1.3.0
23
23
  Requires-Python: >=3.12, <3.15
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=1UbL9iwVuF5UCZ96vJT-FT78kHbhBr9fS7C7nat-FUo,23
1
+ fractal_server/__init__.py,sha256=eojcLMU9LbD_qBD5WbjnoYryBbA4evbtq1WKYGIlYlE,25
2
2
  fractal_server/__main__.py,sha256=QeKoAgqoiozLJDa8kSVe-Aso1WWgrk1yLUYWS8RxZVM,11405
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -9,16 +9,16 @@ fractal_server/app/models/linkuserproject.py,sha256=Bk0VzjzG7RbnVnOwrztqxKIPxz_A
9
9
  fractal_server/app/models/security.py,sha256=2gKF2JQX2Xpb58fW-s7UgkgUneEy5SspgawawWoju3M,4970
10
10
  fractal_server/app/models/v2/__init__.py,sha256=xL05Mvdx0dqUFhJf694oPfuqkUQxZbxOkoUgRuNIXl4,949
11
11
  fractal_server/app/models/v2/accounting.py,sha256=VNweFARrvY3mj5LI0834Ku061S2aGC61kuVHzi_tZhc,1187
12
- fractal_server/app/models/v2/dataset.py,sha256=5UEV75LZTYAOq5d8UqRAuIIVBh3rAer3D5qvWmGbw8s,1154
12
+ fractal_server/app/models/v2/dataset.py,sha256=BL5elDU0UXnUSwvuXSO4JeKa9gje0QFerU_LP7sI754,1273
13
13
  fractal_server/app/models/v2/history.py,sha256=869RQzBssHFYnBLBvCGlKrLV_HAXwsdO5DgwG9U8D-U,2339
14
- fractal_server/app/models/v2/job.py,sha256=-9PKE5Gh9UCBStYE0DZj3I8XTPDa5qd7g2eVufp-9hw,2521
14
+ fractal_server/app/models/v2/job.py,sha256=IwlOPgx4FU-6PsPd_aBJRGsp5qVXddRBv6xxSMdh524,2360
15
15
  fractal_server/app/models/v2/profile.py,sha256=YajSmV4J_-zC4RX917s-A_lJt4mxYdPRVS3RH_uvJ48,1006
16
16
  fractal_server/app/models/v2/project.py,sha256=VvLXrgzKYLH585mYg_txrO8q3JoSoSEy4XkWjex4sDU,585
17
17
  fractal_server/app/models/v2/resource.py,sha256=XaHlJj9CladIahkrpywWXn8JBSx7_qEHp_wnkFuQ0rU,3896
18
- fractal_server/app/models/v2/task.py,sha256=v0brBHnAbcHrgDiuRHMWYahklq0fgyezZ0c9krKSA10,1463
19
- fractal_server/app/models/v2/task_group.py,sha256=8UcyFMG-6J-_hkgSkrLIxFLSwVCyrWB0Fl-LCgkeo54,4756
20
- fractal_server/app/models/v2/workflow.py,sha256=VKXIX3L3jP8eCdIQhVf5AhhwKGO_LTxn7uB4_sCirLs,1135
21
- fractal_server/app/models/v2/workflowtask.py,sha256=jEt81PGHuXSY8MYid0t15CntIIg0Lzq39uvG4X4Mx6s,1391
18
+ fractal_server/app/models/v2/task.py,sha256=iBIQB8POQE5MyKvLZhw7jZWlBhbrThzCDzRTcgiAczQ,1493
19
+ fractal_server/app/models/v2/task_group.py,sha256=C03LGKIO61Asj7Qz7qeIrZwWdfzoXBliLCeb9jzT5WI,4595
20
+ fractal_server/app/models/v2/workflow.py,sha256=AsL7p8UMGbow--21IG2lYZnOjQ--m85dRWaNCHqb35I,1069
21
+ fractal_server/app/models/v2/workflowtask.py,sha256=qkTc-hcFLpJUVsEUbnDq2BJL0qg9jagy2doZeusF1ek,1266
22
22
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
24
  fractal_server/app/routes/admin/v2/__init__.py,sha256=VF4wg09fvz6gVgIFe-r7LoCU9tlF9kBcEhkQRQPhatY,1134
@@ -29,17 +29,17 @@ fractal_server/app/routes/admin/v2/job.py,sha256=VaVMUrHV7edHyjp2rsqoWf-wh5zlXFM
29
29
  fractal_server/app/routes/admin/v2/profile.py,sha256=DwLlA9K3hkl9BqzyifIDiaWeHOM_N_17kqB5CSJOhSI,3165
30
30
  fractal_server/app/routes/admin/v2/resource.py,sha256=c2z6b_D_W6_dqVnxNF8F8OdlI5Z4asex8Zgfwzjbi2Q,6330
31
31
  fractal_server/app/routes/admin/v2/sharing.py,sha256=I2BoyO0rZNWxFKyq3Em9sjz_0n3kfRynC-WmUZxSzzU,5455
32
- fractal_server/app/routes/admin/v2/task.py,sha256=9OglWWYjGbA1k2wdE8k8vr2ynLn6zqdp_BxwPGfjj9A,5873
32
+ fractal_server/app/routes/admin/v2/task.py,sha256=M4cetPkzn73-6faftk49AU_PeaPLqKE_dZxrrQ_x_98,6087
33
33
  fractal_server/app/routes/admin/v2/task_group.py,sha256=3LxH2LEstj7L9qGNN3kkLo4JdFw4GXDlnlM6s4ulc_0,9377
34
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=mC7zKuoyDZSOOuOiXiQeGfFv78JYHFBQ9RNtNAdAQfQ,10202
34
+ fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=LkHSQVXRRUkicpIDQpc7dwT944uabtCTTrZCoESHjik,9935
35
35
  fractal_server/app/routes/api/__init__.py,sha256=ZQtqy-HGJZsehAL8UjnTvRhWd9MI9Noi2NvwD5hE_xA,231
36
36
  fractal_server/app/routes/api/alive.py,sha256=hE42dfh1naix9EKvpyKCoLMA6WqThObgA3jIro2rx-M,206
37
37
  fractal_server/app/routes/api/settings.py,sha256=9wV4To1FjW3jCcRtFp-QGGu3235s1oUg4HwKQIw-TWc,1210
38
- fractal_server/app/routes/api/v2/__init__.py,sha256=_pNH8odiUYpv2Okiqbqe0VNjbvK2jWqIoaImlgYzI84,2580
38
+ fractal_server/app/routes/api/v2/__init__.py,sha256=_J8ekQqNGJ3DC1mKum8m8SNOcv66oZ7A260MFjcEtGE,2710
39
39
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=pxXcauCMZEVKkft8nOCK_Nq5m7hkx7BVUXch_j2KVtg,15131
40
40
  fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=vB8AwSBf3Dp1sxpTAYtWj4s9kgAp_D5Hd1BX6Z8JTxc,6057
41
41
  fractal_server/app/routes/api/v2/_aux_functions_sharing.py,sha256=IvDII3Sl00eypdD3QRELQ4SLyC3gq6-HsXhuCx5Bp5I,2995
42
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=EffjYdQCHoUc5i59c7KmyH8hoLYxQsPh4RPo8qY8CYA,7796
42
+ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qTJdKC3nKLwLYfuKbzJW6tREmzy-dNk57xcmvgi_UDA,8529
43
43
  fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py,sha256=PKjV7r8YsPRXoNiVSnOK4KBYVV3l_Yb_ZPrqAkMkXrQ,1182
44
44
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=Hrumknv0vH5VX7SFp8WZDzsJv_z7quvFyNoDYmYoD7A,13623
45
45
  fractal_server/app/routes/api/v2/_aux_task_group_disambiguation.py,sha256=vdvMTa3San1HMTzctN5Vk7zxpqe4ccByrFBQyHfgWW8,4889
@@ -50,20 +50,21 @@ fractal_server/app/routes/api/v2/job.py,sha256=gulXJyP0Kc2_dNkPHPpc_XZlWoBEA1lrr
50
50
  fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=wqZ44rCJ8f2WcpyFOQ0FZN3g-66EzDnuYCrJJXawSdY,5179
51
51
  fractal_server/app/routes/api/v2/project.py,sha256=dirGShqcuEnK1sVh-Bl6kemj_z6A32s8ARNqSLIFhM8,5729
52
52
  fractal_server/app/routes/api/v2/sharing.py,sha256=W8Zf_rmTJwbFYz3XCrL9IFN6KXph5c97YS6Ok8oHTHM,9658
53
- fractal_server/app/routes/api/v2/submit.py,sha256=fhWLAsD-XgEunUfZpoSDsTpuYl3wReKAPg811iAmFRU,9642
53
+ fractal_server/app/routes/api/v2/status_legacy.py,sha256=qSV1wibqleOR084CTYNlXMuCkieL1aqlQXogTShd-1c,6002
54
+ fractal_server/app/routes/api/v2/submit.py,sha256=1J2QABGo8jzMxWqMFTG8nM4Ru-88h8Zjf9rJa3xuaoA,9544
54
55
  fractal_server/app/routes/api/v2/task.py,sha256=xKeGon7aRBOu1gnYd9EnWW1V-pnIqSVpXkN3dnubH3A,7418
55
- fractal_server/app/routes/api/v2/task_collection.py,sha256=VkxPwkKxQxvKJJL8ShkGVT4YolX5KjP8yLBaNS7WkP8,12405
56
+ fractal_server/app/routes/api/v2/task_collection.py,sha256=IFbFMadudfqBOu7CgMaQUr9dAhev_qiP-e43ZHV0gIE,12322
56
57
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=yZ0c3hWkq3nR5CKYP63yU6D1--xWjS2aimqoYWrQT5I,6894
57
- fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=IT-vhTFPUpMA2kY4z8i5us03vQCTquEvjjH7k7bgYVg,7229
58
- fractal_server/app/routes/api/v2/task_group.py,sha256=Jemgrc_1qA6b8at48BePMLJennterLIpJwoa077jMmc,7632
59
- fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=0iv4MOCesCngujiMUU9i3EdPADnoGIilXrR2Rc1yY04,10738
58
+ fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=qNqKvXAwQg9c6kwIJ7kbR1KA3TqPWthyQkOaJrVCLUk,7146
59
+ fractal_server/app/routes/api/v2/task_group.py,sha256=Q9KqhsnpT7Y4R0vg0oYzoJGX8b21Npe9pa3yIuN0Zcg,8348
60
+ fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=GOwIq78_M-HZylHGkGZJcu0yLIyjM4wpDsB8jobpWwI,10471
60
61
  fractal_server/app/routes/api/v2/task_version_update.py,sha256=dFzUetvIkd0RXw-cgOGGQGu8843y62AG30RbCQL15Tc,8506
61
- fractal_server/app/routes/api/v2/workflow.py,sha256=lHssUNN3ekqvXp4wWX8a33-5m2ZwLuLPkoL3keCsFhA,10741
62
- fractal_server/app/routes/api/v2/workflow_import.py,sha256=UND3U9zE-2o_85BW0uWNsm3_9125PgDtnUlki13jDT4,8740
63
- fractal_server/app/routes/api/v2/workflowtask.py,sha256=vn3pvaCIC9ydJYwboDAFT61IWRIJ6uYe5p3u4W9iloQ,7962
62
+ fractal_server/app/routes/api/v2/workflow.py,sha256=dHNytIVJgMqF-bGEzDrc2xFxgivFpF_xrDdUuYf_tyg,10709
63
+ fractal_server/app/routes/api/v2/workflow_import.py,sha256=FOpWT68FxDawDiuLa1Jlkt5mfMeLe3AM6MdGMS5EKXg,9620
64
+ fractal_server/app/routes/api/v2/workflowtask.py,sha256=f9tleS7TZlku-Z6G8rsyscXlO-bTF5TBHQbx9tAs1Gc,8226
64
65
  fractal_server/app/routes/auth/__init__.py,sha256=JL4MUBjPiNsNJLlb0lbn6_LbIzdRLosquQNqpn6niFw,2836
65
66
  fractal_server/app/routes/auth/_aux_auth.py,sha256=gKdYTWUzxcU44Iep787zReWwdAs4kW5baNDXCPmiKn8,9195
66
- fractal_server/app/routes/auth/current_user.py,sha256=gOLk-jUnkXTrBj8aN_yRlUcvCoJMHxxoWz13M8DBCbg,3502
67
+ fractal_server/app/routes/auth/current_user.py,sha256=uDWttWo9isG69Jv1EGnnr2Ki5ZGd0D76jgjVDQMkn8c,3251
67
68
  fractal_server/app/routes/auth/group.py,sha256=uR98vdQHH-7BFl-Czj85ESPxT2yQymy4qtagaMrnUPU,6491
68
69
  fractal_server/app/routes/auth/login.py,sha256=buVa5Y8T0cd_SW1CqC-zMv-3SfPxGJknf7MYlUyKOl0,567
69
70
  fractal_server/app/routes/auth/oauth.py,sha256=dOt1bWz1viW36CAnHVLmLkYzdCHUMdOhdTNgoQp-VvU,3663
@@ -74,16 +75,15 @@ fractal_server/app/routes/auth/viewer_paths.py,sha256=uDIwP3AWjLOskG2ZSMUokmn6DW
74
75
  fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
76
  fractal_server/app/routes/aux/_job.py,sha256=n-UhONvomKyKkQDDqd0lFh2kCMhlCGXpfdMNW39R1E4,644
76
77
  fractal_server/app/routes/aux/_runner.py,sha256=-SvcXCVEV7Mb6q4PbbxuTCCruX6sAlR5QGXk9CzBVv8,979
77
- fractal_server/app/routes/aux/_versions.py,sha256=6ZnH5KcLlJXACfZsFXhgARQcp2AwekIaaTVz68H6HWs,1527
78
78
  fractal_server/app/routes/aux/validate_user_profile.py,sha256=fGqJDdAFkbQoEIjqZ5F9-SDY_4os63R2EUMqODC7eBg,1969
79
79
  fractal_server/app/routes/pagination.py,sha256=C4XW6cnyDfyu1XMHXRN4wgk72lsS0UtlINZmwGZFb4Y,1174
80
80
  fractal_server/app/schemas/__init__.py,sha256=VIWJCaqokte3OljDLX00o-EC2d12rFoPb5HOLKQI94Y,86
81
81
  fractal_server/app/schemas/user.py,sha256=ncPPxl6DsF_YDsEKJ1idTpAZTsVyh1iC9p4KfK25WZ4,3421
82
82
  fractal_server/app/schemas/user_group.py,sha256=irel29GbffKCXNcyrAYbNSN3pCgmoUQ1wG32_s6jvos,1082
83
- fractal_server/app/schemas/v2/__init__.py,sha256=cYyOsr-6jttq1jzRNa_tOuCXPp3zwV3jBDR72F4u4vU,3957
83
+ fractal_server/app/schemas/v2/__init__.py,sha256=6W1uSthuLGXs9oOYnjmScoqJYkWyUTT-9cNHFZoTmkM,4005
84
84
  fractal_server/app/schemas/v2/accounting.py,sha256=6EVUdPTkFY6Wb9-Vc0cIEZYVXwGEvJ3tP4YOXYE1hao,546
85
85
  fractal_server/app/schemas/v2/dataset.py,sha256=SBS3TwHxPRHtLvFu-Bm4eQlI96DIkCiFF7oKvfcfTOc,2736
86
- fractal_server/app/schemas/v2/dumps.py,sha256=x3LK-Xq1OQgp7waAjy-5gaQVNbcF5JQNTqqPw86xkpM,2246
86
+ fractal_server/app/schemas/v2/dumps.py,sha256=UPtb1Rqkd48AFpWsVfcHcAjKGzF2ZoHLdYrAJzPdsSM,2276
87
87
  fractal_server/app/schemas/v2/history.py,sha256=pZiMKfh6nMWbTp5MUtrnGySPKbeRFf5tM1VLFaTgGcw,1784
88
88
  fractal_server/app/schemas/v2/job.py,sha256=YnnxnrbI_l7EgZNzk_KgnuEuh0COg-RPoph2foHUvZo,3308
89
89
  fractal_server/app/schemas/v2/manifest.py,sha256=I8KyVZvW6r6_DrcKX5aZ9zJwa-Kk_u3gCKlz6HVPF5o,6655
@@ -92,12 +92,12 @@ fractal_server/app/schemas/v2/project.py,sha256=jTc4jhwdkO1Ht2K27AguA0z9wWX0CYKv
92
92
  fractal_server/app/schemas/v2/resource.py,sha256=LPi1D67vGngOn5BWNicqAIHCKExaf2XyzuZKByo7wfc,6841
93
93
  fractal_server/app/schemas/v2/sharing.py,sha256=wHBiEmqhU53NokQ2rmm6xkH3lumBR6TdWw4nvDz6uww,1818
94
94
  fractal_server/app/schemas/v2/status_legacy.py,sha256=ajLm2p0wNfJ_lQX9Oq3NJn0jxQj50U3eZxuRjOIdOpg,949
95
- fractal_server/app/schemas/v2/task.py,sha256=le62bHu4nRrXlN-cCOdpkStLQNLtkR_myqK1j8fLoNs,4260
95
+ fractal_server/app/schemas/v2/task.py,sha256=Fd4n6vitliOuQyoofQ0daFy25QzIoWe9NPbXanNyrrE,4351
96
96
  fractal_server/app/schemas/v2/task_collection.py,sha256=ljGnZOmYg9pQ9PbYnNxLJDf4O2BDym-BQ_cXr-NWSd4,4590
97
97
  fractal_server/app/schemas/v2/task_group.py,sha256=sbg6AkvonU7F_-QC4G9kDxO6YVcz7wUPY3k3n9jYkRY,3392
98
- fractal_server/app/schemas/v2/workflow.py,sha256=m-udZHvOFokjaD3cGdTMHdtC8qHLNlmx7PsCEgcvN7U,1801
99
- fractal_server/app/schemas/v2/workflowtask.py,sha256=sAzu6ZHWGgwvQgviIZJB3J6X79TxchlClkqzNHMlxTo,3683
100
- fractal_server/app/security/__init__.py,sha256=Z-xQjt5jx6ldBBrz1iJ0IQ-SLKbv8Gq7fQ4U18NAxXc,18471
98
+ fractal_server/app/schemas/v2/workflow.py,sha256=87Aa92H6ceBbkDUsDhDqVNJyuBZuVRRAgFqNeg_djwE,1738
99
+ fractal_server/app/schemas/v2/workflowtask.py,sha256=1k56KHwzZDZGjl7FG1yslj-MKtKKR5fZ5RKGlJbopNc,3608
100
+ fractal_server/app/security/__init__.py,sha256=sblIH9DFCt_iyk22WzV6k4LuKdbvNPtS1HqPCHIiBJ4,18363
101
101
  fractal_server/app/security/signup_email.py,sha256=R69U5eTi9X7gZHSTfZ26SaHMQAeqReYEpGnB8r3AVig,1992
102
102
  fractal_server/app/shutdown.py,sha256=bfEmf6Xdc906ES0zDDWsihmd6neQpGFyIc7qnadnNu8,2283
103
103
  fractal_server/config/__init__.py,sha256=WvcoE3qiY1qnkumv3qspcemCFw5iFG5NkSFR78vN4ks,562
@@ -122,10 +122,8 @@ fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=uuf0sJibC4
122
122
  fractal_server/migrations/versions/068496367952_drop_taskgroup_venv_size_and_files_.py,sha256=rVFmB7eO7LtOfJivNnfnniQecD8DebAWnSvYEE2yq7k,1239
123
123
  fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=cSz3Jc2X79dN7I-rh0OSefOd5WOJU65wNWFNMnX2LR4,1450
124
124
  fractal_server/migrations/versions/0f5f85bb2ae7_add_pre_pinned_packages.py,sha256=XH6msE3On7P7S2gz-Xec20bWAI6vR29zRT1ZafFwStI,1056
125
- fractal_server/migrations/versions/18a26fcdea5d_drop_dataset_history.py,sha256=AzCnz-PJCzKw8Sm28zN1cIrHexgv-5Bcw5dGIxkzDOU,1064
126
125
  fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=5OzcIQjTfwiqh9vK_yLQHJlNtIHxKiEPr-IjZ9iir-Y,961
127
126
  fractal_server/migrations/versions/1a83a5260664_rename.py,sha256=BGZrM2UfccWc0s0kNlpACEw1LB1K6AemHPR5irkJ1gA,790
128
- fractal_server/migrations/versions/1bf8785755f9_add_description_to_workflow_and_.py,sha256=OVugy2ftELqAE8_6zTGiwwpDxj1STmf8XNHOa8TBr6o,1507
129
127
  fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py,sha256=DGdm1Q58UHfQ-6GAg0ucyKCiMxXb1JzELYQ50d1f5bo,1605
130
128
  fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py,sha256=5gIIZp3Apc2fTiUoHEvd-k34W-HIq7cjFt7Fwqjq6lE,911
131
129
  fractal_server/migrations/versions/40d6d6511b20_add_index_to_history_models.py,sha256=dZglAP0b3_YAeYc7Pphxs9iI73icSaMkgku7R6MaPL0,1357
@@ -137,7 +135,6 @@ fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nu
137
135
  fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=SeBUvMEikyl4qkCjiOgkkqbcIeaim57EPuufjFh_X8k,3271
138
136
  fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=sh0tB5K35ui8bfvBPI2jwDkeNVDtdlGvWrvTeumA0iY,8654
139
137
  fractal_server/migrations/versions/5bf02391cfef_v2.py,sha256=jTNyZ8H5VDh4eRvCEy-WliXAu55kUybddsaTeexXr6c,8367
140
- fractal_server/migrations/versions/5fb08bf05b14_drop_taskv2_source.py,sha256=bUs6xhVFGeCU4rR7yRdQXEXsns28lFUKBdcDsd-Bj4Q,880
141
138
  fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=vJ6nDb7UnkCMIPg2zNM7ZE0JOTvaqFL3Fe9UarP-ivM,1633
142
139
  fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py,sha256=qpHZC97AduFk5_G0xHs9akhnhpzb1LZooYCTPHy7n28,1353
143
140
  fractal_server/migrations/versions/7673fe18c05d_remove_project_dir_server_default.py,sha256=PwTfY9Kq3_cwb5G4E0sM9u7UjzOhOwsYCspymmPgipQ,795
@@ -165,7 +162,6 @@ fractal_server/migrations/versions/b7477cc98f45_2_18_1.py,sha256=clKGCwh95sWFTeU
165
162
  fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py,sha256=5h8ogjfQPbKbVwN0-pfh5ixPQSCCYsiVnQoOveUKKUA,2145
166
163
  fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py,sha256=CPQNKHqsx22wSY4ylqM8UMhDOWkQeC9eLAHlQQJYSfQ,1102
167
164
  fractal_server/migrations/versions/caba9fb1ea5e_drop_useroauth_user_settings_id.py,sha256=7MpunfOBk0LM6u-xrwca8GUHIjinAJZrS9AUT3l62qU,1320
168
- fractal_server/migrations/versions/cfd13f7954e7_add_fractal_server_version_to_jobv2_and_.py,sha256=rqF1f3j5exatdKyeEvccX-TMC_bcZEFcHmixguOQRqw,1447
169
165
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=bFMJUFJAnOaHYyYYKISHpbQWKBoQopiEKRT0PSidqhk,3796
170
166
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=zLHqar9iduiLs5Ib50B9RKrdLLbWSffahWxXDDePnI8,950
171
167
  fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=-ihE-KJEVemb8ZhRejg6xdC5TPTW8GkKWnzcl1ReAHQ,901
@@ -220,7 +216,7 @@ fractal_server/runner/v2/deduplicate_list.py,sha256=TWxHDucal0VZPswy_H7IFaEb4ddG
220
216
  fractal_server/runner/v2/merge_outputs.py,sha256=0ahaSwdMFAoEhxVaEaO9nSJuKIcWg9pDZ356ktSHcC0,897
221
217
  fractal_server/runner/v2/runner.py,sha256=aKz5ocgsMcUUsvaz00db8cWbBHMBA_g_PJhwV973pdY,20884
222
218
  fractal_server/runner/v2/runner_functions.py,sha256=1wW2ByskwPtx_mhyJiCpKMXDnDyZ_y5fDWv8hktFZXI,19564
223
- fractal_server/runner/v2/submit_workflow.py,sha256=MNUyBlXKfPjSbJgB5HlpAhXPvDeuMtPXc2h6HHmJkU0,11761
219
+ fractal_server/runner/v2/submit_workflow.py,sha256=FSqXF0_BzhNMRGqYxdjwSVD5rdonk4fTL6Fkp31O_fQ,11867
224
220
  fractal_server/runner/v2/task_interface.py,sha256=ftPPpOU16rbJD8q-QV7o_3ey8W7MQTFuWJiYUr4OmF4,2532
225
221
  fractal_server/runner/versions.py,sha256=uz59Dxj7BphnFnr-p0kyaZRH0h4w5Xkd0UJNVGtt4ds,474
226
222
  fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
@@ -274,8 +270,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=zt
274
270
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
275
271
  fractal_server/utils.py,sha256=-rjg8QTXQcKweXjn0NcmETFs1_uM9PGnbl0Q7c4ERPM,2181
276
272
  fractal_server/zip_tools.py,sha256=Uhn-ax4_9g1PJ32BdyaX30hFpAeVOv2tZYTUK-zVn1E,5719
277
- fractal_server-2.19.0.dist-info/licenses/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
278
- fractal_server-2.19.0.dist-info/WHEEL,sha256=eycQt0QpYmJMLKpE3X9iDk8R04v2ZF0x82ogq-zP6bQ,79
279
- fractal_server-2.19.0.dist-info/entry_points.txt,sha256=3TpdcjmETRYWJxFyAh3z-9955EWua9jdkSnBwxES1uE,60
280
- fractal_server-2.19.0.dist-info/METADATA,sha256=IxX_tJLqbH-qtYWusf2Q846BSHL9pIm6Pr7EcFTFuY0,4181
281
- fractal_server-2.19.0.dist-info/RECORD,,
273
+ fractal_server-2.19.0a0.dist-info/licenses/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
274
+ fractal_server-2.19.0a0.dist-info/WHEEL,sha256=eycQt0QpYmJMLKpE3X9iDk8R04v2ZF0x82ogq-zP6bQ,79
275
+ fractal_server-2.19.0a0.dist-info/entry_points.txt,sha256=3TpdcjmETRYWJxFyAh3z-9955EWua9jdkSnBwxES1uE,60
276
+ fractal_server-2.19.0a0.dist-info/METADATA,sha256=pmh1KIC1bSUPthf2eJlfOJq_J3Yw88vWkvPr4hgtYNA,4185
277
+ fractal_server-2.19.0a0.dist-info/RECORD,,
@@ -1,42 +0,0 @@
1
- from fastapi import HTTPException
2
- from fastapi import status
3
- from packaging.version import InvalidVersion
4
- from packaging.version import Version
5
-
6
-
7
- def _version_sort_key(version: str | None) -> tuple[int, Version | str | None]:
8
- """
9
- Returns a tuple used as (reverse) ordering key for TaskGroups in
10
- `get_task_group_list`.
11
- The parsable versions are the first in order, sorted according to the
12
- sorting rules of packaging.version.Version.
13
- Next in order we have the non-null non-parsable versions, sorted
14
- alphabetically.
15
- """
16
- if version is None:
17
- return (0, None)
18
- try:
19
- return (2, Version(version))
20
- except InvalidVersion:
21
- return (1, version)
22
-
23
-
24
- def _find_latest_version_or_422(versions: list[str]) -> str:
25
- """
26
- > For PEP 440 versions, this is easy enough for the client to do (using
27
- > the `packaging` library [...]. For non-standard versions, there is no
28
- > well-defined ordering, and clients will need to decide on what rule is
29
- > appropriate for their needs.
30
- (https://peps.python.org/pep-0700/#why-not-provide-a-latest-version-value)
31
-
32
- The `versions` array is coming from the PyPI API, and its elements are
33
- assumed parsable.
34
- """
35
- try:
36
- latest = max(versions, key=lambda v_str: Version(v_str))
37
- return latest
38
- except InvalidVersion as e:
39
- raise HTTPException(
40
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
41
- detail=f"Cannot find latest version (original error: {str(e)}).",
42
- )
@@ -1,41 +0,0 @@
1
- """drop dataset.history
2
-
3
- Revision ID: 18a26fcdea5d
4
- Revises: 1bf8785755f9
5
- Create Date: 2026-01-29 10:15:18.467384
6
-
7
- """
8
-
9
- import sqlalchemy as sa
10
- from alembic import op
11
- from sqlalchemy.dialects import postgresql
12
-
13
- # revision identifiers, used by Alembic.
14
- revision = "18a26fcdea5d"
15
- down_revision = "1bf8785755f9"
16
- branch_labels = None
17
- depends_on = None
18
-
19
-
20
- def upgrade() -> None:
21
- # ### commands auto generated by Alembic - please adjust! ###
22
- with op.batch_alter_table("datasetv2", schema=None) as batch_op:
23
- batch_op.drop_column("history")
24
-
25
- # ### end Alembic commands ###
26
-
27
-
28
- def downgrade() -> None:
29
- # ### commands auto generated by Alembic - please adjust! ###
30
- with op.batch_alter_table("datasetv2", schema=None) as batch_op:
31
- batch_op.add_column(
32
- sa.Column(
33
- "history",
34
- postgresql.JSONB(astext_type=sa.Text()),
35
- server_default=sa.text("'[]'::json"),
36
- autoincrement=False,
37
- nullable=False,
38
- )
39
- )
40
-
41
- # ### end Alembic commands ###
@@ -1,53 +0,0 @@
1
- """Add description to workflow and description and alias to workflow task
2
-
3
- Revision ID: 1bf8785755f9
4
- Revises: 5fb08bf05b14
5
- Create Date: 2026-01-26 09:03:18.396841
6
-
7
- """
8
-
9
- import sqlalchemy as sa
10
- import sqlmodel
11
- from alembic import op
12
-
13
- # revision identifiers, used by Alembic.
14
- revision = "1bf8785755f9"
15
- down_revision = "5fb08bf05b14"
16
- branch_labels = None
17
- depends_on = None
18
-
19
-
20
- def upgrade() -> None:
21
- # ### commands auto generated by Alembic - please adjust! ###
22
- with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
23
- batch_op.add_column(
24
- sa.Column(
25
- "alias", sqlmodel.sql.sqltypes.AutoString(), nullable=True
26
- )
27
- )
28
- batch_op.add_column(
29
- sa.Column(
30
- "description", sqlmodel.sql.sqltypes.AutoString(), nullable=True
31
- )
32
- )
33
-
34
- with op.batch_alter_table("workflowv2", schema=None) as batch_op:
35
- batch_op.add_column(
36
- sa.Column(
37
- "description", sqlmodel.sql.sqltypes.AutoString(), nullable=True
38
- )
39
- )
40
-
41
- # ### end Alembic commands ###
42
-
43
-
44
- def downgrade() -> None:
45
- # ### commands auto generated by Alembic - please adjust! ###
46
- with op.batch_alter_table("workflowv2", schema=None) as batch_op:
47
- batch_op.drop_column("description")
48
-
49
- with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
50
- batch_op.drop_column("description")
51
- batch_op.drop_column("alias")
52
-
53
- # ### end Alembic commands ###
@@ -1,36 +0,0 @@
1
- """drop TaskV2.source
2
-
3
- Revision ID: 5fb08bf05b14
4
- Revises: e53dc51fdf93
5
- Create Date: 2026-01-21 12:50:39.072816
6
-
7
- """
8
-
9
- import sqlalchemy as sa
10
- from alembic import op
11
-
12
- # revision identifiers, used by Alembic.
13
- revision = "5fb08bf05b14"
14
- down_revision = "e53dc51fdf93"
15
- branch_labels = None
16
- depends_on = None
17
-
18
-
19
- def upgrade() -> None:
20
- # ### commands auto generated by Alembic - please adjust! ###
21
- with op.batch_alter_table("taskv2", schema=None) as batch_op:
22
- batch_op.drop_column("source")
23
-
24
- # ### end Alembic commands ###
25
-
26
-
27
- def downgrade() -> None:
28
- # ### commands auto generated by Alembic - please adjust! ###
29
- with op.batch_alter_table("taskv2", schema=None) as batch_op:
30
- batch_op.add_column(
31
- sa.Column(
32
- "source", sa.VARCHAR(), autoincrement=False, nullable=True
33
- )
34
- )
35
-
36
- # ### end Alembic commands ###
@@ -1,52 +0,0 @@
1
- """add fractal_server_version to jobv2 and taskgroupactivityv2
2
-
3
- Revision ID: cfd13f7954e7
4
- Revises: 18a26fcdea5d
5
- Create Date: 2026-01-29 12:33:00.064562
6
-
7
- """
8
-
9
- import sqlalchemy as sa
10
- from alembic import op
11
-
12
- # revision identifiers, used by Alembic.
13
- revision = "cfd13f7954e7"
14
- down_revision = "18a26fcdea5d"
15
- branch_labels = None
16
- depends_on = None
17
-
18
-
19
- def upgrade() -> None:
20
- # ### commands auto generated by Alembic - please adjust! ###
21
- with op.batch_alter_table("jobv2", schema=None) as batch_op:
22
- batch_op.add_column(
23
- sa.Column(
24
- "fractal_server_version",
25
- sa.String(),
26
- server_default="pre-2.19.0",
27
- nullable=False,
28
- )
29
- )
30
-
31
- with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
32
- batch_op.add_column(
33
- sa.Column(
34
- "fractal_server_version",
35
- sa.String(),
36
- server_default="pre-2.19.0",
37
- nullable=False,
38
- )
39
- )
40
-
41
- # ### end Alembic commands ###
42
-
43
-
44
- def downgrade() -> None:
45
- # ### commands auto generated by Alembic - please adjust! ###
46
- with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
47
- batch_op.drop_column("fractal_server_version")
48
-
49
- with op.batch_alter_table("jobv2", schema=None) as batch_op:
50
- batch_op.drop_column("fractal_server_version")
51
-
52
- # ### end Alembic commands ###