fractal-server 2.8.1__py3-none-any.whl → 2.9.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +2 -35
  3. fractal_server/app/models/v2/__init__.py +3 -3
  4. fractal_server/app/models/v2/task.py +0 -72
  5. fractal_server/app/models/v2/task_group.py +102 -0
  6. fractal_server/app/routes/admin/v1.py +1 -20
  7. fractal_server/app/routes/admin/v2/job.py +1 -20
  8. fractal_server/app/routes/admin/v2/task_group.py +53 -13
  9. fractal_server/app/routes/api/v2/__init__.py +11 -2
  10. fractal_server/app/routes/api/v2/{_aux_functions_task_collection.py → _aux_functions_task_lifecycle.py} +43 -0
  11. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +27 -17
  12. fractal_server/app/routes/api/v2/task_collection.py +30 -55
  13. fractal_server/app/routes/api/v2/task_collection_custom.py +3 -3
  14. fractal_server/app/routes/api/v2/task_group.py +83 -14
  15. fractal_server/app/routes/api/v2/task_group_lifecycle.py +221 -0
  16. fractal_server/app/routes/api/v2/workflow.py +1 -1
  17. fractal_server/app/routes/api/v2/workflow_import.py +2 -2
  18. fractal_server/app/routes/aux/_timestamp.py +18 -0
  19. fractal_server/app/schemas/_validators.py +1 -2
  20. fractal_server/app/schemas/v2/__init__.py +3 -2
  21. fractal_server/app/schemas/v2/task_collection.py +0 -21
  22. fractal_server/app/schemas/v2/task_group.py +31 -8
  23. fractal_server/config.py +11 -56
  24. fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py +105 -0
  25. fractal_server/ssh/_fabric.py +18 -0
  26. fractal_server/tasks/utils.py +2 -12
  27. fractal_server/tasks/v2/local/__init__.py +3 -0
  28. fractal_server/tasks/v2/local/collect.py +291 -0
  29. fractal_server/tasks/v2/local/deactivate.py +210 -0
  30. fractal_server/tasks/v2/local/reactivate.py +159 -0
  31. fractal_server/tasks/v2/local/utils_local.py +45 -0
  32. fractal_server/tasks/v2/ssh/__init__.py +0 -0
  33. fractal_server/tasks/v2/ssh/collect.py +386 -0
  34. fractal_server/tasks/v2/ssh/deactivate.py +2 -0
  35. fractal_server/tasks/v2/ssh/reactivate.py +2 -0
  36. fractal_server/tasks/v2/templates/{_2_preliminary_pip_operations.sh → 1_create_venv.sh} +6 -7
  37. fractal_server/tasks/v2/templates/{_3_pip_install.sh → 2_pip_install.sh} +8 -1
  38. fractal_server/tasks/v2/templates/{_4_pip_freeze.sh → 3_pip_freeze.sh} +0 -7
  39. fractal_server/tasks/v2/templates/{_5_pip_show.sh → 4_pip_show.sh} +5 -6
  40. fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh +10 -0
  41. fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh +35 -0
  42. fractal_server/tasks/v2/utils_background.py +42 -103
  43. fractal_server/tasks/v2/utils_templates.py +32 -2
  44. fractal_server/utils.py +4 -2
  45. {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a1.dist-info}/METADATA +2 -3
  46. {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a1.dist-info}/RECORD +50 -39
  47. fractal_server/app/models/v2/collection_state.py +0 -22
  48. fractal_server/tasks/v2/collection_local.py +0 -357
  49. fractal_server/tasks/v2/collection_ssh.py +0 -352
  50. fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -42
  51. /fractal_server/tasks/v2/{database_operations.py → utils_database.py} +0 -0
  52. {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a1.dist-info}/LICENSE +0 -0
  53. {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a1.dist-info}/WHEEL +0 -0
  54. {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a1.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.8.1"
1
+ __VERSION__ = "2.9.0a1"
@@ -2,17 +2,14 @@
2
2
  `db` module, loosely adapted from
3
3
  https://testdriven.io/blog/fastapi-sqlmodel/#async-sqlmodel
4
4
  """
5
- import sqlite3
6
5
  from typing import AsyncGenerator
7
6
  from typing import Generator
8
7
 
9
8
  from sqlalchemy import create_engine
10
- from sqlalchemy import event
11
9
  from sqlalchemy.ext.asyncio import AsyncSession
12
10
  from sqlalchemy.ext.asyncio import create_async_engine
13
11
  from sqlalchemy.orm import Session as DBSyncSession
14
12
  from sqlalchemy.orm import sessionmaker
15
- from sqlalchemy.pool import StaticPool
16
13
 
17
14
  from ...config import get_settings
18
15
  from ...logger import set_logger
@@ -21,14 +18,6 @@ from ...syringe import Inject
21
18
 
22
19
  logger = set_logger(__name__)
23
20
 
24
- SQLITE_WARNING_MESSAGE = (
25
- "SQLite is supported (supported version >=3.37, "
26
- f"current {sqlite3.sqlite_version=}) "
27
- "but discouraged in production. "
28
- "Given its partial support for ForeignKey constraints, "
29
- "database consistency cannot be guaranteed."
30
- )
31
-
32
21
 
33
22
  class DB:
34
23
  """
@@ -56,14 +45,7 @@ class DB:
56
45
  settings = Inject(get_settings)
57
46
  settings.check_db()
58
47
 
59
- if settings.DB_ENGINE == "sqlite":
60
- logger.warning(SQLITE_WARNING_MESSAGE)
61
- # Set some sqlite-specific options
62
- engine_kwargs_async = dict(poolclass=StaticPool)
63
- else:
64
- engine_kwargs_async = {
65
- "pool_pre_ping": True,
66
- }
48
+ engine_kwargs_async = {"pool_pre_ping": True}
67
49
 
68
50
  cls._engine_async = create_async_engine(
69
51
  settings.DATABASE_ASYNC_URL,
@@ -83,15 +65,7 @@ class DB:
83
65
  settings = Inject(get_settings)
84
66
  settings.check_db()
85
67
 
86
- if settings.DB_ENGINE == "sqlite":
87
- logger.warning(SQLITE_WARNING_MESSAGE)
88
- # Set some sqlite-specific options
89
- engine_kwargs_sync = dict(
90
- poolclass=StaticPool,
91
- connect_args={"check_same_thread": False},
92
- )
93
- else:
94
- engine_kwargs_sync = {}
68
+ engine_kwargs_sync = {}
95
69
 
96
70
  cls._engine_sync = create_engine(
97
71
  settings.DATABASE_SYNC_URL,
@@ -107,13 +81,6 @@ class DB:
107
81
  future=True,
108
82
  )
109
83
 
110
- @event.listens_for(cls._engine_sync, "connect")
111
- def set_sqlite_pragma(dbapi_connection, connection_record):
112
- if settings.DB_ENGINE == "sqlite":
113
- cursor = dbapi_connection.cursor()
114
- cursor.execute("PRAGMA journal_mode=WAL")
115
- cursor.close()
116
-
117
84
  @classmethod
118
85
  async def get_async_db(cls) -> AsyncGenerator[AsyncSession, None]:
119
86
  """
@@ -2,12 +2,12 @@
2
2
  v2 `models` module
3
3
  """
4
4
  from ..linkuserproject import LinkUserProjectV2
5
- from .collection_state import CollectionStateV2
6
5
  from .dataset import DatasetV2
7
6
  from .job import JobV2
8
7
  from .project import ProjectV2
9
- from .task import TaskGroupV2
10
8
  from .task import TaskV2
9
+ from .task_group import TaskGroupActivityV2
10
+ from .task_group import TaskGroupV2
11
11
  from .workflow import WorkflowV2
12
12
  from .workflowtask import WorkflowTaskV2
13
13
 
@@ -16,8 +16,8 @@ __all__ = [
16
16
  "DatasetV2",
17
17
  "JobV2",
18
18
  "ProjectV2",
19
- "CollectionStateV2",
20
19
  "TaskGroupV2",
20
+ "TaskGroupActivityV2",
21
21
  "TaskV2",
22
22
  "WorkflowTaskV2",
23
23
  "WorkflowV2",
@@ -1,17 +1,12 @@
1
- from datetime import datetime
2
1
  from typing import Any
3
2
  from typing import Optional
4
3
 
5
4
  from pydantic import HttpUrl
6
5
  from sqlalchemy import Column
7
- from sqlalchemy.types import DateTime
8
6
  from sqlalchemy.types import JSON
9
7
  from sqlmodel import Field
10
- from sqlmodel import Relationship
11
8
  from sqlmodel import SQLModel
12
9
 
13
- from fractal_server.utils import get_timestamp
14
-
15
10
 
16
11
  class TaskV2(SQLModel, table=True):
17
12
  id: Optional[int] = Field(default=None, primary_key=True)
@@ -51,70 +46,3 @@ class TaskV2(SQLModel, table=True):
51
46
  tags: list[str] = Field(
52
47
  sa_column=Column(JSON, server_default="[]", nullable=False)
53
48
  )
54
-
55
-
56
- class TaskGroupV2(SQLModel, table=True):
57
- id: Optional[int] = Field(default=None, primary_key=True)
58
- task_list: list[TaskV2] = Relationship(
59
- sa_relationship_kwargs=dict(
60
- lazy="selectin", cascade="all, delete-orphan"
61
- ),
62
- )
63
-
64
- user_id: int = Field(foreign_key="user_oauth.id")
65
- user_group_id: Optional[int] = Field(foreign_key="usergroup.id")
66
-
67
- origin: str
68
- pkg_name: str
69
- version: Optional[str] = None
70
- python_version: Optional[str] = None
71
- path: Optional[str] = None
72
- venv_path: Optional[str] = None
73
- wheel_path: Optional[str] = None
74
- pip_extras: Optional[str] = None
75
- pinned_package_versions: dict[str, str] = Field(
76
- sa_column=Column(
77
- JSON,
78
- server_default="{}",
79
- default={},
80
- nullable=True,
81
- ),
82
- )
83
-
84
- active: bool = True
85
- timestamp_created: datetime = Field(
86
- default_factory=get_timestamp,
87
- sa_column=Column(DateTime(timezone=True), nullable=False),
88
- )
89
-
90
- @property
91
- def pip_install_string(self) -> str:
92
- """
93
- Prepare string to be used in `python -m pip install`.
94
- """
95
- extras = f"[{self.pip_extras}]" if self.pip_extras is not None else ""
96
-
97
- if self.wheel_path is not None:
98
- return f"{self.wheel_path}{extras}"
99
- else:
100
- if self.version is None:
101
- raise ValueError(
102
- "Cannot run `pip_install_string` with "
103
- f"{self.pkg_name=}, {self.wheel_path=}, {self.version=}."
104
- )
105
- return f"{self.pkg_name}{extras}=={self.version}"
106
-
107
- @property
108
- def pinned_package_versions_string(self) -> str:
109
- """
110
- Prepare string to be used in `python -m pip install`.
111
- """
112
- if self.pinned_package_versions is None:
113
- return ""
114
- output = " ".join(
115
- [
116
- f"{key}=={value}"
117
- for key, value in self.pinned_package_versions.items()
118
- ]
119
- )
120
- return output
@@ -0,0 +1,102 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from sqlalchemy import Column
5
+ from sqlalchemy.types import DateTime
6
+ from sqlalchemy.types import JSON
7
+ from sqlmodel import Field
8
+ from sqlmodel import Relationship
9
+ from sqlmodel import SQLModel
10
+
11
+ from .task import TaskV2
12
+ from fractal_server.utils import get_timestamp
13
+
14
+
15
+ class TaskGroupV2(SQLModel, table=True):
16
+ id: Optional[int] = Field(default=None, primary_key=True)
17
+ task_list: list[TaskV2] = Relationship(
18
+ sa_relationship_kwargs=dict(
19
+ lazy="selectin", cascade="all, delete-orphan"
20
+ ),
21
+ )
22
+
23
+ user_id: int = Field(foreign_key="user_oauth.id")
24
+ user_group_id: Optional[int] = Field(foreign_key="usergroup.id")
25
+
26
+ origin: str
27
+ pkg_name: str
28
+ version: Optional[str] = None
29
+ python_version: Optional[str] = None
30
+ path: Optional[str] = None
31
+ wheel_path: Optional[str] = None
32
+ pip_extras: Optional[str] = None
33
+ pinned_package_versions: dict[str, str] = Field(
34
+ sa_column=Column(
35
+ JSON,
36
+ server_default="{}",
37
+ default={},
38
+ nullable=True,
39
+ ),
40
+ )
41
+ pip_freeze: Optional[str] = None
42
+ venv_path: Optional[str] = None
43
+ venv_size_in_kB: Optional[int] = None
44
+ venv_file_number: Optional[int] = None
45
+
46
+ active: bool = True
47
+ timestamp_created: datetime = Field(
48
+ default_factory=get_timestamp,
49
+ sa_column=Column(DateTime(timezone=True), nullable=False),
50
+ )
51
+
52
+ @property
53
+ def pip_install_string(self) -> str:
54
+ """
55
+ Prepare string to be used in `python -m pip install`.
56
+ """
57
+ extras = f"[{self.pip_extras}]" if self.pip_extras is not None else ""
58
+
59
+ if self.wheel_path is not None:
60
+ return f"{self.wheel_path}{extras}"
61
+ else:
62
+ if self.version is None:
63
+ raise ValueError(
64
+ "Cannot run `pip_install_string` with "
65
+ f"{self.pkg_name=}, {self.wheel_path=}, {self.version=}."
66
+ )
67
+ return f"{self.pkg_name}{extras}=={self.version}"
68
+
69
+ @property
70
+ def pinned_package_versions_string(self) -> str:
71
+ """
72
+ Prepare string to be used in `python -m pip install`.
73
+ """
74
+ if self.pinned_package_versions is None:
75
+ return ""
76
+ output = " ".join(
77
+ [
78
+ f"{key}=={value}"
79
+ for key, value in self.pinned_package_versions.items()
80
+ ]
81
+ )
82
+ return output
83
+
84
+
85
+ class TaskGroupActivityV2(SQLModel, table=True):
86
+
87
+ id: Optional[int] = Field(default=None, primary_key=True)
88
+ user_id: int = Field(foreign_key="user_oauth.id")
89
+ taskgroupv2_id: Optional[int] = Field(foreign_key="taskgroupv2.id")
90
+ timestamp_started: datetime = Field(
91
+ default_factory=get_timestamp,
92
+ sa_column=Column(DateTime(timezone=True), nullable=False),
93
+ )
94
+ pkg_name: str
95
+ version: str
96
+ status: str
97
+ action: str
98
+ log: Optional[str] = None
99
+ timestamp_ended: Optional[datetime] = Field(
100
+ default=None,
101
+ sa_column=Column(DateTime(timezone=True)),
102
+ )
@@ -2,7 +2,6 @@
2
2
  Definition of `/admin` routes.
3
3
  """
4
4
  from datetime import datetime
5
- from datetime import timezone
6
5
  from pathlib import Path
7
6
  from typing import Optional
8
7
 
@@ -15,8 +14,6 @@ from fastapi.responses import StreamingResponse
15
14
  from sqlalchemy import func
16
15
  from sqlmodel import select
17
16
 
18
- from ....config import get_settings
19
- from ....syringe import Inject
20
17
  from ....utils import get_timestamp
21
18
  from ....zip_tools import _zip_folder_to_byte_stream_iterator
22
19
  from ...db import AsyncSession
@@ -36,27 +33,11 @@ from ..aux._job import _write_shutdown_file
36
33
  from ..aux._runner import _check_shutdown_is_supported
37
34
  from fractal_server.app.models import UserOAuth
38
35
  from fractal_server.app.routes.auth import current_active_superuser
36
+ from fractal_server.app.routes.aux._timestamp import _convert_to_db_timestamp
39
37
 
40
38
  router_admin_v1 = APIRouter()
41
39
 
42
40
 
43
- def _convert_to_db_timestamp(dt: datetime) -> datetime:
44
- """
45
- This function takes a timezone-aware datetime and converts it to UTC.
46
- If using SQLite, it also removes the timezone information in order to make
47
- the datetime comparable with datetimes in the database.
48
- """
49
- if dt.tzinfo is None:
50
- raise HTTPException(
51
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
52
- detail=f"The timestamp provided has no timezone information: {dt}",
53
- )
54
- _dt = dt.astimezone(timezone.utc)
55
- if Inject(get_settings).DB_ENGINE == "sqlite":
56
- return _dt.replace(tzinfo=None)
57
- return _dt
58
-
59
-
60
41
  @router_admin_v1.get("/project/", response_model=list[ProjectReadV1])
61
42
  async def view_project(
62
43
  id: Optional[int] = None,
@@ -1,5 +1,4 @@
1
1
  from datetime import datetime
2
- from datetime import timezone
3
2
  from pathlib import Path
4
3
  from typing import Optional
5
4
 
@@ -19,35 +18,17 @@ from fractal_server.app.models.v2 import ProjectV2
19
18
  from fractal_server.app.routes.auth import current_active_superuser
20
19
  from fractal_server.app.routes.aux._job import _write_shutdown_file
21
20
  from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
21
+ from fractal_server.app.routes.aux._timestamp import _convert_to_db_timestamp
22
22
  from fractal_server.app.runner.filenames import WORKFLOW_LOG_FILENAME
23
23
  from fractal_server.app.schemas.v2 import JobReadV2
24
24
  from fractal_server.app.schemas.v2 import JobStatusTypeV2
25
25
  from fractal_server.app.schemas.v2 import JobUpdateV2
26
- from fractal_server.config import get_settings
27
- from fractal_server.syringe import Inject
28
26
  from fractal_server.utils import get_timestamp
29
27
  from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator
30
28
 
31
29
  router = APIRouter()
32
30
 
33
31
 
34
- def _convert_to_db_timestamp(dt: datetime) -> datetime:
35
- """
36
- This function takes a timezone-aware datetime and converts it to UTC.
37
- If using SQLite, it also removes the timezone information in order to make
38
- the datetime comparable with datetimes in the database.
39
- """
40
- if dt.tzinfo is None:
41
- raise HTTPException(
42
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
43
- detail=f"The timestamp provided has no timezone information: {dt}",
44
- )
45
- _dt = dt.astimezone(timezone.utc)
46
- if Inject(get_settings).DB_ENGINE == "sqlite":
47
- return _dt.replace(tzinfo=None)
48
- return _dt
49
-
50
-
51
32
  @router.get("/", response_model=list[JobReadV2])
52
33
  async def view_job(
53
34
  id: Optional[int] = None,
@@ -1,3 +1,4 @@
1
+ from datetime import datetime
1
2
  from typing import Optional
2
3
 
3
4
  from fastapi import APIRouter
@@ -12,13 +13,16 @@ from sqlmodel import select
12
13
  from fractal_server.app.db import AsyncSession
13
14
  from fractal_server.app.db import get_async_db
14
15
  from fractal_server.app.models import UserOAuth
15
- from fractal_server.app.models.v2 import CollectionStateV2
16
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
16
17
  from fractal_server.app.models.v2 import TaskGroupV2
17
18
  from fractal_server.app.models.v2 import WorkflowTaskV2
18
19
  from fractal_server.app.routes.auth import current_active_superuser
19
20
  from fractal_server.app.routes.auth._aux_auth import (
20
21
  _verify_user_belongs_to_group,
21
22
  )
23
+ from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
24
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
25
+ from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
22
26
  from fractal_server.app.schemas.v2 import TaskGroupReadV2
23
27
  from fractal_server.app.schemas.v2 import TaskGroupUpdateV2
24
28
  from fractal_server.app.schemas.v2 import TaskGroupV2OriginEnum
@@ -29,6 +33,42 @@ router = APIRouter()
29
33
  logger = set_logger(__name__)
30
34
 
31
35
 
36
+ @router.get("/activity/", response_model=list[TaskGroupActivityV2Read])
37
+ async def get_task_group_activity_list(
38
+ task_group_activity_id: Optional[int] = None,
39
+ user_id: Optional[int] = None,
40
+ taskgroupv2_id: Optional[int] = None,
41
+ pkg_name: Optional[str] = None,
42
+ status: Optional[TaskGroupActivityStatusV2] = None,
43
+ action: Optional[TaskGroupActivityActionV2] = None,
44
+ timestamp_started_min: Optional[datetime] = None,
45
+ superuser: UserOAuth = Depends(current_active_superuser),
46
+ db: AsyncSession = Depends(get_async_db),
47
+ ) -> list[TaskGroupActivityV2Read]:
48
+
49
+ stm = select(TaskGroupActivityV2)
50
+ if task_group_activity_id is not None:
51
+ stm = stm.where(TaskGroupActivityV2.id == task_group_activity_id)
52
+ if user_id:
53
+ stm = stm.where(TaskGroupActivityV2.user_id == user_id)
54
+ if taskgroupv2_id:
55
+ stm = stm.where(TaskGroupActivityV2.taskgroupv2_id == taskgroupv2_id)
56
+ if pkg_name:
57
+ stm = stm.where(TaskGroupActivityV2.pkg_name.icontains(pkg_name))
58
+ if status:
59
+ stm = stm.where(TaskGroupActivityV2.status == status)
60
+ if action:
61
+ stm = stm.where(TaskGroupActivityV2.action == action)
62
+ if timestamp_started_min is not None:
63
+ stm = stm.where(
64
+ TaskGroupActivityV2.timestamp_started >= timestamp_started_min
65
+ )
66
+
67
+ res = await db.execute(stm)
68
+ activities = res.scalars().all()
69
+ return activities
70
+
71
+
32
72
  @router.get("/{task_group_id}/", response_model=TaskGroupReadV2)
33
73
  async def query_task_group(
34
74
  task_group_id: int,
@@ -139,22 +179,22 @@ async def delete_task_group(
139
179
  detail=f"TaskV2 {workflow_tasks[0].task_id} is still in use",
140
180
  )
141
181
 
142
- # Cascade operations: set foreign-keys to null for CollectionStateV2 which
143
- # are in relationship with the current TaskGroupV2
144
- logger.debug("Start of cascade operations on CollectionStateV2.")
145
- stm = select(CollectionStateV2).where(
146
- CollectionStateV2.taskgroupv2_id == task_group_id
182
+ # Cascade operations: set foreign-keys to null for TaskGroupActivityV2
183
+ # which are in relationship with the current TaskGroupV2
184
+ logger.debug("Start of cascade operations on TaskGroupActivityV2.")
185
+ stm = select(TaskGroupActivityV2).where(
186
+ TaskGroupActivityV2.taskgroupv2_id == task_group_id
147
187
  )
148
188
  res = await db.execute(stm)
149
- collection_states = res.scalars().all()
150
- for collection_state in collection_states:
189
+ task_group_activity_list = res.scalars().all()
190
+ for task_group_activity in task_group_activity_list:
151
191
  logger.debug(
152
- f"Setting CollectionStateV2[{collection_state.id}].taskgroupv2_id "
153
- "to None."
192
+ f"Setting TaskGroupActivityV2[{task_group_activity.id}]"
193
+ ".taskgroupv2_id to None."
154
194
  )
155
- collection_state.taskgroupv2_id = None
156
- db.add(collection_state)
157
- logger.debug("End of cascade operations on CollectionStateV2.")
195
+ task_group_activity.taskgroupv2_id = None
196
+ db.add(task_group_activity)
197
+ logger.debug("End of cascade operations on TaskGroupActivityV2.")
158
198
 
159
199
  await db.delete(task_group)
160
200
  await db.commit()
@@ -13,6 +13,7 @@ from .task import router as task_router_v2
13
13
  from .task_collection import router as task_collection_router_v2
14
14
  from .task_collection_custom import router as task_collection_router_v2_custom
15
15
  from .task_group import router as task_group_router_v2
16
+ from .task_group_lifecycle import router as task_group_lifecycle_router_v2
16
17
  from .workflow import router as workflow_router_v2
17
18
  from .workflow_import import router as workflow_import_router_v2
18
19
  from .workflowtask import router as workflowtask_router_v2
@@ -31,13 +32,21 @@ router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
31
32
 
32
33
  settings = Inject(get_settings)
33
34
  router_api_v2.include_router(
34
- task_collection_router_v2, prefix="/task", tags=["V2 Task Collection"]
35
+ task_collection_router_v2,
36
+ prefix="/task",
37
+ tags=["V2 Task Lifecycle"],
35
38
  )
36
39
  router_api_v2.include_router(
37
40
  task_collection_router_v2_custom,
38
41
  prefix="/task",
39
- tags=["V2 Task Collection"],
42
+ tags=["V2 Task Lifecycle"],
43
+ )
44
+ router_api_v2.include_router(
45
+ task_group_lifecycle_router_v2,
46
+ prefix="/task-group",
47
+ tags=["V2 Task Lifecycle"],
40
48
  )
49
+
41
50
  router_api_v2.include_router(task_router_v2, prefix="/task", tags=["V2 Task"])
42
51
  router_api_v2.include_router(
43
52
  task_group_router_v2, prefix="/task-group", tags=["V2 TaskGroup"]
@@ -4,7 +4,11 @@ from fastapi import HTTPException
4
4
  from fastapi import status
5
5
  from httpx import AsyncClient
6
6
  from httpx import TimeoutException
7
+ from sqlmodel import select
7
8
 
9
+ from fractal_server.app.db import AsyncSession
10
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
11
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
8
12
  from fractal_server.logger import set_logger
9
13
 
10
14
 
@@ -122,3 +126,42 @@ async def get_package_version_from_pypi(
122
126
  # Case 3: `version` is unset and we use latest
123
127
  logger.info(f"No version requested, returning {latest_version=}.")
124
128
  return latest_version
129
+
130
+
131
+ async def check_no_ongoing_activity(
132
+ *,
133
+ task_group_id: int,
134
+ db: AsyncSession,
135
+ ) -> None:
136
+ """
137
+ Find ongoing activities for the same task group.
138
+
139
+ Arguments:
140
+ task_group_id:
141
+ db:
142
+ """
143
+ # DB query
144
+ stm = (
145
+ select(TaskGroupActivityV2)
146
+ .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
147
+ .where(TaskGroupActivityV2.status == TaskGroupActivityStatusV2.ONGOING)
148
+ )
149
+ res = await db.execute(stm)
150
+ ongoing_activities = res.scalars().all()
151
+
152
+ if ongoing_activities == []:
153
+ # All good, exit
154
+ return
155
+
156
+ msg = "Found ongoing activities for the same task-group:"
157
+ for ind, activity in enumerate(ongoing_activities):
158
+ msg = (
159
+ f"{msg}\n{ind + 1}) "
160
+ f"Action={activity.action}, "
161
+ f"status={activity.status}, "
162
+ f"timestamp_started={activity.timestamp_started}."
163
+ )
164
+ raise HTTPException(
165
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
166
+ detail=msg,
167
+ )
@@ -13,7 +13,7 @@ from fractal_server.app.db import AsyncSession
13
13
  from fractal_server.app.models import LinkUserGroup
14
14
  from fractal_server.app.models import UserGroup
15
15
  from fractal_server.app.models import UserOAuth
16
- from fractal_server.app.models.v2 import CollectionStateV2
16
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
17
17
  from fractal_server.app.models.v2 import TaskGroupV2
18
18
  from fractal_server.app.models.v2 import TaskV2
19
19
  from fractal_server.app.models.v2 import WorkflowTaskV2
@@ -21,6 +21,7 @@ from fractal_server.app.routes.auth._aux_auth import _get_default_usergroup_id
21
21
  from fractal_server.app.routes.auth._aux_auth import (
22
22
  _verify_user_belongs_to_group,
23
23
  )
24
+ from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
24
25
  from fractal_server.logger import set_logger
25
26
 
26
27
  logger = set_logger(__name__)
@@ -219,27 +220,32 @@ async def _get_valid_user_group_id(
219
220
  return user_group_id
220
221
 
221
222
 
222
- async def _get_collection_status_message(
223
- task_group: TaskGroupV2, db: AsyncSession
223
+ async def _get_collection_task_group_activity_status_message(
224
+ task_group_id: int,
225
+ db: AsyncSession,
224
226
  ) -> str:
227
+
225
228
  res = await db.execute(
226
- select(CollectionStateV2).where(
227
- CollectionStateV2.taskgroupv2_id == task_group.id
228
- )
229
+ select(TaskGroupActivityV2)
230
+ .where(TaskGroupActivityV2.taskgroupv2_id == task_group_id)
231
+ .where(TaskGroupActivityV2.action == TaskGroupActivityActionV2.COLLECT)
229
232
  )
230
- states = res.scalars().all()
231
- if len(states) > 1:
233
+ task_group_activity_list = res.scalars().all()
234
+ if len(task_group_activity_list) > 1:
232
235
  msg = (
233
- "Expected one CollectionStateV2 associated to TaskGroup "
234
- f"{task_group.id}, found {len(states)} "
235
- f"(IDs: {[state.id for state in states]}).\n"
236
+ "\nWarning: "
237
+ "Expected only one TaskGroupActivityV2 associated to TaskGroup "
238
+ f"{task_group_id}, found {len(task_group_activity_list)} "
239
+ f"(IDs: {[tga.id for tga in task_group_activity_list]})."
236
240
  "Warning: this should have not happened, please contact an admin."
237
241
  )
238
- elif len(states) == 1:
242
+ elif len(task_group_activity_list) == 1:
239
243
  msg = (
240
- f"\nThere exists a task-collection state (ID={states[0].id}) for "
241
- f"such task group (ID={task_group.id}), with status "
242
- f"'{states[0].data.get('status')}'."
244
+ "\nNote:"
245
+ "There exists another task-group collection "
246
+ f"(activity ID={task_group_activity_list[0].id}) for "
247
+ f"this task group (ID={task_group_id}), with status "
248
+ f"'{task_group_activity_list[0].status}'."
243
249
  )
244
250
  else:
245
251
  msg = ""
@@ -273,7 +279,9 @@ async def _verify_non_duplication_user_constraint(
273
279
  "This should have not happened: please contact an admin."
274
280
  ),
275
281
  )
276
- state_msg = await _get_collection_status_message(duplicate[0], db)
282
+ state_msg = await _get_collection_task_group_activity_status_message(
283
+ duplicate[0].id, db
284
+ )
277
285
  raise HTTPException(
278
286
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
279
287
  detail=(
@@ -313,7 +321,9 @@ async def _verify_non_duplication_group_constraint(
313
321
  "This should have not happened: please contact an admin."
314
322
  ),
315
323
  )
316
- state_msg = await _get_collection_status_message(duplicate[0], db)
324
+ state_msg = await _get_collection_task_group_activity_status_message(
325
+ duplicate[0].id, db
326
+ )
317
327
  raise HTTPException(
318
328
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
319
329
  detail=(