fractal-server 2.14.0a2__py3-none-any.whl → 2.14.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +3 -1
  3. fractal_server/app/history/__init__.py +4 -4
  4. fractal_server/app/history/image_updates.py +124 -142
  5. fractal_server/app/history/status_enum.py +2 -2
  6. fractal_server/app/models/v2/__init__.py +6 -4
  7. fractal_server/app/models/v2/history.py +44 -20
  8. fractal_server/app/routes/admin/v2/task.py +1 -1
  9. fractal_server/app/routes/api/__init__.py +1 -1
  10. fractal_server/app/routes/api/v2/__init__.py +4 -0
  11. fractal_server/app/routes/api/v2/_aux_functions_history.py +49 -0
  12. fractal_server/app/routes/api/v2/dataset.py +0 -12
  13. fractal_server/app/routes/api/v2/history.py +302 -176
  14. fractal_server/app/routes/api/v2/project.py +1 -26
  15. fractal_server/app/routes/api/v2/status_legacy.py +168 -0
  16. fractal_server/app/routes/api/v2/workflow.py +2 -17
  17. fractal_server/app/routes/api/v2/workflowtask.py +41 -71
  18. fractal_server/app/routes/auth/oauth.py +5 -3
  19. fractal_server/app/runner/executors/base_runner.py +2 -1
  20. fractal_server/app/runner/executors/local/_submit_setup.py +5 -13
  21. fractal_server/app/runner/executors/local/runner.py +10 -55
  22. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +1 -1
  23. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +1 -1
  24. fractal_server/app/runner/executors/slurm_common/remote.py +1 -1
  25. fractal_server/app/runner/executors/slurm_sudo/runner.py +171 -108
  26. fractal_server/app/runner/v2/__init__.py +2 -22
  27. fractal_server/app/runner/v2/_slurm_ssh.py +1 -1
  28. fractal_server/app/runner/v2/_slurm_sudo.py +1 -1
  29. fractal_server/app/runner/v2/runner.py +47 -59
  30. fractal_server/app/runner/v2/runner_functions.py +185 -69
  31. fractal_server/app/schemas/_validators.py +13 -24
  32. fractal_server/app/schemas/user.py +10 -7
  33. fractal_server/app/schemas/user_settings.py +9 -21
  34. fractal_server/app/schemas/v2/dataset.py +8 -6
  35. fractal_server/app/schemas/v2/job.py +9 -5
  36. fractal_server/app/schemas/v2/manifest.py +3 -7
  37. fractal_server/app/schemas/v2/project.py +9 -7
  38. fractal_server/app/schemas/v2/task.py +41 -77
  39. fractal_server/app/schemas/v2/task_collection.py +14 -32
  40. fractal_server/app/schemas/v2/task_group.py +10 -9
  41. fractal_server/app/schemas/v2/workflow.py +10 -11
  42. fractal_server/app/security/__init__.py +3 -3
  43. fractal_server/app/security/signup_email.py +2 -2
  44. fractal_server/config.py +33 -34
  45. fractal_server/migrations/versions/fbce16ff4e47_new_history_items.py +120 -0
  46. fractal_server/tasks/v2/templates/2_pip_install.sh +1 -1
  47. fractal_server/tasks/v2/templates/4_pip_show.sh +1 -1
  48. fractal_server/tasks/v2/utils_templates.py +6 -0
  49. {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/METADATA +1 -1
  50. {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/RECORD +53 -54
  51. fractal_server/app/runner/executors/slurm_sudo/_executor_wait_thread.py +0 -130
  52. fractal_server/app/schemas/v2/history.py +0 -23
  53. fractal_server/migrations/versions/87cd72a537a2_add_historyitem_table.py +0 -68
  54. fractal_server/migrations/versions/954ddc64425a_image_status.py +0 -63
  55. {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/LICENSE +0 -0
  56. {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/WHEEL +0 -0
  57. {fractal_server-2.14.0a2.dist-info → fractal_server-2.14.0a4.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.14.0a2"
1
+ __VERSION__ = "2.14.0a4"
@@ -123,7 +123,9 @@ def set_db(skip_init_data: bool = False):
123
123
  asyncio.run(
124
124
  _create_first_user(
125
125
  email=settings.FRACTAL_DEFAULT_ADMIN_EMAIL,
126
- password=settings.FRACTAL_DEFAULT_ADMIN_PASSWORD,
126
+ password=(
127
+ settings.FRACTAL_DEFAULT_ADMIN_PASSWORD.get_secret_value()
128
+ ),
127
129
  username=settings.FRACTAL_DEFAULT_ADMIN_USERNAME,
128
130
  is_superuser=True,
129
131
  is_verified=True,
@@ -1,4 +1,4 @@
1
- from .image_updates import update_all_images # noqa: F401
2
- from .image_updates import update_single_image # noqa
3
- from .image_updates import update_single_image_logfile # noqa
4
- from .status_enum import HistoryItemImageStatus # noqa: F401
1
+ # from .image_updates import update_all_images # noqa: F401
2
+ # from .image_updates import update_single_image # noqa
3
+ # from .image_updates import update_single_image_logfile # noqa
4
+ # from .status_enum import XXXStatus # noqa: F401
@@ -1,142 +1,124 @@
1
- from typing import Optional
2
-
3
- from sqlalchemy.orm import Session
4
- from sqlalchemy.orm.attributes import flag_modified
5
- from sqlmodel import select
6
-
7
- from fractal_server.app.db import get_sync_db
8
- from fractal_server.app.history.status_enum import HistoryItemImageStatus
9
- from fractal_server.app.models.v2 import HistoryItemV2
10
- from fractal_server.app.models.v2 import ImageStatus
11
- from fractal_server.logger import set_logger
12
-
13
- logger = set_logger(__name__)
14
-
15
-
16
- def _update_single_image_status(
17
- *,
18
- zarr_url: str,
19
- workflowtask_id: int,
20
- dataset_id: int,
21
- status: HistoryItemImageStatus,
22
- db: Session,
23
- commit: bool = True,
24
- logfile: Optional[str] = None,
25
- ) -> None:
26
- image_status = db.get(
27
- ImageStatus,
28
- (
29
- zarr_url,
30
- workflowtask_id,
31
- dataset_id,
32
- ),
33
- )
34
- if image_status is None:
35
- raise RuntimeError("This should have not happened")
36
- image_status.status = status
37
- if logfile is not None:
38
- image_status.logfile = logfile
39
- db.add(image_status)
40
- if commit:
41
- db.commit()
42
-
43
-
44
- def update_single_image(
45
- *,
46
- history_item_id: int,
47
- zarr_url: str,
48
- status: HistoryItemImageStatus,
49
- ) -> None:
50
-
51
- logger.debug(
52
- f"[update_single_image] {history_item_id=}, {status=}, {zarr_url=}"
53
- )
54
-
55
- # Note: thanks to `with_for_update`, a lock is acquired and kept
56
- # until `db.commit()`
57
- with next(get_sync_db()) as db:
58
- stm = (
59
- select(HistoryItemV2)
60
- .where(HistoryItemV2.id == history_item_id)
61
- .with_for_update(nowait=False)
62
- )
63
- history_item = db.execute(stm).scalar_one()
64
- history_item.images[zarr_url] = status
65
- flag_modified(history_item, "images")
66
- db.commit()
67
-
68
- _update_single_image_status(
69
- zarr_url=zarr_url,
70
- dataset_id=history_item.dataset_id,
71
- workflowtask_id=history_item.workflowtask_id,
72
- commit=True,
73
- status=status,
74
- db=db,
75
- )
76
-
77
-
78
- def update_single_image_logfile(
79
- *,
80
- history_item_id: int,
81
- zarr_url: str,
82
- logfile: str,
83
- ) -> None:
84
-
85
- logger.debug(
86
- f"[update_single_image_logfile] {history_item_id=}, {logfile=}, {zarr_url=}"
87
- )
88
-
89
- with next(get_sync_db()) as db:
90
- history_item = db.get(HistoryItemV2, history_item_id)
91
- image_status = db.get(
92
- ImageStatus,
93
- (
94
- zarr_url,
95
- history_item.workflowtask_id,
96
- history_item.dataset_id,
97
- ),
98
- )
99
- if image_status is None:
100
- raise RuntimeError("This should have not happened")
101
- image_status.logfile = logfile
102
- db.merge(image_status)
103
- db.commit()
104
-
105
-
106
- def update_all_images(
107
- *,
108
- history_item_id: int,
109
- status: HistoryItemImageStatus,
110
- logfile: Optional[str] = None,
111
- ) -> None:
112
-
113
- logger.debug(f"[update_all_images] {history_item_id=}, {status=}")
114
-
115
- # Note: thanks to `with_for_update`, a lock is acquired and kept
116
- # until `db.commit()`
117
- stm = (
118
- select(HistoryItemV2)
119
- .where(HistoryItemV2.id == history_item_id)
120
- .with_for_update(nowait=False)
121
- )
122
- with next(get_sync_db()) as db:
123
- history_item = db.execute(stm).scalar_one()
124
- new_images = {
125
- zarr_url: status for zarr_url in history_item.images.keys()
126
- }
127
- history_item.images = new_images
128
- flag_modified(history_item, "images")
129
- db.commit()
130
-
131
- # FIXME: Make this a bulk edit, if possible
132
- for ind, zarr_url in enumerate(history_item.images.keys()):
133
- _update_single_image_status(
134
- zarr_url=zarr_url,
135
- dataset_id=history_item.dataset_id,
136
- workflowtask_id=history_item.workflowtask_id,
137
- commit=False,
138
- status=status,
139
- logfile=logfile,
140
- db=db,
141
- )
142
- db.commit()
1
+ # from typing import Optional
2
+ # from sqlalchemy.orm import Session
3
+ # from sqlalchemy.orm.attributes import flag_modified
4
+ # from sqlmodel import select
5
+ # from fractal_server.app.db import get_sync_db
6
+ # from fractal_server.app.history.status_enum import HistoryItemImageStatus
7
+ # from fractal_server.app.models.v2 import HistoryItemV2
8
+ # from fractal_server.app.models.v2 import ImageStatus
9
+ # from fractal_server.logger import set_logger
10
+ # logger = set_logger(__name__)
11
+ # def _update_single_image_status(
12
+ # *,
13
+ # zarr_url: str,
14
+ # workflowtask_id: int,
15
+ # dataset_id: int,
16
+ # status: HistoryItemImageStatus,
17
+ # db: Session,
18
+ # commit: bool = True,
19
+ # logfile: Optional[str] = None,
20
+ # ) -> None:
21
+ # image_status = db.get(
22
+ # ImageStatus,
23
+ # (
24
+ # zarr_url,
25
+ # workflowtask_id,
26
+ # dataset_id,
27
+ # ),
28
+ # )
29
+ # if image_status is None:
30
+ # raise RuntimeError("This should have not happened")
31
+ # image_status.status = status
32
+ # if logfile is not None:
33
+ # image_status.logfile = logfile
34
+ # db.add(image_status)
35
+ # if commit:
36
+ # db.commit()
37
+ # def update_single_image(
38
+ # *,
39
+ # history_item_id: int,
40
+ # zarr_url: str,
41
+ # status: HistoryItemImageStatus,
42
+ # ) -> None:
43
+ # logger.debug(
44
+ # f"[update_single_image] {history_item_id=}, {status=}, {zarr_url=}"
45
+ # )
46
+ # # Note: thanks to `with_for_update`, a lock is acquired and kept
47
+ # # until `db.commit()`
48
+ # with next(get_sync_db()) as db:
49
+ # stm = (
50
+ # select(HistoryItemV2)
51
+ # .where(HistoryItemV2.id == history_item_id)
52
+ # .with_for_update(nowait=False)
53
+ # )
54
+ # history_item = db.execute(stm).scalar_one()
55
+ # history_item.images[zarr_url] = status
56
+ # flag_modified(history_item, "images")
57
+ # db.commit()
58
+ # _update_single_image_status(
59
+ # zarr_url=zarr_url,
60
+ # dataset_id=history_item.dataset_id,
61
+ # workflowtask_id=history_item.workflowtask_id,
62
+ # commit=True,
63
+ # status=status,
64
+ # db=db,
65
+ # )
66
+ # def update_single_image_logfile(
67
+ # *,
68
+ # history_item_id: int,
69
+ # zarr_url: str,
70
+ # logfile: str,
71
+ # ) -> None:
72
+ # logger.debug(
73
+ # "[update_single_image_logfile] "
74
+ # f"{history_item_id=}, {logfile=}, {zarr_url=}"
75
+ # )
76
+ # with next(get_sync_db()) as db:
77
+ # history_item = db.get(HistoryItemV2, history_item_id)
78
+ # image_status = db.get(
79
+ # ImageStatus,
80
+ # (
81
+ # zarr_url,
82
+ # history_item.workflowtask_id,
83
+ # history_item.dataset_id,
84
+ # ),
85
+ # )
86
+ # if image_status is None:
87
+ # raise RuntimeError("This should have not happened")
88
+ # image_status.logfile = logfile
89
+ # db.merge(image_status)
90
+ # db.commit()
91
+ # def update_all_images(
92
+ # *,
93
+ # history_item_id: int,
94
+ # status: HistoryItemImageStatus,
95
+ # logfile: Optional[str] = None,
96
+ # ) -> None:
97
+ # logger.debug(f"[update_all_images] {history_item_id=}, {status=}")
98
+ # # Note: thanks to `with_for_update`, a lock is acquired and kept
99
+ # # until `db.commit()`
100
+ # stm = (
101
+ # select(HistoryItemV2)
102
+ # .where(HistoryItemV2.id == history_item_id)
103
+ # .with_for_update(nowait=False)
104
+ # )
105
+ # with next(get_sync_db()) as db:
106
+ # history_item = db.execute(stm).scalar_one()
107
+ # new_images = {
108
+ # zarr_url: status for zarr_url in history_item.images.keys()
109
+ # }
110
+ # history_item.images = new_images
111
+ # flag_modified(history_item, "images")
112
+ # db.commit()
113
+ # # FIXME: Make this a bulk edit, if possible
114
+ # for ind, zarr_url in enumerate(history_item.images.keys()):
115
+ # _update_single_image_status(
116
+ # zarr_url=zarr_url,
117
+ # dataset_id=history_item.dataset_id,
118
+ # workflowtask_id=history_item.workflowtask_id,
119
+ # commit=False,
120
+ # status=status,
121
+ # logfile=logfile,
122
+ # db=db,
123
+ # )
124
+ # db.commit()
@@ -1,9 +1,9 @@
1
1
  from enum import Enum
2
2
 
3
3
 
4
- class HistoryItemImageStatus(str, Enum):
4
+ class XXXStatus(str, Enum):
5
5
  """
6
- Available image-status values within a `HistoryItemV2`
6
+ Available status for images
7
7
 
8
8
  Attributes:
9
9
  SUBMITTED:
@@ -5,8 +5,9 @@ from ..linkuserproject import LinkUserProjectV2
5
5
  from .accounting import AccountingRecord
6
6
  from .accounting import AccountingRecordSlurm
7
7
  from .dataset import DatasetV2
8
- from .history import HistoryItemV2
9
- from .history import ImageStatus
8
+ from .history import HistoryImageCache
9
+ from .history import HistoryRun
10
+ from .history import HistoryUnit
10
11
  from .job import JobV2
11
12
  from .project import ProjectV2
12
13
  from .task import TaskV2
@@ -27,6 +28,7 @@ __all__ = [
27
28
  "TaskV2",
28
29
  "WorkflowV2",
29
30
  "WorkflowTaskV2",
30
- "HistoryItemV2",
31
- "ImageStatus",
31
+ "HistoryRun",
32
+ "HistoryUnit",
33
+ "HistoryImageCache",
32
34
  ]
@@ -4,6 +4,8 @@ from typing import Optional
4
4
 
5
5
  from pydantic import ConfigDict
6
6
  from sqlalchemy import Column
7
+ from sqlalchemy import String
8
+ from sqlalchemy.dialects.postgresql import ARRAY
7
9
  from sqlalchemy.dialects.postgresql import JSONB
8
10
  from sqlalchemy.types import DateTime
9
11
  from sqlmodel import Field
@@ -12,42 +14,64 @@ from sqlmodel import SQLModel
12
14
  from ....utils import get_timestamp
13
15
 
14
16
 
15
- class HistoryItemV2(SQLModel, table=True):
17
+ class HistoryRun(SQLModel, table=True):
16
18
  model_config = ConfigDict(arbitrary_types_allowed=True)
17
19
 
18
20
  id: Optional[int] = Field(default=None, primary_key=True)
19
- dataset_id: int = Field(foreign_key="datasetv2.id")
21
+ dataset_id: int = Field(
22
+ foreign_key="datasetv2.id",
23
+ ondelete="CASCADE",
24
+ )
20
25
  workflowtask_id: Optional[int] = Field(
21
26
  foreign_key="workflowtaskv2.id",
22
27
  default=None,
28
+ ondelete="SET NULL",
23
29
  )
24
- timestamp_started: datetime = Field(
25
- default_factory=get_timestamp,
26
- sa_column=Column(
27
- DateTime(timezone=True),
28
- nullable=False,
29
- ),
30
- )
30
+
31
31
  workflowtask_dump: dict[str, Any] = Field(
32
- sa_column=Column(JSONB, nullable=False)
32
+ sa_column=Column(JSONB, nullable=False),
33
33
  )
34
34
  task_group_dump: dict[str, Any] = Field(
35
- sa_column=Column(JSONB, nullable=False)
35
+ sa_column=Column(JSONB, nullable=False),
36
36
  )
37
- parameters_hash: str
37
+
38
+ timestamp_started: datetime = Field(
39
+ sa_column=Column(DateTime(timezone=True), nullable=False),
40
+ default_factory=get_timestamp,
41
+ )
42
+ status: str
38
43
  num_available_images: int
39
- num_current_images: int
40
- images: dict[str, str] = Field(sa_column=Column(JSONB, nullable=False))
41
44
 
42
45
 
43
- class ImageStatus(SQLModel, table=True):
46
+ class HistoryUnit(SQLModel, table=True):
47
+ id: Optional[int] = Field(default=None, primary_key=True)
48
+ history_run_id: int = Field(
49
+ foreign_key="historyrun.id",
50
+ ondelete="CASCADE",
51
+ )
52
+
53
+ logfile: Optional[str]
54
+ status: str
55
+ zarr_urls: list[str] = Field(
56
+ sa_column=Column(ARRAY(String)),
57
+ default_factory=list,
58
+ )
59
+
44
60
 
61
+ class HistoryImageCache(SQLModel, table=True):
45
62
  zarr_url: str = Field(primary_key=True)
63
+ dataset_id: int = Field(
64
+ primary_key=True,
65
+ foreign_key="datasetv2.id",
66
+ ondelete="CASCADE",
67
+ )
46
68
  workflowtask_id: int = Field(
47
- primary_key=True, foreign_key="workflowtaskv2.id"
69
+ primary_key=True,
70
+ foreign_key="workflowtaskv2.id",
71
+ ondelete="CASCADE",
48
72
  )
49
- dataset_id: int = Field(primary_key=True, foreign_key="datasetv2.id")
50
73
 
51
- parameters_hash: str
52
- status: str
53
- logfile: Optional[str]
74
+ latest_history_unit_id: int = Field(
75
+ foreign_key="historyunit.id",
76
+ ondelete="CASCADE",
77
+ )
@@ -68,7 +68,7 @@ async def query_tasks(
68
68
  db: AsyncSession = Depends(get_async_db),
69
69
  ) -> list[TaskV2Info]:
70
70
  """
71
- Query `TaskV2` table and get informations about related items
71
+ Query `TaskV2` table and get information about related items
72
72
  (WorkflowV2s and ProjectV2s)
73
73
 
74
74
  Args:
@@ -25,4 +25,4 @@ async def alive():
25
25
  @router_api.get("/settings/")
26
26
  async def view_settings(user: UserOAuth = Depends(current_active_superuser)):
27
27
  settings = Inject(get_settings)
28
- return settings.get_sanitized()
28
+ return settings.model_dump()
@@ -8,6 +8,7 @@ from .history import router as history_router_v2
8
8
  from .images import router as images_routes_v2
9
9
  from .job import router as job_router_v2
10
10
  from .project import router as project_router_v2
11
+ from .status_legacy import router as status_legacy_router_v2
11
12
  from .submit import router as submit_job_router_v2
12
13
  from .task import router as task_router_v2
13
14
  from .task_collection import router as task_collection_router_v2
@@ -29,6 +30,9 @@ router_api_v2.include_router(images_routes_v2, tags=["V2 Images"])
29
30
  router_api_v2.include_router(project_router_v2, tags=["V2 Project"])
30
31
  router_api_v2.include_router(submit_job_router_v2, tags=["V2 Job"])
31
32
  router_api_v2.include_router(history_router_v2, tags=["V2 History"])
33
+ router_api_v2.include_router(
34
+ status_legacy_router_v2, tags=["V2 Status Legacy"]
35
+ )
32
36
 
33
37
 
34
38
  settings = Inject(get_settings)
@@ -0,0 +1,49 @@
1
+ from pathlib import Path
2
+
3
+ from fastapi import HTTPException
4
+ from fastapi import status
5
+
6
+ from fractal_server.app.db import AsyncSession
7
+ from fractal_server.app.models import WorkflowTaskV2
8
+ from fractal_server.app.models.v2 import HistoryUnit
9
+
10
+
11
+ async def get_history_unit_or_404(
12
+ *, history_unit_id: int, db: AsyncSession
13
+ ) -> HistoryUnit:
14
+ """
15
+ Get an existing HistoryUnit or raise a 404.
16
+
17
+ Arguments:
18
+ history_unit_id: The `HistoryUnit` id
19
+ db: An asynchronous db session
20
+ """
21
+ history_unit = await db.get(HistoryUnit, history_unit_id)
22
+ if history_unit is None:
23
+ raise HTTPException(
24
+ status_code=status.HTTP_404_NOT_FOUND,
25
+ detail=f"HistoryUnit {history_unit_id} not found",
26
+ )
27
+ return history_unit
28
+
29
+
30
+ def read_log_file(
31
+ *,
32
+ logfile: str | None,
33
+ wftask: WorkflowTaskV2,
34
+ dataset_id: int,
35
+ ):
36
+ if logfile is None or not Path(logfile).exists():
37
+ return (
38
+ f"Logs for task '{wftask.task.name}' in dataset "
39
+ f"{dataset_id} are not available."
40
+ )
41
+
42
+ try:
43
+ with open(logfile, "r") as f:
44
+ return f.read()
45
+ except Exception as e:
46
+ return (
47
+ f"Error while retrieving logs for task '{wftask.task.name}' "
48
+ f"in dataset {dataset_id}. Original error: {str(e)}."
49
+ )
@@ -5,14 +5,11 @@ from fastapi import Depends
5
5
  from fastapi import HTTPException
6
6
  from fastapi import Response
7
7
  from fastapi import status
8
- from sqlmodel import delete
9
8
  from sqlmodel import select
10
9
 
11
10
  from ....db import AsyncSession
12
11
  from ....db import get_async_db
13
12
  from ....models.v2 import DatasetV2
14
- from ....models.v2 import HistoryItemV2
15
- from ....models.v2 import ImageStatus
16
13
  from ....models.v2 import JobV2
17
14
  from ....models.v2 import ProjectV2
18
15
  from ....schemas.v2 import DatasetCreateV2
@@ -223,15 +220,6 @@ async def delete_dataset(
223
220
  for job in jobs:
224
221
  job.dataset_id = None
225
222
 
226
- # Cascade operations: delete history items and image status which are in
227
- # relationship with the current dataset
228
-
229
- stm = delete(HistoryItemV2).where(HistoryItemV2.dataset_id == dataset_id)
230
- await db.execute(stm)
231
-
232
- stm = delete(ImageStatus).where(ImageStatus.dataset_id == dataset_id)
233
- await db.execute(stm)
234
-
235
223
  # Delete dataset
236
224
  await db.delete(dataset)
237
225
  await db.commit()