fractal-server 2.13.0__py3-none-any.whl → 2.13.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/__init__.py +4 -0
  3. fractal_server/app/models/v2/accounting.py +35 -0
  4. fractal_server/app/routes/admin/v2/__init__.py +4 -0
  5. fractal_server/app/routes/admin/v2/accounting.py +108 -0
  6. fractal_server/app/routes/admin/v2/impersonate.py +35 -0
  7. fractal_server/app/routes/admin/v2/job.py +5 -13
  8. fractal_server/app/routes/admin/v2/task_group.py +4 -12
  9. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +3 -3
  10. fractal_server/app/routes/api/v2/submit.py +1 -0
  11. fractal_server/app/routes/api/v2/task_group.py +2 -5
  12. fractal_server/app/routes/aux/__init__.py +0 -20
  13. fractal_server/app/runner/v2/__init__.py +4 -0
  14. fractal_server/app/runner/v2/_local/__init__.py +3 -0
  15. fractal_server/app/runner/v2/_slurm_ssh/__init__.py +2 -0
  16. fractal_server/app/runner/v2/_slurm_sudo/__init__.py +2 -0
  17. fractal_server/app/runner/v2/runner.py +16 -6
  18. fractal_server/app/runner/v2/runner_functions.py +12 -11
  19. fractal_server/app/schemas/v2/__init__.py +1 -0
  20. fractal_server/app/schemas/v2/accounting.py +18 -0
  21. fractal_server/config.py +56 -50
  22. fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py +57 -0
  23. fractal_server/tasks/v2/utils_background.py +1 -1
  24. {fractal_server-2.13.0.dist-info → fractal_server-2.13.1.dist-info}/METADATA +1 -1
  25. {fractal_server-2.13.0.dist-info → fractal_server-2.13.1.dist-info}/RECORD +28 -23
  26. {fractal_server-2.13.0.dist-info → fractal_server-2.13.1.dist-info}/LICENSE +0 -0
  27. {fractal_server-2.13.0.dist-info → fractal_server-2.13.1.dist-info}/WHEEL +0 -0
  28. {fractal_server-2.13.0.dist-info → fractal_server-2.13.1.dist-info}/entry_points.txt +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.13.0"
1
+ __VERSION__ = "2.13.1"
@@ -2,6 +2,8 @@
2
2
  v2 `models` module
3
3
  """
4
4
  from ..linkuserproject import LinkUserProjectV2
5
+ from .accounting import AccountingRecord
6
+ from .accounting import AccountingRecordSlurm
5
7
  from .dataset import DatasetV2
6
8
  from .job import JobV2
7
9
  from .project import ProjectV2
@@ -12,6 +14,8 @@ from .workflow import WorkflowV2
12
14
  from .workflowtask import WorkflowTaskV2
13
15
 
14
16
  __all__ = [
17
+ "AccountingRecord",
18
+ "AccountingRecordSlurm",
15
19
  "LinkUserProjectV2",
16
20
  "DatasetV2",
17
21
  "JobV2",
@@ -0,0 +1,35 @@
1
+ from datetime import datetime
2
+ from typing import Optional
3
+
4
+ from sqlalchemy import Column
5
+ from sqlalchemy import Integer
6
+ from sqlalchemy.dialects.postgresql import ARRAY
7
+ from sqlalchemy.types import DateTime
8
+ from sqlmodel import Field
9
+ from sqlmodel import SQLModel
10
+
11
+ from ....utils import get_timestamp
12
+
13
+
14
+ class AccountingRecord(SQLModel, table=True):
15
+ id: Optional[int] = Field(default=None, primary_key=True)
16
+ user_id: int = Field(foreign_key="user_oauth.id", nullable=False)
17
+ timestamp: datetime = Field(
18
+ default_factory=get_timestamp,
19
+ sa_column=Column(DateTime(timezone=True), nullable=False),
20
+ )
21
+ num_tasks: int
22
+ num_new_images: int
23
+
24
+
25
+ class AccountingRecordSlurm(SQLModel, table=True):
26
+ id: Optional[int] = Field(default=None, primary_key=True)
27
+ user_id: int = Field(foreign_key="user_oauth.id", nullable=False)
28
+ timestamp: datetime = Field(
29
+ default_factory=get_timestamp,
30
+ sa_column=Column(DateTime(timezone=True), nullable=False),
31
+ )
32
+ slurm_job_ids: list[int] = Field(
33
+ default_factory=list,
34
+ sa_column=Column(ARRAY(Integer)),
35
+ )
@@ -3,6 +3,8 @@
3
3
  """
4
4
  from fastapi import APIRouter
5
5
 
6
+ from .accounting import router as accounting_router
7
+ from .impersonate import router as impersonate_router
6
8
  from .job import router as job_router
7
9
  from .project import router as project_router
8
10
  from .task import router as task_router
@@ -11,6 +13,7 @@ from .task_group_lifecycle import router as task_group_lifecycle_router
11
13
 
12
14
  router_admin_v2 = APIRouter()
13
15
 
16
+ router_admin_v2.include_router(accounting_router, prefix="/accounting")
14
17
  router_admin_v2.include_router(job_router, prefix="/job")
15
18
  router_admin_v2.include_router(project_router, prefix="/project")
16
19
  router_admin_v2.include_router(task_router, prefix="/task")
@@ -18,3 +21,4 @@ router_admin_v2.include_router(task_group_router, prefix="/task-group")
18
21
  router_admin_v2.include_router(
19
22
  task_group_lifecycle_router, prefix="/task-group"
20
23
  )
24
+ router_admin_v2.include_router(impersonate_router, prefix="/impersonate")
@@ -0,0 +1,108 @@
1
+ from itertools import chain
2
+ from typing import Optional
3
+
4
+ from fastapi import APIRouter
5
+ from fastapi import Depends
6
+ from fastapi import HTTPException
7
+ from fastapi import Query
8
+ from fastapi import status
9
+ from fastapi.responses import JSONResponse
10
+ from pydantic import BaseModel
11
+ from pydantic.types import AwareDatetime
12
+ from sqlmodel import func
13
+ from sqlmodel import select
14
+
15
+ from fractal_server.app.db import AsyncSession
16
+ from fractal_server.app.db import get_async_db
17
+ from fractal_server.app.models import UserOAuth
18
+ from fractal_server.app.models.v2 import AccountingRecord
19
+ from fractal_server.app.models.v2 import AccountingRecordSlurm
20
+ from fractal_server.app.routes.auth import current_active_superuser
21
+ from fractal_server.app.schemas.v2 import AccountingRecordRead
22
+
23
+
24
+ class AccountingQuery(BaseModel):
25
+ user_id: Optional[int] = None
26
+ timestamp_min: Optional[AwareDatetime] = None
27
+ timestamp_max: Optional[AwareDatetime] = None
28
+
29
+
30
+ class AccountingPage(BaseModel):
31
+ total_count: int
32
+ page_size: int
33
+ current_page: int
34
+ records: list[AccountingRecordRead]
35
+
36
+
37
+ router = APIRouter()
38
+
39
+
40
+ @router.post("/", response_model=AccountingPage)
41
+ async def query_accounting(
42
+ query: AccountingQuery,
43
+ # pagination
44
+ page: int = Query(default=1, ge=1),
45
+ page_size: Optional[int] = Query(default=None, ge=1),
46
+ # dependencies
47
+ superuser: UserOAuth = Depends(current_active_superuser),
48
+ db: AsyncSession = Depends(get_async_db),
49
+ ) -> AccountingPage:
50
+
51
+ if page_size is None and page > 1:
52
+ raise HTTPException(
53
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
54
+ detail=(f"Invalid pagination parameters: {page=}, {page_size=}."),
55
+ )
56
+
57
+ stm = select(AccountingRecord).order_by(AccountingRecord.id)
58
+ stm_count = select(func.count(AccountingRecord.id))
59
+ if query.user_id is not None:
60
+ stm = stm.where(AccountingRecord.user_id == query.user_id)
61
+ stm_count = stm_count.where(AccountingRecord.user_id == query.user_id)
62
+ if query.timestamp_min is not None:
63
+ stm = stm.where(AccountingRecord.timestamp >= query.timestamp_min)
64
+ stm_count = stm_count.where(
65
+ AccountingRecord.timestamp >= query.timestamp_min
66
+ )
67
+ if query.timestamp_max is not None:
68
+ stm = stm.where(AccountingRecord.timestamp <= query.timestamp_max)
69
+ stm_count = stm_count.where(
70
+ AccountingRecord.timestamp <= query.timestamp_max
71
+ )
72
+ if page_size is not None:
73
+ stm = stm.offset((page - 1) * page_size).limit(page_size)
74
+
75
+ res = await db.execute(stm)
76
+ records = res.scalars().all()
77
+ res_total_count = await db.execute(stm_count)
78
+ total_count = res_total_count.scalar()
79
+
80
+ actual_page_size = page_size or len(records)
81
+ return AccountingPage(
82
+ total_count=total_count,
83
+ page_size=actual_page_size,
84
+ current_page=page,
85
+ records=[record.model_dump() for record in records],
86
+ )
87
+
88
+
89
+ @router.post("/slurm/")
90
+ async def query_accounting_slurm(
91
+ query: AccountingQuery,
92
+ # dependencies
93
+ superuser: UserOAuth = Depends(current_active_superuser),
94
+ db: AsyncSession = Depends(get_async_db),
95
+ ) -> JSONResponse:
96
+
97
+ stm = select(AccountingRecordSlurm.slurm_job_ids)
98
+ if query.user_id is not None:
99
+ stm = stm.where(AccountingRecordSlurm.user_id == query.user_id)
100
+ if query.timestamp_min is not None:
101
+ stm = stm.where(AccountingRecordSlurm.timestamp >= query.timestamp_min)
102
+ if query.timestamp_max is not None:
103
+ stm = stm.where(AccountingRecordSlurm.timestamp <= query.timestamp_max)
104
+
105
+ res = await db.execute(stm)
106
+ nested_slurm_job_ids = res.scalars().all()
107
+ aggregated_slurm_job_ids = list(chain(*nested_slurm_job_ids))
108
+ return JSONResponse(content=aggregated_slurm_job_ids, status_code=200)
@@ -0,0 +1,35 @@
1
+ from fastapi import APIRouter
2
+ from fastapi import Depends
3
+ from fastapi.responses import JSONResponse
4
+ from fastapi_users.authentication import JWTStrategy
5
+
6
+ from fractal_server.app.db import AsyncSession
7
+ from fractal_server.app.db import get_async_db
8
+ from fractal_server.app.models import UserOAuth
9
+ from fractal_server.app.routes.auth import current_active_superuser
10
+ from fractal_server.app.routes.auth._aux_auth import _user_or_404
11
+ from fractal_server.config import get_settings
12
+ from fractal_server.syringe import Inject
13
+
14
+ router = APIRouter()
15
+
16
+
17
+ @router.get("/{user_id}/")
18
+ async def impersonate_user(
19
+ user_id: int,
20
+ superuser: UserOAuth = Depends(current_active_superuser),
21
+ db: AsyncSession = Depends(get_async_db),
22
+ ) -> JSONResponse:
23
+ user = await _user_or_404(user_id, db)
24
+
25
+ settings = Inject(get_settings)
26
+ jwt_strategy = JWTStrategy(
27
+ secret=settings.JWT_SECRET_KEY, # type: ignore
28
+ lifetime_seconds=7200, # 2 hours
29
+ )
30
+ token = await jwt_strategy.write_token(user)
31
+
32
+ return JSONResponse(
33
+ content={"access_token": token, "token_type": "bearer"},
34
+ status_code=200,
35
+ )
@@ -1,4 +1,3 @@
1
- from datetime import datetime
2
1
  from pathlib import Path
3
2
  from typing import Optional
4
3
 
@@ -8,6 +7,7 @@ from fastapi import HTTPException
8
7
  from fastapi import Response
9
8
  from fastapi import status
10
9
  from fastapi.responses import StreamingResponse
10
+ from pydantic.types import AwareDatetime
11
11
  from sqlmodel import select
12
12
 
13
13
  from fractal_server.app.db import AsyncSession
@@ -16,7 +16,6 @@ from fractal_server.app.models import UserOAuth
16
16
  from fractal_server.app.models.v2 import JobV2
17
17
  from fractal_server.app.models.v2 import ProjectV2
18
18
  from fractal_server.app.routes.auth import current_active_superuser
19
- from fractal_server.app.routes.aux import _raise_if_naive_datetime
20
19
  from fractal_server.app.routes.aux._job import _write_shutdown_file
21
20
  from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
22
21
  from fractal_server.app.runner.filenames import WORKFLOW_LOG_FILENAME
@@ -37,10 +36,10 @@ async def view_job(
37
36
  dataset_id: Optional[int] = None,
38
37
  workflow_id: Optional[int] = None,
39
38
  status: Optional[JobStatusTypeV2] = None,
40
- start_timestamp_min: Optional[datetime] = None,
41
- start_timestamp_max: Optional[datetime] = None,
42
- end_timestamp_min: Optional[datetime] = None,
43
- end_timestamp_max: Optional[datetime] = None,
39
+ start_timestamp_min: Optional[AwareDatetime] = None,
40
+ start_timestamp_max: Optional[AwareDatetime] = None,
41
+ end_timestamp_min: Optional[AwareDatetime] = None,
42
+ end_timestamp_max: Optional[AwareDatetime] = None,
44
43
  log: bool = True,
45
44
  user: UserOAuth = Depends(current_active_superuser),
46
45
  db: AsyncSession = Depends(get_async_db),
@@ -67,13 +66,6 @@ async def view_job(
67
66
  `job.log` is set to `None`.
68
67
  """
69
68
 
70
- _raise_if_naive_datetime(
71
- start_timestamp_min,
72
- start_timestamp_max,
73
- end_timestamp_min,
74
- end_timestamp_max,
75
- )
76
-
77
69
  stm = select(JobV2)
78
70
 
79
71
  if id is not None:
@@ -1,4 +1,3 @@
1
- from datetime import datetime
2
1
  from typing import Optional
3
2
 
4
3
  from fastapi import APIRouter
@@ -6,6 +5,7 @@ from fastapi import Depends
6
5
  from fastapi import HTTPException
7
6
  from fastapi import Response
8
7
  from fastapi import status
8
+ from pydantic.types import AwareDatetime
9
9
  from sqlalchemy.sql.operators import is_
10
10
  from sqlalchemy.sql.operators import is_not
11
11
  from sqlmodel import select
@@ -20,7 +20,6 @@ from fractal_server.app.routes.auth import current_active_superuser
20
20
  from fractal_server.app.routes.auth._aux_auth import (
21
21
  _verify_user_belongs_to_group,
22
22
  )
23
- from fractal_server.app.routes.aux import _raise_if_naive_datetime
24
23
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
25
24
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
26
25
  from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
@@ -42,13 +41,11 @@ async def get_task_group_activity_list(
42
41
  pkg_name: Optional[str] = None,
43
42
  status: Optional[TaskGroupActivityStatusV2] = None,
44
43
  action: Optional[TaskGroupActivityActionV2] = None,
45
- timestamp_started_min: Optional[datetime] = None,
44
+ timestamp_started_min: Optional[AwareDatetime] = None,
46
45
  superuser: UserOAuth = Depends(current_active_superuser),
47
46
  db: AsyncSession = Depends(get_async_db),
48
47
  ) -> list[TaskGroupActivityV2Read]:
49
48
 
50
- _raise_if_naive_datetime(timestamp_started_min)
51
-
52
49
  stm = select(TaskGroupActivityV2)
53
50
  if task_group_activity_id is not None:
54
51
  stm = stm.where(TaskGroupActivityV2.id == task_group_activity_id)
@@ -96,19 +93,14 @@ async def query_task_group_list(
96
93
  active: Optional[bool] = None,
97
94
  pkg_name: Optional[str] = None,
98
95
  origin: Optional[TaskGroupV2OriginEnum] = None,
99
- timestamp_last_used_min: Optional[datetime] = None,
100
- timestamp_last_used_max: Optional[datetime] = None,
96
+ timestamp_last_used_min: Optional[AwareDatetime] = None,
97
+ timestamp_last_used_max: Optional[AwareDatetime] = None,
101
98
  user: UserOAuth = Depends(current_active_superuser),
102
99
  db: AsyncSession = Depends(get_async_db),
103
100
  ) -> list[TaskGroupReadV2]:
104
101
 
105
102
  stm = select(TaskGroupV2)
106
103
 
107
- _raise_if_naive_datetime(
108
- timestamp_last_used_max,
109
- timestamp_last_used_min,
110
- )
111
-
112
104
  if user_group_id is not None and private is True:
113
105
  raise HTTPException(
114
106
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
@@ -55,7 +55,7 @@ async def get_package_version_from_pypi(
55
55
  f"A TimeoutException occurred while getting {url}.\n"
56
56
  f"Original error: {str(e)}."
57
57
  )
58
- logger.error(error_msg)
58
+ logger.warning(error_msg)
59
59
  raise HTTPException(
60
60
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
61
61
  detail=error_msg,
@@ -65,7 +65,7 @@ async def get_package_version_from_pypi(
65
65
  f"An unknown error occurred while getting {url}. "
66
66
  f"Original error: {str(e)}."
67
67
  )
68
- logger.error(error_msg)
68
+ logger.warning(error_msg)
69
69
  raise HTTPException(
70
70
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
71
71
  detail=error_msg,
@@ -85,7 +85,7 @@ async def get_package_version_from_pypi(
85
85
  latest_version = response_data["info"]["version"]
86
86
  available_releases = response_data["releases"].keys()
87
87
  except KeyError as e:
88
- logger.error(
88
+ logger.warning(
89
89
  f"A KeyError occurred while getting {url}. "
90
90
  f"Original error: {str(e)}."
91
91
  )
@@ -239,6 +239,7 @@ async def apply_workflow(
239
239
  workflow_id=workflow.id,
240
240
  dataset_id=dataset.id,
241
241
  job_id=job.id,
242
+ user_id=user.id,
242
243
  user_settings=user_settings,
243
244
  worker_init=job.worker_init,
244
245
  slurm_user=user_settings.slurm_user,
@@ -1,4 +1,3 @@
1
- from datetime import datetime
2
1
  from typing import Optional
3
2
 
4
3
  from fastapi import APIRouter
@@ -6,6 +5,7 @@ from fastapi import Depends
6
5
  from fastapi import HTTPException
7
6
  from fastapi import Response
8
7
  from fastapi import status
8
+ from pydantic.types import AwareDatetime
9
9
  from sqlmodel import or_
10
10
  from sqlmodel import select
11
11
 
@@ -23,7 +23,6 @@ from fractal_server.app.routes.auth import current_active_user
23
23
  from fractal_server.app.routes.auth._aux_auth import (
24
24
  _verify_user_belongs_to_group,
25
25
  )
26
- from fractal_server.app.routes.aux import _raise_if_naive_datetime
27
26
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
28
27
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
29
28
  from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
@@ -43,13 +42,11 @@ async def get_task_group_activity_list(
43
42
  pkg_name: Optional[str] = None,
44
43
  status: Optional[TaskGroupActivityStatusV2] = None,
45
44
  action: Optional[TaskGroupActivityActionV2] = None,
46
- timestamp_started_min: Optional[datetime] = None,
45
+ timestamp_started_min: Optional[AwareDatetime] = None,
47
46
  user: UserOAuth = Depends(current_active_user),
48
47
  db: AsyncSession = Depends(get_async_db),
49
48
  ) -> list[TaskGroupActivityV2Read]:
50
49
 
51
- _raise_if_naive_datetime(timestamp_started_min)
52
-
53
50
  stm = select(TaskGroupActivityV2).where(
54
51
  TaskGroupActivityV2.user_id == user.id
55
52
  )
@@ -1,20 +0,0 @@
1
- from datetime import datetime
2
- from typing import Optional
3
-
4
- from fastapi import HTTPException
5
- from fastapi import status
6
-
7
-
8
- def _raise_if_naive_datetime(*timestamps: tuple[Optional[datetime]]) -> None:
9
- """
10
- Raise 422 if any not-null argument is a naive `datetime` object:
11
- https://docs.python.org/3/library/datetime.html#determining-if-an-object-is-aware-or-naive
12
- """
13
- for timestamp in filter(None, timestamps):
14
- if (timestamp.tzinfo is None) or (
15
- timestamp.tzinfo.utcoffset(timestamp) is None
16
- ):
17
- raise HTTPException(
18
- status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
19
- detail=f"{timestamp=} is naive. You must provide a timezone.",
20
- )
@@ -70,6 +70,7 @@ def submit_workflow(
70
70
  workflow_id: int,
71
71
  dataset_id: int,
72
72
  job_id: int,
73
+ user_id: int,
73
74
  user_settings: UserSettings,
74
75
  worker_init: Optional[str] = None,
75
76
  slurm_user: Optional[str] = None,
@@ -90,6 +91,8 @@ def submit_workflow(
90
91
  job_id:
91
92
  Id of the job record which stores the state for the current
92
93
  workflow application.
94
+ user_id:
95
+ User ID.
93
96
  worker_init:
94
97
  Custom executor parameters that get parsed before the execution of
95
98
  each task.
@@ -312,6 +315,7 @@ def submit_workflow(
312
315
  process_workflow(
313
316
  workflow=workflow,
314
317
  dataset=dataset,
318
+ user_id=user_id,
315
319
  workflow_dir_local=WORKFLOW_DIR_LOCAL,
316
320
  workflow_dir_remote=WORKFLOW_DIR_REMOTE,
317
321
  logger_name=logger_name,
@@ -41,6 +41,7 @@ def process_workflow(
41
41
  last_task_index: Optional[int] = None,
42
42
  logger_name: str,
43
43
  job_attribute_filters: AttributeFiltersType,
44
+ user_id: int,
44
45
  # Slurm-specific
45
46
  user_cache_dir: Optional[str] = None,
46
47
  slurm_user: Optional[str] = None,
@@ -75,6 +76,7 @@ def process_workflow(
75
76
  Positional index of the last task to execute; if `None`, proceed
76
77
  until the last task.
77
78
  logger_name: Logger name
79
+ user_id:
78
80
  slurm_user:
79
81
  Username to impersonate to run the workflow. This argument is
80
82
  present for compatibility with the standard backend interface, but
@@ -126,4 +128,5 @@ def process_workflow(
126
128
  logger_name=logger_name,
127
129
  submit_setup_call=_local_submit_setup,
128
130
  job_attribute_filters=job_attribute_filters,
131
+ user_id=user_id,
129
132
  )
@@ -45,6 +45,7 @@ def process_workflow(
45
45
  job_attribute_filters: AttributeFiltersType,
46
46
  fractal_ssh: FractalSSH,
47
47
  worker_init: Optional[str] = None,
48
+ user_id: int,
48
49
  # Not used
49
50
  user_cache_dir: Optional[str] = None,
50
51
  slurm_user: Optional[str] = None,
@@ -94,4 +95,5 @@ def process_workflow(
94
95
  logger_name=logger_name,
95
96
  submit_setup_call=_slurm_submit_setup,
96
97
  job_attribute_filters=job_attribute_filters,
98
+ user_id=user_id,
97
99
  )
@@ -38,6 +38,7 @@ def process_workflow(
38
38
  last_task_index: Optional[int] = None,
39
39
  logger_name: str,
40
40
  job_attribute_filters: AttributeFiltersType,
41
+ user_id: int,
41
42
  # Slurm-specific
42
43
  user_cache_dir: Optional[str] = None,
43
44
  slurm_user: Optional[str] = None,
@@ -85,4 +86,5 @@ def process_workflow(
85
86
  logger_name=logger_name,
86
87
  submit_setup_call=_slurm_submit_setup,
87
88
  job_attribute_filters=job_attribute_filters,
89
+ user_id=user_id,
88
90
  )
@@ -18,6 +18,7 @@ from .runner_functions import run_v2_task_non_parallel
18
18
  from .runner_functions import run_v2_task_parallel
19
19
  from .task_interface import TaskOutput
20
20
  from fractal_server.app.db import get_sync_db
21
+ from fractal_server.app.models.v2 import AccountingRecord
21
22
  from fractal_server.app.models.v2 import DatasetV2
22
23
  from fractal_server.app.models.v2 import WorkflowTaskV2
23
24
  from fractal_server.app.schemas.v2.dataset import _DatasetHistoryItemV2
@@ -31,6 +32,7 @@ def execute_tasks_v2(
31
32
  wf_task_list: list[WorkflowTaskV2],
32
33
  dataset: DatasetV2,
33
34
  executor: ThreadPoolExecutor,
35
+ user_id: int,
34
36
  workflow_dir_local: Path,
35
37
  workflow_dir_remote: Optional[Path] = None,
36
38
  logger_name: Optional[str] = None,
@@ -88,7 +90,7 @@ def execute_tasks_v2(
88
90
  db.commit()
89
91
  # TASK EXECUTION (V2)
90
92
  if task.type == "non_parallel":
91
- current_task_output = run_v2_task_non_parallel(
93
+ current_task_output, num_tasks = run_v2_task_non_parallel(
92
94
  images=filtered_images,
93
95
  zarr_dir=zarr_dir,
94
96
  wftask=wftask,
@@ -96,22 +98,20 @@ def execute_tasks_v2(
96
98
  workflow_dir_local=workflow_dir_local,
97
99
  workflow_dir_remote=workflow_dir_remote,
98
100
  executor=executor,
99
- logger_name=logger_name,
100
101
  submit_setup_call=submit_setup_call,
101
102
  )
102
103
  elif task.type == "parallel":
103
- current_task_output = run_v2_task_parallel(
104
+ current_task_output, num_tasks = run_v2_task_parallel(
104
105
  images=filtered_images,
105
106
  wftask=wftask,
106
107
  task=task,
107
108
  workflow_dir_local=workflow_dir_local,
108
109
  workflow_dir_remote=workflow_dir_remote,
109
110
  executor=executor,
110
- logger_name=logger_name,
111
111
  submit_setup_call=submit_setup_call,
112
112
  )
113
113
  elif task.type == "compound":
114
- current_task_output = run_v2_task_compound(
114
+ current_task_output, num_tasks = run_v2_task_compound(
115
115
  images=filtered_images,
116
116
  zarr_dir=zarr_dir,
117
117
  wftask=wftask,
@@ -119,7 +119,6 @@ def execute_tasks_v2(
119
119
  workflow_dir_local=workflow_dir_local,
120
120
  workflow_dir_remote=workflow_dir_remote,
121
121
  executor=executor,
122
- logger_name=logger_name,
123
122
  submit_setup_call=submit_setup_call,
124
123
  )
125
124
  else:
@@ -144,6 +143,7 @@ def execute_tasks_v2(
144
143
  )
145
144
 
146
145
  # Update image list
146
+ num_new_images = 0
147
147
  current_task_output.check_zarr_urls_are_unique()
148
148
  for image_obj in current_task_output.image_list_updates:
149
149
  image = image_obj.model_dump()
@@ -246,6 +246,7 @@ def execute_tasks_v2(
246
246
  SingleImage(**new_image)
247
247
  # Add image into the dataset image list
248
248
  tmp_images.append(new_image)
249
+ num_new_images += 1
249
250
 
250
251
  # Remove images from tmp_images
251
252
  for img_zarr_url in current_task_output.image_list_removals:
@@ -281,4 +282,13 @@ def execute_tasks_v2(
281
282
  db.merge(db_dataset)
282
283
  db.commit()
283
284
 
285
+ # Create accounting record
286
+ record = AccountingRecord(
287
+ user_id=user_id,
288
+ num_tasks=num_tasks,
289
+ num_new_images=num_new_images,
290
+ )
291
+ db.add(record)
292
+ db.commit()
293
+
284
294
  logger.debug(f'END {wftask.order}-th task (name="{task_name}")')
@@ -115,9 +115,8 @@ def run_v2_task_non_parallel(
115
115
  workflow_dir_local: Path,
116
116
  workflow_dir_remote: Optional[Path] = None,
117
117
  executor: Executor,
118
- logger_name: Optional[str] = None,
119
118
  submit_setup_call: Callable = no_op_submit_setup_call,
120
- ) -> TaskOutput:
119
+ ) -> tuple[TaskOutput, int]:
121
120
  """
122
121
  This runs server-side (see `executor` argument)
123
122
  """
@@ -154,10 +153,11 @@ def run_v2_task_non_parallel(
154
153
  **executor_options,
155
154
  )
156
155
  output = future.result()
156
+ num_tasks = 1
157
157
  if output is None:
158
- return TaskOutput()
158
+ return (TaskOutput(), num_tasks)
159
159
  else:
160
- return _cast_and_validate_TaskOutput(output)
160
+ return (_cast_and_validate_TaskOutput(output), num_tasks)
161
161
 
162
162
 
163
163
  def run_v2_task_parallel(
@@ -168,12 +168,11 @@ def run_v2_task_parallel(
168
168
  executor: Executor,
169
169
  workflow_dir_local: Path,
170
170
  workflow_dir_remote: Optional[Path] = None,
171
- logger_name: Optional[str] = None,
172
171
  submit_setup_call: Callable = no_op_submit_setup_call,
173
- ) -> TaskOutput:
172
+ ) -> tuple[TaskOutput, int]:
174
173
 
175
174
  if len(images) == 0:
176
- return TaskOutput()
175
+ return (TaskOutput(), 0)
177
176
 
178
177
  _check_parallelization_list_size(images)
179
178
 
@@ -216,8 +215,9 @@ def run_v2_task_parallel(
216
215
  else:
217
216
  outputs[ind] = _cast_and_validate_TaskOutput(output)
218
217
 
218
+ num_tasks = len(images)
219
219
  merged_output = merge_outputs(outputs)
220
- return merged_output
220
+ return (merged_output, num_tasks)
221
221
 
222
222
 
223
223
  def run_v2_task_compound(
@@ -229,7 +229,6 @@ def run_v2_task_compound(
229
229
  executor: Executor,
230
230
  workflow_dir_local: Path,
231
231
  workflow_dir_remote: Optional[Path] = None,
232
- logger_name: Optional[str] = None,
233
232
  submit_setup_call: Callable = no_op_submit_setup_call,
234
233
  ) -> TaskOutput:
235
234
 
@@ -273,11 +272,13 @@ def run_v2_task_compound(
273
272
  parallelization_list = init_task_output.parallelization_list
274
273
  parallelization_list = deduplicate_list(parallelization_list)
275
274
 
275
+ num_task = 1 + len(parallelization_list)
276
+
276
277
  # 3/B: parallel part of a compound task
277
278
  _check_parallelization_list_size(parallelization_list)
278
279
 
279
280
  if len(parallelization_list) == 0:
280
- return TaskOutput()
281
+ return (TaskOutput(), 0)
281
282
 
282
283
  list_function_kwargs = []
283
284
  for ind, parallelization_item in enumerate(parallelization_list):
@@ -313,4 +314,4 @@ def run_v2_task_compound(
313
314
  outputs[ind] = validated_output
314
315
 
315
316
  merged_output = merge_outputs(outputs)
316
- return merged_output
317
+ return (merged_output, num_task)
@@ -1,3 +1,4 @@
1
+ from .accounting import AccountingRecordRead # noqa F401
1
2
  from .dataset import DatasetCreateV2 # noqa F401
2
3
  from .dataset import DatasetExportV2 # noqa F401
3
4
  from .dataset import DatasetImportV2 # noqa F401
@@ -0,0 +1,18 @@
1
+ from datetime import datetime
2
+
3
+ from pydantic import BaseModel
4
+ from pydantic import field_serializer
5
+ from pydantic.types import AwareDatetime
6
+
7
+
8
+ class AccountingRecordRead(BaseModel):
9
+
10
+ id: int
11
+ user_id: int
12
+ timestamp: AwareDatetime
13
+ num_tasks: int
14
+ num_new_images: int
15
+
16
+ @field_serializer("timestamp")
17
+ def serialize_datetime(v: datetime) -> str:
18
+ return v.isoformat()
fractal_server/config.py CHANGED
@@ -611,74 +611,80 @@ class Settings(BaseSettings):
611
611
  """
612
612
  Comma-separated list of recipients of the OAuth-signup emails.
613
613
  """
614
- FRACTAL_EMAIL_USE_STARTTLS: Optional[bool] = True
614
+ FRACTAL_EMAIL_USE_STARTTLS: Literal["true", "false"] = "true"
615
615
  """
616
616
  Whether to use StartTLS when using the SMTP server.
617
+ Accepted values: 'true', 'false'.
617
618
  """
618
- FRACTAL_EMAIL_USE_LOGIN: Optional[bool] = True
619
+ FRACTAL_EMAIL_USE_LOGIN: Literal["true", "false"] = "true"
619
620
  """
620
621
  Whether to use login when using the SMTP server.
622
+ If 'true', FRACTAL_EMAIL_PASSWORD and FRACTAL_EMAIL_PASSWORD_KEY must be
623
+ provided.
624
+ Accepted values: 'true', 'false'.
621
625
  """
622
626
  email_settings: Optional[MailSettings] = None
623
627
 
624
- @model_validator(mode="before")
625
- @classmethod
626
- def validate_email_settings(cls, values):
627
- email_values = {
628
- k: v for k, v in values.items() if k.startswith("FRACTAL_EMAIL")
629
- }
630
- if email_values:
631
-
632
- def assert_key(key: str):
633
- if key not in email_values:
634
- raise ValueError(f"Missing '{key}'")
635
-
636
- assert_key("FRACTAL_EMAIL_SENDER")
637
- assert_key("FRACTAL_EMAIL_SMTP_SERVER")
638
- assert_key("FRACTAL_EMAIL_SMTP_PORT")
639
- assert_key("FRACTAL_EMAIL_INSTANCE_NAME")
640
- assert_key("FRACTAL_EMAIL_RECIPIENTS")
628
+ @model_validator(mode="after")
629
+ def validate_email_settings(self):
630
+ email_values = [
631
+ self.FRACTAL_EMAIL_SENDER,
632
+ self.FRACTAL_EMAIL_SMTP_SERVER,
633
+ self.FRACTAL_EMAIL_SMTP_PORT,
634
+ self.FRACTAL_EMAIL_INSTANCE_NAME,
635
+ self.FRACTAL_EMAIL_RECIPIENTS,
636
+ ]
637
+ if len(set(email_values)) == 1:
638
+ # All required EMAIL attributes are None
639
+ pass
640
+ elif None in email_values:
641
+ # Not all required EMAIL attributes are set
642
+ error_msg = (
643
+ "Invalid FRACTAL_EMAIL configuration. "
644
+ f"Given values: {email_values}."
645
+ )
646
+ raise ValueError(error_msg)
647
+ else:
648
+ use_starttls = self.FRACTAL_EMAIL_USE_STARTTLS == "true"
649
+ use_login = self.FRACTAL_EMAIL_USE_LOGIN == "true"
641
650
 
642
- if email_values.get("FRACTAL_EMAIL_USE_LOGIN", True):
643
- if "FRACTAL_EMAIL_PASSWORD" not in email_values:
651
+ if use_login:
652
+ if self.FRACTAL_EMAIL_PASSWORD is None:
644
653
  raise ValueError(
645
- "'FRACTAL_EMAIL_USE_LOGIN' is True but "
654
+ "'FRACTAL_EMAIL_USE_LOGIN' is 'true' but "
646
655
  "'FRACTAL_EMAIL_PASSWORD' is not provided."
647
656
  )
648
- elif "FRACTAL_EMAIL_PASSWORD_KEY" not in email_values:
657
+ if self.FRACTAL_EMAIL_PASSWORD_KEY is None:
649
658
  raise ValueError(
650
- "'FRACTAL_EMAIL_USE_LOGIN' is True but "
659
+ "'FRACTAL_EMAIL_USE_LOGIN' is 'true' but "
651
660
  "'FRACTAL_EMAIL_PASSWORD_KEY' is not provided."
652
661
  )
653
- else:
654
- try:
655
- (
656
- Fernet(email_values["FRACTAL_EMAIL_PASSWORD_KEY"])
657
- .decrypt(email_values["FRACTAL_EMAIL_PASSWORD"])
658
- .decode("utf-8")
659
- )
660
- except Exception as e:
661
- raise ValueError(
662
- "Invalid pair (FRACTAL_EMAIL_PASSWORD, "
663
- "FRACTAL_EMAIL_PASSWORD_KEY). "
664
- f"Original error: {str(e)}."
665
- )
662
+ try:
663
+ (
664
+ Fernet(self.FRACTAL_EMAIL_PASSWORD_KEY)
665
+ .decrypt(self.FRACTAL_EMAIL_PASSWORD)
666
+ .decode("utf-8")
667
+ )
668
+ except Exception as e:
669
+ raise ValueError(
670
+ "Invalid pair (FRACTAL_EMAIL_PASSWORD, "
671
+ "FRACTAL_EMAIL_PASSWORD_KEY). "
672
+ f"Original error: {str(e)}."
673
+ )
666
674
 
667
- values["email_settings"] = MailSettings(
668
- sender=email_values["FRACTAL_EMAIL_SENDER"],
669
- recipients=email_values["FRACTAL_EMAIL_RECIPIENTS"].split(","),
670
- smtp_server=email_values["FRACTAL_EMAIL_SMTP_SERVER"],
671
- port=email_values["FRACTAL_EMAIL_SMTP_PORT"],
672
- encrypted_password=email_values.get("FRACTAL_EMAIL_PASSWORD"),
673
- encryption_key=email_values.get("FRACTAL_EMAIL_PASSWORD_KEY"),
674
- instance_name=email_values["FRACTAL_EMAIL_INSTANCE_NAME"],
675
- use_starttls=email_values.get(
676
- "FRACTAL_EMAIL_USE_STARTTLS", True
677
- ),
678
- use_login=email_values.get("FRACTAL_EMAIL_USE_LOGIN", True),
675
+ self.email_settings = MailSettings(
676
+ sender=self.FRACTAL_EMAIL_SENDER,
677
+ recipients=self.FRACTAL_EMAIL_RECIPIENTS.split(","),
678
+ smtp_server=self.FRACTAL_EMAIL_SMTP_SERVER,
679
+ port=self.FRACTAL_EMAIL_SMTP_PORT,
680
+ encrypted_password=self.FRACTAL_EMAIL_PASSWORD,
681
+ encryption_key=self.FRACTAL_EMAIL_PASSWORD_KEY,
682
+ instance_name=self.FRACTAL_EMAIL_INSTANCE_NAME,
683
+ use_starttls=use_starttls,
684
+ use_login=use_login,
679
685
  )
680
686
 
681
- return values
687
+ return self
682
688
 
683
689
  ###########################################################################
684
690
  # BUSINESS LOGIC
@@ -0,0 +1,57 @@
1
+ """Add accounting tables
2
+
3
+ Revision ID: af1ef1c83c9b
4
+ Revises: 1eac13a26c83
5
+ Create Date: 2025-02-17 14:22:32.701581
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "af1ef1c83c9b"
14
+ down_revision = "1eac13a26c83"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ op.create_table(
22
+ "accountingrecord",
23
+ sa.Column("id", sa.Integer(), nullable=False),
24
+ sa.Column("user_id", sa.Integer(), nullable=False),
25
+ sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False),
26
+ sa.Column("num_tasks", sa.Integer(), nullable=False),
27
+ sa.Column("num_new_images", sa.Integer(), nullable=False),
28
+ sa.ForeignKeyConstraint(
29
+ ["user_id"],
30
+ ["user_oauth.id"],
31
+ name=op.f("fk_accountingrecord_user_id_user_oauth"),
32
+ ),
33
+ sa.PrimaryKeyConstraint("id", name=op.f("pk_accountingrecord")),
34
+ )
35
+ op.create_table(
36
+ "accountingrecordslurm",
37
+ sa.Column("id", sa.Integer(), nullable=False),
38
+ sa.Column("user_id", sa.Integer(), nullable=False),
39
+ sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False),
40
+ sa.Column(
41
+ "slurm_job_ids", postgresql.ARRAY(sa.Integer()), nullable=True
42
+ ),
43
+ sa.ForeignKeyConstraint(
44
+ ["user_id"],
45
+ ["user_oauth.id"],
46
+ name=op.f("fk_accountingrecordslurm_user_id_user_oauth"),
47
+ ),
48
+ sa.PrimaryKeyConstraint("id", name=op.f("pk_accountingrecordslurm")),
49
+ )
50
+ # ### end Alembic commands ###
51
+
52
+
53
+ def downgrade() -> None:
54
+ # ### commands auto generated by Alembic - please adjust! ###
55
+ op.drop_table("accountingrecordslurm")
56
+ op.drop_table("accountingrecord")
57
+ # ### end Alembic commands ###
@@ -34,7 +34,7 @@ def fail_and_cleanup(
34
34
  db: DBSyncSession,
35
35
  ):
36
36
  logger = get_logger(logger_name)
37
- logger.error(
37
+ logger.warning(
38
38
  f"Task {task_group_activity.action} failed. "
39
39
  f"Original error: {str(exception)}"
40
40
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.13.0
3
+ Version: 2.13.1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  Home-page: https://github.com/fractal-analytics-platform/fractal-server
6
6
  License: BSD-3-Clause
@@ -1,4 +1,4 @@
1
- fractal_server/__init__.py,sha256=9ob5spirSIHc6BZ8Gib1GW_SY2DX0L2EX7f1ZzmBmCE,23
1
+ fractal_server/__init__.py,sha256=RVVgvOrf56lXdkkOitLzI3TL2D6aaRZrlU-iLjRWJ7w,23
2
2
  fractal_server/__main__.py,sha256=igfS2XL3e8JycuhASl2vsYuIPma0MG0cfPPFRuQfh14,6906
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -8,7 +8,8 @@ fractal_server/app/models/linkusergroup.py,sha256=LWTUfhH2uAnn_4moK7QdRUIHWtpw-h
8
8
  fractal_server/app/models/linkuserproject.py,sha256=hvaxh3Lkiy2uUCwB8gvn8RorCpvxSSdzWdCS_U1GL7g,315
9
9
  fractal_server/app/models/security.py,sha256=mMb_HiwWY74QZrs9xuyno0CVSmk4GYQWk5FxGixr8SU,3860
10
10
  fractal_server/app/models/user_settings.py,sha256=Y-ZV-uZAFLZqXxy8c5_Qeh_F7zQuZDWOgLpU6Zs6iqU,1316
11
- fractal_server/app/models/v2/__init__.py,sha256=63THGEZQlxWcosGCI74SEvJU7wOoOn1j1byTjf4NFOI,526
11
+ fractal_server/app/models/v2/__init__.py,sha256=TvY6VBvRG0XEfayCFNeaDxi0SZX_-rGPSvkoctl7wNM,666
12
+ fractal_server/app/models/v2/accounting.py,sha256=f2ALxfKKBNxFLJTtC2-YqRepVK253x68y7zkD2V_Nls,1115
12
13
  fractal_server/app/models/v2/dataset.py,sha256=O5_6YfNeX6JM7PUcEZhbeV4JCvuAhFCQbOOuefpVnqc,1544
13
14
  fractal_server/app/models/v2/job.py,sha256=L0P1mrztMqqb-6qdPEbuHXhCsf2mxVUct_ehcXrREGg,1844
14
15
  fractal_server/app/models/v2/project.py,sha256=rAHoh5KfYwIaW7rTX0_O0jvWmxEvfo1BafvmcXuSSRk,786
@@ -18,27 +19,29 @@ fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO
18
19
  fractal_server/app/models/v2/workflowtask.py,sha256=919L2jCm9y57MXiezGBb28uiXpxyiSHwA_DkDF9OVjg,1226
19
20
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
21
  fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- fractal_server/app/routes/admin/v2/__init__.py,sha256=KYrw0COmmMuIMp7c6YcYRXah4tEYplCWeROnPK1VTeg,681
22
- fractal_server/app/routes/admin/v2/job.py,sha256=cbkFIRIIXaWmNsUFI7RAu8HpQ0mWn_bgoxtvWZxr-IA,7624
22
+ fractal_server/app/routes/admin/v2/__init__.py,sha256=_5lqb6-M8-fZqE1HRMep6pAFYRUKMxrvbZOKs-RXWkw,933
23
+ fractal_server/app/routes/admin/v2/accounting.py,sha256=ueTGk9748d-gNorqCkkhddgGKzgpWVZQ4G0Loobl7eQ,3788
24
+ fractal_server/app/routes/admin/v2/impersonate.py,sha256=gc4lshfEPFR6W2asH7aKu6hqE6chzusdhAUVV9p51eU,1131
25
+ fractal_server/app/routes/admin/v2/job.py,sha256=4soc-5d99QEsir7U9AqpofgaGggSBwgMm7mXW5LBvSI,7439
23
26
  fractal_server/app/routes/admin/v2/project.py,sha256=luy-yiGX1JYTdPm1hpIdDUUqPm8xHuipLy9k2X6zu74,1223
24
27
  fractal_server/app/routes/admin/v2/task.py,sha256=h6O_DLYgp7KMExydPRO-1UsvsYj8S52E3CjslDNJZnA,4375
25
- fractal_server/app/routes/admin/v2/task_group.py,sha256=QD7Ui8bMJW698YAlhejWKj_ywjMwt8jymWppl2HnkLU,8126
28
+ fractal_server/app/routes/admin/v2/task_group.py,sha256=XTjdqgABXZcx9EenaoqSmHh12BXSentUus3SV0oxBMs,7929
26
29
  fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=0e0ZJ_k75TVHaT2o8Xk33DPDSgh-eBhZf-y4y7t-Adg,9429
27
30
  fractal_server/app/routes/api/__init__.py,sha256=2IDheFi0OFdsUg7nbUiyahqybvpgXqeHUXIL2QtWrQQ,641
28
31
  fractal_server/app/routes/api/v2/__init__.py,sha256=w4c9WzagaVV5d4TWBX5buu5ENk8jf3YftMQYmhavz9Q,2172
29
32
  fractal_server/app/routes/api/v2/_aux_functions.py,sha256=NJ6_1biN_hhIEK1w8Vj6XhLmdkQ5kMVd_MX5JC_nHLU,11524
30
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=c8eqPXdMhc3nIixX50B1Ka5n7LgbOZm2JbEs7lICQ04,6767
33
+ fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=qdXCb6IP8-qPEAxGZKljtjIqNzIAyRaAsQSRi5VqFHM,6773
31
34
  fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=uhNSs-jcS7ndIUFKiOC1yrDiViw3uvKEXi9UL04BMks,11642
32
35
  fractal_server/app/routes/api/v2/dataset.py,sha256=tyZjoncjzWd3Bl3t1vBFNMIY1FVo0z23aagdFlNKX0Q,9465
33
36
  fractal_server/app/routes/api/v2/images.py,sha256=32sSDSWri_A8uQHcdbVTPvGGEjj9vk5ILj8q0-2jLX8,8507
34
37
  fractal_server/app/routes/api/v2/job.py,sha256=m89FTh9Px25oXCeWj2k2NdGWQaO2oxMh-6lZppcsJOY,5551
35
38
  fractal_server/app/routes/api/v2/project.py,sha256=3kMp1ValIVwY-qogfRNgWmSMk0jgsk1VJlY8MhwvH1Q,6665
36
39
  fractal_server/app/routes/api/v2/status.py,sha256=Ls_TMPBE_pqOFsO1XBWf8NDSoQkle45XuI7clKwGouQ,6305
37
- fractal_server/app/routes/api/v2/submit.py,sha256=a2Jo9kEvFCAD8L1mHPkk5aj_xPWfrXrT0o-wf758z1s,8599
40
+ fractal_server/app/routes/api/v2/submit.py,sha256=K4OjcSg476JXIeeMUaYdTDk8Qpj5IO5UULvfErI7Y5Y,8624
38
41
  fractal_server/app/routes/api/v2/task.py,sha256=z3_SxsXoKsbM9GGNJUdIiZisQwAJSBqvCc7thaJIOTU,7191
39
42
  fractal_server/app/routes/api/v2/task_collection.py,sha256=IDNF6sjDuU37HIQ0TuQA-TZIuf7nfHAQXUUNmkrlhLM,12706
40
43
  fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=cctW61-C2QYF2KXluS15lLhZJS_kt30Ca6UGLFO32z0,6207
41
- fractal_server/app/routes/api/v2/task_group.py,sha256=LjX2X1sfqr3xlqhH8lfJmFG5ecpEzK_jCV548pU7s24,8268
44
+ fractal_server/app/routes/api/v2/task_group.py,sha256=j3zDvVZizB7NWEgVgZU42JCXETkaVkk2ImJPr0jS7BQ,8164
42
45
  fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=3o9bCC8ubMwffQPPaxQZy-CjH9IB2RkIReIecI6L2_w,9300
43
46
  fractal_server/app/routes/api/v2/workflow.py,sha256=Z71EI_WdLnaI0Z5OXy9RaAk_0xN9EHkhR5qieLtM8t0,11689
44
47
  fractal_server/app/routes/api/v2/workflow_import.py,sha256=INmnhlMEBJp-vHPR0f940DANPmIidts3OfcooeM_aNA,11205
@@ -52,7 +55,7 @@ fractal_server/app/routes/auth/oauth.py,sha256=AnFHbjqL2AgBX3eksI931xD6RTtmbciHB
52
55
  fractal_server/app/routes/auth/register.py,sha256=DlHq79iOvGd_gt2v9uwtsqIKeO6i_GKaW59VIkllPqY,587
53
56
  fractal_server/app/routes/auth/router.py,sha256=tzJrygXFZlmV_uWelVqTOJMEH-3Fr7ydwlgx1LxRjxY,527
54
57
  fractal_server/app/routes/auth/users.py,sha256=Zr1Bsa7Hpricb_1uFwKPCtgt3PzGnP0TaMLMdpbQDNs,7825
55
- fractal_server/app/routes/aux/__init__.py,sha256=LR4bR7RunHAK6jc9IR2bReQd-BdXADdnDccXI4uGeGY,731
58
+ fractal_server/app/routes/aux/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
59
  fractal_server/app/routes/aux/_job.py,sha256=XWyWpOObcV55YyK7uzGRlaslmPDCBZy4hiSZBpoa_bg,616
57
60
  fractal_server/app/routes/aux/_runner.py,sha256=spNudutueHTBJPhm55RlOuYzb31DhyheSjl2rk6dloM,873
58
61
  fractal_server/app/routes/aux/validate_user_settings.py,sha256=FLVi__8YFcm_6c_K5uMQo7raWWXQLBcZtx8yaPO4jaE,2301
@@ -82,22 +85,22 @@ fractal_server/app/runner/run_subprocess.py,sha256=c3JbYXq3hX2aaflQU19qJ5Xs6J6oX
82
85
  fractal_server/app/runner/set_start_and_last_task_index.py,sha256=-q4zVybAj8ek2XlbENKlfOAJ39hT_zoJoZkqzDqiAMY,1254
83
86
  fractal_server/app/runner/shutdown.py,sha256=9pfSKHDNdIcm0eY-opgRTi7y0HmvfPmYiu9JR6Idark,2082
84
87
  fractal_server/app/runner/task_files.py,sha256=sd_MpJ01C8c9QTO8GzGMidFGdlq_hXX_ARDRhd_YMnI,3762
85
- fractal_server/app/runner/v2/__init__.py,sha256=hA5WyFh1GI2p56D86tf-8BFE9Dcj8f4Ua-X0XTbJIfQ,14770
86
- fractal_server/app/runner/v2/_local/__init__.py,sha256=QnQ9jfqpzShzjp6H7rfVx9Sqp03J1JB6fCpwNx2MDOw,5119
88
+ fractal_server/app/runner/v2/__init__.py,sha256=Qa2HsdtFfFNCUsKPhDPwjoy5jRpDlIWeraLYsF0ytsY,14855
89
+ fractal_server/app/runner/v2/_local/__init__.py,sha256=URU4bQIFfTEJLJnp4iJLkyGDLKha1wkhskajNmLQfd4,5183
87
90
  fractal_server/app/runner/v2/_local/_local_config.py,sha256=L54CYrkCyzwgUUra-YhnYR7fy2nyMl6GGu3-4hjoZ4U,3653
88
91
  fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=MucNOo8Er0F5ZIwH7CnTeXgnFMc6d3pKPkv563QNVi0,1630
89
92
  fractal_server/app/runner/v2/_local/executor.py,sha256=QrJlD77G6q4WohoJQO7XXbvi2RlCUsNvMnPDEZIoAqA,3620
90
93
  fractal_server/app/runner/v2/_slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
91
94
  fractal_server/app/runner/v2/_slurm_common/get_slurm_config.py,sha256=21Tl70L8oyo3H_r0vXW6KO9pq2IKEiV5ZkshsPsBjzI,6226
92
- fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=wDW58jBPcmOUeBMcuaOqTz1ElEqiyQn9ar7zp2xCPX4,3274
95
+ fractal_server/app/runner/v2/_slurm_ssh/__init__.py,sha256=qSRQrCSnPJJ-EwjdQP9sPPtn-Tm2e4zmgOJo2_bCtyU,3321
93
96
  fractal_server/app/runner/v2/_slurm_ssh/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
94
- fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=16P_3nDRtXztBf0-JOZzNmpnp164kgzRTADaLX733NI,2921
97
+ fractal_server/app/runner/v2/_slurm_sudo/__init__.py,sha256=F9YsldPrGeEYQ27jz6bn-Nf6u0bwOpmgFhZPXYlJ1fE,2968
95
98
  fractal_server/app/runner/v2/_slurm_sudo/_submit_setup.py,sha256=a5_FDPH_yxYmrjAjMRLgh_Y4DSG3mRslCLQodGM3-t4,2838
96
99
  fractal_server/app/runner/v2/deduplicate_list.py,sha256=IVTE4abBU1bUprFTkxrTfYKnvkNTanWQ-KWh_etiT08,645
97
100
  fractal_server/app/runner/v2/handle_failed_job.py,sha256=-zFWw4d208bQEFUF_sAdH2LdHEARyg1FC8BENr1SjhU,2045
98
101
  fractal_server/app/runner/v2/merge_outputs.py,sha256=D1L4Taieq9i71SPQyNc1kMokgHh-sV_MqF3bv7QMDBc,907
99
- fractal_server/app/runner/v2/runner.py,sha256=EzmoF_7K6n1w1vo2hM12h2EppWW6WyrpT_MmCBbFlCw,11778
100
- fractal_server/app/runner/v2/runner_functions.py,sha256=BLREIcQaE6FSc2AEJyZuiYk6rGazEz_9gprUqUZDljs,9488
102
+ fractal_server/app/runner/v2/runner.py,sha256=L1zvv5f4IuCW7DaJTxmznQO7LLIO8rdzpN_0WNhlDMs,12088
103
+ fractal_server/app/runner/v2/runner_functions.py,sha256=EnyHkMj7boM1MbS0DEGk2R_pJbVpspqt6BEHcSU7jBE,9548
101
104
  fractal_server/app/runner/v2/runner_functions_low_level.py,sha256=1fWvQ6YZUUnDhO_mipXC5hnaT-zK-GHxg8ayoxZX82k,3648
102
105
  fractal_server/app/runner/v2/task_interface.py,sha256=e1GGQSYd0MyBj1EZvEVzqv-HpVE4YffXOq82WLrCaOc,1866
103
106
  fractal_server/app/runner/versions.py,sha256=dSaPRWqmFPHjg20kTCHmi_dmGNcCETflDtDLronNanU,852
@@ -107,7 +110,8 @@ fractal_server/app/schemas/_validators.py,sha256=ap0VWJzOfPcq_tbH3sglZClkuinNvMj
107
110
  fractal_server/app/schemas/user.py,sha256=vLijVyPIQJdpFc36S6qK2XtSKMzfkhULN1nBsb9tV18,2407
108
111
  fractal_server/app/schemas/user_group.py,sha256=Uao1igRYflBu7Dg6Zs0kaFU3zBFJzIwDLrkFfaJk6z8,2176
109
112
  fractal_server/app/schemas/user_settings.py,sha256=4XeXQ3rnDS1UvLPd9hvh-WVSbzBGcMDjChfiZ_WGi-w,3134
110
- fractal_server/app/schemas/v2/__init__.py,sha256=IT2a6fbRx3rt8h6jri_4gZWzTN9EVXewiWoIuBcZ-xA,2618
113
+ fractal_server/app/schemas/v2/__init__.py,sha256=pjRUOJwZGzsDbBbxfnCgQgShs5V8SaHRqwIQDW3STpY,2676
114
+ fractal_server/app/schemas/v2/accounting.py,sha256=Wylt7uWTiDIFlHJOh4XEtYitk2FjFlmnodDrJDxcr0E,397
111
115
  fractal_server/app/schemas/v2/dataset.py,sha256=FRJI1hWEjj7M_QhAq3mhWkmJ5lMKOtr1pXcUq-bH-Gk,5517
112
116
  fractal_server/app/schemas/v2/dumps.py,sha256=2GUjoqeblUvrSoojBz5odoUUf53IABtbY_5GvFZoMVc,1782
113
117
  fractal_server/app/schemas/v2/job.py,sha256=Dp_RRiC5uvJqq1fAJlBXztAFA-tS5FWuRnUbTnLtL6M,4226
@@ -122,7 +126,7 @@ fractal_server/app/schemas/v2/workflowtask.py,sha256=qMvwlnFCsnyD8uv8HJ4cFy2-QMm
122
126
  fractal_server/app/security/__init__.py,sha256=8dg7vBzLjU43p_eGoHuFBO97FtIDtNnbJJ5mzVSVRNI,14094
123
127
  fractal_server/app/security/signup_email.py,sha256=CR1VbsGFNshxsWquLDZPbUAYnGzkCHeJChtncq63RBc,1434
124
128
  fractal_server/app/user_settings.py,sha256=OP1yiYKtPadxwM51_Q0hdPk3z90TCN4z1BLpQsXyWiU,1316
125
- fractal_server/config.py,sha256=qNhfBYRAGFjT7rcg_LPLQXexzxWydJr4kHyrWaWoMew,28753
129
+ fractal_server/config.py,sha256=16IxtLytZcUZCY1Bog0Vv9t20cK3vG0XyQ_y94DaOh8,28810
126
130
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
127
131
  fractal_server/data_migrations/tools.py,sha256=LeMeASwYGtEqd-3wOLle6WARdTGAimoyMmRbbJl-hAM,572
128
132
  fractal_server/gunicorn_fractal.py,sha256=u6U01TLGlXgq1v8QmEpLih3QnsInZD7CqphgJ_GrGzc,1230
@@ -154,6 +158,7 @@ fractal_server/migrations/versions/99ea79d9e5d2_add_dataset_history.py,sha256=0i
154
158
  fractal_server/migrations/versions/9c5ae74c9b98_add_user_settings_table.py,sha256=syONdZNf4-OnAcWIsbzXpYwpXPsXZ4SsmjwVvmVG0PU,2256
155
159
  fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py,sha256=4l1AHGUsa0ONoJVZlr3fTXw_xbbQ8O7wlD92Az2aRfM,1849
156
160
  fractal_server/migrations/versions/a7f4d6137b53_add_workflow_dump_to_applyworkflow.py,sha256=ekDUML7ILpmdoqEclKbEUdyLi4uw9HSG_sTjG2hp_JE,867
161
+ fractal_server/migrations/versions/af1ef1c83c9b_add_accounting_tables.py,sha256=BftudWuSGvKGBzIL5AMb3yWkgTAuaKPBGsYcOzp_gLQ,1899
157
162
  fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha256=9sLd0F7nO5chHHm7RZ4wBA-9bvWomS-av_odKwODADM,1551
158
163
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
159
164
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
@@ -186,7 +191,7 @@ fractal_server/tasks/v2/templates/3_pip_freeze.sh,sha256=JldREScEBI4cD_qjfX4UK7V
186
191
  fractal_server/tasks/v2/templates/4_pip_show.sh,sha256=84NGHlg6JIbrQktgGKyfGsggPFzy6RBJuOmIpPUhsrw,1747
187
192
  fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh,sha256=q-6ZUvA6w6FDVEoSd9O63LaJ9tKZc7qAFH72SGPrd_k,284
188
193
  fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh,sha256=A2y8RngEjAcRhG-_owA6P7tAdrS_AszFuGXnaeMV8u0,1122
189
- fractal_server/tasks/v2/utils_background.py,sha256=6wM7Z6zedsR8oVGwrJ-HN2Hj1b7BgmaSBdvuB_r9hI8,4246
194
+ fractal_server/tasks/v2/utils_background.py,sha256=W_RvihI1aiYPJNsPo8z4wKuA_bPs0UT2huzLihRpjU4,4248
190
195
  fractal_server/tasks/v2/utils_database.py,sha256=iLbwkxMxTCgpyKe1JQzdfIR3zBfxohgmLwSdGps1AUo,1274
191
196
  fractal_server/tasks/v2/utils_package_names.py,sha256=RDg__xrvQs4ieeVzmVdMcEh95vGQYrv9Hfal-5EDBM8,2393
192
197
  fractal_server/tasks/v2/utils_python_interpreter.py,sha256=5_wrlrTqXyo1YuLZvAW9hrSoh5MyLOzdPVUlUwM7uDQ,955
@@ -194,8 +199,8 @@ fractal_server/tasks/v2/utils_templates.py,sha256=07TZpJ0Mh_A4lXVXrrH2o1VLFFGwxe
194
199
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
195
200
  fractal_server/utils.py,sha256=PMwrxWFxRTQRl1b9h-NRIbFGPKqpH_hXnkAT3NfZdpY,3571
196
201
  fractal_server/zip_tools.py,sha256=GjDgo_sf6V_DDg6wWeBlZu5zypIxycn_l257p_YVKGc,4876
197
- fractal_server-2.13.0.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
198
- fractal_server-2.13.0.dist-info/METADATA,sha256=YY5WVT35OWNC2Ilx_EIBFAIfd7KXo5xZjDEVfe04R-E,4548
199
- fractal_server-2.13.0.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
200
- fractal_server-2.13.0.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
201
- fractal_server-2.13.0.dist-info/RECORD,,
202
+ fractal_server-2.13.1.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
203
+ fractal_server-2.13.1.dist-info/METADATA,sha256=xWwzGuagIVtTBtHUpvpmo3jk5NPwmDq71eZgGrKnYjI,4548
204
+ fractal_server-2.13.1.dist-info/WHEEL,sha256=RaoafKOydTQ7I_I3JTrPCg6kUmTgtm4BornzOqyEfJ8,88
205
+ fractal_server-2.13.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
206
+ fractal_server-2.13.1.dist-info/RECORD,,