fractal-server 2.17.0a10__py3-none-any.whl → 2.17.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +1 -1
  3. fractal_server/app/models/__init__.py +0 -1
  4. fractal_server/app/models/security.py +1 -14
  5. fractal_server/app/models/v2/project.py +1 -4
  6. fractal_server/app/models/v2/task_group.py +1 -4
  7. fractal_server/app/routes/admin/v2/job.py +45 -9
  8. fractal_server/app/routes/admin/v2/resource.py +3 -1
  9. fractal_server/app/routes/admin/v2/task.py +49 -32
  10. fractal_server/app/schemas/v2/task_collection.py +11 -3
  11. fractal_server/app/shutdown.py +23 -19
  12. fractal_server/config/_database.py +3 -2
  13. fractal_server/config/_main.py +1 -1
  14. fractal_server/migrations/versions/45fbb391d7af_make_resource_id_fk_non_nullable.py +46 -0
  15. fractal_server/migrations/versions/49d0856e9569_drop_table.py +63 -0
  16. fractal_server/migrations/versions/7673fe18c05d_remove_project_dir_server_default.py +29 -0
  17. fractal_server/migrations/versions/caba9fb1ea5e_drop_useroauth_user_settings_id.py +49 -0
  18. fractal_server/runner/v2/runner.py +24 -0
  19. fractal_server/tasks/config/_python.py +1 -1
  20. fractal_server/tasks/v2/local/delete.py +1 -1
  21. {fractal_server-2.17.0a10.dist-info → fractal_server-2.17.1.dist-info}/METADATA +1 -1
  22. {fractal_server-2.17.0a10.dist-info → fractal_server-2.17.1.dist-info}/RECORD +25 -22
  23. fractal_server/app/models/user_settings.py +0 -37
  24. {fractal_server-2.17.0a10.dist-info → fractal_server-2.17.1.dist-info}/WHEEL +0 -0
  25. {fractal_server-2.17.0a10.dist-info → fractal_server-2.17.1.dist-info}/entry_points.txt +0 -0
  26. {fractal_server-2.17.0a10.dist-info → fractal_server-2.17.1.dist-info}/licenses/LICENSE +0 -0
@@ -1 +1 @@
1
- __VERSION__ = "2.17.0a10"
1
+ __VERSION__ = "2.17.1"
@@ -7,7 +7,6 @@ from pathlib import Path
7
7
  import uvicorn
8
8
  from pydantic import ValidationError
9
9
 
10
- from fractal_server.app.schemas.v2 import ResourceType
11
10
 
12
11
  parser = ap.ArgumentParser(description="fractal-server commands")
13
12
 
@@ -149,6 +148,7 @@ def init_db_data(
149
148
  from fractal_server.app.models import Resource, Profile
150
149
  from fractal_server.app.schemas.v2.resource import cast_serialize_resource
151
150
  from fractal_server.app.schemas.v2.profile import cast_serialize_profile
151
+ from fractal_server.app.schemas.v2 import ResourceType
152
152
 
153
153
  # Create default group and user
154
154
  print()
@@ -6,5 +6,4 @@ will not be picked up by alembic.
6
6
  from .linkusergroup import LinkUserGroup # noqa: F401
7
7
  from .linkuserproject import LinkUserProjectV2 # noqa: F401
8
8
  from .security import * # noqa
9
- from .user_settings import UserSettings # noqa
10
9
  from .v2 import * # noqa
@@ -95,24 +95,11 @@ class UserOAuth(SQLModel, table=True):
95
95
  ondelete="RESTRICT",
96
96
  )
97
97
 
98
- # TODO-2.17.1: update to `project_dir: str`
99
- project_dir: str = Field(
100
- sa_column=Column(
101
- String,
102
- server_default="/PLACEHOLDER",
103
- nullable=False,
104
- )
105
- )
98
+ project_dir: str
106
99
  slurm_accounts: list[str] = Field(
107
100
  sa_column=Column(ARRAY(String), server_default="{}"),
108
101
  )
109
102
 
110
- # TODO-2.17.1: remove
111
- user_settings_id: int | None = Field(
112
- foreign_key="user_settings.id",
113
- default=None,
114
- )
115
-
116
103
 
117
104
  class UserGroup(SQLModel, table=True):
118
105
  """
@@ -15,10 +15,7 @@ class ProjectV2(SQLModel, table=True):
15
15
  id: int | None = Field(default=None, primary_key=True)
16
16
  name: str
17
17
 
18
- # TODO-2.17.1: make `resource_id` not nullable
19
- resource_id: int | None = Field(
20
- foreign_key="resource.id", default=None, ondelete="RESTRICT"
21
- )
18
+ resource_id: int = Field(foreign_key="resource.id", ondelete="RESTRICT")
22
19
  timestamp_created: datetime = Field(
23
20
  default_factory=get_timestamp,
24
21
  sa_column=Column(DateTime(timezone=True), nullable=False),
@@ -42,10 +42,7 @@ class TaskGroupV2(SQLModel, table=True):
42
42
  user_group_id: int | None = Field(
43
43
  foreign_key="usergroup.id", default=None, ondelete="SET NULL"
44
44
  )
45
- # TODO-2.17.1: make `resource_id` not nullable
46
- resource_id: int | None = Field(
47
- foreign_key="resource.id", default=None, ondelete="RESTRICT"
48
- )
45
+ resource_id: int = Field(foreign_key="resource.id", ondelete="RESTRICT")
49
46
 
50
47
  origin: str
51
48
  pkg_name: str
@@ -7,6 +7,7 @@ from fastapi import Response
7
7
  from fastapi import status
8
8
  from fastapi.responses import StreamingResponse
9
9
  from pydantic.types import AwareDatetime
10
+ from sqlalchemy import func
10
11
  from sqlmodel import select
11
12
 
12
13
  from fractal_server.app.db import AsyncSession
@@ -19,6 +20,9 @@ from fractal_server.app.models.v2 import ProjectV2
19
20
  from fractal_server.app.routes.auth import current_superuser_act
20
21
  from fractal_server.app.routes.aux._job import _write_shutdown_file
21
22
  from fractal_server.app.routes.aux._runner import _check_shutdown_is_supported
23
+ from fractal_server.app.routes.pagination import get_pagination_params
24
+ from fractal_server.app.routes.pagination import PaginationRequest
25
+ from fractal_server.app.routes.pagination import PaginationResponse
22
26
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
23
27
  from fractal_server.app.schemas.v2 import JobReadV2
24
28
  from fractal_server.app.schemas.v2 import JobStatusTypeV2
@@ -30,7 +34,7 @@ from fractal_server.zip_tools import _zip_folder_to_byte_stream_iterator
30
34
  router = APIRouter()
31
35
 
32
36
 
33
- @router.get("/", response_model=list[JobReadV2])
37
+ @router.get("/", response_model=PaginationResponse[JobReadV2])
34
38
  async def view_job(
35
39
  id: int | None = None,
36
40
  user_id: int | None = None,
@@ -43,9 +47,10 @@ async def view_job(
43
47
  end_timestamp_min: AwareDatetime | None = None,
44
48
  end_timestamp_max: AwareDatetime | None = None,
45
49
  log: bool = True,
50
+ pagination: PaginationRequest = Depends(get_pagination_params),
46
51
  user: UserOAuth = Depends(current_superuser_act),
47
52
  db: AsyncSession = Depends(get_async_db),
48
- ) -> list[JobReadV2]:
53
+ ) -> PaginationResponse[JobReadV2]:
49
54
  """
50
55
  Query `ApplyWorkflow` table.
51
56
 
@@ -68,43 +73,74 @@ async def view_job(
68
73
  `job.log` is set to `None`.
69
74
  """
70
75
 
71
- stm = select(JobV2)
76
+ # Assign pagination parameters
77
+ page = pagination.page
78
+ page_size = pagination.page_size
72
79
 
80
+ # Prepare statements
81
+ stm = select(JobV2).order_by(JobV2.start_timestamp.desc())
82
+ stm_count = select(func.count(JobV2.id))
73
83
  if id is not None:
74
84
  stm = stm.where(JobV2.id == id)
85
+ stm_count = stm_count.where(JobV2.id == id)
75
86
  if user_id is not None:
76
87
  stm = stm.join(ProjectV2).where(
77
88
  ProjectV2.user_list.any(UserOAuth.id == user_id)
78
89
  )
90
+ stm_count = stm_count.join(ProjectV2).where(
91
+ ProjectV2.user_list.any(UserOAuth.id == user_id)
92
+ )
79
93
  if project_id is not None:
80
94
  stm = stm.where(JobV2.project_id == project_id)
95
+ stm_count = stm_count.where(JobV2.project_id == project_id)
81
96
  if dataset_id is not None:
82
97
  stm = stm.where(JobV2.dataset_id == dataset_id)
98
+ stm_count = stm_count.where(JobV2.dataset_id == dataset_id)
83
99
  if workflow_id is not None:
84
100
  stm = stm.where(JobV2.workflow_id == workflow_id)
101
+ stm_count = stm_count.where(JobV2.workflow_id == workflow_id)
85
102
  if status is not None:
86
103
  stm = stm.where(JobV2.status == status)
104
+ stm_count = stm_count.where(JobV2.status == status)
87
105
  if start_timestamp_min is not None:
88
- start_timestamp_min = start_timestamp_min
89
106
  stm = stm.where(JobV2.start_timestamp >= start_timestamp_min)
107
+ stm_count = stm_count.where(
108
+ JobV2.start_timestamp >= start_timestamp_min
109
+ )
90
110
  if start_timestamp_max is not None:
91
- start_timestamp_max = start_timestamp_max
92
111
  stm = stm.where(JobV2.start_timestamp <= start_timestamp_max)
112
+ stm_count = stm_count.where(
113
+ JobV2.start_timestamp <= start_timestamp_max
114
+ )
93
115
  if end_timestamp_min is not None:
94
- end_timestamp_min = end_timestamp_min
95
116
  stm = stm.where(JobV2.end_timestamp >= end_timestamp_min)
117
+ stm_count = stm_count.where(JobV2.end_timestamp >= end_timestamp_min)
96
118
  if end_timestamp_max is not None:
97
- end_timestamp_max = end_timestamp_max
98
119
  stm = stm.where(JobV2.end_timestamp <= end_timestamp_max)
120
+ stm_count = stm_count.where(JobV2.end_timestamp <= end_timestamp_max)
99
121
 
122
+ # Find total number of elements
123
+ res_total_count = await db.execute(stm_count)
124
+ total_count = res_total_count.scalar()
125
+ if page_size is None:
126
+ page_size = total_count
127
+ else:
128
+ stm = stm.offset((page - 1) * page_size).limit(page_size)
129
+
130
+ # Get `page_size` rows
100
131
  res = await db.execute(stm)
101
132
  job_list = res.scalars().all()
102
- await db.close()
133
+
103
134
  if not log:
104
135
  for job in job_list:
105
136
  setattr(job, "log", None)
106
137
 
107
- return job_list
138
+ return PaginationResponse[JobReadV2](
139
+ total_count=total_count,
140
+ page_size=page_size,
141
+ current_page=page,
142
+ items=[job.model_dump() for job in job_list],
143
+ )
108
144
 
109
145
 
110
146
  @router.get("/{job_id}/", response_model=JobReadV2)
@@ -180,7 +180,9 @@ async def get_resource_profiles(
180
180
  await _get_resource_or_404(resource_id=resource_id, db=db)
181
181
 
182
182
  res = await db.execute(
183
- select(Profile).where(Profile.resource_id == resource_id)
183
+ select(Profile)
184
+ .where(Profile.resource_id == resource_id)
185
+ .order_by(Profile.id)
184
186
  )
185
187
  profiles = res.scalars().all()
186
188
 
@@ -1,7 +1,5 @@
1
1
  from fastapi import APIRouter
2
2
  from fastapi import Depends
3
- from fastapi import HTTPException
4
- from fastapi import status
5
3
  from pydantic import BaseModel
6
4
  from pydantic import EmailStr
7
5
  from pydantic import Field
@@ -16,6 +14,10 @@ from fractal_server.app.models.v2 import TaskV2
16
14
  from fractal_server.app.models.v2 import WorkflowTaskV2
17
15
  from fractal_server.app.models.v2 import WorkflowV2
18
16
  from fractal_server.app.routes.auth import current_superuser_act
17
+ from fractal_server.app.routes.pagination import get_pagination_params
18
+ from fractal_server.app.routes.pagination import PaginationRequest
19
+ from fractal_server.app.routes.pagination import PaginationResponse
20
+ from fractal_server.app.schemas.v2.task import TaskType
19
21
 
20
22
  router = APIRouter()
21
23
 
@@ -49,75 +51,85 @@ class TaskV2Info(BaseModel):
49
51
  relationships: list[TaskV2Relationship]
50
52
 
51
53
 
52
- @router.get("/", response_model=list[TaskV2Info])
54
+ @router.get("/", response_model=PaginationResponse[TaskV2Info])
53
55
  async def query_tasks(
54
56
  id: int | None = None,
55
57
  source: str | None = None,
56
58
  version: str | None = None,
57
59
  name: str | None = None,
58
- max_number_of_results: int = 25,
60
+ task_type: TaskType | None = None,
59
61
  category: str | None = None,
60
62
  modality: str | None = None,
61
63
  author: str | None = None,
62
64
  resource_id: int | None = None,
65
+ pagination: PaginationRequest = Depends(get_pagination_params),
63
66
  user: UserOAuth = Depends(current_superuser_act),
64
67
  db: AsyncSession = Depends(get_async_db),
65
- ) -> list[TaskV2Info]:
68
+ ) -> PaginationResponse[TaskV2Info]:
66
69
  """
67
- Query `TaskV2` table and get information about related items
68
- (WorkflowV2s and ProjectV2s)
69
-
70
- Args:
71
- id: If not `None`, query for matching `task.id`.
72
- source: If not `None`, query for contained case insensitive
73
- `task.source`.
74
- version: If not `None`, query for matching `task.version`.
75
- name: If not `None`, query for contained case insensitive `task.name`.
76
- max_number_of_results: The maximum length of the response.
77
- category:
78
- modality:
79
- author:
80
- resource_id:
70
+ Query `TaskV2` and get information about related workflows and projects.
81
71
  """
82
72
 
83
- stm = select(TaskV2)
73
+ # Assign pagination parameters
74
+ page = pagination.page
75
+ page_size = pagination.page_size
84
76
 
77
+ # Prepare statements
78
+ stm = select(TaskV2).order_by(TaskV2.id)
79
+ stm_count = select(func.count(TaskV2.id))
85
80
  if id is not None:
86
81
  stm = stm.where(TaskV2.id == id)
82
+ stm_count = stm_count.where(TaskV2.id == id)
87
83
  if source is not None:
88
84
  stm = stm.where(TaskV2.source.icontains(source))
85
+ stm_count = stm_count.where(TaskV2.source.icontains(source))
89
86
  if version is not None:
90
87
  stm = stm.where(TaskV2.version == version)
88
+ stm_count = stm_count.where(TaskV2.version == version)
91
89
  if name is not None:
92
90
  stm = stm.where(TaskV2.name.icontains(name))
91
+ stm_count = stm_count.where(TaskV2.name.icontains(name))
92
+ if task_type is not None:
93
+ stm = stm.where(TaskV2.type == task_type)
94
+ stm_count = stm_count.where(TaskV2.type == task_type)
93
95
  if category is not None:
94
96
  stm = stm.where(func.lower(TaskV2.category) == category.lower())
97
+ stm_count = stm_count.where(
98
+ func.lower(TaskV2.category) == category.lower()
99
+ )
95
100
  if modality is not None:
96
101
  stm = stm.where(func.lower(TaskV2.modality) == modality.lower())
102
+ stm_count = stm_count.where(
103
+ func.lower(TaskV2.modality) == modality.lower()
104
+ )
97
105
  if author is not None:
98
106
  stm = stm.where(TaskV2.authors.icontains(author))
107
+ stm_count = stm_count.where(TaskV2.authors.icontains(author))
99
108
  if resource_id is not None:
100
109
  stm = (
101
110
  stm.join(TaskGroupV2)
102
111
  .where(TaskGroupV2.id == TaskV2.taskgroupv2_id)
103
112
  .where(TaskGroupV2.resource_id == resource_id)
104
113
  )
114
+ stm_count = (
115
+ stm_count.join(TaskGroupV2)
116
+ .where(TaskGroupV2.id == TaskV2.taskgroupv2_id)
117
+ .where(TaskGroupV2.resource_id == resource_id)
118
+ )
119
+
120
+ # Find total number of elements
121
+ res_total_count = await db.execute(stm_count)
122
+ total_count = res_total_count.scalar()
123
+ if page_size is None:
124
+ page_size = total_count
125
+ else:
126
+ stm = stm.offset((page - 1) * page_size).limit(page_size)
105
127
 
106
- stm = stm.order_by(TaskV2.id)
128
+ # Get `page_size` rows
107
129
  res = await db.execute(stm)
108
130
  task_list = res.scalars().all()
109
- if len(task_list) > max_number_of_results:
110
- await db.close()
111
- raise HTTPException(
112
- status_code=status.HTTP_422_UNPROCESSABLE_CONTENT,
113
- detail=(
114
- f"Too many Tasks ({len(task_list)} > {max_number_of_results})."
115
- " Please add more query filters."
116
- ),
117
- )
118
131
 
119
132
  task_info_list = []
120
-
121
133
  for task in task_list:
122
134
  stm = (
123
135
  select(WorkflowV2)
@@ -147,4 +159,9 @@ async def query_tasks(
147
159
  )
148
160
  )
149
161
 
150
- return task_info_list
162
+ return PaginationResponse[TaskV2Info](
163
+ total_count=total_count,
164
+ page_size=page_size,
165
+ current_page=page,
166
+ items=task_info_list,
167
+ )
@@ -51,9 +51,17 @@ class TaskCollectPipV2(BaseModel):
51
51
  package: NonEmptyStr | None = None
52
52
  package_version: NonEmptyStr | None = None
53
53
  package_extras: NonEmptyStr | None = None
54
- python_version: Literal[
55
- "3.9", "3.10", "3.11", "3.12", "3.13"
56
- ] | None = None
54
+ python_version: (
55
+ Literal[
56
+ "3.9",
57
+ "3.10",
58
+ "3.11",
59
+ "3.12",
60
+ "3.13",
61
+ "3.14",
62
+ ]
63
+ | None
64
+ ) = None
57
65
  pinned_package_versions_pre: DictStrStr | None = None
58
66
  pinned_package_versions_post: DictStrStr | None = None
59
67
 
@@ -12,44 +12,48 @@ from fractal_server.syringe import Inject
12
12
 
13
13
 
14
14
  async def cleanup_after_shutdown(*, jobsV2: list[int], logger_name: str):
15
+ settings = Inject(get_settings)
15
16
  logger = get_logger(logger_name)
16
17
  logger.info("Cleanup function after shutdown")
17
- stm_v2 = (
18
+ stm_objects = (
18
19
  select(JobV2)
19
20
  .where(JobV2.id.in_(jobsV2))
20
21
  .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
21
22
  )
23
+ stm_ids = (
24
+ select(JobV2.id)
25
+ .where(JobV2.id.in_(jobsV2))
26
+ .where(JobV2.status == JobStatusTypeV2.SUBMITTED)
27
+ )
22
28
 
23
29
  async for session in get_async_db():
24
- jobsV2_db = (await session.execute(stm_v2)).scalars().all()
25
-
26
- for job in jobsV2_db:
30
+ # Write shutdown file for all jobs
31
+ jobs = (await session.execute(stm_objects)).scalars().all()
32
+ for job in jobs:
27
33
  _write_shutdown_file(job=job)
28
34
 
29
- settings = Inject(get_settings)
30
-
35
+ # Wait for completion of all job - with a timeout
36
+ interval = settings.FRACTAL_GRACEFUL_SHUTDOWN_TIME / 20
31
37
  t_start = time.perf_counter()
32
38
  while (
33
39
  time.perf_counter() - t_start
34
- ) < settings.FRACTAL_GRACEFUL_SHUTDOWN_TIME: # 30 seconds
35
- logger.info("Waiting 3 seconds before checking")
36
- time.sleep(3)
37
- jobsV2_db = (await session.execute(stm_v2)).scalars().all()
38
-
39
- if len(jobsV2_db) == 0:
40
- logger.info(
41
- "All jobs associated to this app are "
42
- "either done or failed. Exit."
43
- )
40
+ ) <= settings.FRACTAL_GRACEFUL_SHUTDOWN_TIME:
41
+ job_ids = (await session.execute(stm_ids)).scalars().all()
42
+ if len(job_ids) == 0:
43
+ logger.info("All jobs are either done or failed. Exit.")
44
44
  return
45
45
  else:
46
- logger.info(f"Some jobs are still 'submitted' {jobsV2_db=}")
46
+ logger.info(f"Some jobs are still 'submitted': {job_ids=}")
47
+ logger.info(f"Wait {interval:.4f} seconds before next check.")
48
+ time.sleep(interval)
47
49
  logger.info(
48
50
  "Graceful shutdown reached its maximum time, "
49
- "but some jobs are still submitted"
51
+ "but some jobs are still submitted."
50
52
  )
51
53
 
52
- for job in jobsV2_db:
54
+ # Mark jobs as failed and update their logs.
55
+ jobs = (await session.execute(stm_objects)).scalars().all()
56
+ for job in jobs:
53
57
  job.status = "failed"
54
58
  job.log = (job.log or "") + "\nJob stopped due to app shutdown\n"
55
59
  session.add(job)
@@ -1,4 +1,5 @@
1
1
  from pydantic import SecretStr
2
+ from pydantic.types import NonNegativeInt
2
3
  from pydantic_settings import BaseSettings
3
4
  from pydantic_settings import SettingsConfigDict
4
5
  from sqlalchemy.engine import URL
@@ -27,11 +28,11 @@ class DatabaseSettings(BaseSettings):
27
28
  """
28
29
  Password to use when connecting to the PostgreSQL database.
29
30
  """
30
- POSTGRES_HOST: NonEmptyStr | None = "localhost"
31
+ POSTGRES_HOST: NonEmptyStr = "localhost"
31
32
  """
32
33
  URL to the PostgreSQL server or path to a UNIX domain socket.
33
34
  """
34
- POSTGRES_PORT: NonEmptyStr | None = "5432"
35
+ POSTGRES_PORT: NonNegativeInt = 5432
35
36
  """
36
37
  Port number to use when connecting to the PostgreSQL server.
37
38
  """
@@ -57,7 +57,7 @@ class Settings(BaseSettings):
57
57
  `app.state`.
58
58
  """
59
59
 
60
- FRACTAL_GRACEFUL_SHUTDOWN_TIME: int = 30
60
+ FRACTAL_GRACEFUL_SHUTDOWN_TIME: float = 30.0
61
61
  """
62
62
  Waiting time for the shutdown phase of executors
63
63
  """
@@ -0,0 +1,46 @@
1
+ """Make resource_id FK non-nullable
2
+
3
+ Revision ID: 45fbb391d7af
4
+ Revises: caba9fb1ea5e
5
+ Create Date: 2025-11-11 16:39:12.813766
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "45fbb391d7af"
14
+ down_revision = "caba9fb1ea5e"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("projectv2", schema=None) as batch_op:
22
+ batch_op.alter_column(
23
+ "resource_id", existing_type=sa.INTEGER(), nullable=False
24
+ )
25
+
26
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
27
+ batch_op.alter_column(
28
+ "resource_id", existing_type=sa.INTEGER(), nullable=False
29
+ )
30
+
31
+ # ### end Alembic commands ###
32
+
33
+
34
+ def downgrade() -> None:
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
37
+ batch_op.alter_column(
38
+ "resource_id", existing_type=sa.INTEGER(), nullable=True
39
+ )
40
+
41
+ with op.batch_alter_table("projectv2", schema=None) as batch_op:
42
+ batch_op.alter_column(
43
+ "resource_id", existing_type=sa.INTEGER(), nullable=True
44
+ )
45
+
46
+ # ### end Alembic commands ###
@@ -0,0 +1,63 @@
1
+ """Drop table
2
+
3
+ Revision ID: 49d0856e9569
4
+ Revises: 45fbb391d7af
5
+ Create Date: 2025-11-11 16:39:41.497832
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "49d0856e9569"
14
+ down_revision = "45fbb391d7af"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ op.drop_table("user_settings")
22
+ # ### end Alembic commands ###
23
+
24
+
25
+ def downgrade() -> None:
26
+ # ### commands auto generated by Alembic - please adjust! ###
27
+ op.create_table(
28
+ "user_settings",
29
+ sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
30
+ sa.Column(
31
+ "slurm_accounts",
32
+ postgresql.JSONB(astext_type=sa.Text()),
33
+ server_default=sa.text("'[]'::json"),
34
+ autoincrement=False,
35
+ nullable=False,
36
+ ),
37
+ sa.Column(
38
+ "ssh_host", sa.VARCHAR(), autoincrement=False, nullable=True
39
+ ),
40
+ sa.Column(
41
+ "ssh_username", sa.VARCHAR(), autoincrement=False, nullable=True
42
+ ),
43
+ sa.Column(
44
+ "ssh_private_key_path",
45
+ sa.VARCHAR(),
46
+ autoincrement=False,
47
+ nullable=True,
48
+ ),
49
+ sa.Column(
50
+ "ssh_tasks_dir", sa.VARCHAR(), autoincrement=False, nullable=True
51
+ ),
52
+ sa.Column(
53
+ "ssh_jobs_dir", sa.VARCHAR(), autoincrement=False, nullable=True
54
+ ),
55
+ sa.Column(
56
+ "slurm_user", sa.VARCHAR(), autoincrement=False, nullable=True
57
+ ),
58
+ sa.Column(
59
+ "project_dir", sa.VARCHAR(), autoincrement=False, nullable=True
60
+ ),
61
+ sa.PrimaryKeyConstraint("id", name=op.f("pk_user_settings")),
62
+ )
63
+ # ### end Alembic commands ###
@@ -0,0 +1,29 @@
1
+ """Remove project_dir server_default
2
+
3
+ Revision ID: 7673fe18c05d
4
+ Revises: 49d0856e9569
5
+ Create Date: 2025-11-11 16:50:20.079193
6
+
7
+ """
8
+ from alembic import op
9
+
10
+
11
+ # revision identifiers, used by Alembic.
12
+ revision = "7673fe18c05d"
13
+ down_revision = "49d0856e9569"
14
+ branch_labels = None
15
+ depends_on = None
16
+
17
+
18
+ def upgrade() -> None:
19
+ """
20
+ Remove `server_default` for `project_dir` column - see
21
+ https://alembic.sqlalchemy.org/en/latest/ops.html#alembic.operations.Operations.alter_column.params.server_default
22
+ """
23
+ with op.batch_alter_table("user_oauth") as batch_op:
24
+ batch_op.alter_column("project_dir", server_default=None)
25
+
26
+
27
+ def downgrade() -> None:
28
+ with op.batch_alter_table("user_oauth") as batch_op:
29
+ batch_op.alter_column("project_dir", server_default="/PLACEHOLDER")
@@ -0,0 +1,49 @@
1
+ """Drop UserOAuth.user_settings_id
2
+
3
+ Revision ID: caba9fb1ea5e
4
+ Revises: 83bc2ad3ffcc
5
+ Create Date: 2025-11-11 16:38:27.243693
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "caba9fb1ea5e"
14
+ down_revision = "83bc2ad3ffcc"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("user_oauth", schema=None) as batch_op:
22
+ batch_op.drop_constraint(
23
+ batch_op.f("fk_user_oauth_user_settings_id_user_settings"),
24
+ type_="foreignkey",
25
+ )
26
+ batch_op.drop_column("user_settings_id")
27
+
28
+ # ### end Alembic commands ###
29
+
30
+
31
+ def downgrade() -> None:
32
+ # ### commands auto generated by Alembic - please adjust! ###
33
+ with op.batch_alter_table("user_oauth", schema=None) as batch_op:
34
+ batch_op.add_column(
35
+ sa.Column(
36
+ "user_settings_id",
37
+ sa.INTEGER(),
38
+ autoincrement=False,
39
+ nullable=True,
40
+ )
41
+ )
42
+ batch_op.create_foreign_key(
43
+ batch_op.f("fk_user_oauth_user_settings_id_user_settings"),
44
+ "user_settings",
45
+ ["user_settings_id"],
46
+ ["id"],
47
+ )
48
+
49
+ # ### end Alembic commands ###
@@ -192,6 +192,30 @@ def execute_tasks_v2(
192
192
  job_db.executor_error_log = None
193
193
  db.merge(job_db)
194
194
  db.commit()
195
+ db.expunge_all()
196
+
197
+ # Fail when running a non-converter task on an empty image list
198
+ if (
199
+ wftask.task_type
200
+ in [
201
+ TaskType.COMPOUND,
202
+ TaskType.PARALLEL,
203
+ TaskType.NON_PARALLEL,
204
+ ]
205
+ and len(filtered_images) == 0
206
+ ):
207
+ error_msg = (
208
+ f"Cannot run task '{task.name}' for an empty image list "
209
+ f"(obtained after applying {type_filters=} and "
210
+ f"attribute_filters={job_attribute_filters})."
211
+ )
212
+ logger.info(error_msg)
213
+ update_status_of_history_run(
214
+ history_run_id=history_run_id,
215
+ status=HistoryUnitStatus.FAILED,
216
+ db_sync=db,
217
+ )
218
+ raise JobExecutionError(error_msg)
195
219
 
196
220
  # TASK EXECUTION (V2)
197
221
  try:
@@ -45,7 +45,7 @@ class TasksPythonSettings(BaseModel):
45
45
  if self.default_version not in self.versions.keys():
46
46
  raise ValueError(
47
47
  f"The default Python version ('{self.default_version}') is "
48
- f"not available versions in {list(self.versions.keys())}."
48
+ f"not available in {list(self.versions.keys())}."
49
49
  )
50
50
 
51
51
  return self
@@ -51,8 +51,8 @@ def delete_local(
51
51
  activity.log = get_current_log(log_file_path)
52
52
  activity = add_commit_refresh(obj=activity, db=db)
53
53
 
54
- db.commit()
55
54
  db.delete(task_group)
55
+ db.commit()
56
56
  logger.debug("Task group removed from database.")
57
57
 
58
58
  if task_group.origin != TaskGroupV2OriginEnum.OTHER:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fractal-server
3
- Version: 2.17.0a10
3
+ Version: 2.17.1
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License-Expression: BSD-3-Clause
6
6
  License-File: LICENSE
@@ -1,23 +1,22 @@
1
- fractal_server/__init__.py,sha256=G5BYCg1etHirfVvA4-6BLaLisXMafnQOALbQLrNgQAU,26
2
- fractal_server/__main__.py,sha256=68FlTuST3zbzVofFI8JSYsSBrBQ07Bv3Mu3PsZX9Fw0,11423
1
+ fractal_server/__init__.py,sha256=CUZtFSmkfStuE3L_hKV87CswJmhI3gnUuekCrCEQ1Yk,23
2
+ fractal_server/__main__.py,sha256=qLbUicU1Ulaob_Eo5pspi-IH2xAkLfifJTH9gYEhZss,11427
3
3
  fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
4
4
  fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  fractal_server/app/db/__init__.py,sha256=sttX0mHVV0ESI1SJ1kcxUKiuEwqeP-BWsst0o_9Yo44,2810
6
- fractal_server/app/models/__init__.py,sha256=xJWiGAwpXmCpnFMC4c_HTqoUCzMOXrakoGLUH_uMvdA,415
6
+ fractal_server/app/models/__init__.py,sha256=93eFiAaiV6cU_Mf8WQDi_Ir7vqakAiS5H9Pl7qAb4JE,367
7
7
  fractal_server/app/models/linkusergroup.py,sha256=3KkkE4QIUAlTrBAZs_tVy0pGvAxUAq6yOEjflct_z2M,678
8
8
  fractal_server/app/models/linkuserproject.py,sha256=hvaxh3Lkiy2uUCwB8gvn8RorCpvxSSdzWdCS_U1GL7g,315
9
- fractal_server/app/models/security.py,sha256=VThWDEmzUP4SgLsAvd5WjJ1p2WVxBuc6D5TMQJaOyd8,3873
10
- fractal_server/app/models/user_settings.py,sha256=u0GOK1JdqDmXzA8hK2JV93rZxY_rF-0oKMkArRolnN8,1201
9
+ fractal_server/app/models/security.py,sha256=f44hOx4Tro0-KLj5N70_gv1UsFrr5ygHY7W_XHFaOJE,3546
11
10
  fractal_server/app/models/v2/__init__.py,sha256=A668GF4z_UPar6kAOwC-o_qUo3CIRJ3SmBGYTs3Xc7k,923
12
11
  fractal_server/app/models/v2/accounting.py,sha256=i-2TsjqyuclxFQ21C-TeDoss7ZBTRuXdzIJfVr2UxwE,1081
13
12
  fractal_server/app/models/v2/dataset.py,sha256=P_zy4dPQAqrCALQ6737VkAFk1SvcgYjnslGUZhPI8sc,1226
14
13
  fractal_server/app/models/v2/history.py,sha256=CBN2WVg9vW5pHU1RP8TkB_nnJrwnuifCcxgnd53UtEE,2163
15
14
  fractal_server/app/models/v2/job.py,sha256=YYzt3ef2CU1WXFNjlltR3ft2kM9T0Hq8oskSipQSxuM,2042
16
15
  fractal_server/app/models/v2/profile.py,sha256=QqOE7XGeq-ckQAbGhcgzDN5zFFaTNrtcuWgOXy9psR8,440
17
- fractal_server/app/models/v2/project.py,sha256=oXNcuNVDeNYZ60fwAx-Y_vnkS3xd9pwFdoT2pZmnBNI,918
16
+ fractal_server/app/models/v2/project.py,sha256=DJgTZG1NTA_pbLlY0Jy3WFBE8X8fBMkVALfDWK5ZuHY,832
18
17
  fractal_server/app/models/v2/resource.py,sha256=ReaBGtKb3e0_1PZOZncdGqrttkrC-bsgDCv3wPCGfOs,3512
19
18
  fractal_server/app/models/v2/task.py,sha256=iBIQB8POQE5MyKvLZhw7jZWlBhbrThzCDzRTcgiAczQ,1493
20
- fractal_server/app/models/v2/task_group.py,sha256=gHkuyIBw8hkoMCwHgo08SWVMc8T1MXC5xqoW2YNd5Sw,4753
19
+ fractal_server/app/models/v2/task_group.py,sha256=v9hI-R8mXhm0LvE_I_YG3M8VHz0V9DohhGNykdEgcB8,4667
21
20
  fractal_server/app/models/v2/workflow.py,sha256=gBjDXO-RytVT81aAlesImBhmVHrwNUrmsF_UsGa1qLM,1057
22
21
  fractal_server/app/models/v2/workflowtask.py,sha256=qkTc-hcFLpJUVsEUbnDq2BJL0qg9jagy2doZeusF1ek,1266
23
22
  fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -26,11 +25,11 @@ fractal_server/app/routes/admin/v2/__init__.py,sha256=SzLurEzKBRWHcIFh2mpac2wrGA
26
25
  fractal_server/app/routes/admin/v2/_aux_functions.py,sha256=fqA5sUCFuD2iVANQt2WUUfVOEVz5egQA7inzUKYGCw0,1684
27
26
  fractal_server/app/routes/admin/v2/accounting.py,sha256=DjgMqzdrL8hYMn19cZj2pkwtLKl3QfBwxd473kH4KaI,3584
28
27
  fractal_server/app/routes/admin/v2/impersonate.py,sha256=ictDjuvBr3iLv3YtwkVRMNQRq5qtPAeAXbbC7STSsEg,1125
29
- fractal_server/app/routes/admin/v2/job.py,sha256=sFgMbOtUCIJ-ri6YD3ZWP7XETZZDQsLqPfT1kaH9RHQ,8577
28
+ fractal_server/app/routes/admin/v2/job.py,sha256=CRt_a6tULsuzN5ojcClzVH3vd7OXT06jK81Qro9fuBk,10171
30
29
  fractal_server/app/routes/admin/v2/profile.py,sha256=0Y_1Qv-BA6cHVrxPTDDBOpttpfuJN8g1FqFlG6JiOD8,3164
31
30
  fractal_server/app/routes/admin/v2/project.py,sha256=rRq7ZDngr_29skASnte1xfycZCjK-WPdeTf7siBXiCU,1182
32
- fractal_server/app/routes/admin/v2/resource.py,sha256=eLK3PxvpibwQgVfgpMb_CdqkiB7hz8-RtPqqtP9ujz8,6310
33
- fractal_server/app/routes/admin/v2/task.py,sha256=9MMUI2PnyHQx08Xmt93O5rM60C_tlic27mP6t7ljpYo,4655
31
+ fractal_server/app/routes/admin/v2/resource.py,sha256=UWimApUcL9HPu8NN0ccDbReuKly3aCqff2SA-Y1iEzs,6349
32
+ fractal_server/app/routes/admin/v2/task.py,sha256=tHWCPZRpfvu7k8sQA-634CUC7QbgdJa2RHiMXQL8a58,5599
34
33
  fractal_server/app/routes/admin/v2/task_group.py,sha256=EDY9oliXq_xYVJ2HgRuE4-5MbL85j-y4LbWwupZxy38,6249
35
34
  fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=W7LjIBAheyjrn0fEz0SsWINqcZK5HMB5GRGMjPrc6a4,9994
36
35
  fractal_server/app/routes/api/__init__.py,sha256=ewprevw6hZ0FWM-GPHoQZU0w-yfItqLeQT-Jr_Nbjnw,1658
@@ -88,18 +87,18 @@ fractal_server/app/schemas/v2/project.py,sha256=7UC0aZLgtmkaAiPykeUj-9OZXhMkoyi3
88
87
  fractal_server/app/schemas/v2/resource.py,sha256=4iXzZJeHVLcXYY08-okoJM_4gqpzhG4KglRPBm24Jwc,3718
89
88
  fractal_server/app/schemas/v2/status_legacy.py,sha256=eQT1zGxbkzSwd0EqclsOdZ60n1x6J3DB1CZ3m4LYyxc,955
90
89
  fractal_server/app/schemas/v2/task.py,sha256=IJv8loB4kx9FBkaIHoiMsswQyq02FxvyAnHK1u074fU,4364
91
- fractal_server/app/schemas/v2/task_collection.py,sha256=BzHQXq2_zLZTbigWauOR5Zi-mlsqCIF2NEF_z12Nqxg,4480
90
+ fractal_server/app/schemas/v2/task_collection.py,sha256=S4fBQ3cbL4YmyL6ZTZYDbIBm6o8D-v5CuYQy4ng3nWU,4581
92
91
  fractal_server/app/schemas/v2/task_group.py,sha256=4hNZUXnWYSozpLXR3JqBvGzfZBG2TbjqydckHHu2Aq0,3506
93
92
  fractal_server/app/schemas/v2/workflow.py,sha256=L-dW6SzCH_VNoH6ENip44lTgGGqVYHHBk_3PtM-Ooy8,1772
94
93
  fractal_server/app/schemas/v2/workflowtask.py,sha256=6eweAMyziwaoMT-7R1fVJYunIeZKzT0-7fAVgPO_FEc,3639
95
94
  fractal_server/app/security/__init__.py,sha256=k-La8Da89C1hSUGsiidrWo6Az4u6dbe5PzN1Ctt1t34,18394
96
95
  fractal_server/app/security/signup_email.py,sha256=kphjq6TAygvPpYpg95QJWefyqmzdVrGz7fyRMctUJWE,1982
97
- fractal_server/app/shutdown.py,sha256=ViSNJyXWU_iWPSDOOMGNh_iQdUFrdPh_jvf8vVKLpAo,1950
96
+ fractal_server/app/shutdown.py,sha256=QU4DfNvqwUXlHiLORtYJit4DxlFQo014SKTfs4dcE2U,2295
98
97
  fractal_server/config/__init__.py,sha256=ZCmroNB50sUxJiFtkW0a4fFtmfyPnL4LWhtKY5FbQfg,737
99
98
  fractal_server/config/_data.py,sha256=9Jyt83yrSsr_0_9ANWDAXz88_jjyFlcB5VWJGXq8aUY,2311
100
- fractal_server/config/_database.py,sha256=YOBi3xuJno5wLGw1hKsjLm-bftaxVWiBNIQWVTMX3Ag,1661
99
+ fractal_server/config/_database.py,sha256=k1z__MrslQjmel34yFvge0sroPUs1vBtT_OSlPY8pN8,1690
101
100
  fractal_server/config/_email.py,sha256=j1QmZCyspNbD1xxkypc9Kv299tU3vTO1AqDFJ8-LZzQ,4201
102
- fractal_server/config/_main.py,sha256=9v64gJsvY1oGP70_AoJMnyMIeRo7FcIg6T8NDV-p9as,1992
101
+ fractal_server/config/_main.py,sha256=NmpNuNezVI7MpuAiZ9AOlo3Fc-4x73FIaFm-UMhppEw,1996
103
102
  fractal_server/config/_oauth.py,sha256=7J4FphGVFfVmtQycCkas6scEJQJGZUGEzQ-t2PZiqSo,1934
104
103
  fractal_server/config/_settings_config.py,sha256=tsyXQOnn9QKCFJD6hRo_dJXlQQyl70DbqgHMJoZ1xnY,144
105
104
  fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
@@ -121,7 +120,9 @@ fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha
121
120
  fractal_server/migrations/versions/1a83a5260664_rename.py,sha256=EkzTAjbJm7CfsLraIUbH9hkTj4M6XvmziVb4K9ZjKmQ,790
122
121
  fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py,sha256=7OW3HmqAePHx53OWdEPzNxvtupxSR0lB_6tZF1b3JIM,1604
123
122
  fractal_server/migrations/versions/316140ff7ee1_remove_usersettings_cache_dir.py,sha256=lANgTox0rz459_yo1Rw7fGCT1qw5sUCUXTLUMc_Bzf8,911
123
+ fractal_server/migrations/versions/45fbb391d7af_make_resource_id_fk_non_nullable.py,sha256=y9zr161YIWgnWbaMg1rahKN4b-vHjT3f5VSeoOAHaqI,1296
124
124
  fractal_server/migrations/versions/47351f8c7ebc_drop_dataset_filters.py,sha256=vePkVm1iUHiPNKLQ3KR7BBLdHruqBdl87j_tUCbMbEA,1414
125
+ fractal_server/migrations/versions/49d0856e9569_drop_table.py,sha256=qoq7cGUQmrnUj_wpV2mRqVneyoKqglgbrgzW_8eS_5w,1835
125
126
  fractal_server/migrations/versions/4c308bcaea2b_add_task_args_schema_and_task_args_.py,sha256=-wHe-fOffmYeAm0JXVl_lxZ7hhDkaEVqxgxpHkb_uL8,954
126
127
  fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py,sha256=Mob8McGYAcmgvrseyyYOa54E6Gsgr-4SiGdC-r9O4_A,1157
127
128
  fractal_server/migrations/versions/501961cfcd85_remove_link_between_v1_and_v2_tasks_.py,sha256=JOrVa6mGzqZ6H61YCFVOed64vFRjTWGWyN3z7NE3T08,3270
@@ -129,6 +130,7 @@ fractal_server/migrations/versions/50a13d6138fd_initial_schema.py,sha256=zwXegXs
129
130
  fractal_server/migrations/versions/5bf02391cfef_v2.py,sha256=axhNkr_H6R4rRbY7oGYazNbFvPXeSyBDWFVbKNmiqs8,8433
130
131
  fractal_server/migrations/versions/70e77f1c38b0_add_applyworkflow_first_task_index_and_.py,sha256=Q-DsMzG3IcUV2Ol1dhJWosDvKERamBE6QvA2zzS5zpQ,1632
131
132
  fractal_server/migrations/versions/71eefd1dd202_add_slurm_accounts.py,sha256=mbWuCkTpRAdGbRhW7lhXs_e5S6O37UAcCN6JfoY5H8A,1353
133
+ fractal_server/migrations/versions/7673fe18c05d_remove_project_dir_server_default.py,sha256=LAC1Uv4SeLkqjXPyqj5Mof8L0105gxqS1TYKzNVX4GE,795
132
134
  fractal_server/migrations/versions/791ce783d3d8_add_indices.py,sha256=gNE6AgJgeJZY99Fbd336Z9see3gRMQvuNBC0xDk_5sw,1154
133
135
  fractal_server/migrations/versions/83bc2ad3ffcc_2_17_0.py,sha256=U7t_8n58taRkd9sxCXOshrTr9M5AhlsQne8SGKa5Jt4,6377
134
136
  fractal_server/migrations/versions/84bf0fffde30_add_dumps_to_applyworkflow.py,sha256=NSCuhANChsg76vBkShBl-9tQ4VEHubOjtAv1etHhlvY,2684
@@ -148,6 +150,7 @@ fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py,sha25
148
150
  fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py,sha256=loDrqBB-9U3vqLKePEeJy4gK4EuPs_1F345mdrnoCt0,1293
149
151
  fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py,sha256=Q01lPlBNQgi3hpoUquWj2QUEF7cTsyQ7uikUhWunzWY,10035
150
152
  fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py,sha256=Y1cPwmFOZ4mx3v2XZM6adgu8u0L0VD_R4ADURyMb2ro,1102
153
+ fractal_server/migrations/versions/caba9fb1ea5e_drop_useroauth_user_settings_id.py,sha256=8tlWVmux-c-fB9hMO4JEsaPMXRwLN_X3PpC0rUuFrYw,1320
151
154
  fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=HN3_Pk8G81SzdYjg4K1RZAyjKSlsZGvcYE2nWOUbwxQ,3861
152
155
  fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=6cHEZFuTXiQg9yu32Y3RH1XAl71av141WQ6UMbiITIg,949
153
156
  fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=yGWSA2HIHUybcVy66xBITk08opV2DFYSCIIrulaUZhI,901
@@ -197,7 +200,7 @@ fractal_server/runner/v2/_slurm_sudo.py,sha256=f34B6KWtn1Wwm7Y-D95Ecub0vKpA03zYL
197
200
  fractal_server/runner/v2/db_tools.py,sha256=kLB0sGYNuCMPPoP4XDSClPIf7x5lSseTKUjzGk6gAfc,3318
198
201
  fractal_server/runner/v2/deduplicate_list.py,sha256=vidkd7K6u3w0A4zVgsGZkc9mwoP6ihTYJZQUhBNorfE,667
199
202
  fractal_server/runner/v2/merge_outputs.py,sha256=0ahaSwdMFAoEhxVaEaO9nSJuKIcWg9pDZ356ktSHcC0,897
200
- fractal_server/runner/v2/runner.py,sha256=1vj6m_ir6RrYNr3Ew98h5hnEAms_TaHPhee52swQ_gA,19404
203
+ fractal_server/runner/v2/runner.py,sha256=PANeskwZpZbbaL8i5vk0voHfP-jli1sZaFKpiW1gq20,20245
201
204
  fractal_server/runner/v2/runner_functions.py,sha256=w_i74LCt_9f07w511wslTFhoDUtoE1R-IKcglEGWPIc,19618
202
205
  fractal_server/runner/v2/submit_workflow.py,sha256=s3mo5pLZH5x8V01IZjoJXcbpCl9geZwvISEr5StSBeI,11458
203
206
  fractal_server/runner/v2/task_interface.py,sha256=ftPPpOU16rbJD8q-QV7o_3ey8W7MQTFuWJiYUr4OmF4,2532
@@ -209,7 +212,7 @@ fractal_server/syringe.py,sha256=3YJeIALH-wibuJ9R5VMNYUWh7x1-MkWT0SqGcWG5MY8,279
209
212
  fractal_server/tasks/__init__.py,sha256=kadmVUoIghl8s190_Tt-8f-WBqMi8u8oU4Pvw39NHE8,23
210
213
  fractal_server/tasks/config/__init__.py,sha256=f1kARvKJPdM3b0HmJhIyMRxi8RCgPajUfS7RBhNfLKY,152
211
214
  fractal_server/tasks/config/_pixi.py,sha256=KVf0xYAf6hAnOKSrTmg_GYJ5VaA19kEMAydRJNHjeiA,3773
212
- fractal_server/tasks/config/_python.py,sha256=DKXHpSctTU0GJp4D9iF7DncAyrG6disMrECNX4Qu9_k,1362
215
+ fractal_server/tasks/config/_python.py,sha256=uEBm4Z9Ah-7A7pXBcSTBhFgefSuJPwN8WrWQHoNdp9M,1353
213
216
  fractal_server/tasks/utils.py,sha256=V7dj8o2AnoHhGSTYlqJHcRFhCIpmOrMOUhtiE_DvRVA,291
214
217
  fractal_server/tasks/v2/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
215
218
  fractal_server/tasks/v2/local/__init__.py,sha256=2dJqJIwn9UEANg8lPFprxVLk3ug-4xYIb5pvIsqPb5s,353
@@ -218,7 +221,7 @@ fractal_server/tasks/v2/local/collect.py,sha256=2dvR90b57gCz8F_bmNoMLtnvlrW1ZLBz
218
221
  fractal_server/tasks/v2/local/collect_pixi.py,sha256=BTWwu07LZQB0zTvCpHJkyEHgUl8XlF1qSN5NsOC5MVo,11489
219
222
  fractal_server/tasks/v2/local/deactivate.py,sha256=cPJbB-zoHjmxL5goaNAUItkW0o_fg-YAd2YuXu_n8Rg,9925
220
223
  fractal_server/tasks/v2/local/deactivate_pixi.py,sha256=h1XdAwihACywC9qak5K2tlfz4y32zZJOA-OEkIVy67U,3627
221
- fractal_server/tasks/v2/local/delete.py,sha256=4uKf6WjbSy_KiJNm1ip8eXa9GgvZWiDbWQjlSKE4I20,2960
224
+ fractal_server/tasks/v2/local/delete.py,sha256=rmAjZ5i8x0jAIqOjG7eH9XFtUZkeo172S2WyZRFIVq0,2960
222
225
  fractal_server/tasks/v2/local/reactivate.py,sha256=ZQTaqPquevpwdchUJIp59Lw01vErA3T2XN8g1jMBNAE,6013
223
226
  fractal_server/tasks/v2/local/reactivate_pixi.py,sha256=R0D3bUwrAo3uyDNZRYsQ65NMl7-nhqd1GefBAh9Hk5c,8159
224
227
  fractal_server/tasks/v2/ssh/__init__.py,sha256=dPK6BtEZVh1GiFP05j1RKTEnZvjJez8o2KkMC2hWXaw,339
@@ -254,8 +257,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=HL
254
257
  fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
255
258
  fractal_server/utils.py,sha256=SYVVUuXe_nWyrJLsy7QA-KJscwc5PHEXjvsW4TK7XQI,2180
256
259
  fractal_server/zip_tools.py,sha256=H0w7wS5yE4ebj7hw1_77YQ959dl2c-L0WX6J_ro1TY4,4884
257
- fractal_server-2.17.0a10.dist-info/METADATA,sha256=-hVkYe_goWyUvm96LujeOSDCXsmkYPVPlojm9jxLHGs,4227
258
- fractal_server-2.17.0a10.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
259
- fractal_server-2.17.0a10.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
260
- fractal_server-2.17.0a10.dist-info/licenses/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
261
- fractal_server-2.17.0a10.dist-info/RECORD,,
260
+ fractal_server-2.17.1.dist-info/METADATA,sha256=fxvj3AoIvayiofwYRfl1dwAp2cQcdCZ0cZPnQoEoOBg,4224
261
+ fractal_server-2.17.1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
262
+ fractal_server-2.17.1.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
263
+ fractal_server-2.17.1.dist-info/licenses/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
264
+ fractal_server-2.17.1.dist-info/RECORD,,
@@ -1,37 +0,0 @@
1
- from sqlalchemy import Column
2
- from sqlalchemy.dialects.postgresql import JSONB
3
- from sqlmodel import Field
4
- from sqlmodel import SQLModel
5
-
6
-
7
- # TODO-2.17.1: Drop `UserSettings`
8
- class UserSettings(SQLModel, table=True):
9
- """
10
- Comprehensive list of user settings.
11
-
12
- Attributes:
13
- id: ID of database object
14
- slurm_accounts:
15
- List of SLURM accounts, to be used upon Fractal job submission.
16
- ssh_host: SSH-reachable host where a SLURM client is available.
17
- ssh_username: User on `ssh_host`.
18
- ssh_private_key_path: Path of private SSH key for `ssh_username`.
19
- slurm_user: Local user, to be impersonated via `sudo -u`
20
- project_dir: Folder where `slurm_user` can write.
21
- """
22
-
23
- __tablename__ = "user_settings"
24
-
25
- id: int | None = Field(default=None, primary_key=True)
26
- slurm_accounts: list[str] = Field(
27
- sa_column=Column(JSONB, server_default="[]", nullable=False)
28
- )
29
- ssh_host: str | None = None
30
- ssh_username: str | None = None
31
- ssh_private_key_path: str | None = None
32
-
33
- slurm_user: str | None = None
34
- project_dir: str | None = None
35
-
36
- ssh_tasks_dir: str | None = None
37
- ssh_jobs_dir: str | None = None