fractal-server 2.17.2__py3-none-any.whl → 2.18.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/__main__.py +2 -1
  3. fractal_server/app/models/linkuserproject.py +40 -0
  4. fractal_server/app/models/security.py +7 -5
  5. fractal_server/app/models/v2/job.py +13 -2
  6. fractal_server/app/models/v2/resource.py +13 -0
  7. fractal_server/app/routes/admin/v2/__init__.py +11 -11
  8. fractal_server/app/routes/admin/v2/accounting.py +2 -2
  9. fractal_server/app/routes/admin/v2/job.py +34 -23
  10. fractal_server/app/routes/admin/v2/sharing.py +103 -0
  11. fractal_server/app/routes/admin/v2/task.py +9 -8
  12. fractal_server/app/routes/admin/v2/task_group.py +94 -16
  13. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
  14. fractal_server/app/routes/api/__init__.py +0 -9
  15. fractal_server/app/routes/api/v2/__init__.py +47 -47
  16. fractal_server/app/routes/api/v2/_aux_functions.py +65 -64
  17. fractal_server/app/routes/api/v2/_aux_functions_history.py +8 -3
  18. fractal_server/app/routes/api/v2/_aux_functions_sharing.py +97 -0
  19. fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
  20. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
  21. fractal_server/app/routes/api/v2/dataset.py +89 -77
  22. fractal_server/app/routes/api/v2/history.py +28 -16
  23. fractal_server/app/routes/api/v2/images.py +22 -8
  24. fractal_server/app/routes/api/v2/job.py +40 -24
  25. fractal_server/app/routes/api/v2/pre_submission_checks.py +13 -6
  26. fractal_server/app/routes/api/v2/project.py +48 -25
  27. fractal_server/app/routes/api/v2/sharing.py +311 -0
  28. fractal_server/app/routes/api/v2/status_legacy.py +22 -33
  29. fractal_server/app/routes/api/v2/submit.py +76 -71
  30. fractal_server/app/routes/api/v2/task.py +15 -17
  31. fractal_server/app/routes/api/v2/task_collection.py +18 -18
  32. fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
  33. fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
  34. fractal_server/app/routes/api/v2/task_group.py +18 -18
  35. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
  36. fractal_server/app/routes/api/v2/task_version_update.py +12 -9
  37. fractal_server/app/routes/api/v2/workflow.py +41 -29
  38. fractal_server/app/routes/api/v2/workflow_import.py +25 -23
  39. fractal_server/app/routes/api/v2/workflowtask.py +25 -17
  40. fractal_server/app/routes/auth/_aux_auth.py +100 -0
  41. fractal_server/app/routes/auth/current_user.py +0 -63
  42. fractal_server/app/routes/auth/group.py +1 -30
  43. fractal_server/app/routes/auth/router.py +2 -0
  44. fractal_server/app/routes/auth/users.py +9 -0
  45. fractal_server/app/routes/auth/viewer_paths.py +43 -0
  46. fractal_server/app/schemas/user.py +29 -12
  47. fractal_server/app/schemas/user_group.py +0 -15
  48. fractal_server/app/schemas/v2/__init__.py +55 -48
  49. fractal_server/app/schemas/v2/dataset.py +35 -13
  50. fractal_server/app/schemas/v2/dumps.py +9 -9
  51. fractal_server/app/schemas/v2/job.py +11 -11
  52. fractal_server/app/schemas/v2/project.py +3 -3
  53. fractal_server/app/schemas/v2/resource.py +13 -4
  54. fractal_server/app/schemas/v2/sharing.py +99 -0
  55. fractal_server/app/schemas/v2/status_legacy.py +3 -3
  56. fractal_server/app/schemas/v2/task.py +6 -6
  57. fractal_server/app/schemas/v2/task_collection.py +4 -4
  58. fractal_server/app/schemas/v2/task_group.py +16 -16
  59. fractal_server/app/schemas/v2/workflow.py +16 -16
  60. fractal_server/app/schemas/v2/workflowtask.py +14 -14
  61. fractal_server/app/security/__init__.py +1 -1
  62. fractal_server/app/shutdown.py +6 -6
  63. fractal_server/config/__init__.py +0 -6
  64. fractal_server/config/_data.py +0 -79
  65. fractal_server/config/_main.py +6 -1
  66. fractal_server/data_migrations/2_18_0.py +30 -0
  67. fractal_server/images/models.py +1 -2
  68. fractal_server/main.py +72 -11
  69. fractal_server/migrations/versions/7910eed4cf97_user_project_dirs_and_usergroup_viewer_.py +60 -0
  70. fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
  71. fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py +72 -0
  72. fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
  73. fractal_server/runner/config/_slurm.py +2 -0
  74. fractal_server/runner/executors/slurm_common/_batching.py +4 -10
  75. fractal_server/runner/executors/slurm_common/slurm_config.py +1 -0
  76. fractal_server/runner/executors/slurm_ssh/runner.py +1 -1
  77. fractal_server/runner/executors/slurm_sudo/runner.py +1 -1
  78. fractal_server/runner/v2/_local.py +4 -3
  79. fractal_server/runner/v2/_slurm_ssh.py +4 -3
  80. fractal_server/runner/v2/_slurm_sudo.py +4 -3
  81. fractal_server/runner/v2/runner.py +36 -17
  82. fractal_server/runner/v2/runner_functions.py +11 -14
  83. fractal_server/runner/v2/submit_workflow.py +22 -9
  84. fractal_server/tasks/v2/local/_utils.py +2 -2
  85. fractal_server/tasks/v2/local/collect.py +5 -6
  86. fractal_server/tasks/v2/local/collect_pixi.py +5 -6
  87. fractal_server/tasks/v2/local/deactivate.py +7 -7
  88. fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
  89. fractal_server/tasks/v2/local/delete.py +5 -5
  90. fractal_server/tasks/v2/local/reactivate.py +5 -5
  91. fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
  92. fractal_server/tasks/v2/ssh/collect.py +5 -5
  93. fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
  94. fractal_server/tasks/v2/ssh/deactivate.py +7 -7
  95. fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
  96. fractal_server/tasks/v2/ssh/delete.py +5 -5
  97. fractal_server/tasks/v2/ssh/reactivate.py +5 -5
  98. fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
  99. fractal_server/tasks/v2/utils_background.py +7 -7
  100. fractal_server/tasks/v2/utils_database.py +5 -5
  101. fractal_server/types/__init__.py +22 -0
  102. fractal_server/types/validators/__init__.py +3 -0
  103. fractal_server/types/validators/_common_validators.py +32 -0
  104. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/METADATA +3 -2
  105. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/RECORD +108 -98
  106. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/WHEEL +0 -0
  107. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/entry_points.txt +0 -0
  108. {fractal_server-2.17.2.dist-info → fractal_server-2.18.0.dist-info}/licenses/LICENSE +0 -0
fractal_server/main.py CHANGED
@@ -1,15 +1,20 @@
1
1
  import os
2
+ import time
2
3
  from contextlib import asynccontextmanager
4
+ from datetime import datetime
3
5
  from itertools import chain
4
6
 
5
7
  from fastapi import FastAPI
8
+ from starlette.types import Message
9
+ from starlette.types import Receive
10
+ from starlette.types import Scope
11
+ from starlette.types import Send
6
12
 
7
13
  from fractal_server import __VERSION__
8
14
  from fractal_server.app.schemas.v2 import ResourceType
9
15
 
10
16
  from .app.routes.aux._runner import _backend_supports_shutdown
11
17
  from .app.shutdown import cleanup_after_shutdown
12
- from .config import get_data_settings
13
18
  from .config import get_db_settings
14
19
  from .config import get_email_settings
15
20
  from .config import get_settings
@@ -28,16 +33,14 @@ def collect_routers(app: FastAPI) -> None:
28
33
  app:
29
34
  The application to register the routers to.
30
35
  """
31
- from .app.routes.admin.v2 import router_admin_v2
36
+ from .app.routes.admin.v2 import router_admin
32
37
  from .app.routes.api import router_api
33
- from .app.routes.api.v2 import router_api_v2
38
+ from .app.routes.api.v2 import router_api as router_api_v2
34
39
  from .app.routes.auth.router import router_auth
35
40
 
36
41
  app.include_router(router_api, prefix="/api")
37
42
  app.include_router(router_api_v2, prefix="/api/v2")
38
- app.include_router(
39
- router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
40
- )
43
+ app.include_router(router_admin, prefix="/admin/v2", tags=["Admin area"])
41
44
  app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
42
45
 
43
46
 
@@ -54,14 +57,12 @@ def check_settings() -> None:
54
57
  settings = Inject(get_settings)
55
58
  db_settings = Inject(get_db_settings)
56
59
  email_settings = Inject(get_email_settings)
57
- data_settings = Inject(get_data_settings)
58
60
  logger = set_logger("fractal_server_settings")
59
61
  logger.debug("Fractal Settings:")
60
62
  for key, value in chain(
61
63
  db_settings.model_dump().items(),
62
64
  settings.model_dump().items(),
63
65
  email_settings.model_dump().items(),
64
- data_settings.model_dump().items(),
65
66
  ):
66
67
  if any(s in key.upper() for s in ["PASSWORD", "SECRET", "KEY"]):
67
68
  value = "*****"
@@ -71,7 +72,7 @@ def check_settings() -> None:
71
72
 
72
73
  @asynccontextmanager
73
74
  async def lifespan(app: FastAPI):
74
- app.state.jobsV2 = []
75
+ app.state.jobs = []
75
76
  logger = set_logger("fractal_server.lifespan")
76
77
  logger.info(f"[startup] START (fractal-server {__VERSION__})")
77
78
  check_settings()
@@ -108,12 +109,12 @@ async def lifespan(app: FastAPI):
108
109
 
109
110
  logger.info(
110
111
  f"[teardown] Current worker with pid {os.getpid()} is shutting down. "
111
- f"Current jobs: {app.state.jobsV2=}"
112
+ f"Current jobs: {app.state.jobs=}"
112
113
  )
113
114
  if _backend_supports_shutdown(settings.FRACTAL_RUNNER_BACKEND):
114
115
  try:
115
116
  await cleanup_after_shutdown(
116
- jobsV2=app.state.jobsV2,
117
+ jobs=app.state.jobs,
117
118
  logger_name="fractal_server.lifespan",
118
119
  )
119
120
  except Exception as e:
@@ -131,6 +132,59 @@ async def lifespan(app: FastAPI):
131
132
  reset_logger_handlers(logger)
132
133
 
133
134
 
135
+ slow_response_logger = set_logger("slow-response")
136
+
137
+
138
+ def _endpoint_has_background_task(method: str, path: str) -> bool:
139
+ has_background_task = (method == "POST") and (
140
+ "/job/submit/" in path
141
+ or "/task/collect/pi" in path # "/pip" and "/pixi"
142
+ or "/task-group/" in path
143
+ )
144
+ return has_background_task
145
+
146
+
147
+ class SlowResponseMiddleware:
148
+ def __init__(self, app: FastAPI, time_threshold: float):
149
+ self.app = app
150
+ self.time_threshold = time_threshold
151
+
152
+ async def __call__(self, scope: Scope, receive: Receive, send: Send):
153
+ if (
154
+ scope["type"] != "http" # e.g. `scope["type"] == "lifespan"`
155
+ or _endpoint_has_background_task(scope["method"], scope["path"])
156
+ ):
157
+ await self.app(scope, receive, send)
158
+ return
159
+
160
+ # Mutable variable which can be updated from within `send_wrapper`
161
+ context = {"status_code": None}
162
+
163
+ async def send_wrapper(message: Message):
164
+ if message["type"] == "http.response.start":
165
+ context["status_code"] = message["status"]
166
+ await send(message)
167
+
168
+ # Measure request time
169
+ start_timestamp = datetime.now()
170
+ start_time = time.perf_counter()
171
+ await self.app(scope, receive, send_wrapper)
172
+ stop_time = time.perf_counter()
173
+ request_time = stop_time - start_time
174
+
175
+ # Log if process time is too high
176
+ if request_time > self.time_threshold:
177
+ end_timestamp = datetime.now()
178
+ slow_response_logger.warning(
179
+ f"{scope['method']} {scope['route'].path}"
180
+ f"?{scope['query_string'].decode('utf-8')}, "
181
+ f"{context['status_code']}, "
182
+ f"{request_time:.2f}, "
183
+ f"{start_timestamp.isoformat(timespec='milliseconds')}, "
184
+ f"{end_timestamp.isoformat(timespec='milliseconds')}"
185
+ )
186
+
187
+
134
188
  def start_application() -> FastAPI:
135
189
  """
136
190
  Create the application, initialise it and collect all available routers.
@@ -140,6 +194,13 @@ def start_application() -> FastAPI:
140
194
  The fully initialised application.
141
195
  """
142
196
  app = FastAPI(lifespan=lifespan)
197
+
198
+ settings = Inject(get_settings)
199
+ app.add_middleware(
200
+ SlowResponseMiddleware,
201
+ time_threshold=settings.FRACTAL_LONG_REQUEST_TIME,
202
+ )
203
+
143
204
  collect_routers(app)
144
205
  return app
145
206
 
@@ -0,0 +1,60 @@
1
+ """User project_dirs and UserGroup viewer paths
2
+
3
+ Revision ID: 7910eed4cf97
4
+ Revises: bc0e8b3327a7
5
+ Create Date: 2025-11-27 16:02:51.824653
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+ from sqlalchemy.dialects import postgresql
12
+
13
+ # revision identifiers, used by Alembic.
14
+ revision = "7910eed4cf97"
15
+ down_revision = "bc0e8b3327a7"
16
+ branch_labels = None
17
+ depends_on = None
18
+
19
+
20
+ def upgrade() -> None:
21
+ # ### commands auto generated by Alembic - please adjust! ###
22
+ with op.batch_alter_table("user_oauth", schema=None) as batch_op:
23
+ batch_op.add_column(
24
+ sa.Column(
25
+ "project_dirs",
26
+ postgresql.ARRAY(sa.String()),
27
+ server_default="{}",
28
+ nullable=False,
29
+ )
30
+ )
31
+ batch_op.alter_column(
32
+ "project_dir", existing_type=sa.VARCHAR(), nullable=True
33
+ )
34
+
35
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
36
+ batch_op.drop_column("viewer_paths")
37
+
38
+ # ### end Alembic commands ###
39
+
40
+
41
+ def downgrade() -> None:
42
+ # ### commands auto generated by Alembic - please adjust! ###
43
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
44
+ batch_op.add_column(
45
+ sa.Column(
46
+ "viewer_paths",
47
+ postgresql.JSONB(astext_type=sa.Text()),
48
+ server_default=sa.text("'[]'::json"),
49
+ autoincrement=False,
50
+ nullable=False,
51
+ )
52
+ )
53
+
54
+ with op.batch_alter_table("user_oauth", schema=None) as batch_op:
55
+ batch_op.alter_column(
56
+ "project_dir", existing_type=sa.VARCHAR(), nullable=False
57
+ )
58
+ batch_op.drop_column("project_dirs")
59
+
60
+ # ### end Alembic commands ###
@@ -0,0 +1,39 @@
1
+ """add_prevent_new_submissions
2
+
3
+ Revision ID: 88270f589c9b
4
+ Revises: f0702066b007
5
+ Create Date: 2025-12-02 12:34:11.028259
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "88270f589c9b"
14
+ down_revision = "f0702066b007"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("resource", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "prevent_new_submissions",
25
+ sa.BOOLEAN(),
26
+ server_default="false",
27
+ nullable=False,
28
+ )
29
+ )
30
+
31
+ # ### end Alembic commands ###
32
+
33
+
34
+ def downgrade() -> None:
35
+ # ### commands auto generated by Alembic - please adjust! ###
36
+ with op.batch_alter_table("resource", schema=None) as batch_op:
37
+ batch_op.drop_column("prevent_new_submissions")
38
+
39
+ # ### end Alembic commands ###
@@ -0,0 +1,72 @@
1
+ """Project sharing
2
+
3
+ Revision ID: bc0e8b3327a7
4
+ Revises: e0e717ae2f26
5
+ Create Date: 2025-11-20 11:40:03.796112
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "bc0e8b3327a7"
14
+ down_revision = "e0e717ae2f26"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("linkuserprojectv2", schema=None) as batch_op:
22
+ batch_op.add_column(
23
+ sa.Column(
24
+ "is_owner", sa.BOOLEAN(), server_default="true", nullable=False
25
+ )
26
+ )
27
+ batch_op.add_column(
28
+ sa.Column(
29
+ "is_verified",
30
+ sa.BOOLEAN(),
31
+ server_default="true",
32
+ nullable=False,
33
+ )
34
+ )
35
+ batch_op.add_column(
36
+ sa.Column(
37
+ "permissions", sa.String(), server_default="rwx", nullable=False
38
+ )
39
+ )
40
+ batch_op.create_index(
41
+ "ix_linkuserprojectv2_one_owner_per_project",
42
+ ["project_id"],
43
+ unique=True,
44
+ postgresql_where=sa.text("is_owner IS true"),
45
+ )
46
+
47
+ # ### end Alembic commands ###
48
+
49
+ # Manually add check constraints
50
+ batch_op.create_check_constraint(
51
+ "owner_is_verified", "NOT (is_owner AND NOT is_verified)"
52
+ )
53
+ batch_op.create_check_constraint(
54
+ "owner_full_permissions", "NOT (is_owner AND permissions <> 'rwx')"
55
+ )
56
+ batch_op.create_check_constraint(
57
+ "valid_permissions", "permissions IN ('r', 'rw', 'rwx')"
58
+ )
59
+
60
+
61
+ def downgrade() -> None:
62
+ # ### commands auto generated by Alembic - please adjust! ###
63
+ with op.batch_alter_table("linkuserprojectv2", schema=None) as batch_op:
64
+ batch_op.drop_index(
65
+ "ix_linkuserprojectv2_one_owner_per_project",
66
+ postgresql_where=sa.text("is_owner IS true"),
67
+ )
68
+ batch_op.drop_column("permissions")
69
+ batch_op.drop_column("is_verified")
70
+ batch_op.drop_column("is_owner")
71
+
72
+ # ### end Alembic commands ###
@@ -0,0 +1,40 @@
1
+ """One submitted Job per Dataset
2
+
3
+ Revision ID: f0702066b007
4
+ Revises: 7910eed4cf97
5
+ Create Date: 2025-12-01 20:54:03.137093
6
+
7
+ """
8
+
9
+ import sqlalchemy as sa
10
+ from alembic import op
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "f0702066b007"
14
+ down_revision = "7910eed4cf97"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
22
+ batch_op.create_index(
23
+ "ix_jobv2_one_submitted_job_per_dataset",
24
+ ["dataset_id"],
25
+ unique=True,
26
+ postgresql_where=sa.text("status = 'submitted'"),
27
+ )
28
+
29
+ # ### end Alembic commands ###
30
+
31
+
32
+ def downgrade() -> None:
33
+ # ### commands auto generated by Alembic - please adjust! ###
34
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
35
+ batch_op.drop_index(
36
+ "ix_jobv2_one_submitted_job_per_dataset",
37
+ postgresql_where=sa.text("status = 'submitted'"),
38
+ )
39
+
40
+ # ### end Alembic commands ###
@@ -34,6 +34,7 @@ class SlurmConfigSet(BaseModel):
34
34
  account:
35
35
  extra_lines:
36
36
  gpus:
37
+ shebang_line: The shell shebang to use for SLURM jobs.
37
38
  """
38
39
 
39
40
  model_config = ConfigDict(extra="forbid")
@@ -49,6 +50,7 @@ class SlurmConfigSet(BaseModel):
49
50
  account: NonEmptyStr | None = None
50
51
  extra_lines: list[NonEmptyStr] = Field(default_factory=list)
51
52
  gpus: NonEmptyStr | None = None
53
+ shebang_line: str = "#!/bin/sh"
52
54
 
53
55
 
54
56
  class BatchingConfigSet(BaseModel):
@@ -1,13 +1,3 @@
1
- # Copyright 2022 (C) Friedrich Miescher Institute for Biomedical Research and
2
- # University of Zurich
3
- #
4
- # Original authors:
5
- # Tommaso Comparin <tommaso.comparin@exact-lab.it>
6
- #
7
- # This file is part of Fractal and was originally developed by eXact lab S.r.l.
8
- # <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
9
- # Institute for Biomedical Research and Pelkmans Lab from the University of
10
- # Zurich.
11
1
  """
12
2
  Submodule to determine the number of total/parallel tasks per SLURM job.
13
3
  """
@@ -20,6 +10,10 @@ logger = set_logger(__name__)
20
10
 
21
11
 
22
12
  class SlurmHeuristicsError(ValueError):
13
+ """
14
+ Error in SLURM-batching heuristics.
15
+ """
16
+
23
17
  pass
24
18
 
25
19
 
@@ -60,6 +60,7 @@ class SlurmConfig(BaseModel):
60
60
  Key-value pairs to be included as `export`-ed variables in SLURM
61
61
  submission script, after prepending values with the user's cache
62
62
  directory.
63
+
63
64
  """
64
65
 
65
66
  model_config = ConfigDict(extra="forbid")
@@ -24,7 +24,7 @@ logger = set_logger(__name__)
24
24
 
25
25
  class SlurmSSHRunner(BaseSlurmRunner):
26
26
  """
27
- Runner implementation for a computational `sudo_slurm` resource.
27
+ Runner implementation for a computational `slurm_ssh` resource.
28
28
  """
29
29
 
30
30
  fractal_ssh: FractalSSH
@@ -47,7 +47,7 @@ def _subprocess_run_or_raise(
47
47
 
48
48
  class SlurmSudoRunner(BaseSlurmRunner):
49
49
  """
50
- Runner implementation for a computational `sudo_slurm` resource.
50
+ Runner implementation for a computational `slurm_sudo` resource.
51
51
  """
52
52
 
53
53
  slurm_user: str
@@ -14,7 +14,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
14
14
  from fractal_server.ssh._fabric import FractalSSH
15
15
  from fractal_server.types import AttributeFilters
16
16
 
17
- from .runner import execute_tasks_v2
17
+ from .runner import execute_tasks
18
18
 
19
19
 
20
20
  def process_workflow(
@@ -59,7 +59,7 @@ def process_workflow(
59
59
  resource: Computational resource for running this job.
60
60
  profile: Computational profile for running this job.
61
61
  user_cache_dir:
62
- User-writeable folder (typically a subfolder of `project_dir`).
62
+ User-writeable folder (typically a subfolder of `project_dirs`).
63
63
  Only relevant for `slurm_sudo` and `slurm_ssh` backends.
64
64
  fractal_ssh:
65
65
  `FractalSSH` object, only relevant for the `slurm_ssh` backend.
@@ -90,7 +90,7 @@ def process_workflow(
90
90
  resource=resource,
91
91
  profile=profile,
92
92
  ) as runner:
93
- execute_tasks_v2(
93
+ execute_tasks(
94
94
  wf_task_list=workflow.task_list[
95
95
  first_task_index : (last_task_index + 1)
96
96
  ],
@@ -104,4 +104,5 @@ def process_workflow(
104
104
  job_attribute_filters=job_attribute_filters,
105
105
  job_type_filters=job_type_filters,
106
106
  user_id=user_id,
107
+ resource_id=resource.id,
107
108
  )
@@ -33,7 +33,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
33
33
  from fractal_server.ssh._fabric import FractalSSH
34
34
  from fractal_server.types import AttributeFilters
35
35
 
36
- from .runner import execute_tasks_v2
36
+ from .runner import execute_tasks
37
37
 
38
38
  logger = set_logger(__name__)
39
39
 
@@ -80,7 +80,7 @@ def process_workflow(
80
80
  resource: Computational resource for running this job.
81
81
  profile: Computational profile for running this job.
82
82
  user_cache_dir:
83
- User-writeable folder (typically a subfolder of `project_dir`).
83
+ User-writeable folder (typically a subfolder of `project_dirs`).
84
84
  Only relevant for `slurm_sudo` and `slurm_ssh` backends.
85
85
  fractal_ssh:
86
86
  `FractalSSH` object, only relevant for the `slurm_ssh` backend.
@@ -113,7 +113,7 @@ def process_workflow(
113
113
  common_script_lines=worker_init,
114
114
  user_cache_dir=user_cache_dir,
115
115
  ) as runner:
116
- execute_tasks_v2(
116
+ execute_tasks(
117
117
  wf_task_list=workflow.task_list[
118
118
  first_task_index : (last_task_index + 1)
119
119
  ],
@@ -127,4 +127,5 @@ def process_workflow(
127
127
  job_attribute_filters=job_attribute_filters,
128
128
  job_type_filters=job_type_filters,
129
129
  user_id=user_id,
130
+ resource_id=resource.id,
130
131
  )
@@ -32,7 +32,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
32
32
  from fractal_server.ssh._fabric import FractalSSH
33
33
  from fractal_server.types import AttributeFilters
34
34
 
35
- from .runner import execute_tasks_v2
35
+ from .runner import execute_tasks
36
36
 
37
37
 
38
38
  def process_workflow(
@@ -77,7 +77,7 @@ def process_workflow(
77
77
  resource: Computational resource for running this job.
78
78
  profile: Computational profile for running this job.
79
79
  user_cache_dir:
80
- User-writeable folder (typically a subfolder of `project_dir`).
80
+ User-writeable folder (typically a subfolder of `project_dirs`).
81
81
  Only relevant for `slurm_sudo` and `slurm_ssh` backends.
82
82
  fractal_ssh:
83
83
  `FractalSSH` object, only relevant for the `slurm_ssh` backend.
@@ -109,7 +109,7 @@ def process_workflow(
109
109
  user_cache_dir=user_cache_dir,
110
110
  slurm_account=slurm_account,
111
111
  ) as runner:
112
- execute_tasks_v2(
112
+ execute_tasks(
113
113
  wf_task_list=workflow.task_list[
114
114
  first_task_index : (last_task_index + 1)
115
115
  ],
@@ -123,4 +123,5 @@ def process_workflow(
123
123
  job_attribute_filters=job_attribute_filters,
124
124
  job_type_filters=job_type_filters,
125
125
  user_id=user_id,
126
+ resource_id=resource.id,
126
127
  )
@@ -14,11 +14,12 @@ from fractal_server.app.models.v2 import HistoryImageCache
14
14
  from fractal_server.app.models.v2 import HistoryRun
15
15
  from fractal_server.app.models.v2 import HistoryUnit
16
16
  from fractal_server.app.models.v2 import JobV2
17
+ from fractal_server.app.models.v2 import Resource
17
18
  from fractal_server.app.models.v2 import TaskGroupV2
18
19
  from fractal_server.app.models.v2 import WorkflowTaskV2
19
20
  from fractal_server.app.schemas.v2 import HistoryUnitStatus
20
- from fractal_server.app.schemas.v2 import TaskDumpV2
21
- from fractal_server.app.schemas.v2 import TaskGroupDumpV2
21
+ from fractal_server.app.schemas.v2 import TaskDump
22
+ from fractal_server.app.schemas.v2 import TaskGroupDump
22
23
  from fractal_server.app.schemas.v2 import TaskType
23
24
  from fractal_server.images import SingleImage
24
25
  from fractal_server.images.status_tools import IMAGE_STATUS_KEY
@@ -35,9 +36,9 @@ from fractal_server.types import AttributeFilters
35
36
  from .merge_outputs import merge_outputs
36
37
  from .runner_functions import GetRunnerConfigType
37
38
  from .runner_functions import SubmissionOutcome
38
- from .runner_functions import run_v2_task_compound
39
- from .runner_functions import run_v2_task_non_parallel
40
- from .runner_functions import run_v2_task_parallel
39
+ from .runner_functions import run_task_compound
40
+ from .runner_functions import run_task_non_parallel
41
+ from .runner_functions import run_task_parallel
41
42
  from .task_interface import TaskOutput
42
43
 
43
44
 
@@ -82,7 +83,7 @@ def get_origin_attribute_and_types(
82
83
  return updated_attributes, updated_types
83
84
 
84
85
 
85
- def execute_tasks_v2(
86
+ def execute_tasks(
86
87
  *,
87
88
  wf_task_list: list[WorkflowTaskV2],
88
89
  dataset: DatasetV2,
@@ -95,6 +96,7 @@ def execute_tasks_v2(
95
96
  get_runner_config: GetRunnerConfigType,
96
97
  job_type_filters: dict[str, bool],
97
98
  job_attribute_filters: AttributeFilters,
99
+ resource_id: int,
98
100
  ) -> None:
99
101
  logger = get_logger(logger_name=logger_name)
100
102
 
@@ -165,10 +167,10 @@ def execute_tasks_v2(
165
167
  # Create dumps for workflowtask and taskgroup
166
168
  workflowtask_dump = dict(
167
169
  **wftask.model_dump(exclude={"task"}),
168
- task=TaskDumpV2(**wftask.task.model_dump()).model_dump(),
170
+ task=TaskDump(**wftask.task.model_dump()).model_dump(),
169
171
  )
170
172
  task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
171
- task_group_dump = TaskGroupDumpV2(
173
+ task_group_dump = TaskGroupDump(
172
174
  **task_group.model_dump()
173
175
  ).model_dump()
174
176
  # Create HistoryRun
@@ -211,20 +213,37 @@ def execute_tasks_v2(
211
213
  f"attribute_filters={job_attribute_filters})."
212
214
  )
213
215
  logger.info(error_msg)
214
- update_status_of_history_run(
215
- history_run_id=history_run_id,
216
- status=HistoryUnitStatus.FAILED,
217
- db_sync=db,
218
- )
216
+ with next(get_sync_db()) as db:
217
+ update_status_of_history_run(
218
+ history_run_id=history_run_id,
219
+ status=HistoryUnitStatus.FAILED,
220
+ db_sync=db,
221
+ )
219
222
  raise JobExecutionError(error_msg)
220
223
 
221
- # TASK EXECUTION (V2)
224
+ # Fail if the resource is not open for new submissions
225
+ with next(get_sync_db()) as db:
226
+ resource = db.get(Resource, resource_id)
227
+ if resource.prevent_new_submissions:
228
+ error_msg = (
229
+ f"Cannot run '{task.name}', since the '{resource.name}' "
230
+ "resource is not currently active."
231
+ )
232
+ logger.info(error_msg)
233
+ update_status_of_history_run(
234
+ history_run_id=history_run_id,
235
+ status=HistoryUnitStatus.FAILED,
236
+ db_sync=db,
237
+ )
238
+ raise JobExecutionError(error_msg)
239
+
240
+ # TASK EXECUTION
222
241
  try:
223
242
  if task.type in [
224
243
  TaskType.NON_PARALLEL,
225
244
  TaskType.CONVERTER_NON_PARALLEL,
226
245
  ]:
227
- outcomes_dict, num_tasks = run_v2_task_non_parallel(
246
+ outcomes_dict, num_tasks = run_task_non_parallel(
228
247
  images=filtered_images,
229
248
  zarr_dir=zarr_dir,
230
249
  wftask=wftask,
@@ -239,7 +258,7 @@ def execute_tasks_v2(
239
258
  user_id=user_id,
240
259
  )
241
260
  elif task.type == TaskType.PARALLEL:
242
- outcomes_dict, num_tasks = run_v2_task_parallel(
261
+ outcomes_dict, num_tasks = run_task_parallel(
243
262
  images=filtered_images,
244
263
  wftask=wftask,
245
264
  task=task,
@@ -255,7 +274,7 @@ def execute_tasks_v2(
255
274
  TaskType.COMPOUND,
256
275
  TaskType.CONVERTER_COMPOUND,
257
276
  ]:
258
- outcomes_dict, num_tasks = run_v2_task_compound(
277
+ outcomes_dict, num_tasks = run_task_compound(
259
278
  images=filtered_images,
260
279
  zarr_dir=zarr_dir,
261
280
  wftask=wftask,