fractal-server 2.18.0a4__py3-none-any.whl → 2.18.0a6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/job.py +13 -2
- fractal_server/app/models/v2/resource.py +13 -0
- fractal_server/app/routes/admin/v2/__init__.py +10 -12
- fractal_server/app/routes/admin/v2/job.py +15 -15
- fractal_server/app/routes/admin/v2/task.py +7 -7
- fractal_server/app/routes/admin/v2/task_group.py +11 -11
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +20 -20
- fractal_server/app/routes/api/v2/__init__.py +47 -49
- fractal_server/app/routes/api/v2/_aux_functions.py +22 -47
- fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py +4 -4
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +2 -2
- fractal_server/app/routes/api/v2/dataset.py +62 -82
- fractal_server/app/routes/api/v2/history.py +7 -5
- fractal_server/app/routes/api/v2/job.py +12 -12
- fractal_server/app/routes/api/v2/project.py +11 -11
- fractal_server/app/routes/api/v2/status_legacy.py +15 -29
- fractal_server/app/routes/api/v2/submit.py +65 -66
- fractal_server/app/routes/api/v2/task.py +15 -17
- fractal_server/app/routes/api/v2/task_collection.py +18 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +11 -13
- fractal_server/app/routes/api/v2/task_collection_pixi.py +9 -9
- fractal_server/app/routes/api/v2/task_group.py +18 -18
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -26
- fractal_server/app/routes/api/v2/task_version_update.py +5 -5
- fractal_server/app/routes/api/v2/workflow.py +18 -18
- fractal_server/app/routes/api/v2/workflow_import.py +11 -11
- fractal_server/app/routes/api/v2/workflowtask.py +10 -10
- fractal_server/app/routes/auth/_aux_auth.py +99 -0
- fractal_server/app/routes/auth/users.py +9 -0
- fractal_server/app/schemas/user.py +1 -1
- fractal_server/app/schemas/v2/__init__.py +48 -48
- fractal_server/app/schemas/v2/dataset.py +35 -13
- fractal_server/app/schemas/v2/dumps.py +9 -9
- fractal_server/app/schemas/v2/job.py +11 -11
- fractal_server/app/schemas/v2/project.py +3 -3
- fractal_server/app/schemas/v2/resource.py +13 -4
- fractal_server/app/schemas/v2/status_legacy.py +3 -3
- fractal_server/app/schemas/v2/task.py +6 -6
- fractal_server/app/schemas/v2/task_collection.py +4 -4
- fractal_server/app/schemas/v2/task_group.py +16 -16
- fractal_server/app/schemas/v2/workflow.py +16 -16
- fractal_server/app/schemas/v2/workflowtask.py +14 -14
- fractal_server/app/shutdown.py +6 -6
- fractal_server/config/_main.py +1 -1
- fractal_server/data_migrations/2_18_0.py +2 -1
- fractal_server/images/models.py +1 -2
- fractal_server/main.py +20 -13
- fractal_server/migrations/versions/88270f589c9b_add_prevent_new_submissions.py +39 -0
- fractal_server/migrations/versions/f0702066b007_one_submitted_job_per_dataset.py +40 -0
- fractal_server/runner/v2/_local.py +3 -2
- fractal_server/runner/v2/_slurm_ssh.py +3 -2
- fractal_server/runner/v2/_slurm_sudo.py +3 -2
- fractal_server/runner/v2/runner.py +36 -17
- fractal_server/runner/v2/runner_functions.py +11 -14
- fractal_server/runner/v2/submit_workflow.py +22 -9
- fractal_server/tasks/v2/local/_utils.py +2 -2
- fractal_server/tasks/v2/local/collect.py +5 -6
- fractal_server/tasks/v2/local/collect_pixi.py +5 -6
- fractal_server/tasks/v2/local/deactivate.py +7 -7
- fractal_server/tasks/v2/local/deactivate_pixi.py +3 -3
- fractal_server/tasks/v2/local/delete.py +5 -5
- fractal_server/tasks/v2/local/reactivate.py +5 -5
- fractal_server/tasks/v2/local/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/collect.py +5 -5
- fractal_server/tasks/v2/ssh/collect_pixi.py +5 -5
- fractal_server/tasks/v2/ssh/deactivate.py +7 -7
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +2 -2
- fractal_server/tasks/v2/ssh/delete.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate.py +5 -5
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +5 -5
- fractal_server/tasks/v2/utils_background.py +7 -7
- fractal_server/tasks/v2/utils_database.py +5 -5
- fractal_server/types/__init__.py +13 -4
- fractal_server/types/validators/__init__.py +3 -1
- fractal_server/types/validators/_common_validators.py +23 -1
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a6.dist-info}/METADATA +1 -1
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a6.dist-info}/RECORD +81 -79
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a6.dist-info}/WHEEL +0 -0
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a6.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.18.0a4.dist-info → fractal_server-2.18.0a6.dist-info}/licenses/LICENSE +0 -0
|
@@ -5,29 +5,29 @@ from pydantic import ConfigDict
|
|
|
5
5
|
from pydantic import field_serializer
|
|
6
6
|
from pydantic.types import AwareDatetime
|
|
7
7
|
|
|
8
|
-
from fractal_server.app.schemas.v2.project import
|
|
9
|
-
from fractal_server.app.schemas.v2.workflowtask import
|
|
10
|
-
from fractal_server.app.schemas.v2.workflowtask import
|
|
11
|
-
from fractal_server.app.schemas.v2.workflowtask import
|
|
8
|
+
from fractal_server.app.schemas.v2.project import ProjectRead
|
|
9
|
+
from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskExport
|
|
10
|
+
from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskImport
|
|
11
|
+
from fractal_server.app.schemas.v2.workflowtask import WorkflowTaskRead
|
|
12
12
|
from fractal_server.app.schemas.v2.workflowtask import (
|
|
13
|
-
|
|
13
|
+
WorkflowTaskReadWithWarning,
|
|
14
14
|
)
|
|
15
15
|
from fractal_server.types import ListUniqueNonNegativeInt
|
|
16
16
|
from fractal_server.types import NonEmptyStr
|
|
17
17
|
|
|
18
18
|
|
|
19
|
-
class
|
|
19
|
+
class WorkflowCreate(BaseModel):
|
|
20
20
|
model_config = ConfigDict(extra="forbid")
|
|
21
21
|
|
|
22
22
|
name: NonEmptyStr
|
|
23
23
|
|
|
24
24
|
|
|
25
|
-
class
|
|
25
|
+
class WorkflowRead(BaseModel):
|
|
26
26
|
id: int
|
|
27
27
|
name: str
|
|
28
28
|
project_id: int
|
|
29
|
-
task_list: list[
|
|
30
|
-
project:
|
|
29
|
+
task_list: list[WorkflowTaskRead]
|
|
30
|
+
project: ProjectRead
|
|
31
31
|
timestamp_created: AwareDatetime
|
|
32
32
|
|
|
33
33
|
@field_serializer("timestamp_created")
|
|
@@ -35,18 +35,18 @@ class WorkflowReadV2(BaseModel):
|
|
|
35
35
|
return v.isoformat()
|
|
36
36
|
|
|
37
37
|
|
|
38
|
-
class
|
|
39
|
-
task_list: list[
|
|
38
|
+
class WorkflowReadWithWarnings(WorkflowRead):
|
|
39
|
+
task_list: list[WorkflowTaskReadWithWarning]
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
class
|
|
42
|
+
class WorkflowUpdate(BaseModel):
|
|
43
43
|
model_config = ConfigDict(extra="forbid")
|
|
44
44
|
|
|
45
45
|
name: NonEmptyStr = None
|
|
46
46
|
reordered_workflowtask_ids: ListUniqueNonNegativeInt | None = None
|
|
47
47
|
|
|
48
48
|
|
|
49
|
-
class
|
|
49
|
+
class WorkflowImport(BaseModel):
|
|
50
50
|
"""
|
|
51
51
|
Class for `Workflow` import.
|
|
52
52
|
|
|
@@ -56,10 +56,10 @@ class WorkflowImportV2(BaseModel):
|
|
|
56
56
|
|
|
57
57
|
model_config = ConfigDict(extra="forbid")
|
|
58
58
|
name: NonEmptyStr
|
|
59
|
-
task_list: list[
|
|
59
|
+
task_list: list[WorkflowTaskImport]
|
|
60
60
|
|
|
61
61
|
|
|
62
|
-
class
|
|
62
|
+
class WorkflowExport(BaseModel):
|
|
63
63
|
"""
|
|
64
64
|
Class for `Workflow` export.
|
|
65
65
|
|
|
@@ -68,4 +68,4 @@ class WorkflowExportV2(BaseModel):
|
|
|
68
68
|
"""
|
|
69
69
|
|
|
70
70
|
name: str
|
|
71
|
-
task_list: list[
|
|
71
|
+
task_list: list[WorkflowTaskExport]
|
|
@@ -9,14 +9,14 @@ from fractal_server.types import DictStrAny
|
|
|
9
9
|
from fractal_server.types import TypeFilters
|
|
10
10
|
from fractal_server.types import WorkflowTaskArgument
|
|
11
11
|
|
|
12
|
-
from .task import
|
|
13
|
-
from .task import
|
|
14
|
-
from .task import
|
|
15
|
-
from .task import
|
|
12
|
+
from .task import TaskExport
|
|
13
|
+
from .task import TaskImport
|
|
14
|
+
from .task import TaskImportLegacy
|
|
15
|
+
from .task import TaskRead
|
|
16
16
|
from .task import TaskType
|
|
17
17
|
|
|
18
18
|
|
|
19
|
-
class
|
|
19
|
+
class WorkflowTaskCreate(BaseModel):
|
|
20
20
|
model_config = ConfigDict(extra="forbid")
|
|
21
21
|
|
|
22
22
|
meta_non_parallel: DictStrAny | None = None
|
|
@@ -26,14 +26,14 @@ class WorkflowTaskCreateV2(BaseModel):
|
|
|
26
26
|
type_filters: TypeFilters = Field(default_factory=dict)
|
|
27
27
|
|
|
28
28
|
|
|
29
|
-
class
|
|
29
|
+
class WorkflowTaskReplace(BaseModel):
|
|
30
30
|
"""Used by 'replace-task' endpoint"""
|
|
31
31
|
|
|
32
32
|
args_non_parallel: dict[str, Any] | None = None
|
|
33
33
|
args_parallel: dict[str, Any] | None = None
|
|
34
34
|
|
|
35
35
|
|
|
36
|
-
class
|
|
36
|
+
class WorkflowTaskRead(BaseModel):
|
|
37
37
|
id: int
|
|
38
38
|
|
|
39
39
|
workflow_id: int
|
|
@@ -48,14 +48,14 @@ class WorkflowTaskReadV2(BaseModel):
|
|
|
48
48
|
|
|
49
49
|
task_type: TaskType
|
|
50
50
|
task_id: int
|
|
51
|
-
task:
|
|
51
|
+
task: TaskRead
|
|
52
52
|
|
|
53
53
|
|
|
54
|
-
class
|
|
54
|
+
class WorkflowTaskReadWithWarning(WorkflowTaskRead):
|
|
55
55
|
warning: str | None = None
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
class
|
|
58
|
+
class WorkflowTaskUpdate(BaseModel):
|
|
59
59
|
model_config = ConfigDict(extra="forbid")
|
|
60
60
|
|
|
61
61
|
meta_non_parallel: DictStrAny | None = None
|
|
@@ -65,7 +65,7 @@ class WorkflowTaskUpdateV2(BaseModel):
|
|
|
65
65
|
type_filters: TypeFilters = None
|
|
66
66
|
|
|
67
67
|
|
|
68
|
-
class
|
|
68
|
+
class WorkflowTaskImport(BaseModel):
|
|
69
69
|
model_config = ConfigDict(extra="forbid")
|
|
70
70
|
|
|
71
71
|
meta_non_parallel: DictStrAny | None = None
|
|
@@ -75,7 +75,7 @@ class WorkflowTaskImportV2(BaseModel):
|
|
|
75
75
|
type_filters: TypeFilters | None = None
|
|
76
76
|
input_filters: dict[str, Any] | None = None
|
|
77
77
|
|
|
78
|
-
task:
|
|
78
|
+
task: TaskImport | TaskImportLegacy
|
|
79
79
|
|
|
80
80
|
@model_validator(mode="before")
|
|
81
81
|
@classmethod
|
|
@@ -106,11 +106,11 @@ class WorkflowTaskImportV2(BaseModel):
|
|
|
106
106
|
return values
|
|
107
107
|
|
|
108
108
|
|
|
109
|
-
class
|
|
109
|
+
class WorkflowTaskExport(BaseModel):
|
|
110
110
|
meta_non_parallel: dict[str, Any] | None = None
|
|
111
111
|
meta_parallel: dict[str, Any] | None = None
|
|
112
112
|
args_non_parallel: dict[str, Any] | None = None
|
|
113
113
|
args_parallel: dict[str, Any] | None = None
|
|
114
114
|
type_filters: dict[str, bool] = Field(default_factory=dict)
|
|
115
115
|
|
|
116
|
-
task:
|
|
116
|
+
task: TaskExport
|
fractal_server/app/shutdown.py
CHANGED
|
@@ -4,26 +4,26 @@ from sqlmodel import select
|
|
|
4
4
|
|
|
5
5
|
from fractal_server.app.db import get_async_db
|
|
6
6
|
from fractal_server.app.models.v2 import JobV2
|
|
7
|
-
from fractal_server.app.models.v2.job import
|
|
7
|
+
from fractal_server.app.models.v2.job import JobStatusType
|
|
8
8
|
from fractal_server.app.routes.aux._job import _write_shutdown_file
|
|
9
9
|
from fractal_server.config import get_settings
|
|
10
10
|
from fractal_server.logger import get_logger
|
|
11
11
|
from fractal_server.syringe import Inject
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
async def cleanup_after_shutdown(*,
|
|
14
|
+
async def cleanup_after_shutdown(*, jobs: list[int], logger_name: str):
|
|
15
15
|
settings = Inject(get_settings)
|
|
16
16
|
logger = get_logger(logger_name)
|
|
17
17
|
logger.info("Cleanup function after shutdown")
|
|
18
18
|
stm_objects = (
|
|
19
19
|
select(JobV2)
|
|
20
|
-
.where(JobV2.id.in_(
|
|
21
|
-
.where(JobV2.status ==
|
|
20
|
+
.where(JobV2.id.in_(jobs))
|
|
21
|
+
.where(JobV2.status == JobStatusType.SUBMITTED)
|
|
22
22
|
)
|
|
23
23
|
stm_ids = (
|
|
24
24
|
select(JobV2.id)
|
|
25
|
-
.where(JobV2.id.in_(
|
|
26
|
-
.where(JobV2.status ==
|
|
25
|
+
.where(JobV2.id.in_(jobs))
|
|
26
|
+
.where(JobV2.status == JobStatusType.SUBMITTED)
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
async for session in get_async_db():
|
fractal_server/config/_main.py
CHANGED
|
@@ -28,7 +28,7 @@ class Settings(BaseSettings):
|
|
|
28
28
|
Only logs of with this level (or higher) will appear in the console
|
|
29
29
|
logs.
|
|
30
30
|
FRACTAL_API_MAX_JOB_LIST_LENGTH:
|
|
31
|
-
Number of ids that can be stored in the `
|
|
31
|
+
Number of ids that can be stored in the `jobs` attribute of
|
|
32
32
|
`app.state`.
|
|
33
33
|
FRACTAL_GRACEFUL_SHUTDOWN_TIME:
|
|
34
34
|
Waiting time for the shutdown phase of executors, in seconds.
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import sys
|
|
3
|
+
from os.path import normpath
|
|
3
4
|
|
|
4
5
|
from sqlalchemy.orm.attributes import flag_modified
|
|
5
6
|
from sqlmodel import select
|
|
@@ -21,7 +22,7 @@ def fix_db():
|
|
|
21
22
|
logging.info(f"Now handling user {user.email}.")
|
|
22
23
|
if user.project_dirs != []:
|
|
23
24
|
sys.exit(f"Non empty `project_dirs` for User[{user.id}]")
|
|
24
|
-
user.project_dirs.append(user.project_dir)
|
|
25
|
+
user.project_dirs.append(normpath(user.project_dir))
|
|
25
26
|
flag_modified(user, "project_dirs")
|
|
26
27
|
|
|
27
28
|
db.commit()
|
fractal_server/images/models.py
CHANGED
|
@@ -5,7 +5,6 @@ from fractal_server.types import DictStrAny
|
|
|
5
5
|
from fractal_server.types import ImageAttributes
|
|
6
6
|
from fractal_server.types import ImageAttributesWithNone
|
|
7
7
|
from fractal_server.types import ImageTypes
|
|
8
|
-
from fractal_server.types import ZarrDirStr
|
|
9
8
|
from fractal_server.types import ZarrUrlStr
|
|
10
9
|
|
|
11
10
|
|
|
@@ -21,7 +20,7 @@ class SingleImageBase(BaseModel):
|
|
|
21
20
|
"""
|
|
22
21
|
|
|
23
22
|
zarr_url: ZarrUrlStr
|
|
24
|
-
origin:
|
|
23
|
+
origin: ZarrUrlStr | None = None
|
|
25
24
|
|
|
26
25
|
attributes: DictStrAny = Field(default_factory=dict)
|
|
27
26
|
types: ImageTypes = Field(default_factory=dict)
|
fractal_server/main.py
CHANGED
|
@@ -33,16 +33,14 @@ def collect_routers(app: FastAPI) -> None:
|
|
|
33
33
|
app:
|
|
34
34
|
The application to register the routers to.
|
|
35
35
|
"""
|
|
36
|
-
from .app.routes.admin.v2 import
|
|
36
|
+
from .app.routes.admin.v2 import router_admin
|
|
37
37
|
from .app.routes.api import router_api
|
|
38
|
-
from .app.routes.api.v2 import router_api_v2
|
|
38
|
+
from .app.routes.api.v2 import router_api as router_api_v2
|
|
39
39
|
from .app.routes.auth.router import router_auth
|
|
40
40
|
|
|
41
41
|
app.include_router(router_api, prefix="/api")
|
|
42
42
|
app.include_router(router_api_v2, prefix="/api/v2")
|
|
43
|
-
app.include_router(
|
|
44
|
-
router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
|
|
45
|
-
)
|
|
43
|
+
app.include_router(router_admin, prefix="/admin/v2", tags=["Admin area"])
|
|
46
44
|
app.include_router(router_auth, prefix="/auth", tags=["Authentication"])
|
|
47
45
|
|
|
48
46
|
|
|
@@ -74,7 +72,7 @@ def check_settings() -> None:
|
|
|
74
72
|
|
|
75
73
|
@asynccontextmanager
|
|
76
74
|
async def lifespan(app: FastAPI):
|
|
77
|
-
app.state.
|
|
75
|
+
app.state.jobs = []
|
|
78
76
|
logger = set_logger("fractal_server.lifespan")
|
|
79
77
|
logger.info(f"[startup] START (fractal-server {__VERSION__})")
|
|
80
78
|
check_settings()
|
|
@@ -111,12 +109,12 @@ async def lifespan(app: FastAPI):
|
|
|
111
109
|
|
|
112
110
|
logger.info(
|
|
113
111
|
f"[teardown] Current worker with pid {os.getpid()} is shutting down. "
|
|
114
|
-
f"Current jobs: {app.state.
|
|
112
|
+
f"Current jobs: {app.state.jobs=}"
|
|
115
113
|
)
|
|
116
114
|
if _backend_supports_shutdown(settings.FRACTAL_RUNNER_BACKEND):
|
|
117
115
|
try:
|
|
118
116
|
await cleanup_after_shutdown(
|
|
119
|
-
|
|
117
|
+
jobs=app.state.jobs,
|
|
120
118
|
logger_name="fractal_server.lifespan",
|
|
121
119
|
)
|
|
122
120
|
except Exception as e:
|
|
@@ -136,7 +134,14 @@ async def lifespan(app: FastAPI):
|
|
|
136
134
|
|
|
137
135
|
slow_response_logger = set_logger("slow-response")
|
|
138
136
|
|
|
139
|
-
|
|
137
|
+
|
|
138
|
+
def _endpoint_has_background_task(method: str, path: str) -> bool:
|
|
139
|
+
has_background_task = (method == "POST") and (
|
|
140
|
+
"/job/submit/" in path
|
|
141
|
+
or "/task/collect/pi" in path # "/pip" and "/pixi"
|
|
142
|
+
or "/task-group/" in path
|
|
143
|
+
)
|
|
144
|
+
return has_background_task
|
|
140
145
|
|
|
141
146
|
|
|
142
147
|
class SlowResponseMiddleware:
|
|
@@ -145,8 +150,10 @@ class SlowResponseMiddleware:
|
|
|
145
150
|
self.time_threshold = time_threshold
|
|
146
151
|
|
|
147
152
|
async def __call__(self, scope: Scope, receive: Receive, send: Send):
|
|
148
|
-
|
|
149
|
-
|
|
153
|
+
if (
|
|
154
|
+
scope["type"] != "http" # e.g. `scope["type"] == "lifespan"`
|
|
155
|
+
or _endpoint_has_background_task(scope["method"], scope["path"])
|
|
156
|
+
):
|
|
150
157
|
await self.app(scope, receive, send)
|
|
151
158
|
return
|
|
152
159
|
|
|
@@ -173,8 +180,8 @@ class SlowResponseMiddleware:
|
|
|
173
180
|
f"?{scope['query_string'].decode('utf-8')}, "
|
|
174
181
|
f"{context['status_code']}, "
|
|
175
182
|
f"{request_time:.2f}, "
|
|
176
|
-
f"{start_timestamp.
|
|
177
|
-
f"{end_timestamp.
|
|
183
|
+
f"{start_timestamp.isoformat(timespec='milliseconds')}, "
|
|
184
|
+
f"{end_timestamp.isoformat(timespec='milliseconds')}"
|
|
178
185
|
)
|
|
179
186
|
|
|
180
187
|
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""add_prevent_new_submissions
|
|
2
|
+
|
|
3
|
+
Revision ID: 88270f589c9b
|
|
4
|
+
Revises: f0702066b007
|
|
5
|
+
Create Date: 2025-12-02 12:34:11.028259
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
|
|
12
|
+
# revision identifiers, used by Alembic.
|
|
13
|
+
revision = "88270f589c9b"
|
|
14
|
+
down_revision = "f0702066b007"
|
|
15
|
+
branch_labels = None
|
|
16
|
+
depends_on = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def upgrade() -> None:
|
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
21
|
+
with op.batch_alter_table("resource", schema=None) as batch_op:
|
|
22
|
+
batch_op.add_column(
|
|
23
|
+
sa.Column(
|
|
24
|
+
"prevent_new_submissions",
|
|
25
|
+
sa.BOOLEAN(),
|
|
26
|
+
server_default="false",
|
|
27
|
+
nullable=False,
|
|
28
|
+
)
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
# ### end Alembic commands ###
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def downgrade() -> None:
|
|
35
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
36
|
+
with op.batch_alter_table("resource", schema=None) as batch_op:
|
|
37
|
+
batch_op.drop_column("prevent_new_submissions")
|
|
38
|
+
|
|
39
|
+
# ### end Alembic commands ###
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""One submitted Job per Dataset
|
|
2
|
+
|
|
3
|
+
Revision ID: f0702066b007
|
|
4
|
+
Revises: 7910eed4cf97
|
|
5
|
+
Create Date: 2025-12-01 20:54:03.137093
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
|
|
12
|
+
# revision identifiers, used by Alembic.
|
|
13
|
+
revision = "f0702066b007"
|
|
14
|
+
down_revision = "7910eed4cf97"
|
|
15
|
+
branch_labels = None
|
|
16
|
+
depends_on = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def upgrade() -> None:
|
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
21
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
|
22
|
+
batch_op.create_index(
|
|
23
|
+
"ix_jobv2_one_submitted_job_per_dataset",
|
|
24
|
+
["dataset_id"],
|
|
25
|
+
unique=True,
|
|
26
|
+
postgresql_where=sa.text("status = 'submitted'"),
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
# ### end Alembic commands ###
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def downgrade() -> None:
|
|
33
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
34
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
|
35
|
+
batch_op.drop_index(
|
|
36
|
+
"ix_jobv2_one_submitted_job_per_dataset",
|
|
37
|
+
postgresql_where=sa.text("status = 'submitted'"),
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# ### end Alembic commands ###
|
|
@@ -14,7 +14,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
|
|
|
14
14
|
from fractal_server.ssh._fabric import FractalSSH
|
|
15
15
|
from fractal_server.types import AttributeFilters
|
|
16
16
|
|
|
17
|
-
from .runner import
|
|
17
|
+
from .runner import execute_tasks
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
def process_workflow(
|
|
@@ -90,7 +90,7 @@ def process_workflow(
|
|
|
90
90
|
resource=resource,
|
|
91
91
|
profile=profile,
|
|
92
92
|
) as runner:
|
|
93
|
-
|
|
93
|
+
execute_tasks(
|
|
94
94
|
wf_task_list=workflow.task_list[
|
|
95
95
|
first_task_index : (last_task_index + 1)
|
|
96
96
|
],
|
|
@@ -104,4 +104,5 @@ def process_workflow(
|
|
|
104
104
|
job_attribute_filters=job_attribute_filters,
|
|
105
105
|
job_type_filters=job_type_filters,
|
|
106
106
|
user_id=user_id,
|
|
107
|
+
resource_id=resource.id,
|
|
107
108
|
)
|
|
@@ -33,7 +33,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
|
|
|
33
33
|
from fractal_server.ssh._fabric import FractalSSH
|
|
34
34
|
from fractal_server.types import AttributeFilters
|
|
35
35
|
|
|
36
|
-
from .runner import
|
|
36
|
+
from .runner import execute_tasks
|
|
37
37
|
|
|
38
38
|
logger = set_logger(__name__)
|
|
39
39
|
|
|
@@ -113,7 +113,7 @@ def process_workflow(
|
|
|
113
113
|
common_script_lines=worker_init,
|
|
114
114
|
user_cache_dir=user_cache_dir,
|
|
115
115
|
) as runner:
|
|
116
|
-
|
|
116
|
+
execute_tasks(
|
|
117
117
|
wf_task_list=workflow.task_list[
|
|
118
118
|
first_task_index : (last_task_index + 1)
|
|
119
119
|
],
|
|
@@ -127,4 +127,5 @@ def process_workflow(
|
|
|
127
127
|
job_attribute_filters=job_attribute_filters,
|
|
128
128
|
job_type_filters=job_type_filters,
|
|
129
129
|
user_id=user_id,
|
|
130
|
+
resource_id=resource.id,
|
|
130
131
|
)
|
|
@@ -32,7 +32,7 @@ from fractal_server.runner.set_start_and_last_task_index import (
|
|
|
32
32
|
from fractal_server.ssh._fabric import FractalSSH
|
|
33
33
|
from fractal_server.types import AttributeFilters
|
|
34
34
|
|
|
35
|
-
from .runner import
|
|
35
|
+
from .runner import execute_tasks
|
|
36
36
|
|
|
37
37
|
|
|
38
38
|
def process_workflow(
|
|
@@ -109,7 +109,7 @@ def process_workflow(
|
|
|
109
109
|
user_cache_dir=user_cache_dir,
|
|
110
110
|
slurm_account=slurm_account,
|
|
111
111
|
) as runner:
|
|
112
|
-
|
|
112
|
+
execute_tasks(
|
|
113
113
|
wf_task_list=workflow.task_list[
|
|
114
114
|
first_task_index : (last_task_index + 1)
|
|
115
115
|
],
|
|
@@ -123,4 +123,5 @@ def process_workflow(
|
|
|
123
123
|
job_attribute_filters=job_attribute_filters,
|
|
124
124
|
job_type_filters=job_type_filters,
|
|
125
125
|
user_id=user_id,
|
|
126
|
+
resource_id=resource.id,
|
|
126
127
|
)
|
|
@@ -14,11 +14,12 @@ from fractal_server.app.models.v2 import HistoryImageCache
|
|
|
14
14
|
from fractal_server.app.models.v2 import HistoryRun
|
|
15
15
|
from fractal_server.app.models.v2 import HistoryUnit
|
|
16
16
|
from fractal_server.app.models.v2 import JobV2
|
|
17
|
+
from fractal_server.app.models.v2 import Resource
|
|
17
18
|
from fractal_server.app.models.v2 import TaskGroupV2
|
|
18
19
|
from fractal_server.app.models.v2 import WorkflowTaskV2
|
|
19
20
|
from fractal_server.app.schemas.v2 import HistoryUnitStatus
|
|
20
|
-
from fractal_server.app.schemas.v2 import
|
|
21
|
-
from fractal_server.app.schemas.v2 import
|
|
21
|
+
from fractal_server.app.schemas.v2 import TaskDump
|
|
22
|
+
from fractal_server.app.schemas.v2 import TaskGroupDump
|
|
22
23
|
from fractal_server.app.schemas.v2 import TaskType
|
|
23
24
|
from fractal_server.images import SingleImage
|
|
24
25
|
from fractal_server.images.status_tools import IMAGE_STATUS_KEY
|
|
@@ -35,9 +36,9 @@ from fractal_server.types import AttributeFilters
|
|
|
35
36
|
from .merge_outputs import merge_outputs
|
|
36
37
|
from .runner_functions import GetRunnerConfigType
|
|
37
38
|
from .runner_functions import SubmissionOutcome
|
|
38
|
-
from .runner_functions import
|
|
39
|
-
from .runner_functions import
|
|
40
|
-
from .runner_functions import
|
|
39
|
+
from .runner_functions import run_task_compound
|
|
40
|
+
from .runner_functions import run_task_non_parallel
|
|
41
|
+
from .runner_functions import run_task_parallel
|
|
41
42
|
from .task_interface import TaskOutput
|
|
42
43
|
|
|
43
44
|
|
|
@@ -82,7 +83,7 @@ def get_origin_attribute_and_types(
|
|
|
82
83
|
return updated_attributes, updated_types
|
|
83
84
|
|
|
84
85
|
|
|
85
|
-
def
|
|
86
|
+
def execute_tasks(
|
|
86
87
|
*,
|
|
87
88
|
wf_task_list: list[WorkflowTaskV2],
|
|
88
89
|
dataset: DatasetV2,
|
|
@@ -95,6 +96,7 @@ def execute_tasks_v2(
|
|
|
95
96
|
get_runner_config: GetRunnerConfigType,
|
|
96
97
|
job_type_filters: dict[str, bool],
|
|
97
98
|
job_attribute_filters: AttributeFilters,
|
|
99
|
+
resource_id: int,
|
|
98
100
|
) -> None:
|
|
99
101
|
logger = get_logger(logger_name=logger_name)
|
|
100
102
|
|
|
@@ -165,10 +167,10 @@ def execute_tasks_v2(
|
|
|
165
167
|
# Create dumps for workflowtask and taskgroup
|
|
166
168
|
workflowtask_dump = dict(
|
|
167
169
|
**wftask.model_dump(exclude={"task"}),
|
|
168
|
-
task=
|
|
170
|
+
task=TaskDump(**wftask.task.model_dump()).model_dump(),
|
|
169
171
|
)
|
|
170
172
|
task_group = db.get(TaskGroupV2, wftask.task.taskgroupv2_id)
|
|
171
|
-
task_group_dump =
|
|
173
|
+
task_group_dump = TaskGroupDump(
|
|
172
174
|
**task_group.model_dump()
|
|
173
175
|
).model_dump()
|
|
174
176
|
# Create HistoryRun
|
|
@@ -211,20 +213,37 @@ def execute_tasks_v2(
|
|
|
211
213
|
f"attribute_filters={job_attribute_filters})."
|
|
212
214
|
)
|
|
213
215
|
logger.info(error_msg)
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
216
|
+
with next(get_sync_db()) as db:
|
|
217
|
+
update_status_of_history_run(
|
|
218
|
+
history_run_id=history_run_id,
|
|
219
|
+
status=HistoryUnitStatus.FAILED,
|
|
220
|
+
db_sync=db,
|
|
221
|
+
)
|
|
219
222
|
raise JobExecutionError(error_msg)
|
|
220
223
|
|
|
221
|
-
#
|
|
224
|
+
# Fail if the resource is not open for new submissions
|
|
225
|
+
with next(get_sync_db()) as db:
|
|
226
|
+
resource = db.get(Resource, resource_id)
|
|
227
|
+
if resource.prevent_new_submissions:
|
|
228
|
+
error_msg = (
|
|
229
|
+
f"Cannot run '{task.name}', since the '{resource.name}' "
|
|
230
|
+
"resource is not currently active."
|
|
231
|
+
)
|
|
232
|
+
logger.info(error_msg)
|
|
233
|
+
update_status_of_history_run(
|
|
234
|
+
history_run_id=history_run_id,
|
|
235
|
+
status=HistoryUnitStatus.FAILED,
|
|
236
|
+
db_sync=db,
|
|
237
|
+
)
|
|
238
|
+
raise JobExecutionError(error_msg)
|
|
239
|
+
|
|
240
|
+
# TASK EXECUTION
|
|
222
241
|
try:
|
|
223
242
|
if task.type in [
|
|
224
243
|
TaskType.NON_PARALLEL,
|
|
225
244
|
TaskType.CONVERTER_NON_PARALLEL,
|
|
226
245
|
]:
|
|
227
|
-
outcomes_dict, num_tasks =
|
|
246
|
+
outcomes_dict, num_tasks = run_task_non_parallel(
|
|
228
247
|
images=filtered_images,
|
|
229
248
|
zarr_dir=zarr_dir,
|
|
230
249
|
wftask=wftask,
|
|
@@ -239,7 +258,7 @@ def execute_tasks_v2(
|
|
|
239
258
|
user_id=user_id,
|
|
240
259
|
)
|
|
241
260
|
elif task.type == TaskType.PARALLEL:
|
|
242
|
-
outcomes_dict, num_tasks =
|
|
261
|
+
outcomes_dict, num_tasks = run_task_parallel(
|
|
243
262
|
images=filtered_images,
|
|
244
263
|
wftask=wftask,
|
|
245
264
|
task=task,
|
|
@@ -255,7 +274,7 @@ def execute_tasks_v2(
|
|
|
255
274
|
TaskType.COMPOUND,
|
|
256
275
|
TaskType.CONVERTER_COMPOUND,
|
|
257
276
|
]:
|
|
258
|
-
outcomes_dict, num_tasks =
|
|
277
|
+
outcomes_dict, num_tasks = run_task_compound(
|
|
259
278
|
images=filtered_images,
|
|
260
279
|
zarr_dir=zarr_dir,
|
|
261
280
|
wftask=wftask,
|