fractal-server 2.19.0a2__py3-none-any.whl → 2.19.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/dataset.py +0 -4
- fractal_server/app/models/v2/job.py +4 -0
- fractal_server/app/models/v2/task_group.py +4 -0
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +6 -0
- fractal_server/app/routes/api/v2/__init__.py +0 -2
- fractal_server/app/routes/api/v2/_aux_functions.py +6 -0
- fractal_server/app/routes/api/v2/submit.py +2 -0
- fractal_server/app/routes/api/v2/task_collection.py +2 -0
- fractal_server/app/routes/api/v2/task_collection_pixi.py +2 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +6 -0
- fractal_server/app/routes/api/v2/workflowtask.py +2 -0
- fractal_server/app/schemas/v2/workflow.py +4 -1
- fractal_server/app/schemas/v2/workflowtask.py +9 -2
- fractal_server/app/security/__init__.py +11 -6
- fractal_server/migrations/versions/18a26fcdea5d_drop_dataset_history.py +41 -0
- fractal_server/migrations/versions/cfd13f7954e7_add_fractal_server_version_to_jobv2_and_.py +52 -0
- fractal_server/runner/config/_slurm.py +2 -0
- fractal_server/runner/executors/slurm_common/base_slurm_runner.py +1 -0
- fractal_server/runner/executors/slurm_common/get_slurm_config.py +1 -0
- fractal_server/runner/executors/slurm_common/slurm_config.py +3 -0
- fractal_server/runner/v2/submit_workflow.py +0 -2
- {fractal_server-2.19.0a2.dist-info → fractal_server-2.19.1.dist-info}/METADATA +1 -1
- {fractal_server-2.19.0a2.dist-info → fractal_server-2.19.1.dist-info}/RECORD +27 -26
- fractal_server/app/routes/api/v2/status_legacy.py +0 -156
- {fractal_server-2.19.0a2.dist-info → fractal_server-2.19.1.dist-info}/WHEEL +0 -0
- {fractal_server-2.19.0a2.dist-info → fractal_server-2.19.1.dist-info}/entry_points.txt +0 -0
- {fractal_server-2.19.0a2.dist-info → fractal_server-2.19.1.dist-info}/licenses/LICENSE +0 -0
fractal_server/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__VERSION__ = "2.19.
|
|
1
|
+
__VERSION__ = "2.19.1"
|
|
@@ -27,10 +27,6 @@ class DatasetV2(SQLModel, table=True):
|
|
|
27
27
|
sa_relationship_kwargs=dict(lazy="selectin"),
|
|
28
28
|
)
|
|
29
29
|
|
|
30
|
-
history: list[dict[str, Any]] = Field(
|
|
31
|
-
sa_column=Column(JSONB, server_default="[]", nullable=False)
|
|
32
|
-
)
|
|
33
|
-
|
|
34
30
|
timestamp_created: datetime = Field(
|
|
35
31
|
default_factory=get_timestamp,
|
|
36
32
|
sa_column=Column(DateTime(timezone=True), nullable=False),
|
|
@@ -5,6 +5,7 @@ from pydantic import ConfigDict
|
|
|
5
5
|
from sqlalchemy import Column
|
|
6
6
|
from sqlalchemy.dialects.postgresql import JSONB
|
|
7
7
|
from sqlalchemy.types import DateTime
|
|
8
|
+
from sqlalchemy.types import String
|
|
8
9
|
from sqlmodel import Field
|
|
9
10
|
from sqlmodel import Index
|
|
10
11
|
from sqlmodel import SQLModel
|
|
@@ -44,6 +45,9 @@ class JobV2(SQLModel, table=True):
|
|
|
44
45
|
project_dump: dict[str, Any] = Field(
|
|
45
46
|
sa_column=Column(JSONB, nullable=False)
|
|
46
47
|
)
|
|
48
|
+
fractal_server_version: str = Field(
|
|
49
|
+
sa_column=Column(String, server_default="pre-2.19.0", nullable=False)
|
|
50
|
+
)
|
|
47
51
|
|
|
48
52
|
worker_init: str | None = None
|
|
49
53
|
working_dir: str | None = None
|
|
@@ -4,6 +4,7 @@ from datetime import timezone
|
|
|
4
4
|
from sqlalchemy import Column
|
|
5
5
|
from sqlalchemy.dialects.postgresql import JSONB
|
|
6
6
|
from sqlalchemy.types import DateTime
|
|
7
|
+
from sqlalchemy.types import String
|
|
7
8
|
from sqlmodel import Field
|
|
8
9
|
from sqlmodel import Relationship
|
|
9
10
|
from sqlmodel import SQLModel
|
|
@@ -151,3 +152,6 @@ class TaskGroupActivityV2(SQLModel, table=True):
|
|
|
151
152
|
default=None,
|
|
152
153
|
sa_column=Column(DateTime(timezone=True)),
|
|
153
154
|
)
|
|
155
|
+
fractal_server_version: str = Field(
|
|
156
|
+
sa_column=Column(String, server_default="pre-2.19.0", nullable=False)
|
|
157
|
+
)
|
|
@@ -5,6 +5,7 @@ from fastapi import HTTPException
|
|
|
5
5
|
from fastapi import Response
|
|
6
6
|
from fastapi import status
|
|
7
7
|
|
|
8
|
+
from fractal_server import __VERSION__
|
|
8
9
|
from fractal_server.app.db import AsyncSession
|
|
9
10
|
from fractal_server.app.db import get_async_db
|
|
10
11
|
from fractal_server.app.models import UserOAuth
|
|
@@ -93,6 +94,7 @@ async def deactivate_task_group(
|
|
|
93
94
|
),
|
|
94
95
|
timestamp_started=get_timestamp(),
|
|
95
96
|
timestamp_ended=get_timestamp(),
|
|
97
|
+
fractal_server_version=__VERSION__,
|
|
96
98
|
)
|
|
97
99
|
db.add(task_group)
|
|
98
100
|
db.add(task_group_activity)
|
|
@@ -108,6 +110,7 @@ async def deactivate_task_group(
|
|
|
108
110
|
pkg_name=task_group.pkg_name,
|
|
109
111
|
version=task_group.version,
|
|
110
112
|
timestamp_started=get_timestamp(),
|
|
113
|
+
fractal_server_version=__VERSION__,
|
|
111
114
|
)
|
|
112
115
|
db.add(task_group_activity)
|
|
113
116
|
await db.commit()
|
|
@@ -188,6 +191,7 @@ async def reactivate_task_group(
|
|
|
188
191
|
),
|
|
189
192
|
timestamp_started=get_timestamp(),
|
|
190
193
|
timestamp_ended=get_timestamp(),
|
|
194
|
+
fractal_server_version=__VERSION__,
|
|
191
195
|
)
|
|
192
196
|
db.add(task_group)
|
|
193
197
|
db.add(task_group_activity)
|
|
@@ -211,6 +215,7 @@ async def reactivate_task_group(
|
|
|
211
215
|
pkg_name=task_group.pkg_name,
|
|
212
216
|
version=task_group.version,
|
|
213
217
|
timestamp_started=get_timestamp(),
|
|
218
|
+
fractal_server_version=__VERSION__,
|
|
214
219
|
)
|
|
215
220
|
db.add(task_group_activity)
|
|
216
221
|
await db.commit()
|
|
@@ -264,6 +269,7 @@ async def delete_task_group(
|
|
|
264
269
|
pkg_name=task_group.pkg_name,
|
|
265
270
|
version=(task_group.version or "N/A"),
|
|
266
271
|
timestamp_started=get_timestamp(),
|
|
272
|
+
fractal_server_version=__VERSION__,
|
|
267
273
|
)
|
|
268
274
|
db.add(task_group_activity)
|
|
269
275
|
await db.commit()
|
|
@@ -14,7 +14,6 @@ from .job import router as job_router
|
|
|
14
14
|
from .pre_submission_checks import router as pre_submission_checks_router
|
|
15
15
|
from .project import router as project_router
|
|
16
16
|
from .sharing import router as sharing_router
|
|
17
|
-
from .status_legacy import router as status_legacy_router
|
|
18
17
|
from .submit import router as submit_job_router
|
|
19
18
|
from .task import router as task_router
|
|
20
19
|
from .task_collection import router as task_collection_router
|
|
@@ -37,7 +36,6 @@ router_api.include_router(sharing_router, tags=["Project Sharing"])
|
|
|
37
36
|
router_api.include_router(project_router, tags=["Project"])
|
|
38
37
|
router_api.include_router(submit_job_router, tags=["Job"])
|
|
39
38
|
router_api.include_router(history_router, tags=["History"])
|
|
40
|
-
router_api.include_router(status_legacy_router, tags=["Status Legacy"])
|
|
41
39
|
|
|
42
40
|
|
|
43
41
|
settings = Inject(get_settings)
|
|
@@ -400,6 +400,8 @@ async def _workflow_insert_task(
|
|
|
400
400
|
args_non_parallel: dict[str, Any] | None = None,
|
|
401
401
|
args_parallel: dict[str, Any] | None = None,
|
|
402
402
|
type_filters: dict[str, bool] | None = None,
|
|
403
|
+
description: str | None = None,
|
|
404
|
+
alias: str | None = None,
|
|
403
405
|
db: AsyncSession,
|
|
404
406
|
) -> WorkflowTaskV2:
|
|
405
407
|
"""
|
|
@@ -414,6 +416,8 @@ async def _workflow_insert_task(
|
|
|
414
416
|
args_non_parallel:
|
|
415
417
|
args_parallel:
|
|
416
418
|
type_filters:
|
|
419
|
+
description:
|
|
420
|
+
alias:
|
|
417
421
|
db:
|
|
418
422
|
"""
|
|
419
423
|
db_workflow = await db.get(WorkflowV2, workflow_id)
|
|
@@ -448,6 +452,8 @@ async def _workflow_insert_task(
|
|
|
448
452
|
meta_parallel=final_meta_parallel,
|
|
449
453
|
meta_non_parallel=final_meta_non_parallel,
|
|
450
454
|
type_filters=(type_filters or dict()),
|
|
455
|
+
description=description,
|
|
456
|
+
alias=alias,
|
|
451
457
|
)
|
|
452
458
|
db_workflow.task_list.append(wf_task)
|
|
453
459
|
flag_modified(db_workflow, "task_list")
|
|
@@ -11,6 +11,7 @@ from fastapi import status
|
|
|
11
11
|
from sqlmodel import select
|
|
12
12
|
from sqlmodel import update
|
|
13
13
|
|
|
14
|
+
from fractal_server import __VERSION__
|
|
14
15
|
from fractal_server.app.db import AsyncSession
|
|
15
16
|
from fractal_server.app.db import get_async_db
|
|
16
17
|
from fractal_server.app.models import Profile
|
|
@@ -221,6 +222,7 @@ async def submit_job(
|
|
|
221
222
|
project_dump=json.loads(
|
|
222
223
|
project.model_dump_json(exclude={"resource_id"})
|
|
223
224
|
),
|
|
225
|
+
fractal_server_version=__VERSION__,
|
|
224
226
|
**job_create.model_dump(),
|
|
225
227
|
)
|
|
226
228
|
|
|
@@ -14,6 +14,7 @@ from pydantic import BaseModel
|
|
|
14
14
|
from pydantic import ValidationError
|
|
15
15
|
from pydantic import model_validator
|
|
16
16
|
|
|
17
|
+
from fractal_server import __VERSION__
|
|
17
18
|
from fractal_server.app.db import AsyncSession
|
|
18
19
|
from fractal_server.app.db import get_async_db
|
|
19
20
|
from fractal_server.app.models import UserOAuth
|
|
@@ -332,6 +333,7 @@ async def collect_tasks_pip(
|
|
|
332
333
|
action=TaskGroupActivityAction.COLLECT,
|
|
333
334
|
pkg_name=task_group.pkg_name,
|
|
334
335
|
version=task_group.version,
|
|
336
|
+
fractal_server_version=__VERSION__,
|
|
335
337
|
)
|
|
336
338
|
db.add(task_group_activity)
|
|
337
339
|
await db.commit()
|
|
@@ -10,6 +10,7 @@ from fastapi import Response
|
|
|
10
10
|
from fastapi import UploadFile
|
|
11
11
|
from fastapi import status
|
|
12
12
|
|
|
13
|
+
from fractal_server import __VERSION__
|
|
13
14
|
from fractal_server.app.db import AsyncSession
|
|
14
15
|
from fractal_server.app.db import get_async_db
|
|
15
16
|
from fractal_server.app.models import UserOAuth
|
|
@@ -182,6 +183,7 @@ async def collect_task_pixi(
|
|
|
182
183
|
action=TaskGroupActivityAction.COLLECT,
|
|
183
184
|
pkg_name=task_group.pkg_name,
|
|
184
185
|
version=task_group.version,
|
|
186
|
+
fractal_server_version=__VERSION__,
|
|
185
187
|
)
|
|
186
188
|
db.add(task_group_activity)
|
|
187
189
|
await db.commit()
|
|
@@ -5,6 +5,7 @@ from fastapi import HTTPException
|
|
|
5
5
|
from fastapi import Response
|
|
6
6
|
from fastapi import status
|
|
7
7
|
|
|
8
|
+
from fractal_server import __VERSION__
|
|
8
9
|
from fractal_server.app.db import AsyncSession
|
|
9
10
|
from fractal_server.app.db import get_async_db
|
|
10
11
|
from fractal_server.app.models import UserOAuth
|
|
@@ -99,6 +100,7 @@ async def deactivate_task_group(
|
|
|
99
100
|
),
|
|
100
101
|
timestamp_started=get_timestamp(),
|
|
101
102
|
timestamp_ended=get_timestamp(),
|
|
103
|
+
fractal_server_version=__VERSION__,
|
|
102
104
|
)
|
|
103
105
|
db.add(task_group)
|
|
104
106
|
db.add(task_group_activity)
|
|
@@ -114,6 +116,7 @@ async def deactivate_task_group(
|
|
|
114
116
|
pkg_name=task_group.pkg_name,
|
|
115
117
|
version=task_group.version,
|
|
116
118
|
timestamp_started=get_timestamp(),
|
|
119
|
+
fractal_server_version=__VERSION__,
|
|
117
120
|
)
|
|
118
121
|
task_group.active = False
|
|
119
122
|
db.add(task_group)
|
|
@@ -202,6 +205,7 @@ async def reactivate_task_group(
|
|
|
202
205
|
),
|
|
203
206
|
timestamp_started=get_timestamp(),
|
|
204
207
|
timestamp_ended=get_timestamp(),
|
|
208
|
+
fractal_server_version=__VERSION__,
|
|
205
209
|
)
|
|
206
210
|
db.add(task_group)
|
|
207
211
|
db.add(task_group_activity)
|
|
@@ -225,6 +229,7 @@ async def reactivate_task_group(
|
|
|
225
229
|
pkg_name=task_group.pkg_name,
|
|
226
230
|
version=task_group.version,
|
|
227
231
|
timestamp_started=get_timestamp(),
|
|
232
|
+
fractal_server_version=__VERSION__,
|
|
228
233
|
)
|
|
229
234
|
db.add(task_group_activity)
|
|
230
235
|
await db.commit()
|
|
@@ -288,6 +293,7 @@ async def delete_task_group(
|
|
|
288
293
|
pkg_name=task_group.pkg_name,
|
|
289
294
|
version=(task_group.version or "N/A"),
|
|
290
295
|
timestamp_started=get_timestamp(),
|
|
296
|
+
fractal_server_version=__VERSION__,
|
|
291
297
|
)
|
|
292
298
|
db.add(task_group_activity)
|
|
293
299
|
await db.commit()
|
|
@@ -20,6 +20,7 @@ class WorkflowCreate(BaseModel):
|
|
|
20
20
|
model_config = ConfigDict(extra="forbid")
|
|
21
21
|
|
|
22
22
|
name: NonEmptyStr
|
|
23
|
+
description: NonEmptyStr | None = None
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
class WorkflowRead(BaseModel):
|
|
@@ -45,7 +46,7 @@ class WorkflowUpdate(BaseModel):
|
|
|
45
46
|
|
|
46
47
|
name: NonEmptyStr = None
|
|
47
48
|
reordered_workflowtask_ids: ListUniqueNonNegativeInt | None = None
|
|
48
|
-
description:
|
|
49
|
+
description: NonEmptyStr | None = None
|
|
49
50
|
|
|
50
51
|
|
|
51
52
|
class WorkflowImport(BaseModel):
|
|
@@ -58,6 +59,7 @@ class WorkflowImport(BaseModel):
|
|
|
58
59
|
|
|
59
60
|
model_config = ConfigDict(extra="forbid")
|
|
60
61
|
name: NonEmptyStr
|
|
62
|
+
description: NonEmptyStr | None = None
|
|
61
63
|
task_list: list[WorkflowTaskImport]
|
|
62
64
|
|
|
63
65
|
|
|
@@ -70,4 +72,5 @@ class WorkflowExport(BaseModel):
|
|
|
70
72
|
"""
|
|
71
73
|
|
|
72
74
|
name: str
|
|
75
|
+
description: str | None
|
|
73
76
|
task_list: list[WorkflowTaskExport]
|
|
@@ -6,6 +6,7 @@ from pydantic import Field
|
|
|
6
6
|
from pydantic import model_validator
|
|
7
7
|
|
|
8
8
|
from fractal_server.types import DictStrAny
|
|
9
|
+
from fractal_server.types import NonEmptyStr
|
|
9
10
|
from fractal_server.types import TypeFilters
|
|
10
11
|
from fractal_server.types import WorkflowTaskArgument
|
|
11
12
|
|
|
@@ -23,6 +24,8 @@ class WorkflowTaskCreate(BaseModel):
|
|
|
23
24
|
args_non_parallel: WorkflowTaskArgument | None = None
|
|
24
25
|
args_parallel: WorkflowTaskArgument | None = None
|
|
25
26
|
type_filters: TypeFilters = Field(default_factory=dict)
|
|
27
|
+
description: NonEmptyStr | None = None
|
|
28
|
+
alias: NonEmptyStr | None = None
|
|
26
29
|
|
|
27
30
|
|
|
28
31
|
class WorkflowTaskReplace(BaseModel):
|
|
@@ -65,8 +68,8 @@ class WorkflowTaskUpdate(BaseModel):
|
|
|
65
68
|
args_non_parallel: WorkflowTaskArgument | None = None
|
|
66
69
|
args_parallel: WorkflowTaskArgument | None = None
|
|
67
70
|
type_filters: TypeFilters = None
|
|
68
|
-
description:
|
|
69
|
-
alias:
|
|
71
|
+
description: NonEmptyStr | None = None
|
|
72
|
+
alias: NonEmptyStr | None = None
|
|
70
73
|
|
|
71
74
|
|
|
72
75
|
class WorkflowTaskImport(BaseModel):
|
|
@@ -78,6 +81,8 @@ class WorkflowTaskImport(BaseModel):
|
|
|
78
81
|
args_parallel: DictStrAny | None = None
|
|
79
82
|
type_filters: TypeFilters | None = None
|
|
80
83
|
input_filters: dict[str, Any] | None = None
|
|
84
|
+
description: NonEmptyStr | None = None
|
|
85
|
+
alias: NonEmptyStr | None = None
|
|
81
86
|
|
|
82
87
|
task: TaskImport
|
|
83
88
|
|
|
@@ -116,5 +121,7 @@ class WorkflowTaskExport(BaseModel):
|
|
|
116
121
|
args_non_parallel: dict[str, Any] | None = None
|
|
117
122
|
args_parallel: dict[str, Any] | None = None
|
|
118
123
|
type_filters: dict[str, bool] = Field(default_factory=dict)
|
|
124
|
+
description: str | None = None
|
|
125
|
+
alias: str | None = None
|
|
119
126
|
|
|
120
127
|
task: TaskExport
|
|
@@ -191,17 +191,22 @@ class UserManager(IntegerIDMixin, BaseUserManager[UserOAuth, int]):
|
|
|
191
191
|
|
|
192
192
|
@override
|
|
193
193
|
async def validate_password(self, password: str, user: UserOAuth) -> None:
|
|
194
|
-
# check password length
|
|
195
194
|
min_length = 4
|
|
196
|
-
|
|
197
|
-
if
|
|
195
|
+
len_password = len(password)
|
|
196
|
+
if len_password < min_length:
|
|
198
197
|
raise InvalidPasswordException(
|
|
199
|
-
|
|
198
|
+
"The password is too short "
|
|
199
|
+
f"(length = {len_password}, minimum length = {min_length})."
|
|
200
200
|
)
|
|
201
|
-
|
|
201
|
+
max_length_in_bytes = 72
|
|
202
|
+
len_password_in_bytes = len(password.encode("utf-8"))
|
|
203
|
+
if len_password_in_bytes > max_length_in_bytes:
|
|
204
|
+
# See:
|
|
205
|
+
# https://github.com/pyca/bcrypt/blob/f0451e42e3ab6f6e1b9ac8b09bf04104bf8bdef8/src/_bcrypt/src/lib.rs#L85-L89
|
|
202
206
|
raise InvalidPasswordException(
|
|
203
207
|
"The password is too long "
|
|
204
|
-
f"(
|
|
208
|
+
f"(length = {len_password_in_bytes} bytes, "
|
|
209
|
+
f"maximum length = {max_length_in_bytes} bytes)."
|
|
205
210
|
)
|
|
206
211
|
|
|
207
212
|
@override
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"""drop dataset.history
|
|
2
|
+
|
|
3
|
+
Revision ID: 18a26fcdea5d
|
|
4
|
+
Revises: 1bf8785755f9
|
|
5
|
+
Create Date: 2026-01-29 10:15:18.467384
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
from sqlalchemy.dialects import postgresql
|
|
12
|
+
|
|
13
|
+
# revision identifiers, used by Alembic.
|
|
14
|
+
revision = "18a26fcdea5d"
|
|
15
|
+
down_revision = "1bf8785755f9"
|
|
16
|
+
branch_labels = None
|
|
17
|
+
depends_on = None
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def upgrade() -> None:
|
|
21
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
22
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
|
23
|
+
batch_op.drop_column("history")
|
|
24
|
+
|
|
25
|
+
# ### end Alembic commands ###
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def downgrade() -> None:
|
|
29
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
30
|
+
with op.batch_alter_table("datasetv2", schema=None) as batch_op:
|
|
31
|
+
batch_op.add_column(
|
|
32
|
+
sa.Column(
|
|
33
|
+
"history",
|
|
34
|
+
postgresql.JSONB(astext_type=sa.Text()),
|
|
35
|
+
server_default=sa.text("'[]'::json"),
|
|
36
|
+
autoincrement=False,
|
|
37
|
+
nullable=False,
|
|
38
|
+
)
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# ### end Alembic commands ###
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"""add fractal_server_version to jobv2 and taskgroupactivityv2
|
|
2
|
+
|
|
3
|
+
Revision ID: cfd13f7954e7
|
|
4
|
+
Revises: 18a26fcdea5d
|
|
5
|
+
Create Date: 2026-01-29 12:33:00.064562
|
|
6
|
+
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import sqlalchemy as sa
|
|
10
|
+
from alembic import op
|
|
11
|
+
|
|
12
|
+
# revision identifiers, used by Alembic.
|
|
13
|
+
revision = "cfd13f7954e7"
|
|
14
|
+
down_revision = "18a26fcdea5d"
|
|
15
|
+
branch_labels = None
|
|
16
|
+
depends_on = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def upgrade() -> None:
|
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
21
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
|
22
|
+
batch_op.add_column(
|
|
23
|
+
sa.Column(
|
|
24
|
+
"fractal_server_version",
|
|
25
|
+
sa.String(),
|
|
26
|
+
server_default="pre-2.19.0",
|
|
27
|
+
nullable=False,
|
|
28
|
+
)
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
|
|
32
|
+
batch_op.add_column(
|
|
33
|
+
sa.Column(
|
|
34
|
+
"fractal_server_version",
|
|
35
|
+
sa.String(),
|
|
36
|
+
server_default="pre-2.19.0",
|
|
37
|
+
nullable=False,
|
|
38
|
+
)
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# ### end Alembic commands ###
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def downgrade() -> None:
|
|
45
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
46
|
+
with op.batch_alter_table("taskgroupactivityv2", schema=None) as batch_op:
|
|
47
|
+
batch_op.drop_column("fractal_server_version")
|
|
48
|
+
|
|
49
|
+
with op.batch_alter_table("jobv2", schema=None) as batch_op:
|
|
50
|
+
batch_op.drop_column("fractal_server_version")
|
|
51
|
+
|
|
52
|
+
# ### end Alembic commands ###
|
|
@@ -31,6 +31,7 @@ class SlurmConfigSet(BaseModel):
|
|
|
31
31
|
time:
|
|
32
32
|
exclude:
|
|
33
33
|
nodelist:
|
|
34
|
+
nodes:
|
|
34
35
|
account:
|
|
35
36
|
extra_lines:
|
|
36
37
|
gpus:
|
|
@@ -47,6 +48,7 @@ class SlurmConfigSet(BaseModel):
|
|
|
47
48
|
gres: NonEmptyStr | None = None
|
|
48
49
|
exclude: NonEmptyStr | None = None
|
|
49
50
|
nodelist: NonEmptyStr | None = None
|
|
51
|
+
nodes: int | None = None
|
|
50
52
|
time: NonEmptyStr | None = None
|
|
51
53
|
account: NonEmptyStr | None = None
|
|
52
54
|
extra_lines: list[NonEmptyStr] = Field(default_factory=list)
|
|
@@ -39,6 +39,7 @@ class SlurmConfig(BaseModel):
|
|
|
39
39
|
gpus: Corresponds to SLURM option.
|
|
40
40
|
time: Corresponds to SLURM option (WARNING: not fully supported).
|
|
41
41
|
nodelist: Corresponds to SLURM option.
|
|
42
|
+
nodes: Corresponds to SLURM option.
|
|
42
43
|
exclude: Corresponds to SLURM option.
|
|
43
44
|
prefix: Prefix of configuration lines in SLURM submission scripts.
|
|
44
45
|
shebang_line: Shebang line for SLURM submission scripts.
|
|
@@ -87,6 +88,7 @@ class SlurmConfig(BaseModel):
|
|
|
87
88
|
time: str | None = None
|
|
88
89
|
account: str | None = None
|
|
89
90
|
nodelist: str | None = None
|
|
91
|
+
nodes: int | None = None
|
|
90
92
|
exclude: str | None = None
|
|
91
93
|
|
|
92
94
|
# Free-field attribute for extra lines to be added to the SLURM job
|
|
@@ -188,6 +190,7 @@ class SlurmConfig(BaseModel):
|
|
|
188
190
|
"account",
|
|
189
191
|
"exclude",
|
|
190
192
|
"nodelist",
|
|
193
|
+
"nodes",
|
|
191
194
|
]:
|
|
192
195
|
value = getattr(self, key)
|
|
193
196
|
if value is not None:
|
|
@@ -13,7 +13,6 @@ from typing import Protocol
|
|
|
13
13
|
|
|
14
14
|
from sqlalchemy.orm import Session as DBSyncSession
|
|
15
15
|
|
|
16
|
-
from fractal_server import __VERSION__
|
|
17
16
|
from fractal_server.app.db import DB
|
|
18
17
|
from fractal_server.app.models.v2 import DatasetV2
|
|
19
18
|
from fractal_server.app.models.v2 import JobV2
|
|
@@ -224,7 +223,6 @@ def submit_workflow(
|
|
|
224
223
|
f'Start execution of workflow "{workflow.name}"; '
|
|
225
224
|
f"more logs at {str(log_file_path)}"
|
|
226
225
|
)
|
|
227
|
-
logger.debug(f"fractal_server.__VERSION__: {__VERSION__}")
|
|
228
226
|
logger.debug(f"Resource name: {resource.name}")
|
|
229
227
|
logger.debug(f"Profile name: {profile.name}")
|
|
230
228
|
logger.debug(f"Username: {profile.username}")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: fractal-server
|
|
3
|
-
Version: 2.19.
|
|
3
|
+
Version: 2.19.1
|
|
4
4
|
Summary: Backend component of the Fractal analytics platform
|
|
5
5
|
Author: Tommaso Comparin, Marco Franzon, Yuri Chiucconi, Jacopo Nespolo
|
|
6
6
|
Author-email: Tommaso Comparin <tommaso.comparin@exact-lab.it>, Marco Franzon <marco.franzon@exact-lab.it>, Yuri Chiucconi <yuri.chiucconi@exact-lab.it>, Jacopo Nespolo <jacopo.nespolo@exact-lab.it>
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
|
1
|
+
fractal_server/__init__.py,sha256=0QLxDPQmcNVnbI5SScAigbtFe33XibtMSGSFEDzzp8o,23
|
|
2
2
|
fractal_server/__main__.py,sha256=QeKoAgqoiozLJDa8kSVe-Aso1WWgrk1yLUYWS8RxZVM,11405
|
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -9,14 +9,14 @@ fractal_server/app/models/linkuserproject.py,sha256=Bk0VzjzG7RbnVnOwrztqxKIPxz_A
|
|
|
9
9
|
fractal_server/app/models/security.py,sha256=2gKF2JQX2Xpb58fW-s7UgkgUneEy5SspgawawWoju3M,4970
|
|
10
10
|
fractal_server/app/models/v2/__init__.py,sha256=xL05Mvdx0dqUFhJf694oPfuqkUQxZbxOkoUgRuNIXl4,949
|
|
11
11
|
fractal_server/app/models/v2/accounting.py,sha256=VNweFARrvY3mj5LI0834Ku061S2aGC61kuVHzi_tZhc,1187
|
|
12
|
-
fractal_server/app/models/v2/dataset.py,sha256=
|
|
12
|
+
fractal_server/app/models/v2/dataset.py,sha256=5UEV75LZTYAOq5d8UqRAuIIVBh3rAer3D5qvWmGbw8s,1154
|
|
13
13
|
fractal_server/app/models/v2/history.py,sha256=869RQzBssHFYnBLBvCGlKrLV_HAXwsdO5DgwG9U8D-U,2339
|
|
14
|
-
fractal_server/app/models/v2/job.py,sha256
|
|
14
|
+
fractal_server/app/models/v2/job.py,sha256=-9PKE5Gh9UCBStYE0DZj3I8XTPDa5qd7g2eVufp-9hw,2521
|
|
15
15
|
fractal_server/app/models/v2/profile.py,sha256=YajSmV4J_-zC4RX917s-A_lJt4mxYdPRVS3RH_uvJ48,1006
|
|
16
16
|
fractal_server/app/models/v2/project.py,sha256=VvLXrgzKYLH585mYg_txrO8q3JoSoSEy4XkWjex4sDU,585
|
|
17
17
|
fractal_server/app/models/v2/resource.py,sha256=XaHlJj9CladIahkrpywWXn8JBSx7_qEHp_wnkFuQ0rU,3896
|
|
18
18
|
fractal_server/app/models/v2/task.py,sha256=v0brBHnAbcHrgDiuRHMWYahklq0fgyezZ0c9krKSA10,1463
|
|
19
|
-
fractal_server/app/models/v2/task_group.py,sha256=
|
|
19
|
+
fractal_server/app/models/v2/task_group.py,sha256=8UcyFMG-6J-_hkgSkrLIxFLSwVCyrWB0Fl-LCgkeo54,4756
|
|
20
20
|
fractal_server/app/models/v2/workflow.py,sha256=VKXIX3L3jP8eCdIQhVf5AhhwKGO_LTxn7uB4_sCirLs,1135
|
|
21
21
|
fractal_server/app/models/v2/workflowtask.py,sha256=jEt81PGHuXSY8MYid0t15CntIIg0Lzq39uvG4X4Mx6s,1391
|
|
22
22
|
fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -31,12 +31,12 @@ fractal_server/app/routes/admin/v2/resource.py,sha256=c2z6b_D_W6_dqVnxNF8F8OdlI5
|
|
|
31
31
|
fractal_server/app/routes/admin/v2/sharing.py,sha256=I2BoyO0rZNWxFKyq3Em9sjz_0n3kfRynC-WmUZxSzzU,5455
|
|
32
32
|
fractal_server/app/routes/admin/v2/task.py,sha256=9OglWWYjGbA1k2wdE8k8vr2ynLn6zqdp_BxwPGfjj9A,5873
|
|
33
33
|
fractal_server/app/routes/admin/v2/task_group.py,sha256=3LxH2LEstj7L9qGNN3kkLo4JdFw4GXDlnlM6s4ulc_0,9377
|
|
34
|
-
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=
|
|
34
|
+
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=mC7zKuoyDZSOOuOiXiQeGfFv78JYHFBQ9RNtNAdAQfQ,10202
|
|
35
35
|
fractal_server/app/routes/api/__init__.py,sha256=ZQtqy-HGJZsehAL8UjnTvRhWd9MI9Noi2NvwD5hE_xA,231
|
|
36
36
|
fractal_server/app/routes/api/alive.py,sha256=hE42dfh1naix9EKvpyKCoLMA6WqThObgA3jIro2rx-M,206
|
|
37
37
|
fractal_server/app/routes/api/settings.py,sha256=9wV4To1FjW3jCcRtFp-QGGu3235s1oUg4HwKQIw-TWc,1210
|
|
38
|
-
fractal_server/app/routes/api/v2/__init__.py,sha256=
|
|
39
|
-
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=
|
|
38
|
+
fractal_server/app/routes/api/v2/__init__.py,sha256=_pNH8odiUYpv2Okiqbqe0VNjbvK2jWqIoaImlgYzI84,2580
|
|
39
|
+
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=G5yhxkSVO7YTWNwcewljK2ql9lo0I-9EekBYSeOy7Oc,15287
|
|
40
40
|
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=vB8AwSBf3Dp1sxpTAYtWj4s9kgAp_D5Hd1BX6Z8JTxc,6057
|
|
41
41
|
fractal_server/app/routes/api/v2/_aux_functions_sharing.py,sha256=IvDII3Sl00eypdD3QRELQ4SLyC3gq6-HsXhuCx5Bp5I,2995
|
|
42
42
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=EffjYdQCHoUc5i59c7KmyH8hoLYxQsPh4RPo8qY8CYA,7796
|
|
@@ -50,18 +50,17 @@ fractal_server/app/routes/api/v2/job.py,sha256=gulXJyP0Kc2_dNkPHPpc_XZlWoBEA1lrr
|
|
|
50
50
|
fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=wqZ44rCJ8f2WcpyFOQ0FZN3g-66EzDnuYCrJJXawSdY,5179
|
|
51
51
|
fractal_server/app/routes/api/v2/project.py,sha256=dirGShqcuEnK1sVh-Bl6kemj_z6A32s8ARNqSLIFhM8,5729
|
|
52
52
|
fractal_server/app/routes/api/v2/sharing.py,sha256=W8Zf_rmTJwbFYz3XCrL9IFN6KXph5c97YS6Ok8oHTHM,9658
|
|
53
|
-
fractal_server/app/routes/api/v2/
|
|
54
|
-
fractal_server/app/routes/api/v2/submit.py,sha256=-RtAqHDTJ9fV82sn0Rwhg0LWgznhN3t_qgZ6hycfu1I,9559
|
|
53
|
+
fractal_server/app/routes/api/v2/submit.py,sha256=fhWLAsD-XgEunUfZpoSDsTpuYl3wReKAPg811iAmFRU,9642
|
|
55
54
|
fractal_server/app/routes/api/v2/task.py,sha256=xKeGon7aRBOu1gnYd9EnWW1V-pnIqSVpXkN3dnubH3A,7418
|
|
56
|
-
fractal_server/app/routes/api/v2/task_collection.py,sha256=
|
|
55
|
+
fractal_server/app/routes/api/v2/task_collection.py,sha256=VkxPwkKxQxvKJJL8ShkGVT4YolX5KjP8yLBaNS7WkP8,12405
|
|
57
56
|
fractal_server/app/routes/api/v2/task_collection_custom.py,sha256=yZ0c3hWkq3nR5CKYP63yU6D1--xWjS2aimqoYWrQT5I,6894
|
|
58
|
-
fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=
|
|
57
|
+
fractal_server/app/routes/api/v2/task_collection_pixi.py,sha256=IT-vhTFPUpMA2kY4z8i5us03vQCTquEvjjH7k7bgYVg,7229
|
|
59
58
|
fractal_server/app/routes/api/v2/task_group.py,sha256=Jemgrc_1qA6b8at48BePMLJennterLIpJwoa077jMmc,7632
|
|
60
|
-
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=
|
|
59
|
+
fractal_server/app/routes/api/v2/task_group_lifecycle.py,sha256=0iv4MOCesCngujiMUU9i3EdPADnoGIilXrR2Rc1yY04,10738
|
|
61
60
|
fractal_server/app/routes/api/v2/task_version_update.py,sha256=dFzUetvIkd0RXw-cgOGGQGu8843y62AG30RbCQL15Tc,8506
|
|
62
61
|
fractal_server/app/routes/api/v2/workflow.py,sha256=lHssUNN3ekqvXp4wWX8a33-5m2ZwLuLPkoL3keCsFhA,10741
|
|
63
62
|
fractal_server/app/routes/api/v2/workflow_import.py,sha256=UND3U9zE-2o_85BW0uWNsm3_9125PgDtnUlki13jDT4,8740
|
|
64
|
-
fractal_server/app/routes/api/v2/workflowtask.py,sha256=
|
|
63
|
+
fractal_server/app/routes/api/v2/workflowtask.py,sha256=vzh-NVcGFHKoIv5bENfuZK5IJfACJzUu-BYkyOuBWj4,8030
|
|
65
64
|
fractal_server/app/routes/auth/__init__.py,sha256=JL4MUBjPiNsNJLlb0lbn6_LbIzdRLosquQNqpn6niFw,2836
|
|
66
65
|
fractal_server/app/routes/auth/_aux_auth.py,sha256=gKdYTWUzxcU44Iep787zReWwdAs4kW5baNDXCPmiKn8,9195
|
|
67
66
|
fractal_server/app/routes/auth/current_user.py,sha256=gOLk-jUnkXTrBj8aN_yRlUcvCoJMHxxoWz13M8DBCbg,3502
|
|
@@ -96,9 +95,9 @@ fractal_server/app/schemas/v2/status_legacy.py,sha256=ajLm2p0wNfJ_lQX9Oq3NJn0jxQ
|
|
|
96
95
|
fractal_server/app/schemas/v2/task.py,sha256=le62bHu4nRrXlN-cCOdpkStLQNLtkR_myqK1j8fLoNs,4260
|
|
97
96
|
fractal_server/app/schemas/v2/task_collection.py,sha256=ljGnZOmYg9pQ9PbYnNxLJDf4O2BDym-BQ_cXr-NWSd4,4590
|
|
98
97
|
fractal_server/app/schemas/v2/task_group.py,sha256=sbg6AkvonU7F_-QC4G9kDxO6YVcz7wUPY3k3n9jYkRY,3392
|
|
99
|
-
fractal_server/app/schemas/v2/workflow.py,sha256=
|
|
100
|
-
fractal_server/app/schemas/v2/workflowtask.py,sha256=
|
|
101
|
-
fractal_server/app/security/__init__.py,sha256=
|
|
98
|
+
fractal_server/app/schemas/v2/workflow.py,sha256=RQLlVGeGjClsorI6KmChJNYTLdg-DcmTMieojhchoyE,1923
|
|
99
|
+
fractal_server/app/schemas/v2/workflowtask.py,sha256=v9NCP2JbXPPJOaE7oamm5G0-_-8NQaks9OXwaxxJJrw,3968
|
|
100
|
+
fractal_server/app/security/__init__.py,sha256=FTpkrDE9yL8mpAWAR_A9bXtVUoDVZPhzIxAQ5E1z3pE,18803
|
|
102
101
|
fractal_server/app/security/signup_email.py,sha256=R69U5eTi9X7gZHSTfZ26SaHMQAeqReYEpGnB8r3AVig,1992
|
|
103
102
|
fractal_server/app/shutdown.py,sha256=bfEmf6Xdc906ES0zDDWsihmd6neQpGFyIc7qnadnNu8,2283
|
|
104
103
|
fractal_server/config/__init__.py,sha256=WvcoE3qiY1qnkumv3qspcemCFw5iFG5NkSFR78vN4ks,562
|
|
@@ -123,6 +122,7 @@ fractal_server/migrations/versions/034a469ec2eb_task_groups.py,sha256=uuf0sJibC4
|
|
|
123
122
|
fractal_server/migrations/versions/068496367952_drop_taskgroup_venv_size_and_files_.py,sha256=rVFmB7eO7LtOfJivNnfnniQecD8DebAWnSvYEE2yq7k,1239
|
|
124
123
|
fractal_server/migrations/versions/091b01f51f88_add_usergroup_and_linkusergroup_table.py,sha256=cSz3Jc2X79dN7I-rh0OSefOd5WOJU65wNWFNMnX2LR4,1450
|
|
125
124
|
fractal_server/migrations/versions/0f5f85bb2ae7_add_pre_pinned_packages.py,sha256=XH6msE3On7P7S2gz-Xec20bWAI6vR29zRT1ZafFwStI,1056
|
|
125
|
+
fractal_server/migrations/versions/18a26fcdea5d_drop_dataset_history.py,sha256=AzCnz-PJCzKw8Sm28zN1cIrHexgv-5Bcw5dGIxkzDOU,1064
|
|
126
126
|
fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py,sha256=5OzcIQjTfwiqh9vK_yLQHJlNtIHxKiEPr-IjZ9iir-Y,961
|
|
127
127
|
fractal_server/migrations/versions/1a83a5260664_rename.py,sha256=BGZrM2UfccWc0s0kNlpACEw1LB1K6AemHPR5irkJ1gA,790
|
|
128
128
|
fractal_server/migrations/versions/1bf8785755f9_add_description_to_workflow_and_.py,sha256=OVugy2ftELqAE8_6zTGiwwpDxj1STmf8XNHOa8TBr6o,1507
|
|
@@ -165,6 +165,7 @@ fractal_server/migrations/versions/b7477cc98f45_2_18_1.py,sha256=clKGCwh95sWFTeU
|
|
|
165
165
|
fractal_server/migrations/versions/bc0e8b3327a7_project_sharing.py,sha256=5h8ogjfQPbKbVwN0-pfh5ixPQSCCYsiVnQoOveUKKUA,2145
|
|
166
166
|
fractal_server/migrations/versions/c90a7c76e996_job_id_in_history_run.py,sha256=CPQNKHqsx22wSY4ylqM8UMhDOWkQeC9eLAHlQQJYSfQ,1102
|
|
167
167
|
fractal_server/migrations/versions/caba9fb1ea5e_drop_useroauth_user_settings_id.py,sha256=7MpunfOBk0LM6u-xrwca8GUHIjinAJZrS9AUT3l62qU,1320
|
|
168
|
+
fractal_server/migrations/versions/cfd13f7954e7_add_fractal_server_version_to_jobv2_and_.py,sha256=rqF1f3j5exatdKyeEvccX-TMC_bcZEFcHmixguOQRqw,1447
|
|
168
169
|
fractal_server/migrations/versions/d256a7379ab8_taskgroup_activity_and_venv_info_to_.py,sha256=bFMJUFJAnOaHYyYYKISHpbQWKBoQopiEKRT0PSidqhk,3796
|
|
169
170
|
fractal_server/migrations/versions/d4fe3708d309_make_applyworkflow_workflow_dump_non_.py,sha256=zLHqar9iduiLs5Ib50B9RKrdLLbWSffahWxXDDePnI8,950
|
|
170
171
|
fractal_server/migrations/versions/da2cb2ac4255_user_group_viewer_paths.py,sha256=-ihE-KJEVemb8ZhRejg6xdC5TPTW8GkKWnzcl1ReAHQ,901
|
|
@@ -183,7 +184,7 @@ fractal_server/runner/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
|
|
|
183
184
|
fractal_server/runner/components.py,sha256=-Ii5l8d_V6f5DFOd-Zsr8VYmOsyqw0Hox9fEFQiuqxY,66
|
|
184
185
|
fractal_server/runner/config/__init__.py,sha256=a-vSrvWBeMVnxTtYoy-f5Ibt_mM8MM3F7jqnPvvjHSY,108
|
|
185
186
|
fractal_server/runner/config/_local.py,sha256=IHWtxpKuJDdsQNpk8Q5bNL4DEJunNkNJkLfetfnwmQM,788
|
|
186
|
-
fractal_server/runner/config/_slurm.py,sha256=
|
|
187
|
+
fractal_server/runner/config/_slurm.py,sha256=XcgB6iFKDmpHygLn4pmegTTmLlKSvJsHuR9LuKXughw,3838
|
|
187
188
|
fractal_server/runner/config/slurm_mem_to_MB.py,sha256=6KmrIC-NymQjb9-bIQjNYQx6mE0OoKoZxdi6WQnWOHw,2003
|
|
188
189
|
fractal_server/runner/exceptions.py,sha256=N8DLn7tuV8zMSdr8xdJN0aIdytPveSCeQ1Y5IoxXW-8,1778
|
|
189
190
|
fractal_server/runner/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -195,10 +196,10 @@ fractal_server/runner/executors/local/runner.py,sha256=aad4Q0mLFh5Sb0Naqj0zgpNHd
|
|
|
195
196
|
fractal_server/runner/executors/slurm_common/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
196
197
|
fractal_server/runner/executors/slurm_common/_batching.py,sha256=YQjWRTDI1e6NeLe1R-1QWlt49i42M9ILeExEjdjgy48,8348
|
|
197
198
|
fractal_server/runner/executors/slurm_common/_job_states.py,sha256=nuV-Zba38kDrRESOVB3gaGbrSPZc4q7YGichQaeqTW0,238
|
|
198
|
-
fractal_server/runner/executors/slurm_common/base_slurm_runner.py,sha256=
|
|
199
|
-
fractal_server/runner/executors/slurm_common/get_slurm_config.py,sha256=
|
|
199
|
+
fractal_server/runner/executors/slurm_common/base_slurm_runner.py,sha256=cc6-qfGFXCFS48PhiqJXT5iGG2uaS9prHuf7k4UixqM,42130
|
|
200
|
+
fractal_server/runner/executors/slurm_common/get_slurm_config.py,sha256=8H2qVHq3mD-97Yz1U-i05EGnRaxXb-eKr2bXF7b4ve4,7136
|
|
200
201
|
fractal_server/runner/executors/slurm_common/remote.py,sha256=8pTMTRp_LjzoUr3FtFTfdvDhuLnqzY6HT-T9pzrVLw4,3845
|
|
201
|
-
fractal_server/runner/executors/slurm_common/slurm_config.py,sha256=
|
|
202
|
+
fractal_server/runner/executors/slurm_common/slurm_config.py,sha256=DXzyabDLyu1auQh5SQRIm4pKu0JuuF3BTn_fkGBCvmk,8592
|
|
202
203
|
fractal_server/runner/executors/slurm_common/slurm_job_task_models.py,sha256=VeX40CvU5fckUpSyXlzb3EDE9xxPXkT2sZKLXq_6Ooc,3493
|
|
203
204
|
fractal_server/runner/executors/slurm_ssh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
204
205
|
fractal_server/runner/executors/slurm_ssh/run_subprocess.py,sha256=SyW6t4egvbiARph2YkFjc88Hj94fCamZVi50L7ph8VM,996
|
|
@@ -219,7 +220,7 @@ fractal_server/runner/v2/deduplicate_list.py,sha256=TWxHDucal0VZPswy_H7IFaEb4ddG
|
|
|
219
220
|
fractal_server/runner/v2/merge_outputs.py,sha256=0ahaSwdMFAoEhxVaEaO9nSJuKIcWg9pDZ356ktSHcC0,897
|
|
220
221
|
fractal_server/runner/v2/runner.py,sha256=aKz5ocgsMcUUsvaz00db8cWbBHMBA_g_PJhwV973pdY,20884
|
|
221
222
|
fractal_server/runner/v2/runner_functions.py,sha256=1wW2ByskwPtx_mhyJiCpKMXDnDyZ_y5fDWv8hktFZXI,19564
|
|
222
|
-
fractal_server/runner/v2/submit_workflow.py,sha256=
|
|
223
|
+
fractal_server/runner/v2/submit_workflow.py,sha256=MNUyBlXKfPjSbJgB5HlpAhXPvDeuMtPXc2h6HHmJkU0,11761
|
|
223
224
|
fractal_server/runner/v2/task_interface.py,sha256=ftPPpOU16rbJD8q-QV7o_3ey8W7MQTFuWJiYUr4OmF4,2532
|
|
224
225
|
fractal_server/runner/versions.py,sha256=uz59Dxj7BphnFnr-p0kyaZRH0h4w5Xkd0UJNVGtt4ds,474
|
|
225
226
|
fractal_server/ssh/__init__.py,sha256=sVUmzxf7_DuXG1xoLQ1_00fo5NPhi2LJipSmU5EAkPs,124
|
|
@@ -273,8 +274,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=zt
|
|
|
273
274
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
|
274
275
|
fractal_server/utils.py,sha256=-rjg8QTXQcKweXjn0NcmETFs1_uM9PGnbl0Q7c4ERPM,2181
|
|
275
276
|
fractal_server/zip_tools.py,sha256=Uhn-ax4_9g1PJ32BdyaX30hFpAeVOv2tZYTUK-zVn1E,5719
|
|
276
|
-
fractal_server-2.19.
|
|
277
|
-
fractal_server-2.19.
|
|
278
|
-
fractal_server-2.19.
|
|
279
|
-
fractal_server-2.19.
|
|
280
|
-
fractal_server-2.19.
|
|
277
|
+
fractal_server-2.19.1.dist-info/licenses/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
|
278
|
+
fractal_server-2.19.1.dist-info/WHEEL,sha256=eycQt0QpYmJMLKpE3X9iDk8R04v2ZF0x82ogq-zP6bQ,79
|
|
279
|
+
fractal_server-2.19.1.dist-info/entry_points.txt,sha256=3TpdcjmETRYWJxFyAh3z-9955EWua9jdkSnBwxES1uE,60
|
|
280
|
+
fractal_server-2.19.1.dist-info/METADATA,sha256=w54iWXrIoFD5aa9NKFtlWdq1XFAF40PzGNYaojG_nBE,4181
|
|
281
|
+
fractal_server-2.19.1.dist-info/RECORD,,
|
|
@@ -1,156 +0,0 @@
|
|
|
1
|
-
from fastapi import APIRouter
|
|
2
|
-
from fastapi import Depends
|
|
3
|
-
|
|
4
|
-
from fractal_server.app.db import AsyncSession
|
|
5
|
-
from fractal_server.app.db import get_async_db
|
|
6
|
-
from fractal_server.app.models import UserOAuth
|
|
7
|
-
from fractal_server.app.models.v2 import JobV2
|
|
8
|
-
from fractal_server.app.routes.auth import get_api_guest
|
|
9
|
-
from fractal_server.app.schemas.v2.sharing import ProjectPermissions
|
|
10
|
-
from fractal_server.app.schemas.v2.status_legacy import LegacyStatusRead
|
|
11
|
-
from fractal_server.app.schemas.v2.status_legacy import WorkflowTaskStatusType
|
|
12
|
-
from fractal_server.logger import set_logger
|
|
13
|
-
|
|
14
|
-
from ._aux_functions import _get_dataset_check_access
|
|
15
|
-
from ._aux_functions import _get_submitted_jobs_statement
|
|
16
|
-
from ._aux_functions import _get_workflow_check_access
|
|
17
|
-
|
|
18
|
-
router = APIRouter()
|
|
19
|
-
|
|
20
|
-
logger = set_logger(__name__)
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
@router.get(
|
|
24
|
-
"/project/{project_id}/status-legacy/",
|
|
25
|
-
response_model=LegacyStatusRead,
|
|
26
|
-
)
|
|
27
|
-
async def get_workflowtask_status(
|
|
28
|
-
project_id: int,
|
|
29
|
-
dataset_id: int,
|
|
30
|
-
workflow_id: int,
|
|
31
|
-
user: UserOAuth = Depends(get_api_guest),
|
|
32
|
-
db: AsyncSession = Depends(get_async_db),
|
|
33
|
-
) -> LegacyStatusRead | None:
|
|
34
|
-
"""
|
|
35
|
-
Extract the status of all `WorkflowTaskV2` of a given `WorkflowV2` that ran
|
|
36
|
-
on a given `DatasetV2`.
|
|
37
|
-
|
|
38
|
-
*NOTE*: the current endpoint is not guaranteed to provide consistent
|
|
39
|
-
results if the workflow task list is modified in a non-trivial way
|
|
40
|
-
(that is, by adding intermediate tasks, removing tasks, or changing their
|
|
41
|
-
order). See fractal-server GitHub issues: 793, 1083.
|
|
42
|
-
"""
|
|
43
|
-
# Get the dataset DB entry
|
|
44
|
-
output = await _get_dataset_check_access(
|
|
45
|
-
project_id=project_id,
|
|
46
|
-
dataset_id=dataset_id,
|
|
47
|
-
user_id=user.id,
|
|
48
|
-
required_permissions=ProjectPermissions.READ,
|
|
49
|
-
db=db,
|
|
50
|
-
)
|
|
51
|
-
dataset = output["dataset"]
|
|
52
|
-
|
|
53
|
-
# Get the workflow DB entry
|
|
54
|
-
workflow = await _get_workflow_check_access(
|
|
55
|
-
project_id=project_id,
|
|
56
|
-
workflow_id=workflow_id,
|
|
57
|
-
user_id=user.id,
|
|
58
|
-
required_permissions=ProjectPermissions.READ,
|
|
59
|
-
db=db,
|
|
60
|
-
)
|
|
61
|
-
|
|
62
|
-
# Check whether there exists a submitted job associated to this
|
|
63
|
-
# workflow/dataset pair. If it does exist, it will be used later.
|
|
64
|
-
# If there are multiple jobs, raise an error.
|
|
65
|
-
res = await db.execute(
|
|
66
|
-
_get_submitted_jobs_statement()
|
|
67
|
-
.where(JobV2.dataset_id == dataset_id)
|
|
68
|
-
.where(JobV2.workflow_id == workflow_id)
|
|
69
|
-
)
|
|
70
|
-
running_job = res.scalars().one_or_none()
|
|
71
|
-
|
|
72
|
-
# Initialize empty dictionary for WorkflowTaskV2 status
|
|
73
|
-
workflow_tasks_status_dict: dict = {}
|
|
74
|
-
|
|
75
|
-
# Lowest priority: read status from DB, which corresponds to jobs that are
|
|
76
|
-
# not running
|
|
77
|
-
history = dataset.history
|
|
78
|
-
for history_item in history:
|
|
79
|
-
wftask_id = history_item["workflowtask"]["id"]
|
|
80
|
-
wftask_status = history_item["status"]
|
|
81
|
-
workflow_tasks_status_dict[wftask_id] = wftask_status
|
|
82
|
-
|
|
83
|
-
if running_job is None:
|
|
84
|
-
# If no job is running, the chronological-last history item is also the
|
|
85
|
-
# positional-last workflow task to be included in the response.
|
|
86
|
-
if len(history) > 0:
|
|
87
|
-
last_valid_wftask_id = history[-1]["workflowtask"]["id"]
|
|
88
|
-
else:
|
|
89
|
-
last_valid_wftask_id = None
|
|
90
|
-
else:
|
|
91
|
-
# If a job is running, then gather more up-to-date information
|
|
92
|
-
|
|
93
|
-
# Mid priority: Set all WorkflowTask's that are part of the running job
|
|
94
|
-
# as "submitted"
|
|
95
|
-
start = running_job.first_task_index
|
|
96
|
-
end = running_job.last_task_index + 1
|
|
97
|
-
|
|
98
|
-
running_job_wftasks = workflow.task_list[start:end]
|
|
99
|
-
running_job_statuses = [
|
|
100
|
-
workflow_tasks_status_dict.get(wft.id, None)
|
|
101
|
-
for wft in running_job_wftasks
|
|
102
|
-
]
|
|
103
|
-
try:
|
|
104
|
-
first_submitted_index = running_job_statuses.index(
|
|
105
|
-
WorkflowTaskStatusType.SUBMITTED
|
|
106
|
-
)
|
|
107
|
-
except ValueError:
|
|
108
|
-
logger.warning(
|
|
109
|
-
f"Job {running_job.id} is submitted but its task list does not"
|
|
110
|
-
f" contain a {WorkflowTaskStatusType.SUBMITTED} task."
|
|
111
|
-
)
|
|
112
|
-
first_submitted_index = 0
|
|
113
|
-
|
|
114
|
-
for wftask in running_job_wftasks[first_submitted_index:]:
|
|
115
|
-
workflow_tasks_status_dict[wftask.id] = (
|
|
116
|
-
WorkflowTaskStatusType.SUBMITTED
|
|
117
|
-
)
|
|
118
|
-
|
|
119
|
-
# The last workflow task that is included in the submitted job is also
|
|
120
|
-
# the positional-last workflow task to be included in the response.
|
|
121
|
-
try:
|
|
122
|
-
last_valid_wftask_id = workflow.task_list[end - 1].id
|
|
123
|
-
except IndexError as e:
|
|
124
|
-
logger.warning(
|
|
125
|
-
f"Handled IndexError in `get_workflowtask_status` ({str(e)})."
|
|
126
|
-
)
|
|
127
|
-
logger.warning(
|
|
128
|
-
"Additional information: "
|
|
129
|
-
f"{running_job.first_task_index=}; "
|
|
130
|
-
f"{running_job.last_task_index=}; "
|
|
131
|
-
f"{len(workflow.task_list)=}; "
|
|
132
|
-
f"{dataset_id=}; "
|
|
133
|
-
f"{workflow_id=}."
|
|
134
|
-
)
|
|
135
|
-
last_valid_wftask_id = None
|
|
136
|
-
logger.warning(f"Now setting {last_valid_wftask_id=}.")
|
|
137
|
-
|
|
138
|
-
# Based on previously-gathered information, clean up the response body
|
|
139
|
-
clean_workflow_tasks_status_dict = {}
|
|
140
|
-
for wf_task in workflow.task_list:
|
|
141
|
-
wf_task_status = workflow_tasks_status_dict.get(wf_task.id)
|
|
142
|
-
if wf_task_status is None:
|
|
143
|
-
# If a wftask ID was not found, ignore it and continue
|
|
144
|
-
continue
|
|
145
|
-
clean_workflow_tasks_status_dict[str(wf_task.id)] = wf_task_status
|
|
146
|
-
if wf_task_status == WorkflowTaskStatusType.FAILED:
|
|
147
|
-
# Starting from the beginning of `workflow.task_list`, stop the
|
|
148
|
-
# first time that you hit a failed job
|
|
149
|
-
break
|
|
150
|
-
if wf_task.id == last_valid_wftask_id:
|
|
151
|
-
# Starting from the beginning of `workflow.task_list`, stop the
|
|
152
|
-
# first time that you hit `last_valid_wftask_id``
|
|
153
|
-
break
|
|
154
|
-
|
|
155
|
-
response_body = LegacyStatusRead(status=clean_workflow_tasks_status_dict)
|
|
156
|
-
return response_body
|
|
File without changes
|
|
File without changes
|
|
File without changes
|