fractal-server 2.0.4__py3-none-any.whl → 2.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/__init__.py +0 -4
- fractal_server/app/models/security.py +0 -13
- fractal_server/app/models/v1/__init__.py +10 -9
- fractal_server/app/models/v1/project.py +1 -2
- fractal_server/app/models/v2/project.py +1 -2
- fractal_server/app/routes/admin/v2.py +131 -0
- fractal_server/app/routes/api/v1/_aux_functions.py +1 -1
- fractal_server/app/routes/api/v1/project.py +1 -1
- fractal_server/app/routes/api/v2/submit.py +30 -11
- fractal_server/app/routes/api/v2/task.py +37 -7
- fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py +1 -1
- fractal_server/app/runner/v2/__init__.py +10 -26
- fractal_server/config.py +5 -0
- fractal_server/{logger.py → logger/__init__.py} +28 -2
- fractal_server/logger/gunicorn_logger.py +19 -0
- fractal_server/main.py +9 -4
- {fractal_server-2.0.4.dist-info → fractal_server-2.0.6.dist-info}/METADATA +1 -1
- {fractal_server-2.0.4.dist-info → fractal_server-2.0.6.dist-info}/RECORD +22 -22
- fractal_server/data_migrations/2_0_3.py +0 -79
- {fractal_server-2.0.4.dist-info → fractal_server-2.0.6.dist-info}/LICENSE +0 -0
- {fractal_server-2.0.4.dist-info → fractal_server-2.0.6.dist-info}/WHEEL +0 -0
- {fractal_server-2.0.4.dist-info → fractal_server-2.0.6.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.0.
|
1
|
+
__VERSION__ = "2.0.6"
|
@@ -18,9 +18,6 @@ from sqlmodel import Field
|
|
18
18
|
from sqlmodel import Relationship
|
19
19
|
from sqlmodel import SQLModel
|
20
20
|
|
21
|
-
from .linkuserproject import LinkUserProject
|
22
|
-
from .linkuserproject import LinkUserProjectV2
|
23
|
-
|
24
21
|
|
25
22
|
class OAuthAccount(SQLModel, table=True):
|
26
23
|
"""
|
@@ -103,16 +100,6 @@ class UserOAuth(SQLModel, table=True):
|
|
103
100
|
back_populates="user",
|
104
101
|
sa_relationship_kwargs={"lazy": "joined", "cascade": "all, delete"},
|
105
102
|
)
|
106
|
-
project_list: list["Project"] = Relationship( # noqa
|
107
|
-
back_populates="user_list",
|
108
|
-
link_model=LinkUserProject,
|
109
|
-
sa_relationship_kwargs={"lazy": "selectin"},
|
110
|
-
)
|
111
|
-
project_list_v2: list["ProjectV2"] = Relationship( # noqa
|
112
|
-
back_populates="user_list",
|
113
|
-
link_model=LinkUserProjectV2,
|
114
|
-
sa_relationship_kwargs={"lazy": "selectin"},
|
115
|
-
)
|
116
103
|
|
117
104
|
class Config:
|
118
105
|
orm_mode = True
|
@@ -1,12 +1,13 @@
|
|
1
1
|
"""
|
2
2
|
`models` module
|
3
3
|
"""
|
4
|
-
from
|
5
|
-
from .dataset import
|
6
|
-
from .
|
7
|
-
from .job import
|
8
|
-
from .
|
9
|
-
from .
|
10
|
-
from .
|
11
|
-
from .
|
12
|
-
from .workflow import
|
4
|
+
from ..linkuserproject import LinkUserProject # noqa F401
|
5
|
+
from .dataset import Dataset # noqa F401
|
6
|
+
from .dataset import Resource # noqa F401
|
7
|
+
from .job import ApplyWorkflow # noqa F401
|
8
|
+
from .job import JobStatusTypeV1 # noqa F401
|
9
|
+
from .project import Project # noqa F401
|
10
|
+
from .state import State # noqa F401
|
11
|
+
from .task import Task # noqa F401
|
12
|
+
from .workflow import Workflow # noqa F401
|
13
|
+
from .workflow import WorkflowTask # noqa F401
|
@@ -7,9 +7,9 @@ from sqlmodel import Field
|
|
7
7
|
from sqlmodel import Relationship
|
8
8
|
from sqlmodel import SQLModel
|
9
9
|
|
10
|
+
from . import LinkUserProject
|
10
11
|
from ....utils import get_timestamp
|
11
12
|
from ...schemas.v1.project import _ProjectBaseV1
|
12
|
-
from ..linkuserproject import LinkUserProject
|
13
13
|
from ..security import UserOAuth
|
14
14
|
|
15
15
|
|
@@ -23,7 +23,6 @@ class Project(_ProjectBaseV1, SQLModel, table=True):
|
|
23
23
|
|
24
24
|
user_list: list[UserOAuth] = Relationship(
|
25
25
|
link_model=LinkUserProject,
|
26
|
-
back_populates="project_list",
|
27
26
|
sa_relationship_kwargs={
|
28
27
|
"lazy": "selectin",
|
29
28
|
},
|
@@ -7,8 +7,8 @@ from sqlmodel import Field
|
|
7
7
|
from sqlmodel import Relationship
|
8
8
|
from sqlmodel import SQLModel
|
9
9
|
|
10
|
+
from . import LinkUserProjectV2
|
10
11
|
from ....utils import get_timestamp
|
11
|
-
from ..linkuserproject import LinkUserProjectV2
|
12
12
|
from ..security import UserOAuth
|
13
13
|
|
14
14
|
|
@@ -23,7 +23,6 @@ class ProjectV2(SQLModel, table=True):
|
|
23
23
|
|
24
24
|
user_list: list[UserOAuth] = Relationship(
|
25
25
|
link_model=LinkUserProjectV2,
|
26
|
-
back_populates="project_list_v2",
|
27
26
|
sa_relationship_kwargs={
|
28
27
|
"lazy": "selectin",
|
29
28
|
},
|
@@ -4,6 +4,7 @@ Definition of `/admin` routes.
|
|
4
4
|
from datetime import datetime
|
5
5
|
from datetime import timezone
|
6
6
|
from pathlib import Path
|
7
|
+
from typing import Literal
|
7
8
|
from typing import Optional
|
8
9
|
|
9
10
|
from fastapi import APIRouter
|
@@ -13,6 +14,8 @@ from fastapi import Response
|
|
13
14
|
from fastapi import status
|
14
15
|
from fastapi.responses import StreamingResponse
|
15
16
|
from pydantic import BaseModel
|
17
|
+
from pydantic import EmailStr
|
18
|
+
from pydantic import Field
|
16
19
|
from sqlmodel import select
|
17
20
|
|
18
21
|
from ....config import get_settings
|
@@ -24,6 +27,9 @@ from ...models.security import UserOAuth as User
|
|
24
27
|
from ...models.v1 import Task
|
25
28
|
from ...models.v2 import JobV2
|
26
29
|
from ...models.v2 import ProjectV2
|
30
|
+
from ...models.v2 import TaskV2
|
31
|
+
from ...models.v2 import WorkflowTaskV2
|
32
|
+
from ...models.v2 import WorkflowV2
|
27
33
|
from ...runner.filenames import WORKFLOW_LOG_FILENAME
|
28
34
|
from ...schemas.v2 import JobReadV2
|
29
35
|
from ...schemas.v2 import JobStatusTypeV2
|
@@ -307,3 +313,128 @@ async def flag_task_v1_as_v2_compatible(
|
|
307
313
|
await db.close()
|
308
314
|
|
309
315
|
return Response(status_code=status.HTTP_200_OK)
|
316
|
+
|
317
|
+
|
318
|
+
class TaskV2Minimal(BaseModel):
|
319
|
+
|
320
|
+
id: int
|
321
|
+
name: str
|
322
|
+
type: str
|
323
|
+
command_non_parallel: Optional[str]
|
324
|
+
command_parallel: Optional[str]
|
325
|
+
source: str
|
326
|
+
owner: Optional[str]
|
327
|
+
version: Optional[str]
|
328
|
+
|
329
|
+
|
330
|
+
class ProjectUser(BaseModel):
|
331
|
+
|
332
|
+
id: int
|
333
|
+
email: EmailStr
|
334
|
+
|
335
|
+
|
336
|
+
class TaskV2Relationship(BaseModel):
|
337
|
+
|
338
|
+
workflow_id: int
|
339
|
+
workflow_name: str
|
340
|
+
project_id: int
|
341
|
+
project_name: str
|
342
|
+
project_users: list[ProjectUser] = Field(default_factory=list)
|
343
|
+
|
344
|
+
|
345
|
+
class TaskV2Info(BaseModel):
|
346
|
+
|
347
|
+
task: TaskV2Minimal
|
348
|
+
relationships: list[TaskV2Relationship]
|
349
|
+
|
350
|
+
|
351
|
+
@router_admin_v2.get("/task/", response_model=list[TaskV2Info])
|
352
|
+
async def query_tasks(
|
353
|
+
id: Optional[int] = None,
|
354
|
+
source: Optional[str] = None,
|
355
|
+
version: Optional[str] = None,
|
356
|
+
name: Optional[str] = None,
|
357
|
+
owner: Optional[str] = None,
|
358
|
+
kind: Optional[Literal["common", "users"]] = None,
|
359
|
+
max_number_of_results: int = 25,
|
360
|
+
user: User = Depends(current_active_superuser),
|
361
|
+
db: AsyncSession = Depends(get_async_db),
|
362
|
+
) -> list[TaskV2Info]:
|
363
|
+
"""
|
364
|
+
Query `TaskV2` table and get informations about related items
|
365
|
+
(WorkflowV2s and ProjectV2s)
|
366
|
+
|
367
|
+
Args:
|
368
|
+
id: If not `None`, query for matching `task.id`.
|
369
|
+
source: If not `None`, query for contained case insensitive
|
370
|
+
`task.source`.
|
371
|
+
version: If not `None`, query for matching `task.version`.
|
372
|
+
name: If not `None`, query for contained case insensitive `task.name`.
|
373
|
+
owner: If not `None`, query for matching `task.owner`.
|
374
|
+
kind: If not `None`, query for TaskV2s that have (`users`) or don't
|
375
|
+
have (`common`) a `task.owner`.
|
376
|
+
max_number_of_results: The maximum length of the response.
|
377
|
+
"""
|
378
|
+
|
379
|
+
stm = select(TaskV2)
|
380
|
+
|
381
|
+
if id is not None:
|
382
|
+
stm = stm.where(TaskV2.id == id)
|
383
|
+
if source is not None:
|
384
|
+
stm = stm.where(TaskV2.source.icontains(source))
|
385
|
+
if version is not None:
|
386
|
+
stm = stm.where(TaskV2.version == version)
|
387
|
+
if name is not None:
|
388
|
+
stm = stm.where(TaskV2.name.icontains(name))
|
389
|
+
if owner is not None:
|
390
|
+
stm = stm.where(TaskV2.owner == owner)
|
391
|
+
|
392
|
+
if kind == "common":
|
393
|
+
stm = stm.where(TaskV2.owner == None) # noqa E711
|
394
|
+
elif kind == "users":
|
395
|
+
stm = stm.where(TaskV2.owner != None) # noqa E711
|
396
|
+
|
397
|
+
res = await db.execute(stm)
|
398
|
+
task_list = res.scalars().all()
|
399
|
+
if len(task_list) > max_number_of_results:
|
400
|
+
await db.close()
|
401
|
+
raise HTTPException(
|
402
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
403
|
+
detail=(
|
404
|
+
f"Too many Tasks ({len(task_list)} > {max_number_of_results})."
|
405
|
+
" Please add more query filters."
|
406
|
+
),
|
407
|
+
)
|
408
|
+
|
409
|
+
task_info_list = []
|
410
|
+
|
411
|
+
for task in task_list:
|
412
|
+
stm = (
|
413
|
+
select(WorkflowV2)
|
414
|
+
.join(WorkflowTaskV2)
|
415
|
+
.where(WorkflowTaskV2.workflow_id == WorkflowV2.id)
|
416
|
+
.where(WorkflowTaskV2.task_id == task.id)
|
417
|
+
)
|
418
|
+
res = await db.execute(stm)
|
419
|
+
wf_list = res.scalars().all()
|
420
|
+
|
421
|
+
task_info_list.append(
|
422
|
+
dict(
|
423
|
+
task=task.model_dump(),
|
424
|
+
relationships=[
|
425
|
+
dict(
|
426
|
+
workflow_id=workflow.id,
|
427
|
+
workflow_name=workflow.name,
|
428
|
+
project_id=workflow.project.id,
|
429
|
+
project_name=workflow.project.name,
|
430
|
+
project_users=[
|
431
|
+
dict(id=user.id, email=user.email)
|
432
|
+
for user in workflow.project.user_list
|
433
|
+
],
|
434
|
+
)
|
435
|
+
for workflow in wf_list
|
436
|
+
],
|
437
|
+
)
|
438
|
+
)
|
439
|
+
|
440
|
+
return task_info_list
|
@@ -12,9 +12,9 @@ from sqlmodel import select
|
|
12
12
|
from sqlmodel.sql.expression import SelectOfScalar
|
13
13
|
|
14
14
|
from ....db import AsyncSession
|
15
|
-
from ....models import LinkUserProject
|
16
15
|
from ....models.v1 import ApplyWorkflow
|
17
16
|
from ....models.v1 import Dataset
|
17
|
+
from ....models.v1 import LinkUserProject
|
18
18
|
from ....models.v1 import Project
|
19
19
|
from ....models.v1 import Task
|
20
20
|
from ....models.v1 import Workflow
|
@@ -18,9 +18,9 @@ from .....logger import set_logger
|
|
18
18
|
from .....syringe import Inject
|
19
19
|
from ....db import AsyncSession
|
20
20
|
from ....db import get_async_db
|
21
|
-
from ....models import LinkUserProject
|
22
21
|
from ....models.v1 import ApplyWorkflow
|
23
22
|
from ....models.v1 import Dataset
|
23
|
+
from ....models.v1 import LinkUserProject
|
24
24
|
from ....models.v1 import Project
|
25
25
|
from ....models.v1 import Workflow
|
26
26
|
from ....runner.set_start_and_last_task_index import (
|
@@ -1,6 +1,7 @@
|
|
1
1
|
from datetime import datetime
|
2
2
|
from datetime import timedelta
|
3
3
|
from datetime import timezone
|
4
|
+
from pathlib import Path
|
4
5
|
from typing import Optional
|
5
6
|
|
6
7
|
from fastapi import APIRouter
|
@@ -12,6 +13,7 @@ from sqlmodel import select
|
|
12
13
|
|
13
14
|
from .....config import get_settings
|
14
15
|
from .....syringe import Inject
|
16
|
+
from .....utils import get_timestamp
|
15
17
|
from ....db import AsyncSession
|
16
18
|
from ....db import get_async_db
|
17
19
|
from ....models.v2 import JobV2
|
@@ -91,23 +93,17 @@ async def apply_workflow(
|
|
91
93
|
|
92
94
|
# If backend is SLURM, check that the user has required attributes
|
93
95
|
settings = Inject(get_settings)
|
94
|
-
|
95
|
-
if
|
96
|
+
FRACTAL_RUNNER_BACKEND = settings.FRACTAL_RUNNER_BACKEND
|
97
|
+
if FRACTAL_RUNNER_BACKEND == "slurm":
|
96
98
|
if not user.slurm_user:
|
97
99
|
raise HTTPException(
|
98
100
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
99
|
-
detail=
|
100
|
-
f"FRACTAL_RUNNER_BACKEND={backend}, "
|
101
|
-
f"but {user.slurm_user=}."
|
102
|
-
),
|
101
|
+
detail=f"{FRACTAL_RUNNER_BACKEND=}, but {user.slurm_user=}.",
|
103
102
|
)
|
104
103
|
if not user.cache_dir:
|
105
104
|
raise HTTPException(
|
106
105
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
107
|
-
detail=
|
108
|
-
f"FRACTAL_RUNNER_BACKEND={backend}, "
|
109
|
-
f"but {user.cache_dir=}."
|
110
|
-
),
|
106
|
+
detail=f"{FRACTAL_RUNNER_BACKEND=}, but {user.cache_dir=}.",
|
111
107
|
)
|
112
108
|
|
113
109
|
# Check that no other job with the same dataset_id is SUBMITTED
|
@@ -197,6 +193,30 @@ async def apply_workflow(
|
|
197
193
|
await db.commit()
|
198
194
|
await db.refresh(job)
|
199
195
|
|
196
|
+
# Define server-side job directory
|
197
|
+
timestamp_string = get_timestamp().strftime("%Y%m%d_%H%M%S")
|
198
|
+
WORKFLOW_DIR = (
|
199
|
+
settings.FRACTAL_RUNNER_WORKING_BASE_DIR
|
200
|
+
/ (
|
201
|
+
f"proj_v2_{project_id:07d}_wf_{workflow_id:07d}_job_{job.id:07d}"
|
202
|
+
f"_{timestamp_string}"
|
203
|
+
)
|
204
|
+
).resolve()
|
205
|
+
|
206
|
+
# Define user-side job directory
|
207
|
+
if FRACTAL_RUNNER_BACKEND == "local":
|
208
|
+
WORKFLOW_DIR_USER = WORKFLOW_DIR
|
209
|
+
elif FRACTAL_RUNNER_BACKEND == "slurm":
|
210
|
+
WORKFLOW_DIR_USER = (
|
211
|
+
Path(user.cache_dir) / f"{WORKFLOW_DIR.name}"
|
212
|
+
).resolve()
|
213
|
+
|
214
|
+
# Update job folders in the db
|
215
|
+
job.working_dir = WORKFLOW_DIR.as_posix()
|
216
|
+
job.working_dir_user = WORKFLOW_DIR_USER.as_posix()
|
217
|
+
await db.merge(job)
|
218
|
+
await db.commit()
|
219
|
+
|
200
220
|
background_tasks.add_task(
|
201
221
|
submit_workflow,
|
202
222
|
workflow_id=workflow.id,
|
@@ -208,5 +228,4 @@ async def apply_workflow(
|
|
208
228
|
)
|
209
229
|
|
210
230
|
await db.close()
|
211
|
-
|
212
231
|
return job
|
@@ -14,6 +14,7 @@ from ....db import get_async_db
|
|
14
14
|
from ....models.v1 import Task as TaskV1
|
15
15
|
from ....models.v2 import TaskV2
|
16
16
|
from ....models.v2 import WorkflowTaskV2
|
17
|
+
from ....models.v2 import WorkflowV2
|
17
18
|
from ....schemas.v2 import TaskCreateV2
|
18
19
|
from ....schemas.v2 import TaskReadV2
|
19
20
|
from ....schemas.v2 import TaskUpdateV2
|
@@ -204,16 +205,45 @@ async def delete_task(
|
|
204
205
|
# Check that the TaskV2 is not in relationship with some WorkflowTaskV2
|
205
206
|
stm = select(WorkflowTaskV2).filter(WorkflowTaskV2.task_id == task_id)
|
206
207
|
res = await db.execute(stm)
|
207
|
-
|
208
|
-
|
208
|
+
workflow_tasks = res.scalars().all()
|
209
|
+
|
210
|
+
if workflow_tasks:
|
211
|
+
# Find IDs of all affected workflows
|
212
|
+
workflow_ids = set(wftask.workflow_id for wftask in workflow_tasks)
|
213
|
+
# Fetch all affected workflows from DB
|
214
|
+
stm = select(WorkflowV2).where(WorkflowV2.id.in_(workflow_ids))
|
215
|
+
res = await db.execute(stm)
|
216
|
+
workflows = res.scalars().all()
|
217
|
+
|
218
|
+
# Find which workflows are associated to the current user
|
219
|
+
workflows_current_user = [
|
220
|
+
wf for wf in workflows if user in wf.project.user_list
|
221
|
+
]
|
222
|
+
if workflows_current_user:
|
223
|
+
current_user_msg = (
|
224
|
+
"For the current-user workflows (listed below),"
|
225
|
+
" you can update the task or remove the workflows.\n"
|
226
|
+
)
|
227
|
+
current_user_msg += "\n".join(
|
228
|
+
[
|
229
|
+
f"* '{wf.name}' (id={wf.id})"
|
230
|
+
for wf in workflows_current_user
|
231
|
+
]
|
232
|
+
)
|
233
|
+
else:
|
234
|
+
current_user_msg = ""
|
235
|
+
|
236
|
+
# Count workflows of current users or other users
|
237
|
+
num_workflows_current_user = len(workflows_current_user)
|
238
|
+
num_workflows_other_users = len(workflows) - num_workflows_current_user
|
239
|
+
|
209
240
|
raise HTTPException(
|
210
241
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
211
242
|
detail=(
|
212
|
-
f"Cannot remove
|
213
|
-
"
|
214
|
-
f"
|
215
|
-
"
|
216
|
-
" the workflows.",
|
243
|
+
f"Cannot remove Task with id={task_id}: it is currently in "
|
244
|
+
f"use in {num_workflows_current_user} current-user workflows "
|
245
|
+
f"and in {num_workflows_other_users} other-users workflows.\n"
|
246
|
+
f"{current_user_msg}"
|
217
247
|
),
|
218
248
|
)
|
219
249
|
|
@@ -141,7 +141,7 @@ def _glob_as_user_strict(
|
|
141
141
|
new_output = []
|
142
142
|
known_filenames = [
|
143
143
|
f"{startswith}{suffix}"
|
144
|
-
for suffix in [".args.json", ".metadiff.json", ".err", ".out"]
|
144
|
+
for suffix in [".args.json", ".metadiff.json", ".err", ".out", ".log"]
|
145
145
|
]
|
146
146
|
for filename in output:
|
147
147
|
if filename in known_filenames:
|
@@ -108,18 +108,15 @@ async def submit_workflow(
|
|
108
108
|
return
|
109
109
|
|
110
110
|
# Define and create server-side working folder
|
111
|
-
|
112
|
-
timestamp_string = get_timestamp().strftime("%Y%m%d_%H%M%S")
|
113
|
-
WORKFLOW_DIR = (
|
114
|
-
settings.FRACTAL_RUNNER_WORKING_BASE_DIR
|
115
|
-
/ (
|
116
|
-
f"proj_{project_id:07d}_wf_{workflow_id:07d}_job_{job_id:07d}"
|
117
|
-
f"_{timestamp_string}"
|
118
|
-
)
|
119
|
-
).resolve()
|
120
|
-
|
111
|
+
WORKFLOW_DIR = Path(job.working_dir)
|
121
112
|
if WORKFLOW_DIR.exists():
|
122
|
-
|
113
|
+
job.status = JobStatusTypeV2.FAILED
|
114
|
+
job.end_timestamp = get_timestamp()
|
115
|
+
job.log = f"Workflow dir {WORKFLOW_DIR} already exists."
|
116
|
+
db_sync.merge(job)
|
117
|
+
db_sync.commit()
|
118
|
+
db_sync.close()
|
119
|
+
return
|
123
120
|
|
124
121
|
# Create WORKFLOW_DIR with 755 permissions
|
125
122
|
original_umask = os.umask(0)
|
@@ -127,26 +124,13 @@ async def submit_workflow(
|
|
127
124
|
os.umask(original_umask)
|
128
125
|
|
129
126
|
# Define and create user-side working folder, if needed
|
130
|
-
|
131
|
-
|
132
|
-
elif FRACTAL_RUNNER_BACKEND == "slurm":
|
133
|
-
|
127
|
+
WORKFLOW_DIR_USER = Path(job.working_dir_user)
|
128
|
+
if FRACTAL_RUNNER_BACKEND == "slurm":
|
134
129
|
from ..executors.slurm._subprocess_run_as_user import (
|
135
130
|
_mkdir_as_user,
|
136
131
|
)
|
137
132
|
|
138
|
-
WORKFLOW_DIR_USER = (
|
139
|
-
Path(user_cache_dir) / f"{WORKFLOW_DIR.name}"
|
140
|
-
).resolve()
|
141
133
|
_mkdir_as_user(folder=str(WORKFLOW_DIR_USER), user=slurm_user)
|
142
|
-
else:
|
143
|
-
raise ValueError(f"{FRACTAL_RUNNER_BACKEND=} not supported")
|
144
|
-
|
145
|
-
# Update db
|
146
|
-
job.working_dir = WORKFLOW_DIR.as_posix()
|
147
|
-
job.working_dir_user = WORKFLOW_DIR_USER.as_posix()
|
148
|
-
db_sync.merge(job)
|
149
|
-
db_sync.commit()
|
150
134
|
|
151
135
|
# After Session.commit() is called, either explicitly or when using a
|
152
136
|
# context manager, all objects associated with the Session are expired.
|
fractal_server/config.py
CHANGED
@@ -361,6 +361,11 @@ class Settings(BaseSettings):
|
|
361
361
|
attribute in their input-arguments JSON file.
|
362
362
|
"""
|
363
363
|
|
364
|
+
FRACTAL_API_V1_MODE: Literal["include", "exclude"] = "include"
|
365
|
+
"""
|
366
|
+
Whether to include the v1 API.
|
367
|
+
"""
|
368
|
+
|
364
369
|
###########################################################################
|
365
370
|
# BUSINESS LOGIC
|
366
371
|
###########################################################################
|
@@ -17,8 +17,8 @@ from pathlib import Path
|
|
17
17
|
from typing import Optional
|
18
18
|
from typing import Union
|
19
19
|
|
20
|
-
from
|
21
|
-
from
|
20
|
+
from ..config import get_settings
|
21
|
+
from ..syringe import Inject
|
22
22
|
|
23
23
|
|
24
24
|
LOG_FORMAT = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
@@ -136,3 +136,29 @@ def reset_logger_handlers(logger: logging.Logger) -> None:
|
|
136
136
|
"""
|
137
137
|
close_logger(logger)
|
138
138
|
logger.handlers.clear()
|
139
|
+
|
140
|
+
|
141
|
+
def config_uvicorn_loggers():
|
142
|
+
"""
|
143
|
+
Change the formatter for the uvicorn access/error loggers.
|
144
|
+
|
145
|
+
This is similar to https://stackoverflow.com/a/68864979/19085332. See also
|
146
|
+
https://github.com/tiangolo/fastapi/issues/1508.
|
147
|
+
|
148
|
+
This function is meant to work in two scenarios:
|
149
|
+
|
150
|
+
1. The most relevant case is for a `gunicorn` startup command, with
|
151
|
+
`--access-logfile` and `--error-logfile` options set.
|
152
|
+
2. The case of `fractalctl start` (directly calling `uvicorn`).
|
153
|
+
|
154
|
+
Because of the second use case, we need to check whether uvicorn loggers
|
155
|
+
already have a handler. If not, we skip the formatting.
|
156
|
+
"""
|
157
|
+
|
158
|
+
access_logger = logging.getLogger("uvicorn.access")
|
159
|
+
if len(access_logger.handlers) > 0:
|
160
|
+
access_logger.handlers[0].setFormatter(LOG_FORMATTER)
|
161
|
+
|
162
|
+
error_logger = logging.getLogger("uvicorn.error")
|
163
|
+
if len(error_logger.handlers) > 0:
|
164
|
+
error_logger.handlers[0].setFormatter(LOG_FORMATTER)
|
@@ -0,0 +1,19 @@
|
|
1
|
+
"""
|
2
|
+
This module (which is only executed if `gunicorn` can be imported) subclasses
|
3
|
+
the gunicorn `Logger` class in order to slightly change its log formats.
|
4
|
+
|
5
|
+
This class can be used by including this `gunicorn` command-line option:
|
6
|
+
```
|
7
|
+
--logger-class fractal_server.logger.gunicorn_logger.FractalGunicornLogger
|
8
|
+
```
|
9
|
+
"""
|
10
|
+
|
11
|
+
try:
|
12
|
+
from gunicorn.glogging import Logger as GunicornLogger
|
13
|
+
|
14
|
+
class FractalGunicornLogger(GunicornLogger):
|
15
|
+
error_fmt = r"%(asctime)s - gunicorn.error - %(levelname)s - [pid %(process)d] - %(message)s" # noqa: E501
|
16
|
+
datefmt = r"%Y-%m-%d %H:%M:%S,%u"
|
17
|
+
|
18
|
+
except (ModuleNotFoundError, ImportError):
|
19
|
+
pass
|
fractal_server/main.py
CHANGED
@@ -19,6 +19,7 @@ from fastapi import FastAPI
|
|
19
19
|
|
20
20
|
from .app.security import _create_first_user
|
21
21
|
from .config import get_settings
|
22
|
+
from .logger import config_uvicorn_loggers
|
22
23
|
from .logger import reset_logger_handlers
|
23
24
|
from .logger import set_logger
|
24
25
|
from .syringe import Inject
|
@@ -39,12 +40,15 @@ def collect_routers(app: FastAPI) -> None:
|
|
39
40
|
from .app.routes.admin.v2 import router_admin_v2
|
40
41
|
from .app.routes.auth import router_auth
|
41
42
|
|
43
|
+
settings = Inject(get_settings)
|
44
|
+
|
42
45
|
app.include_router(router_api, prefix="/api")
|
43
|
-
|
46
|
+
if settings.FRACTAL_API_V1_MODE == "include":
|
47
|
+
app.include_router(router_api_v1, prefix="/api/v1")
|
48
|
+
app.include_router(
|
49
|
+
router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
|
50
|
+
)
|
44
51
|
app.include_router(router_api_v2, prefix="/api/v2")
|
45
|
-
app.include_router(
|
46
|
-
router_admin_v1, prefix="/admin/v1", tags=["V1 Admin area"]
|
47
|
-
)
|
48
52
|
app.include_router(
|
49
53
|
router_admin_v2, prefix="/admin/v2", tags=["V2 Admin area"]
|
50
54
|
)
|
@@ -81,6 +85,7 @@ async def __on_startup() -> None:
|
|
81
85
|
callable.
|
82
86
|
"""
|
83
87
|
check_settings()
|
88
|
+
config_uvicorn_loggers()
|
84
89
|
|
85
90
|
|
86
91
|
def start_application() -> FastAPI:
|
@@ -1,15 +1,15 @@
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
1
|
+
fractal_server/__init__.py,sha256=S5hrGn9zHoCPoN-w6jqLkqipFDSl7JkGHq3amd7eoVE,22
|
2
2
|
fractal_server/__main__.py,sha256=CocbzZooX1UtGqPi55GcHGNxnrJXFg5tUU5b3wyFCyo,4958
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
fractal_server/app/db/__init__.py,sha256=WZEVfdJAX7ZyBM1ngfEGeqWWcjK_NygtCbawpmbwGpU,4042
|
6
|
-
fractal_server/app/models/__init__.py,sha256=
|
6
|
+
fractal_server/app/models/__init__.py,sha256=QGRjxBgk6GzHyyXh_7RuHvpLoe5PTl1g5KLkGqhFYMQ,199
|
7
7
|
fractal_server/app/models/linkuserproject.py,sha256=eQaourbGRshvlMVlKzLYJKHEjfsW1CbWws9yW4eHXhA,567
|
8
|
-
fractal_server/app/models/security.py,sha256=
|
9
|
-
fractal_server/app/models/v1/__init__.py,sha256=
|
8
|
+
fractal_server/app/models/security.py,sha256=0oYj_cqPcQFsPFDyN4OTsqbXsLlXRcweawjP_iSiRI0,2900
|
9
|
+
fractal_server/app/models/v1/__init__.py,sha256=hUI7dEbPaiZGN0IbHW4RSmSicyvtn_xeuevoX7zvUwI,466
|
10
10
|
fractal_server/app/models/v1/dataset.py,sha256=99GDgt7njx8yYQApkImqp_7bHA5HH3ElvbR6Oyj9kVI,2017
|
11
11
|
fractal_server/app/models/v1/job.py,sha256=QLGXcWdVRHaUHQNDapYYlLpEfw4K7QyD8TmcwhrWw2o,3304
|
12
|
-
fractal_server/app/models/v1/project.py,sha256=
|
12
|
+
fractal_server/app/models/v1/project.py,sha256=tf6fniyBH-sb6rBvGiqNl2wgN9ipR4hDEE3OKvxKaoo,804
|
13
13
|
fractal_server/app/models/v1/state.py,sha256=ew7xw3iPzBwUnPlzmsOEMiPbPEMsJn_TyZ5cK93jBRQ,1095
|
14
14
|
fractal_server/app/models/v1/task.py,sha256=3xZqNeFYUqslh8ddMSXF2nO4nIiOD8T5Ij37wY20kss,2782
|
15
15
|
fractal_server/app/models/v1/workflow.py,sha256=dnY5eMaOe3oZv8arn00RNX9qVkBtTLG-vYdWXcQuyo4,3950
|
@@ -17,20 +17,20 @@ fractal_server/app/models/v2/__init__.py,sha256=uLzdInqATSwi0bS_V4vKB-TqFrOFaXux
|
|
17
17
|
fractal_server/app/models/v2/collection_state.py,sha256=nxb042i8tt8rCpmgbFJoBCYWU-34m0HdUfO9YurTp8k,588
|
18
18
|
fractal_server/app/models/v2/dataset.py,sha256=-7sxHEw4IIAvF_uSan7tA3o8hvoakBkQ0SRvqS2iOQU,1455
|
19
19
|
fractal_server/app/models/v2/job.py,sha256=ypJmN-qspkKBGhBG7Mt-HypSQqcQ2EmB4Bzzb2-y550,1535
|
20
|
-
fractal_server/app/models/v2/project.py,sha256=
|
20
|
+
fractal_server/app/models/v2/project.py,sha256=CRBnZ8QITNp6u1f5bMxvi1_mcvEfXpWyitsWB5f7gn8,759
|
21
21
|
fractal_server/app/models/v2/task.py,sha256=9ZPhug3VWyeqgT8wQ9_8ZXQ2crSiiicRipxrxTslOso,3257
|
22
22
|
fractal_server/app/models/v2/workflow.py,sha256=YBgFGCziUgU0aJ5EM3Svu9W2c46AewZO9VBlFCHiSps,1069
|
23
23
|
fractal_server/app/models/v2/workflowtask.py,sha256=3jEkObsSnlI05Pur_dSsXYdJxRqPL60Z7tK5-EJLOks,1532
|
24
24
|
fractal_server/app/routes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
25
25
|
fractal_server/app/routes/admin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
26
|
fractal_server/app/routes/admin/v1.py,sha256=uMupmRkicaoWazX8qSX5fgh00O3MbuSfim8QayP6NkE,13996
|
27
|
-
fractal_server/app/routes/admin/v2.py,sha256=
|
27
|
+
fractal_server/app/routes/admin/v2.py,sha256=e0A6RMWfmTO5dVL95XO-P2EljdEwR00FErxahEPzODQ,13757
|
28
28
|
fractal_server/app/routes/api/__init__.py,sha256=EVyZrEq3I_1643QGTPCC5lgCp4xH_auYbrFfogTm4pc,315
|
29
29
|
fractal_server/app/routes/api/v1/__init__.py,sha256=Y2HQdG197J0a7DyQEE2jn53IfxD0EHGhzK1I2JZuEck,958
|
30
|
-
fractal_server/app/routes/api/v1/_aux_functions.py,sha256=
|
30
|
+
fractal_server/app/routes/api/v1/_aux_functions.py,sha256=lFfGJxbJc-Ryah_pPkiTH2SBncWQ15n0iqVFolzIOCM,11976
|
31
31
|
fractal_server/app/routes/api/v1/dataset.py,sha256=HRE-8vPmVkeXf7WFYkI19mDtbY-iJZeJ7PmMiV0LMgY,16923
|
32
32
|
fractal_server/app/routes/api/v1/job.py,sha256=NwXyhvvzdPDor0ts8Im__9-I0P1H943s4NXIRgaz7PM,5436
|
33
|
-
fractal_server/app/routes/api/v1/project.py,sha256=
|
33
|
+
fractal_server/app/routes/api/v1/project.py,sha256=DKQ6n1CPmHqsKieBaJlKnOhKaHWgQTZIA_asJTT9Uxo,15802
|
34
34
|
fractal_server/app/routes/api/v1/task.py,sha256=udbKnenzc-Q10elYCVB9JmOPWATraa9tZi0AaByvWo0,6129
|
35
35
|
fractal_server/app/routes/api/v1/task_collection.py,sha256=mFaYyCWtCPRqvs3j6zx_zaiDXn31Uzoa7UHZS-Lu_L0,8882
|
36
36
|
fractal_server/app/routes/api/v1/workflow.py,sha256=7r9IoIevg_rvYCrerMOsIsUabSOQatxdPCfLdkP0dRs,10942
|
@@ -42,8 +42,8 @@ fractal_server/app/routes/api/v2/images.py,sha256=4r_HblPWyuKSZSJZfn8mbDaLv1ncwZ
|
|
42
42
|
fractal_server/app/routes/api/v2/job.py,sha256=BtaxErBDbLwjY2zgGD1I6eRpsffoMonifcS1CMEXmLU,5325
|
43
43
|
fractal_server/app/routes/api/v2/project.py,sha256=qyvizYZ4aUFgF3tGdfp4z8AwWgfo19N_KbFEljfUaC8,5594
|
44
44
|
fractal_server/app/routes/api/v2/status.py,sha256=osLexiMOSqmYcEV-41tlrwt9ofyFbtRm5HmPS5BU0t4,6394
|
45
|
-
fractal_server/app/routes/api/v2/submit.py,sha256=
|
46
|
-
fractal_server/app/routes/api/v2/task.py,sha256=
|
45
|
+
fractal_server/app/routes/api/v2/submit.py,sha256=su512HjafuD_lyiLswyxN4T8xwKaMpWOhGsYVPh94sI,7672
|
46
|
+
fractal_server/app/routes/api/v2/task.py,sha256=bRTtGgL8BBGbT7csVeRB-a54clgU2xHydi5XpcByDxg,8297
|
47
47
|
fractal_server/app/routes/api/v2/task_collection.py,sha256=eN3NkZaZHkrqnLGRKE7Xd5mo0cHc8aK2lojCt26ErOQ,8988
|
48
48
|
fractal_server/app/routes/api/v2/task_legacy.py,sha256=P_VJv9v0yzFUBuS-DQHhMVSOe20ecGJJcFBqiiFciOM,1628
|
49
49
|
fractal_server/app/routes/api/v2/workflow.py,sha256=2GlcYNjpvCdjwC_Kn7y0UP16B3pOLSNXBvIVsVDtDKM,11863
|
@@ -63,7 +63,7 @@ fractal_server/app/runner/executors/slurm/_batching.py,sha256=1P6CgrAOCK9u_EvNFT
|
|
63
63
|
fractal_server/app/runner/executors/slurm/_check_jobs_status.py,sha256=8d29a7DQ2xoWxoFQCnFfTpHER-qBX8mEatl4Dw5HU_o,1908
|
64
64
|
fractal_server/app/runner/executors/slurm/_executor_wait_thread.py,sha256=J3tjAx33nBgW4eHAXDte7hDs7Oe9FLEZaElEt8inrbg,4421
|
65
65
|
fractal_server/app/runner/executors/slurm/_slurm_config.py,sha256=rF37XDImX1QoWx37MC5hSM9AuY_KfHU5gaWwN4vl4Zk,15552
|
66
|
-
fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py,sha256=
|
66
|
+
fractal_server/app/runner/executors/slurm/_subprocess_run_as_user.py,sha256=YwfJzZr_y4FL_hirHJdWK0vWzrldjoZZhXVFlO2AOMU,5131
|
67
67
|
fractal_server/app/runner/executors/slurm/executor.py,sha256=267YTDvyeA0yX2n2HffxP8OAu_CQF5uB9K-_AaUG3iU,44655
|
68
68
|
fractal_server/app/runner/executors/slurm/remote.py,sha256=wLziIsGdSMiO-jIXM8x77JRK82g_2hx0iBKTiMghuIo,5852
|
69
69
|
fractal_server/app/runner/filenames.py,sha256=9lwu3yB4C67yiijYw8XIKaLFn3mJUt6_TCyVFM_aZUQ,206
|
@@ -80,7 +80,7 @@ fractal_server/app/runner/v1/_slurm/_submit_setup.py,sha256=llTgSOCnCVMvm7Q0SoVp
|
|
80
80
|
fractal_server/app/runner/v1/_slurm/get_slurm_config.py,sha256=6TLWQon8hSicsD7c3yXK4P9xeId0s_H3HOOeMUVGVss,5977
|
81
81
|
fractal_server/app/runner/v1/common.py,sha256=_L-vjLnWato80VdlB_BFN4G8P4jSM07u-5cnl1T3S34,3294
|
82
82
|
fractal_server/app/runner/v1/handle_failed_job.py,sha256=bHzScC_aIlU3q-bQxGW6rfWV4xbZ2tho_sktjsAs1no,4684
|
83
|
-
fractal_server/app/runner/v2/__init__.py,sha256=
|
83
|
+
fractal_server/app/runner/v2/__init__.py,sha256=U6FwKhiIF1INKU8LCoGHBCb04S2xOa3rpbltcodWXLw,11952
|
84
84
|
fractal_server/app/runner/v2/_local/__init__.py,sha256=Q1s-DwXleUq6w1ZNv6tlh3tZv6cyBqxB_hMvZlqVYaM,5881
|
85
85
|
fractal_server/app/runner/v2/_local/_local_config.py,sha256=lR0Js-l63mQUzN9hK0HkfdLsrTf-W6GHvPvbPC64amY,3630
|
86
86
|
fractal_server/app/runner/v2/_local/_submit_setup.py,sha256=deagsLSy6A3ZHKaSDcQqrdvbQVM3i4kgyTcbVc0tC5U,1614
|
@@ -121,14 +121,14 @@ fractal_server/app/schemas/v2/task_collection.py,sha256=sY29NQfJrbjiidmVkVjSIH-2
|
|
121
121
|
fractal_server/app/schemas/v2/workflow.py,sha256=Zzx3e-qgkH8le0FUmAx9UrV5PWd7bj14PPXUh_zgZXM,1827
|
122
122
|
fractal_server/app/schemas/v2/workflowtask.py,sha256=atVuVN4aXsVEOmSd-vyg-8_8OnPmqx-gT75rXcn_AlQ,6552
|
123
123
|
fractal_server/app/security/__init__.py,sha256=wxosoHc3mJYPCdPMyWnRD8w_2OgnKYp2aDkdmwrZh5k,11203
|
124
|
-
fractal_server/config.py,sha256=
|
125
|
-
fractal_server/data_migrations/2_0_3.py,sha256=7EhwLCZTk1yHD_dlU-HIf2uvx6jUIgfDaA5np27QEEM,2918
|
124
|
+
fractal_server/config.py,sha256=19CnX1Jyw_zYjEvfXJTaypB7Ape7qfFzKX4B1LZmOe0,15167
|
126
125
|
fractal_server/data_migrations/README.md,sha256=_3AEFvDg9YkybDqCLlFPdDmGJvr6Tw7HRI14aZ3LOIw,398
|
127
126
|
fractal_server/images/__init__.py,sha256=xO6jTLE4EZKO6cTDdJsBmK9cdeh9hFTaSbSuWgQg7y4,196
|
128
127
|
fractal_server/images/models.py,sha256=9ipU5h4N6ogBChoB-2vHoqtL0TXOHCv6kRR-fER3mkM,4167
|
129
128
|
fractal_server/images/tools.py,sha256=gxeniYy4Z-cp_ToK2LHPJUTVVUUrdpogYdcBUvBuLiY,2209
|
130
|
-
fractal_server/logger.py,sha256=
|
131
|
-
fractal_server/
|
129
|
+
fractal_server/logger/__init__.py,sha256=Q_e03Lj30VWdCqGBJrKw9A2QEeDbCKC_OOkDQVW9Dyw,5132
|
130
|
+
fractal_server/logger/gunicorn_logger.py,sha256=vb5s7mruCHPkKWGrTTOPyrB_658Y2Z05ECdHhCCBhp0,644
|
131
|
+
fractal_server/main.py,sha256=OostfB8nyFKCySDTnIR64WlHg82eHkoykJgQg6se9xc,3556
|
132
132
|
fractal_server/migrations/README,sha256=4rQvyDfqodGhpJw74VYijRmgFP49ji5chyEemWGHsuw,59
|
133
133
|
fractal_server/migrations/env.py,sha256=bsl0HGZpjhommztgcs7wQ94sJzI1Orgnij97K8P_uyo,2630
|
134
134
|
fractal_server/migrations/script.py.mako,sha256=oMXw9LC3zRbinWWPPDgeZ4z9FJrV2zhRWiYdS5YgNbI,526
|
@@ -163,8 +163,8 @@ fractal_server/tasks/v2/background_operations.py,sha256=MAMBn6W2bhkdK59kfUGiD7a1
|
|
163
163
|
fractal_server/tasks/v2/get_collection_data.py,sha256=Qhf2T_aaqAfqu9_KpUSlXsS7EJoZQbEPEreHHa2jco8,502
|
164
164
|
fractal_server/urls.py,sha256=5o_qq7PzKKbwq12NHSQZDmDitn5RAOeQ4xufu-2v9Zk,448
|
165
165
|
fractal_server/utils.py,sha256=b7WwFdcFZ8unyT65mloFToYuEDXpQoHRcmRNqrhd_dQ,2115
|
166
|
-
fractal_server-2.0.
|
167
|
-
fractal_server-2.0.
|
168
|
-
fractal_server-2.0.
|
169
|
-
fractal_server-2.0.
|
170
|
-
fractal_server-2.0.
|
166
|
+
fractal_server-2.0.6.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
167
|
+
fractal_server-2.0.6.dist-info/METADATA,sha256=id7WLlUPHEi6TAgIVSDxZJacFhTvriBOdX86QLLbqPM,4222
|
168
|
+
fractal_server-2.0.6.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
169
|
+
fractal_server-2.0.6.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
170
|
+
fractal_server-2.0.6.dist-info/RECORD,,
|
@@ -1,79 +0,0 @@
|
|
1
|
-
import logging
|
2
|
-
|
3
|
-
from packaging.version import parse
|
4
|
-
from sqlalchemy import select
|
5
|
-
from sqlalchemy.orm.attributes import flag_modified
|
6
|
-
|
7
|
-
import fractal_server
|
8
|
-
from fractal_server.app.db import get_sync_db
|
9
|
-
from fractal_server.app.models.v1 import ApplyWorkflow
|
10
|
-
from fractal_server.app.models.v2 import JobV2
|
11
|
-
from fractal_server.app.schemas.v1 import ApplyWorkflowReadV1
|
12
|
-
from fractal_server.app.schemas.v2 import JobReadV2
|
13
|
-
|
14
|
-
|
15
|
-
def fix_db():
|
16
|
-
logger = logging.getLogger("fix_db")
|
17
|
-
logger.warning("START execution of fix_db function")
|
18
|
-
|
19
|
-
# Check that this module matches with the current version
|
20
|
-
module_version = parse("2.0.3")
|
21
|
-
current_version = parse(fractal_server.__VERSION__)
|
22
|
-
if (
|
23
|
-
current_version.major != module_version.major
|
24
|
-
or current_version.minor != module_version.minor
|
25
|
-
or current_version.micro != module_version.micro
|
26
|
-
):
|
27
|
-
raise RuntimeError(
|
28
|
-
f"{fractal_server.__VERSION__=} not matching with {__file__=}"
|
29
|
-
)
|
30
|
-
|
31
|
-
with next(get_sync_db()) as db:
|
32
|
-
|
33
|
-
# V1 jobs
|
34
|
-
stm = select(ApplyWorkflow)
|
35
|
-
jobs_v1 = db.execute(stm).scalars().all()
|
36
|
-
for job_v1 in sorted(jobs_v1, key=lambda x: x.id):
|
37
|
-
for KEY in ["history"]:
|
38
|
-
logger.warning(
|
39
|
-
f"Now removing {KEY} from `input/output_dataset_dump`, "
|
40
|
-
f"for appplyworkflow.id={job_v1.id}."
|
41
|
-
)
|
42
|
-
if KEY in job_v1.input_dataset_dump.keys():
|
43
|
-
job_v1.input_dataset_dump.pop(KEY)
|
44
|
-
if KEY in job_v1.output_dataset_dump.keys():
|
45
|
-
job_v1.output_dataset_dump.pop(KEY)
|
46
|
-
flag_modified(job_v1, "input_dataset_dump")
|
47
|
-
flag_modified(job_v1, "output_dataset_dump")
|
48
|
-
db.add(job_v1)
|
49
|
-
db.commit()
|
50
|
-
db.refresh(job_v1)
|
51
|
-
db.expunge(job_v1)
|
52
|
-
logger.warning(
|
53
|
-
f"Now validating applyworkflow.id={job_v1.id} with "
|
54
|
-
"ApplyWorkflowReadV1."
|
55
|
-
)
|
56
|
-
ApplyWorkflowReadV1(**job_v1.model_dump())
|
57
|
-
|
58
|
-
# V2 jobs
|
59
|
-
stm = select(JobV2)
|
60
|
-
jobs_v2 = db.execute(stm).scalars().all()
|
61
|
-
for job_v2 in sorted(jobs_v2, key=lambda x: x.id):
|
62
|
-
for KEY in ["history", "images"]:
|
63
|
-
logger.warning(
|
64
|
-
f"Now removing {KEY} from `dataset_dump`, "
|
65
|
-
f"for jobv2.id={job_v2.id}."
|
66
|
-
)
|
67
|
-
if KEY in job_v2.dataset_dump.keys():
|
68
|
-
job_v2.dataset_dump.pop(KEY)
|
69
|
-
flag_modified(job_v2, "dataset_dump")
|
70
|
-
db.add(job_v2)
|
71
|
-
db.commit()
|
72
|
-
db.refresh(job_v2)
|
73
|
-
db.expunge(job_v2)
|
74
|
-
logger.warning(
|
75
|
-
f"Now validating jobv2.id={job_v2.id} with JobReadV2."
|
76
|
-
)
|
77
|
-
JobReadV2(**job_v2.model_dump())
|
78
|
-
|
79
|
-
logger.warning("END of execution of fix_db function")
|
File without changes
|
File without changes
|
File without changes
|