fractal-server 1.4.3a0__py3-none-any.whl → 1.4.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +36 -25
- fractal_server/app/routes/admin.py +8 -8
- fractal_server/app/routes/api/v1/_aux_functions.py +3 -5
- fractal_server/app/routes/api/v1/dataset.py +24 -23
- fractal_server/app/routes/api/v1/job.py +7 -7
- fractal_server/app/routes/api/v1/project.py +14 -19
- fractal_server/app/routes/api/v1/task.py +6 -6
- fractal_server/app/routes/api/v1/task_collection.py +12 -126
- fractal_server/app/routes/api/v1/workflow.py +13 -13
- fractal_server/app/routes/api/v1/workflowtask.py +5 -5
- fractal_server/app/routes/auth.py +2 -2
- fractal_server/app/runner/__init__.py +0 -1
- fractal_server/app/schemas/__init__.py +1 -0
- fractal_server/app/schemas/applyworkflow.py +5 -9
- fractal_server/app/schemas/task_collection.py +2 -10
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +14 -0
- fractal_server/tasks/_TaskCollectPip.py +103 -0
- fractal_server/tasks/__init__.py +3 -1
- fractal_server/tasks/background_operations.py +384 -0
- fractal_server/tasks/endpoint_operations.py +167 -0
- fractal_server/tasks/utils.py +86 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/METADATA +1 -1
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/RECORD +28 -25
- fractal_server/tasks/collection.py +0 -556
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.3a0.dist-info → fractal_server-1.4.3a1.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "1.4.
|
1
|
+
__VERSION__ = "1.4.3a1"
|
@@ -21,6 +21,12 @@ from ...syringe import Inject
|
|
21
21
|
print(__name__)
|
22
22
|
logger = set_logger(__name__)
|
23
23
|
|
24
|
+
SQLITE_WARNING_MESSAGE = (
|
25
|
+
"SQLite is supported (for version >=3.37) but discouraged in production. "
|
26
|
+
"Given its partial support for ForeignKey constraints, "
|
27
|
+
"database consistency cannot be guaranteed."
|
28
|
+
)
|
29
|
+
|
24
30
|
|
25
31
|
class DB:
|
26
32
|
"""
|
@@ -32,7 +38,7 @@ class DB:
|
|
32
38
|
try:
|
33
39
|
return cls._engine_async
|
34
40
|
except AttributeError:
|
35
|
-
cls.
|
41
|
+
cls.set_async_db()
|
36
42
|
return cls._engine_async
|
37
43
|
|
38
44
|
@classmethod
|
@@ -40,32 +46,22 @@ class DB:
|
|
40
46
|
try:
|
41
47
|
return cls._engine_sync
|
42
48
|
except AttributeError:
|
43
|
-
cls.
|
49
|
+
cls.set_sync_db()
|
44
50
|
return cls._engine_sync
|
45
51
|
|
46
52
|
@classmethod
|
47
|
-
def
|
53
|
+
def set_async_db(cls):
|
48
54
|
settings = Inject(get_settings)
|
49
55
|
settings.check_db()
|
50
56
|
|
51
57
|
if settings.DB_ENGINE == "sqlite":
|
52
|
-
logger.warning(
|
53
|
-
"SQLite is supported (for version >=3.37) but discouraged "
|
54
|
-
"in production. Given its partial support for ForeignKey "
|
55
|
-
"constraints, database consistency cannot be guaranteed."
|
56
|
-
)
|
57
|
-
|
58
|
+
logger.warning(SQLITE_WARNING_MESSAGE)
|
58
59
|
# Set some sqlite-specific options
|
59
60
|
engine_kwargs_async = dict(poolclass=StaticPool)
|
60
|
-
engine_kwargs_sync = dict(
|
61
|
-
poolclass=StaticPool,
|
62
|
-
connect_args={"check_same_thread": False},
|
63
|
-
)
|
64
61
|
else:
|
65
62
|
engine_kwargs_async = {
|
66
63
|
"pool_pre_ping": True,
|
67
64
|
}
|
68
|
-
engine_kwargs_sync = {}
|
69
65
|
|
70
66
|
cls._engine_async = create_async_engine(
|
71
67
|
settings.DATABASE_URL,
|
@@ -73,13 +69,6 @@ class DB:
|
|
73
69
|
future=True,
|
74
70
|
**engine_kwargs_async,
|
75
71
|
)
|
76
|
-
cls._engine_sync = create_engine(
|
77
|
-
settings.DATABASE_SYNC_URL,
|
78
|
-
echo=settings.DB_ECHO,
|
79
|
-
future=True,
|
80
|
-
**engine_kwargs_sync,
|
81
|
-
)
|
82
|
-
|
83
72
|
cls._async_session_maker = sessionmaker(
|
84
73
|
cls._engine_async,
|
85
74
|
class_=AsyncSession,
|
@@ -87,6 +76,28 @@ class DB:
|
|
87
76
|
future=True,
|
88
77
|
)
|
89
78
|
|
79
|
+
@classmethod
|
80
|
+
def set_sync_db(cls):
|
81
|
+
settings = Inject(get_settings)
|
82
|
+
settings.check_db()
|
83
|
+
|
84
|
+
if settings.DB_ENGINE == "sqlite":
|
85
|
+
logger.warning(SQLITE_WARNING_MESSAGE)
|
86
|
+
# Set some sqlite-specific options
|
87
|
+
engine_kwargs_sync = dict(
|
88
|
+
poolclass=StaticPool,
|
89
|
+
connect_args={"check_same_thread": False},
|
90
|
+
)
|
91
|
+
else:
|
92
|
+
engine_kwargs_sync = {}
|
93
|
+
|
94
|
+
cls._engine_sync = create_engine(
|
95
|
+
settings.DATABASE_SYNC_URL,
|
96
|
+
echo=settings.DB_ECHO,
|
97
|
+
future=True,
|
98
|
+
**engine_kwargs_sync,
|
99
|
+
)
|
100
|
+
|
90
101
|
cls._sync_session_maker = sessionmaker(
|
91
102
|
bind=cls._engine_sync,
|
92
103
|
autocommit=False,
|
@@ -102,14 +113,14 @@ class DB:
|
|
102
113
|
cursor.close()
|
103
114
|
|
104
115
|
@classmethod
|
105
|
-
async def
|
116
|
+
async def get_async_db(cls) -> AsyncGenerator[AsyncSession, None]:
|
106
117
|
"""
|
107
118
|
Get async database session
|
108
119
|
"""
|
109
120
|
try:
|
110
121
|
session_maker = cls._async_session_maker()
|
111
122
|
except AttributeError:
|
112
|
-
cls.
|
123
|
+
cls.set_async_db()
|
113
124
|
session_maker = cls._async_session_maker()
|
114
125
|
async with session_maker as async_session:
|
115
126
|
yield async_session
|
@@ -122,11 +133,11 @@ class DB:
|
|
122
133
|
try:
|
123
134
|
session_maker = cls._sync_session_maker()
|
124
135
|
except AttributeError:
|
125
|
-
cls.
|
136
|
+
cls.set_sync_db()
|
126
137
|
session_maker = cls._sync_session_maker()
|
127
138
|
with session_maker as sync_session:
|
128
139
|
yield sync_session
|
129
140
|
|
130
141
|
|
131
|
-
|
142
|
+
get_async_db = DB.get_async_db
|
132
143
|
get_sync_db = DB.get_sync_db
|
@@ -16,7 +16,7 @@ from sqlmodel import select
|
|
16
16
|
|
17
17
|
from ...utils import get_timestamp
|
18
18
|
from ..db import AsyncSession
|
19
|
-
from ..db import
|
19
|
+
from ..db import get_async_db
|
20
20
|
from ..models import ApplyWorkflow
|
21
21
|
from ..models import Dataset
|
22
22
|
from ..models import JobStatusType
|
@@ -41,7 +41,7 @@ async def view_project(
|
|
41
41
|
id: Optional[int] = None,
|
42
42
|
user_id: Optional[int] = None,
|
43
43
|
user: User = Depends(current_active_superuser),
|
44
|
-
db: AsyncSession = Depends(
|
44
|
+
db: AsyncSession = Depends(get_async_db),
|
45
45
|
) -> list[ProjectRead]:
|
46
46
|
"""
|
47
47
|
Query `project` table.
|
@@ -73,7 +73,7 @@ async def view_workflow(
|
|
73
73
|
project_id: Optional[int] = None,
|
74
74
|
name_contains: Optional[str] = None,
|
75
75
|
user: User = Depends(current_active_superuser),
|
76
|
-
db: AsyncSession = Depends(
|
76
|
+
db: AsyncSession = Depends(get_async_db),
|
77
77
|
) -> list[WorkflowRead]:
|
78
78
|
"""
|
79
79
|
Query `workflow` table.
|
@@ -115,7 +115,7 @@ async def view_dataset(
|
|
115
115
|
name_contains: Optional[str] = None,
|
116
116
|
type: Optional[str] = None,
|
117
117
|
user: User = Depends(current_active_superuser),
|
118
|
-
db: AsyncSession = Depends(
|
118
|
+
db: AsyncSession = Depends(get_async_db),
|
119
119
|
) -> list[DatasetRead]:
|
120
120
|
"""
|
121
121
|
Query `dataset` table.
|
@@ -166,7 +166,7 @@ async def view_job(
|
|
166
166
|
end_timestamp_min: Optional[datetime] = None,
|
167
167
|
end_timestamp_max: Optional[datetime] = None,
|
168
168
|
user: User = Depends(current_active_superuser),
|
169
|
-
db: AsyncSession = Depends(
|
169
|
+
db: AsyncSession = Depends(get_async_db),
|
170
170
|
) -> list[ApplyWorkflowRead]:
|
171
171
|
"""
|
172
172
|
Query `ApplyWorkflow` table.
|
@@ -231,7 +231,7 @@ async def update_job(
|
|
231
231
|
job_update: ApplyWorkflowUpdate,
|
232
232
|
job_id: int,
|
233
233
|
user: User = Depends(current_active_superuser),
|
234
|
-
db: AsyncSession = Depends(
|
234
|
+
db: AsyncSession = Depends(get_async_db),
|
235
235
|
) -> Optional[ApplyWorkflowRead]:
|
236
236
|
"""
|
237
237
|
Change the status of an existing job.
|
@@ -264,7 +264,7 @@ async def update_job(
|
|
264
264
|
async def stop_job(
|
265
265
|
job_id: int,
|
266
266
|
user: User = Depends(current_active_superuser),
|
267
|
-
db: AsyncSession = Depends(
|
267
|
+
db: AsyncSession = Depends(get_async_db),
|
268
268
|
) -> Response:
|
269
269
|
"""
|
270
270
|
Stop execution of a workflow job.
|
@@ -293,7 +293,7 @@ async def stop_job(
|
|
293
293
|
async def download_job_logs(
|
294
294
|
job_id: int,
|
295
295
|
user: User = Depends(current_active_superuser),
|
296
|
-
db: AsyncSession = Depends(
|
296
|
+
db: AsyncSession = Depends(get_async_db),
|
297
297
|
) -> StreamingResponse:
|
298
298
|
"""
|
299
299
|
Download job folder
|
@@ -368,16 +368,14 @@ async def _get_task_check_owner(
|
|
368
368
|
return task
|
369
369
|
|
370
370
|
|
371
|
-
def
|
371
|
+
def _get_submitted_jobs_statement() -> SelectOfScalar:
|
372
372
|
"""
|
373
373
|
Returns:
|
374
374
|
A sqlmodel statement that selects all `ApplyWorkflow`s with
|
375
|
-
`ApplyWorkflow.status` equal to `submitted
|
375
|
+
`ApplyWorkflow.status` equal to `submitted`.
|
376
376
|
"""
|
377
377
|
stm = select(ApplyWorkflow).where(
|
378
|
-
ApplyWorkflow.status.
|
379
|
-
[JobStatusType.SUBMITTED, JobStatusType.RUNNING]
|
380
|
-
)
|
378
|
+
ApplyWorkflow.status == JobStatusType.SUBMITTED
|
381
379
|
)
|
382
380
|
return stm
|
383
381
|
|
@@ -11,7 +11,7 @@ from sqlmodel import or_
|
|
11
11
|
from sqlmodel import select
|
12
12
|
|
13
13
|
from ....db import AsyncSession
|
14
|
-
from ....db import
|
14
|
+
from ....db import get_async_db
|
15
15
|
from ....models import ApplyWorkflow
|
16
16
|
from ....models import Dataset
|
17
17
|
from ....models import Project
|
@@ -28,9 +28,9 @@ from ....schemas import WorkflowExport
|
|
28
28
|
from ....schemas import WorkflowTaskExport
|
29
29
|
from ....security import current_active_user
|
30
30
|
from ....security import User
|
31
|
-
from ._aux_functions import _get_active_jobs_statement
|
32
31
|
from ._aux_functions import _get_dataset_check_owner
|
33
32
|
from ._aux_functions import _get_project_check_owner
|
33
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
34
34
|
from ._aux_functions import _get_workflow_check_owner
|
35
35
|
|
36
36
|
|
@@ -46,7 +46,7 @@ async def create_dataset(
|
|
46
46
|
project_id: int,
|
47
47
|
dataset: DatasetCreate,
|
48
48
|
user: User = Depends(current_active_user),
|
49
|
-
db: AsyncSession = Depends(
|
49
|
+
db: AsyncSession = Depends(get_async_db),
|
50
50
|
) -> Optional[DatasetRead]:
|
51
51
|
"""
|
52
52
|
Add new dataset to current project
|
@@ -70,7 +70,7 @@ async def create_dataset(
|
|
70
70
|
async def read_dataset_list(
|
71
71
|
project_id: int,
|
72
72
|
user: User = Depends(current_active_user),
|
73
|
-
db: AsyncSession = Depends(
|
73
|
+
db: AsyncSession = Depends(get_async_db),
|
74
74
|
) -> Optional[list[DatasetRead]]:
|
75
75
|
"""
|
76
76
|
Get dataset list for given project
|
@@ -96,7 +96,7 @@ async def read_dataset(
|
|
96
96
|
project_id: int,
|
97
97
|
dataset_id: int,
|
98
98
|
user: User = Depends(current_active_user),
|
99
|
-
db: AsyncSession = Depends(
|
99
|
+
db: AsyncSession = Depends(get_async_db),
|
100
100
|
) -> Optional[DatasetRead]:
|
101
101
|
"""
|
102
102
|
Get info on a dataset associated to the current project
|
@@ -121,7 +121,7 @@ async def update_dataset(
|
|
121
121
|
dataset_id: int,
|
122
122
|
dataset_update: DatasetUpdate,
|
123
123
|
user: User = Depends(current_active_user),
|
124
|
-
db: AsyncSession = Depends(
|
124
|
+
db: AsyncSession = Depends(get_async_db),
|
125
125
|
) -> Optional[DatasetRead]:
|
126
126
|
"""
|
127
127
|
Edit a dataset associated to the current project
|
@@ -158,7 +158,7 @@ async def delete_dataset(
|
|
158
158
|
project_id: int,
|
159
159
|
dataset_id: int,
|
160
160
|
user: User = Depends(current_active_user),
|
161
|
-
db: AsyncSession = Depends(
|
161
|
+
db: AsyncSession = Depends(get_async_db),
|
162
162
|
) -> Response:
|
163
163
|
"""
|
164
164
|
Delete a dataset associated to the current project
|
@@ -171,9 +171,9 @@ async def delete_dataset(
|
|
171
171
|
)
|
172
172
|
dataset = output["dataset"]
|
173
173
|
|
174
|
-
# Fail if there exist jobs that are
|
175
|
-
#
|
176
|
-
stm =
|
174
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
175
|
+
# current dataset.
|
176
|
+
stm = _get_submitted_jobs_statement().where(
|
177
177
|
or_(
|
178
178
|
ApplyWorkflow.input_dataset_id == dataset_id,
|
179
179
|
ApplyWorkflow.output_dataset_id == dataset_id,
|
@@ -231,7 +231,7 @@ async def create_resource(
|
|
231
231
|
dataset_id: int,
|
232
232
|
resource: ResourceCreate,
|
233
233
|
user: User = Depends(current_active_user),
|
234
|
-
db: AsyncSession = Depends(
|
234
|
+
db: AsyncSession = Depends(get_async_db),
|
235
235
|
) -> Optional[ResourceRead]:
|
236
236
|
"""
|
237
237
|
Add resource to an existing dataset
|
@@ -259,7 +259,7 @@ async def get_resource_list(
|
|
259
259
|
project_id: int,
|
260
260
|
dataset_id: int,
|
261
261
|
user: User = Depends(current_active_user),
|
262
|
-
db: AsyncSession = Depends(
|
262
|
+
db: AsyncSession = Depends(get_async_db),
|
263
263
|
) -> Optional[list[ResourceRead]]:
|
264
264
|
"""
|
265
265
|
Get resources from a dataset
|
@@ -287,7 +287,7 @@ async def update_resource(
|
|
287
287
|
resource_id: int,
|
288
288
|
resource_update: ResourceUpdate,
|
289
289
|
user: User = Depends(current_active_user),
|
290
|
-
db: AsyncSession = Depends(
|
290
|
+
db: AsyncSession = Depends(get_async_db),
|
291
291
|
) -> Optional[ResourceRead]:
|
292
292
|
"""
|
293
293
|
Edit a resource of a dataset
|
@@ -327,7 +327,7 @@ async def delete_resource(
|
|
327
327
|
dataset_id: int,
|
328
328
|
resource_id: int,
|
329
329
|
user: User = Depends(current_active_user),
|
330
|
-
db: AsyncSession = Depends(
|
330
|
+
db: AsyncSession = Depends(get_async_db),
|
331
331
|
) -> Response:
|
332
332
|
"""
|
333
333
|
Delete a resource of a dataset
|
@@ -360,7 +360,7 @@ async def export_history_as_workflow(
|
|
360
360
|
project_id: int,
|
361
361
|
dataset_id: int,
|
362
362
|
user: User = Depends(current_active_user),
|
363
|
-
db: AsyncSession = Depends(
|
363
|
+
db: AsyncSession = Depends(get_async_db),
|
364
364
|
) -> Optional[WorkflowExport]:
|
365
365
|
"""
|
366
366
|
Extract a reproducible workflow from the dataset history.
|
@@ -374,11 +374,12 @@ async def export_history_as_workflow(
|
|
374
374
|
)
|
375
375
|
dataset = output["dataset"]
|
376
376
|
|
377
|
-
# Check whether there exists
|
378
|
-
# ==
|
379
|
-
#
|
380
|
-
#
|
381
|
-
|
377
|
+
# Check whether there exists a submitted job such that
|
378
|
+
# `job.output_dataset_id==dataset_id`.
|
379
|
+
# If at least one such job exists, then this endpoint will fail.
|
380
|
+
# We do not support the use case of exporting a reproducible workflow when
|
381
|
+
# job execution is in progress; this may change in the future.
|
382
|
+
stm = _get_submitted_jobs_statement().where(
|
382
383
|
ApplyWorkflow.output_dataset_id == dataset_id
|
383
384
|
)
|
384
385
|
res = await db.execute(stm)
|
@@ -427,7 +428,7 @@ async def get_workflowtask_status(
|
|
427
428
|
project_id: int,
|
428
429
|
dataset_id: int,
|
429
430
|
user: User = Depends(current_active_user),
|
430
|
-
db: AsyncSession = Depends(
|
431
|
+
db: AsyncSession = Depends(get_async_db),
|
431
432
|
) -> Optional[DatasetStatusRead]:
|
432
433
|
"""
|
433
434
|
Extract the status of all `WorkflowTask`s that ran on a given `Dataset`.
|
@@ -449,7 +450,7 @@ async def get_workflowtask_status(
|
|
449
450
|
# Note: see
|
450
451
|
# https://sqlmodel.tiangolo.com/tutorial/where/#type-annotations-and-errors
|
451
452
|
# regarding the type-ignore in this code block
|
452
|
-
stm =
|
453
|
+
stm = _get_submitted_jobs_statement().where(
|
453
454
|
ApplyWorkflow.output_dataset_id == dataset_id
|
454
455
|
)
|
455
456
|
res = await db.execute(stm)
|
@@ -516,7 +517,7 @@ async def get_workflowtask_status(
|
|
516
517
|
@router.get("/dataset/", response_model=list[DatasetRead])
|
517
518
|
async def get_user_datasets(
|
518
519
|
user: User = Depends(current_active_user),
|
519
|
-
db: AsyncSession = Depends(
|
520
|
+
db: AsyncSession = Depends(get_async_db),
|
520
521
|
) -> list[DatasetRead]:
|
521
522
|
"""
|
522
523
|
Returns all the datasets of the current user
|
@@ -9,7 +9,7 @@ from fastapi.responses import StreamingResponse
|
|
9
9
|
from sqlmodel import select
|
10
10
|
|
11
11
|
from ....db import AsyncSession
|
12
|
-
from ....db import
|
12
|
+
from ....db import get_async_db
|
13
13
|
from ....models import ApplyWorkflow
|
14
14
|
from ....models import Project
|
15
15
|
from ....schemas import ApplyWorkflowRead
|
@@ -29,7 +29,7 @@ router = APIRouter()
|
|
29
29
|
@router.get("/job/", response_model=list[ApplyWorkflowRead])
|
30
30
|
async def get_user_jobs(
|
31
31
|
user: User = Depends(current_active_user),
|
32
|
-
db: AsyncSession = Depends(
|
32
|
+
db: AsyncSession = Depends(get_async_db),
|
33
33
|
) -> list[ApplyWorkflowRead]:
|
34
34
|
"""
|
35
35
|
Returns all the jobs of the current user
|
@@ -51,7 +51,7 @@ async def get_workflow_jobs(
|
|
51
51
|
project_id: int,
|
52
52
|
workflow_id: int,
|
53
53
|
user: User = Depends(current_active_user),
|
54
|
-
db: AsyncSession = Depends(
|
54
|
+
db: AsyncSession = Depends(get_async_db),
|
55
55
|
) -> Optional[list[ApplyWorkflowRead]]:
|
56
56
|
"""
|
57
57
|
Returns all the jobs related to a specific workflow
|
@@ -73,7 +73,7 @@ async def read_job(
|
|
73
73
|
project_id: int,
|
74
74
|
job_id: int,
|
75
75
|
user: User = Depends(current_active_user),
|
76
|
-
db: AsyncSession = Depends(
|
76
|
+
db: AsyncSession = Depends(get_async_db),
|
77
77
|
) -> Optional[ApplyWorkflowRead]:
|
78
78
|
"""
|
79
79
|
Return info on an existing job
|
@@ -99,7 +99,7 @@ async def download_job_logs(
|
|
99
99
|
project_id: int,
|
100
100
|
job_id: int,
|
101
101
|
user: User = Depends(current_active_user),
|
102
|
-
db: AsyncSession = Depends(
|
102
|
+
db: AsyncSession = Depends(get_async_db),
|
103
103
|
) -> StreamingResponse:
|
104
104
|
"""
|
105
105
|
Download job folder
|
@@ -132,7 +132,7 @@ async def download_job_logs(
|
|
132
132
|
async def get_job_list(
|
133
133
|
project_id: int,
|
134
134
|
user: User = Depends(current_active_user),
|
135
|
-
db: AsyncSession = Depends(
|
135
|
+
db: AsyncSession = Depends(get_async_db),
|
136
136
|
) -> Optional[list[ApplyWorkflowRead]]:
|
137
137
|
"""
|
138
138
|
Get job list for given project
|
@@ -156,7 +156,7 @@ async def stop_job(
|
|
156
156
|
project_id: int,
|
157
157
|
job_id: int,
|
158
158
|
user: User = Depends(current_active_user),
|
159
|
-
db: AsyncSession = Depends(
|
159
|
+
db: AsyncSession = Depends(get_async_db),
|
160
160
|
) -> Response:
|
161
161
|
"""
|
162
162
|
Stop execution of a workflow job (only available for slurm backend)
|
@@ -14,7 +14,7 @@ from .....logger import close_logger
|
|
14
14
|
from .....logger import set_logger
|
15
15
|
from .....syringe import Inject
|
16
16
|
from ....db import AsyncSession
|
17
|
-
from ....db import
|
17
|
+
from ....db import get_async_db
|
18
18
|
from ....models import ApplyWorkflow
|
19
19
|
from ....models import Dataset
|
20
20
|
from ....models import LinkUserProject
|
@@ -33,9 +33,9 @@ from ....security import current_active_user
|
|
33
33
|
from ....security import current_active_verified_user
|
34
34
|
from ....security import User
|
35
35
|
from ._aux_functions import _check_project_exists
|
36
|
-
from ._aux_functions import _get_active_jobs_statement
|
37
36
|
from ._aux_functions import _get_dataset_check_owner
|
38
37
|
from ._aux_functions import _get_project_check_owner
|
38
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
39
39
|
from ._aux_functions import _get_workflow_check_owner
|
40
40
|
|
41
41
|
|
@@ -45,7 +45,7 @@ router = APIRouter()
|
|
45
45
|
@router.get("/", response_model=list[ProjectRead])
|
46
46
|
async def get_list_project(
|
47
47
|
user: User = Depends(current_active_user),
|
48
|
-
db: AsyncSession = Depends(
|
48
|
+
db: AsyncSession = Depends(get_async_db),
|
49
49
|
) -> list[Project]:
|
50
50
|
"""
|
51
51
|
Return list of projects user is member of
|
@@ -65,7 +65,7 @@ async def get_list_project(
|
|
65
65
|
async def create_project(
|
66
66
|
project: ProjectCreate,
|
67
67
|
user: User = Depends(current_active_user),
|
68
|
-
db: AsyncSession = Depends(
|
68
|
+
db: AsyncSession = Depends(get_async_db),
|
69
69
|
) -> Optional[ProjectRead]:
|
70
70
|
"""
|
71
71
|
Create new poject
|
@@ -100,7 +100,7 @@ async def create_project(
|
|
100
100
|
async def read_project(
|
101
101
|
project_id: int,
|
102
102
|
user: User = Depends(current_active_user),
|
103
|
-
db: AsyncSession = Depends(
|
103
|
+
db: AsyncSession = Depends(get_async_db),
|
104
104
|
) -> Optional[ProjectRead]:
|
105
105
|
"""
|
106
106
|
Return info on an existing project
|
@@ -117,7 +117,7 @@ async def update_project(
|
|
117
117
|
project_id: int,
|
118
118
|
project_update: ProjectUpdate,
|
119
119
|
user: User = Depends(current_active_user),
|
120
|
-
db: AsyncSession = Depends(
|
120
|
+
db: AsyncSession = Depends(get_async_db),
|
121
121
|
):
|
122
122
|
project = await _get_project_check_owner(
|
123
123
|
project_id=project_id, user_id=user.id, db=db
|
@@ -142,7 +142,7 @@ async def update_project(
|
|
142
142
|
async def delete_project(
|
143
143
|
project_id: int,
|
144
144
|
user: User = Depends(current_active_user),
|
145
|
-
db: AsyncSession = Depends(
|
145
|
+
db: AsyncSession = Depends(get_async_db),
|
146
146
|
) -> Response:
|
147
147
|
"""
|
148
148
|
Delete project
|
@@ -151,9 +151,9 @@ async def delete_project(
|
|
151
151
|
project_id=project_id, user_id=user.id, db=db
|
152
152
|
)
|
153
153
|
|
154
|
-
# Fail if there exist jobs that are
|
155
|
-
#
|
156
|
-
stm =
|
154
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
155
|
+
# current project.
|
156
|
+
stm = _get_submitted_jobs_statement().where(
|
157
157
|
ApplyWorkflow.project_id == project_id
|
158
158
|
)
|
159
159
|
res = await db.execute(stm)
|
@@ -245,7 +245,7 @@ async def apply_workflow(
|
|
245
245
|
input_dataset_id: int,
|
246
246
|
output_dataset_id: int,
|
247
247
|
user: User = Depends(current_active_verified_user),
|
248
|
-
db: AsyncSession = Depends(
|
248
|
+
db: AsyncSession = Depends(get_async_db),
|
249
249
|
) -> Optional[ApplyWorkflowRead]:
|
250
250
|
|
251
251
|
output = await _get_dataset_check_owner(
|
@@ -352,16 +352,11 @@ async def apply_workflow(
|
|
352
352
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, detail=str(e)
|
353
353
|
)
|
354
354
|
|
355
|
-
# Check that no other job with the same output_dataset_id is
|
356
|
-
# SUBMITTED or RUNNING
|
355
|
+
# Check that no other job with the same output_dataset_id is SUBMITTED
|
357
356
|
stm = (
|
358
357
|
select(ApplyWorkflow)
|
359
358
|
.where(ApplyWorkflow.output_dataset_id == output_dataset_id)
|
360
|
-
.where(
|
361
|
-
ApplyWorkflow.status.in_(
|
362
|
-
[JobStatusType.SUBMITTED, JobStatusType.RUNNING]
|
363
|
-
)
|
364
|
-
)
|
359
|
+
.where(ApplyWorkflow.status == JobStatusType.SUBMITTED)
|
365
360
|
)
|
366
361
|
res = await db.execute(stm)
|
367
362
|
if res.scalars().all():
|
@@ -369,7 +364,7 @@ async def apply_workflow(
|
|
369
364
|
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
370
365
|
detail=(
|
371
366
|
f"Output dataset {output_dataset_id} is already in use "
|
372
|
-
"in
|
367
|
+
"in submitted job(s)."
|
373
368
|
),
|
374
369
|
)
|
375
370
|
|
@@ -10,7 +10,7 @@ from sqlmodel import select
|
|
10
10
|
|
11
11
|
from .....logger import set_logger
|
12
12
|
from ....db import AsyncSession
|
13
|
-
from ....db import
|
13
|
+
from ....db import get_async_db
|
14
14
|
from ....models import Task
|
15
15
|
from ....models import WorkflowTask
|
16
16
|
from ....schemas import TaskCreate
|
@@ -29,7 +29,7 @@ logger = set_logger(__name__)
|
|
29
29
|
@router.get("/", response_model=list[TaskRead])
|
30
30
|
async def get_list_task(
|
31
31
|
user: User = Depends(current_active_user),
|
32
|
-
db: AsyncSession = Depends(
|
32
|
+
db: AsyncSession = Depends(get_async_db),
|
33
33
|
) -> list[TaskRead]:
|
34
34
|
"""
|
35
35
|
Get list of available tasks
|
@@ -45,7 +45,7 @@ async def get_list_task(
|
|
45
45
|
async def get_task(
|
46
46
|
task_id: int,
|
47
47
|
user: User = Depends(current_active_user),
|
48
|
-
db: AsyncSession = Depends(
|
48
|
+
db: AsyncSession = Depends(get_async_db),
|
49
49
|
) -> TaskRead:
|
50
50
|
"""
|
51
51
|
Get info on a specific task
|
@@ -64,7 +64,7 @@ async def patch_task(
|
|
64
64
|
task_id: int,
|
65
65
|
task_update: TaskUpdate,
|
66
66
|
user: User = Depends(current_active_verified_user),
|
67
|
-
db: AsyncSession = Depends(
|
67
|
+
db: AsyncSession = Depends(get_async_db),
|
68
68
|
) -> Optional[TaskRead]:
|
69
69
|
"""
|
70
70
|
Edit a specific task (restricted to superusers and task owner)
|
@@ -108,7 +108,7 @@ async def patch_task(
|
|
108
108
|
async def create_task(
|
109
109
|
task: TaskCreate,
|
110
110
|
user: User = Depends(current_active_verified_user),
|
111
|
-
db: AsyncSession = Depends(
|
111
|
+
db: AsyncSession = Depends(get_async_db),
|
112
112
|
) -> Optional[TaskRead]:
|
113
113
|
"""
|
114
114
|
Create a new task
|
@@ -154,7 +154,7 @@ async def create_task(
|
|
154
154
|
async def delete_task(
|
155
155
|
task_id: int,
|
156
156
|
user: User = Depends(current_active_user),
|
157
|
-
db: AsyncSession = Depends(
|
157
|
+
db: AsyncSession = Depends(get_async_db),
|
158
158
|
) -> Response:
|
159
159
|
"""
|
160
160
|
Delete a task
|