fractal-server 1.4.2a5__py3-none-any.whl → 1.4.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/db/__init__.py +36 -25
- fractal_server/app/models/dataset.py +8 -0
- fractal_server/app/models/workflow.py +9 -55
- fractal_server/app/routes/admin.py +8 -8
- fractal_server/app/routes/api/v1/_aux_functions.py +64 -5
- fractal_server/app/routes/api/v1/dataset.py +24 -23
- fractal_server/app/routes/api/v1/job.py +7 -7
- fractal_server/app/routes/api/v1/project.py +29 -26
- fractal_server/app/routes/api/v1/task.py +6 -6
- fractal_server/app/routes/api/v1/task_collection.py +12 -126
- fractal_server/app/routes/api/v1/workflow.py +16 -14
- fractal_server/app/routes/api/v1/workflowtask.py +8 -6
- fractal_server/app/routes/auth.py +2 -2
- fractal_server/app/runner/__init__.py +0 -1
- fractal_server/app/schemas/__init__.py +1 -0
- fractal_server/app/schemas/applyworkflow.py +9 -13
- fractal_server/app/schemas/dataset.py +2 -0
- fractal_server/app/schemas/dumps.py +2 -0
- fractal_server/app/schemas/task_collection.py +2 -10
- fractal_server/app/schemas/user.py +7 -3
- fractal_server/app/schemas/workflow.py +2 -0
- fractal_server/app/security/__init__.py +3 -3
- fractal_server/config.py +14 -0
- fractal_server/migrations/versions/4cedeb448a53_workflowtask_foreign_keys_not_nullables.py +42 -0
- fractal_server/migrations/versions/9fd26a2b0de4_add_workflow_timestamp_created.py +60 -0
- fractal_server/tasks/_TaskCollectPip.py +103 -0
- fractal_server/tasks/__init__.py +3 -1
- fractal_server/tasks/background_operations.py +384 -0
- fractal_server/tasks/endpoint_operations.py +167 -0
- fractal_server/tasks/utils.py +86 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/METADATA +2 -2
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/RECORD +36 -31
- fractal_server/tasks/collection.py +0 -556
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/LICENSE +0 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/WHEEL +0 -0
- {fractal_server-1.4.2a5.dist-info → fractal_server-1.4.3a1.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "1.4.
|
1
|
+
__VERSION__ = "1.4.3a1"
|
@@ -21,6 +21,12 @@ from ...syringe import Inject
|
|
21
21
|
print(__name__)
|
22
22
|
logger = set_logger(__name__)
|
23
23
|
|
24
|
+
SQLITE_WARNING_MESSAGE = (
|
25
|
+
"SQLite is supported (for version >=3.37) but discouraged in production. "
|
26
|
+
"Given its partial support for ForeignKey constraints, "
|
27
|
+
"database consistency cannot be guaranteed."
|
28
|
+
)
|
29
|
+
|
24
30
|
|
25
31
|
class DB:
|
26
32
|
"""
|
@@ -32,7 +38,7 @@ class DB:
|
|
32
38
|
try:
|
33
39
|
return cls._engine_async
|
34
40
|
except AttributeError:
|
35
|
-
cls.
|
41
|
+
cls.set_async_db()
|
36
42
|
return cls._engine_async
|
37
43
|
|
38
44
|
@classmethod
|
@@ -40,32 +46,22 @@ class DB:
|
|
40
46
|
try:
|
41
47
|
return cls._engine_sync
|
42
48
|
except AttributeError:
|
43
|
-
cls.
|
49
|
+
cls.set_sync_db()
|
44
50
|
return cls._engine_sync
|
45
51
|
|
46
52
|
@classmethod
|
47
|
-
def
|
53
|
+
def set_async_db(cls):
|
48
54
|
settings = Inject(get_settings)
|
49
55
|
settings.check_db()
|
50
56
|
|
51
57
|
if settings.DB_ENGINE == "sqlite":
|
52
|
-
logger.warning(
|
53
|
-
"SQLite is supported (for version >=3.37) but discouraged "
|
54
|
-
"in production. Given its partial support for ForeignKey "
|
55
|
-
"constraints, database consistency cannot be guaranteed."
|
56
|
-
)
|
57
|
-
|
58
|
+
logger.warning(SQLITE_WARNING_MESSAGE)
|
58
59
|
# Set some sqlite-specific options
|
59
60
|
engine_kwargs_async = dict(poolclass=StaticPool)
|
60
|
-
engine_kwargs_sync = dict(
|
61
|
-
poolclass=StaticPool,
|
62
|
-
connect_args={"check_same_thread": False},
|
63
|
-
)
|
64
61
|
else:
|
65
62
|
engine_kwargs_async = {
|
66
63
|
"pool_pre_ping": True,
|
67
64
|
}
|
68
|
-
engine_kwargs_sync = {}
|
69
65
|
|
70
66
|
cls._engine_async = create_async_engine(
|
71
67
|
settings.DATABASE_URL,
|
@@ -73,13 +69,6 @@ class DB:
|
|
73
69
|
future=True,
|
74
70
|
**engine_kwargs_async,
|
75
71
|
)
|
76
|
-
cls._engine_sync = create_engine(
|
77
|
-
settings.DATABASE_SYNC_URL,
|
78
|
-
echo=settings.DB_ECHO,
|
79
|
-
future=True,
|
80
|
-
**engine_kwargs_sync,
|
81
|
-
)
|
82
|
-
|
83
72
|
cls._async_session_maker = sessionmaker(
|
84
73
|
cls._engine_async,
|
85
74
|
class_=AsyncSession,
|
@@ -87,6 +76,28 @@ class DB:
|
|
87
76
|
future=True,
|
88
77
|
)
|
89
78
|
|
79
|
+
@classmethod
|
80
|
+
def set_sync_db(cls):
|
81
|
+
settings = Inject(get_settings)
|
82
|
+
settings.check_db()
|
83
|
+
|
84
|
+
if settings.DB_ENGINE == "sqlite":
|
85
|
+
logger.warning(SQLITE_WARNING_MESSAGE)
|
86
|
+
# Set some sqlite-specific options
|
87
|
+
engine_kwargs_sync = dict(
|
88
|
+
poolclass=StaticPool,
|
89
|
+
connect_args={"check_same_thread": False},
|
90
|
+
)
|
91
|
+
else:
|
92
|
+
engine_kwargs_sync = {}
|
93
|
+
|
94
|
+
cls._engine_sync = create_engine(
|
95
|
+
settings.DATABASE_SYNC_URL,
|
96
|
+
echo=settings.DB_ECHO,
|
97
|
+
future=True,
|
98
|
+
**engine_kwargs_sync,
|
99
|
+
)
|
100
|
+
|
90
101
|
cls._sync_session_maker = sessionmaker(
|
91
102
|
bind=cls._engine_sync,
|
92
103
|
autocommit=False,
|
@@ -102,14 +113,14 @@ class DB:
|
|
102
113
|
cursor.close()
|
103
114
|
|
104
115
|
@classmethod
|
105
|
-
async def
|
116
|
+
async def get_async_db(cls) -> AsyncGenerator[AsyncSession, None]:
|
106
117
|
"""
|
107
118
|
Get async database session
|
108
119
|
"""
|
109
120
|
try:
|
110
121
|
session_maker = cls._async_session_maker()
|
111
122
|
except AttributeError:
|
112
|
-
cls.
|
123
|
+
cls.set_async_db()
|
113
124
|
session_maker = cls._async_session_maker()
|
114
125
|
async with session_maker as async_session:
|
115
126
|
yield async_session
|
@@ -122,11 +133,11 @@ class DB:
|
|
122
133
|
try:
|
123
134
|
session_maker = cls._sync_session_maker()
|
124
135
|
except AttributeError:
|
125
|
-
cls.
|
136
|
+
cls.set_sync_db()
|
126
137
|
session_maker = cls._sync_session_maker()
|
127
138
|
with session_maker as sync_session:
|
128
139
|
yield sync_session
|
129
140
|
|
130
141
|
|
131
|
-
|
142
|
+
get_async_db = DB.get_async_db
|
132
143
|
get_sync_db = DB.get_sync_db
|
@@ -1,13 +1,16 @@
|
|
1
|
+
from datetime import datetime
|
1
2
|
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
|
4
5
|
from sqlalchemy import Column
|
5
6
|
from sqlalchemy.ext.orderinglist import ordering_list
|
7
|
+
from sqlalchemy.types import DateTime
|
6
8
|
from sqlalchemy.types import JSON
|
7
9
|
from sqlmodel import Field
|
8
10
|
from sqlmodel import Relationship
|
9
11
|
from sqlmodel import SQLModel
|
10
12
|
|
13
|
+
from ...utils import get_timestamp
|
11
14
|
from ..schemas.dataset import _DatasetBase
|
12
15
|
from ..schemas.dataset import _ResourceBase
|
13
16
|
|
@@ -55,6 +58,11 @@ class Dataset(_DatasetBase, SQLModel, table=True):
|
|
55
58
|
sa_column=Column(JSON, server_default="[]", nullable=False)
|
56
59
|
)
|
57
60
|
|
61
|
+
timestamp_created: datetime = Field(
|
62
|
+
default_factory=get_timestamp,
|
63
|
+
sa_column=Column(DateTime(timezone=True), nullable=False),
|
64
|
+
)
|
65
|
+
|
58
66
|
class Config:
|
59
67
|
arbitrary_types_allowed = True
|
60
68
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
from datetime import datetime
|
1
2
|
from typing import Any
|
2
3
|
from typing import Optional
|
3
4
|
from typing import Union
|
@@ -5,12 +6,13 @@ from typing import Union
|
|
5
6
|
from pydantic import validator
|
6
7
|
from sqlalchemy import Column
|
7
8
|
from sqlalchemy.ext.orderinglist import ordering_list
|
9
|
+
from sqlalchemy.types import DateTime
|
8
10
|
from sqlalchemy.types import JSON
|
9
11
|
from sqlmodel import Field
|
10
12
|
from sqlmodel import Relationship
|
11
13
|
from sqlmodel import SQLModel
|
12
14
|
|
13
|
-
from
|
15
|
+
from ...utils import get_timestamp
|
14
16
|
from ..schemas.workflow import _WorkflowBase
|
15
17
|
from ..schemas.workflow import _WorkflowTaskBase
|
16
18
|
from .task import Task
|
@@ -49,8 +51,8 @@ class WorkflowTask(_WorkflowTaskBase, SQLModel, table=True):
|
|
49
51
|
|
50
52
|
id: Optional[int] = Field(default=None, primary_key=True)
|
51
53
|
|
52
|
-
workflow_id:
|
53
|
-
task_id:
|
54
|
+
workflow_id: int = Field(foreign_key="workflow.id")
|
55
|
+
task_id: int = Field(foreign_key="task.id")
|
54
56
|
order: Optional[int]
|
55
57
|
meta: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
|
56
58
|
args: Optional[dict[str, Any]] = Field(sa_column=Column(JSON))
|
@@ -117,58 +119,10 @@ class Workflow(_WorkflowBase, SQLModel, table=True):
|
|
117
119
|
cascade="all, delete-orphan",
|
118
120
|
),
|
119
121
|
)
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
*,
|
125
|
-
args: Optional[dict[str, Any]] = None,
|
126
|
-
meta: Optional[dict[str, Any]] = None,
|
127
|
-
order: Optional[int] = None,
|
128
|
-
db: AsyncSession,
|
129
|
-
commit: bool = True,
|
130
|
-
) -> WorkflowTask:
|
131
|
-
"""
|
132
|
-
Insert a new WorkflowTask into Workflow.task_list
|
133
|
-
|
134
|
-
Args:
|
135
|
-
task_id: TBD
|
136
|
-
args: TBD
|
137
|
-
meta: TBD
|
138
|
-
order: TBD
|
139
|
-
db: TBD
|
140
|
-
commit: TBD
|
141
|
-
"""
|
142
|
-
if order is None:
|
143
|
-
order = len(self.task_list)
|
144
|
-
|
145
|
-
# Get task from db, and extract default arguments via a Task property
|
146
|
-
# method
|
147
|
-
db_task = await db.get(Task, task_id)
|
148
|
-
default_args = db_task.default_args_from_args_schema
|
149
|
-
# Override default_args with args
|
150
|
-
actual_args = default_args.copy()
|
151
|
-
if args is not None:
|
152
|
-
for k, v in args.items():
|
153
|
-
actual_args[k] = v
|
154
|
-
if not actual_args:
|
155
|
-
actual_args = None
|
156
|
-
|
157
|
-
# Combine meta (higher priority) and db_task.meta (lower priority)
|
158
|
-
wt_meta = (db_task.meta or {}).copy()
|
159
|
-
wt_meta.update(meta or {})
|
160
|
-
if not wt_meta:
|
161
|
-
wt_meta = None
|
162
|
-
|
163
|
-
# Create DB entry
|
164
|
-
wf_task = WorkflowTask(task_id=task_id, args=actual_args, meta=wt_meta)
|
165
|
-
db.add(wf_task)
|
166
|
-
self.task_list.insert(order, wf_task)
|
167
|
-
self.task_list.reorder() # type: ignore
|
168
|
-
if commit:
|
169
|
-
await db.commit()
|
170
|
-
await db.refresh(wf_task)
|
171
|
-
return wf_task
|
122
|
+
timestamp_created: datetime = Field(
|
123
|
+
default_factory=get_timestamp,
|
124
|
+
sa_column=Column(DateTime(timezone=True), nullable=False),
|
125
|
+
)
|
172
126
|
|
173
127
|
@property
|
174
128
|
def input_type(self):
|
@@ -16,7 +16,7 @@ from sqlmodel import select
|
|
16
16
|
|
17
17
|
from ...utils import get_timestamp
|
18
18
|
from ..db import AsyncSession
|
19
|
-
from ..db import
|
19
|
+
from ..db import get_async_db
|
20
20
|
from ..models import ApplyWorkflow
|
21
21
|
from ..models import Dataset
|
22
22
|
from ..models import JobStatusType
|
@@ -41,7 +41,7 @@ async def view_project(
|
|
41
41
|
id: Optional[int] = None,
|
42
42
|
user_id: Optional[int] = None,
|
43
43
|
user: User = Depends(current_active_superuser),
|
44
|
-
db: AsyncSession = Depends(
|
44
|
+
db: AsyncSession = Depends(get_async_db),
|
45
45
|
) -> list[ProjectRead]:
|
46
46
|
"""
|
47
47
|
Query `project` table.
|
@@ -73,7 +73,7 @@ async def view_workflow(
|
|
73
73
|
project_id: Optional[int] = None,
|
74
74
|
name_contains: Optional[str] = None,
|
75
75
|
user: User = Depends(current_active_superuser),
|
76
|
-
db: AsyncSession = Depends(
|
76
|
+
db: AsyncSession = Depends(get_async_db),
|
77
77
|
) -> list[WorkflowRead]:
|
78
78
|
"""
|
79
79
|
Query `workflow` table.
|
@@ -115,7 +115,7 @@ async def view_dataset(
|
|
115
115
|
name_contains: Optional[str] = None,
|
116
116
|
type: Optional[str] = None,
|
117
117
|
user: User = Depends(current_active_superuser),
|
118
|
-
db: AsyncSession = Depends(
|
118
|
+
db: AsyncSession = Depends(get_async_db),
|
119
119
|
) -> list[DatasetRead]:
|
120
120
|
"""
|
121
121
|
Query `dataset` table.
|
@@ -166,7 +166,7 @@ async def view_job(
|
|
166
166
|
end_timestamp_min: Optional[datetime] = None,
|
167
167
|
end_timestamp_max: Optional[datetime] = None,
|
168
168
|
user: User = Depends(current_active_superuser),
|
169
|
-
db: AsyncSession = Depends(
|
169
|
+
db: AsyncSession = Depends(get_async_db),
|
170
170
|
) -> list[ApplyWorkflowRead]:
|
171
171
|
"""
|
172
172
|
Query `ApplyWorkflow` table.
|
@@ -231,7 +231,7 @@ async def update_job(
|
|
231
231
|
job_update: ApplyWorkflowUpdate,
|
232
232
|
job_id: int,
|
233
233
|
user: User = Depends(current_active_superuser),
|
234
|
-
db: AsyncSession = Depends(
|
234
|
+
db: AsyncSession = Depends(get_async_db),
|
235
235
|
) -> Optional[ApplyWorkflowRead]:
|
236
236
|
"""
|
237
237
|
Change the status of an existing job.
|
@@ -264,7 +264,7 @@ async def update_job(
|
|
264
264
|
async def stop_job(
|
265
265
|
job_id: int,
|
266
266
|
user: User = Depends(current_active_superuser),
|
267
|
-
db: AsyncSession = Depends(
|
267
|
+
db: AsyncSession = Depends(get_async_db),
|
268
268
|
) -> Response:
|
269
269
|
"""
|
270
270
|
Stop execution of a workflow job.
|
@@ -293,7 +293,7 @@ async def stop_job(
|
|
293
293
|
async def download_job_logs(
|
294
294
|
job_id: int,
|
295
295
|
user: User = Depends(current_active_superuser),
|
296
|
-
db: AsyncSession = Depends(
|
296
|
+
db: AsyncSession = Depends(get_async_db),
|
297
297
|
) -> StreamingResponse:
|
298
298
|
"""
|
299
299
|
Download job folder
|
@@ -1,7 +1,9 @@
|
|
1
1
|
"""
|
2
2
|
Auxiliary functions to get object from the database or perform simple checks
|
3
3
|
"""
|
4
|
+
from typing import Any
|
4
5
|
from typing import Literal
|
6
|
+
from typing import Optional
|
5
7
|
from typing import Union
|
6
8
|
|
7
9
|
from fastapi import HTTPException
|
@@ -366,15 +368,72 @@ async def _get_task_check_owner(
|
|
366
368
|
return task
|
367
369
|
|
368
370
|
|
369
|
-
def
|
371
|
+
def _get_submitted_jobs_statement() -> SelectOfScalar:
|
370
372
|
"""
|
371
373
|
Returns:
|
372
374
|
A sqlmodel statement that selects all `ApplyWorkflow`s with
|
373
|
-
`ApplyWorkflow.status` equal to `submitted
|
375
|
+
`ApplyWorkflow.status` equal to `submitted`.
|
374
376
|
"""
|
375
377
|
stm = select(ApplyWorkflow).where(
|
376
|
-
ApplyWorkflow.status.
|
377
|
-
[JobStatusType.SUBMITTED, JobStatusType.RUNNING]
|
378
|
-
)
|
378
|
+
ApplyWorkflow.status == JobStatusType.SUBMITTED
|
379
379
|
)
|
380
380
|
return stm
|
381
|
+
|
382
|
+
|
383
|
+
async def _workflow_insert_task(
|
384
|
+
*,
|
385
|
+
workflow_id: int,
|
386
|
+
task_id: int,
|
387
|
+
args: Optional[dict[str, Any]] = None,
|
388
|
+
meta: Optional[dict[str, Any]] = None,
|
389
|
+
order: Optional[int] = None,
|
390
|
+
db: AsyncSession,
|
391
|
+
) -> WorkflowTask:
|
392
|
+
"""
|
393
|
+
Insert a new WorkflowTask into Workflow.task_list
|
394
|
+
|
395
|
+
Args:
|
396
|
+
task_id: TBD
|
397
|
+
args: TBD
|
398
|
+
meta: TBD
|
399
|
+
order: TBD
|
400
|
+
db: TBD
|
401
|
+
commit: TBD
|
402
|
+
"""
|
403
|
+
db_workflow = await db.get(Workflow, workflow_id)
|
404
|
+
if db_workflow is None:
|
405
|
+
raise ValueError(f"Workflow {workflow_id} does not exist")
|
406
|
+
|
407
|
+
if order is None:
|
408
|
+
order = len(db_workflow.task_list)
|
409
|
+
|
410
|
+
# Get task from db, and extract default arguments via a Task property
|
411
|
+
# method
|
412
|
+
db_task = await db.get(Task, task_id)
|
413
|
+
if db_task is None:
|
414
|
+
raise ValueError(f"Task {task_id} does not exist")
|
415
|
+
|
416
|
+
default_args = db_task.default_args_from_args_schema
|
417
|
+
# Override default_args with args
|
418
|
+
actual_args = default_args.copy()
|
419
|
+
if args is not None:
|
420
|
+
for k, v in args.items():
|
421
|
+
actual_args[k] = v
|
422
|
+
if not actual_args:
|
423
|
+
actual_args = None
|
424
|
+
|
425
|
+
# Combine meta (higher priority) and db_task.meta (lower priority)
|
426
|
+
wt_meta = (db_task.meta or {}).copy()
|
427
|
+
wt_meta.update(meta or {})
|
428
|
+
if not wt_meta:
|
429
|
+
wt_meta = None
|
430
|
+
|
431
|
+
# Create DB entry
|
432
|
+
wf_task = WorkflowTask(task_id=task_id, args=actual_args, meta=wt_meta)
|
433
|
+
db.add(wf_task)
|
434
|
+
db_workflow.task_list.insert(order, wf_task)
|
435
|
+
db_workflow.task_list.reorder() # type: ignore
|
436
|
+
await db.commit()
|
437
|
+
await db.refresh(wf_task)
|
438
|
+
|
439
|
+
return wf_task
|
@@ -11,7 +11,7 @@ from sqlmodel import or_
|
|
11
11
|
from sqlmodel import select
|
12
12
|
|
13
13
|
from ....db import AsyncSession
|
14
|
-
from ....db import
|
14
|
+
from ....db import get_async_db
|
15
15
|
from ....models import ApplyWorkflow
|
16
16
|
from ....models import Dataset
|
17
17
|
from ....models import Project
|
@@ -28,9 +28,9 @@ from ....schemas import WorkflowExport
|
|
28
28
|
from ....schemas import WorkflowTaskExport
|
29
29
|
from ....security import current_active_user
|
30
30
|
from ....security import User
|
31
|
-
from ._aux_functions import _get_active_jobs_statement
|
32
31
|
from ._aux_functions import _get_dataset_check_owner
|
33
32
|
from ._aux_functions import _get_project_check_owner
|
33
|
+
from ._aux_functions import _get_submitted_jobs_statement
|
34
34
|
from ._aux_functions import _get_workflow_check_owner
|
35
35
|
|
36
36
|
|
@@ -46,7 +46,7 @@ async def create_dataset(
|
|
46
46
|
project_id: int,
|
47
47
|
dataset: DatasetCreate,
|
48
48
|
user: User = Depends(current_active_user),
|
49
|
-
db: AsyncSession = Depends(
|
49
|
+
db: AsyncSession = Depends(get_async_db),
|
50
50
|
) -> Optional[DatasetRead]:
|
51
51
|
"""
|
52
52
|
Add new dataset to current project
|
@@ -70,7 +70,7 @@ async def create_dataset(
|
|
70
70
|
async def read_dataset_list(
|
71
71
|
project_id: int,
|
72
72
|
user: User = Depends(current_active_user),
|
73
|
-
db: AsyncSession = Depends(
|
73
|
+
db: AsyncSession = Depends(get_async_db),
|
74
74
|
) -> Optional[list[DatasetRead]]:
|
75
75
|
"""
|
76
76
|
Get dataset list for given project
|
@@ -96,7 +96,7 @@ async def read_dataset(
|
|
96
96
|
project_id: int,
|
97
97
|
dataset_id: int,
|
98
98
|
user: User = Depends(current_active_user),
|
99
|
-
db: AsyncSession = Depends(
|
99
|
+
db: AsyncSession = Depends(get_async_db),
|
100
100
|
) -> Optional[DatasetRead]:
|
101
101
|
"""
|
102
102
|
Get info on a dataset associated to the current project
|
@@ -121,7 +121,7 @@ async def update_dataset(
|
|
121
121
|
dataset_id: int,
|
122
122
|
dataset_update: DatasetUpdate,
|
123
123
|
user: User = Depends(current_active_user),
|
124
|
-
db: AsyncSession = Depends(
|
124
|
+
db: AsyncSession = Depends(get_async_db),
|
125
125
|
) -> Optional[DatasetRead]:
|
126
126
|
"""
|
127
127
|
Edit a dataset associated to the current project
|
@@ -158,7 +158,7 @@ async def delete_dataset(
|
|
158
158
|
project_id: int,
|
159
159
|
dataset_id: int,
|
160
160
|
user: User = Depends(current_active_user),
|
161
|
-
db: AsyncSession = Depends(
|
161
|
+
db: AsyncSession = Depends(get_async_db),
|
162
162
|
) -> Response:
|
163
163
|
"""
|
164
164
|
Delete a dataset associated to the current project
|
@@ -171,9 +171,9 @@ async def delete_dataset(
|
|
171
171
|
)
|
172
172
|
dataset = output["dataset"]
|
173
173
|
|
174
|
-
# Fail if there exist jobs that are
|
175
|
-
#
|
176
|
-
stm =
|
174
|
+
# Fail if there exist jobs that are submitted and in relation with the
|
175
|
+
# current dataset.
|
176
|
+
stm = _get_submitted_jobs_statement().where(
|
177
177
|
or_(
|
178
178
|
ApplyWorkflow.input_dataset_id == dataset_id,
|
179
179
|
ApplyWorkflow.output_dataset_id == dataset_id,
|
@@ -231,7 +231,7 @@ async def create_resource(
|
|
231
231
|
dataset_id: int,
|
232
232
|
resource: ResourceCreate,
|
233
233
|
user: User = Depends(current_active_user),
|
234
|
-
db: AsyncSession = Depends(
|
234
|
+
db: AsyncSession = Depends(get_async_db),
|
235
235
|
) -> Optional[ResourceRead]:
|
236
236
|
"""
|
237
237
|
Add resource to an existing dataset
|
@@ -259,7 +259,7 @@ async def get_resource_list(
|
|
259
259
|
project_id: int,
|
260
260
|
dataset_id: int,
|
261
261
|
user: User = Depends(current_active_user),
|
262
|
-
db: AsyncSession = Depends(
|
262
|
+
db: AsyncSession = Depends(get_async_db),
|
263
263
|
) -> Optional[list[ResourceRead]]:
|
264
264
|
"""
|
265
265
|
Get resources from a dataset
|
@@ -287,7 +287,7 @@ async def update_resource(
|
|
287
287
|
resource_id: int,
|
288
288
|
resource_update: ResourceUpdate,
|
289
289
|
user: User = Depends(current_active_user),
|
290
|
-
db: AsyncSession = Depends(
|
290
|
+
db: AsyncSession = Depends(get_async_db),
|
291
291
|
) -> Optional[ResourceRead]:
|
292
292
|
"""
|
293
293
|
Edit a resource of a dataset
|
@@ -327,7 +327,7 @@ async def delete_resource(
|
|
327
327
|
dataset_id: int,
|
328
328
|
resource_id: int,
|
329
329
|
user: User = Depends(current_active_user),
|
330
|
-
db: AsyncSession = Depends(
|
330
|
+
db: AsyncSession = Depends(get_async_db),
|
331
331
|
) -> Response:
|
332
332
|
"""
|
333
333
|
Delete a resource of a dataset
|
@@ -360,7 +360,7 @@ async def export_history_as_workflow(
|
|
360
360
|
project_id: int,
|
361
361
|
dataset_id: int,
|
362
362
|
user: User = Depends(current_active_user),
|
363
|
-
db: AsyncSession = Depends(
|
363
|
+
db: AsyncSession = Depends(get_async_db),
|
364
364
|
) -> Optional[WorkflowExport]:
|
365
365
|
"""
|
366
366
|
Extract a reproducible workflow from the dataset history.
|
@@ -374,11 +374,12 @@ async def export_history_as_workflow(
|
|
374
374
|
)
|
375
375
|
dataset = output["dataset"]
|
376
376
|
|
377
|
-
# Check whether there exists
|
378
|
-
# ==
|
379
|
-
#
|
380
|
-
#
|
381
|
-
|
377
|
+
# Check whether there exists a submitted job such that
|
378
|
+
# `job.output_dataset_id==dataset_id`.
|
379
|
+
# If at least one such job exists, then this endpoint will fail.
|
380
|
+
# We do not support the use case of exporting a reproducible workflow when
|
381
|
+
# job execution is in progress; this may change in the future.
|
382
|
+
stm = _get_submitted_jobs_statement().where(
|
382
383
|
ApplyWorkflow.output_dataset_id == dataset_id
|
383
384
|
)
|
384
385
|
res = await db.execute(stm)
|
@@ -427,7 +428,7 @@ async def get_workflowtask_status(
|
|
427
428
|
project_id: int,
|
428
429
|
dataset_id: int,
|
429
430
|
user: User = Depends(current_active_user),
|
430
|
-
db: AsyncSession = Depends(
|
431
|
+
db: AsyncSession = Depends(get_async_db),
|
431
432
|
) -> Optional[DatasetStatusRead]:
|
432
433
|
"""
|
433
434
|
Extract the status of all `WorkflowTask`s that ran on a given `Dataset`.
|
@@ -449,7 +450,7 @@ async def get_workflowtask_status(
|
|
449
450
|
# Note: see
|
450
451
|
# https://sqlmodel.tiangolo.com/tutorial/where/#type-annotations-and-errors
|
451
452
|
# regarding the type-ignore in this code block
|
452
|
-
stm =
|
453
|
+
stm = _get_submitted_jobs_statement().where(
|
453
454
|
ApplyWorkflow.output_dataset_id == dataset_id
|
454
455
|
)
|
455
456
|
res = await db.execute(stm)
|
@@ -516,7 +517,7 @@ async def get_workflowtask_status(
|
|
516
517
|
@router.get("/dataset/", response_model=list[DatasetRead])
|
517
518
|
async def get_user_datasets(
|
518
519
|
user: User = Depends(current_active_user),
|
519
|
-
db: AsyncSession = Depends(
|
520
|
+
db: AsyncSession = Depends(get_async_db),
|
520
521
|
) -> list[DatasetRead]:
|
521
522
|
"""
|
522
523
|
Returns all the datasets of the current user
|
@@ -9,7 +9,7 @@ from fastapi.responses import StreamingResponse
|
|
9
9
|
from sqlmodel import select
|
10
10
|
|
11
11
|
from ....db import AsyncSession
|
12
|
-
from ....db import
|
12
|
+
from ....db import get_async_db
|
13
13
|
from ....models import ApplyWorkflow
|
14
14
|
from ....models import Project
|
15
15
|
from ....schemas import ApplyWorkflowRead
|
@@ -29,7 +29,7 @@ router = APIRouter()
|
|
29
29
|
@router.get("/job/", response_model=list[ApplyWorkflowRead])
|
30
30
|
async def get_user_jobs(
|
31
31
|
user: User = Depends(current_active_user),
|
32
|
-
db: AsyncSession = Depends(
|
32
|
+
db: AsyncSession = Depends(get_async_db),
|
33
33
|
) -> list[ApplyWorkflowRead]:
|
34
34
|
"""
|
35
35
|
Returns all the jobs of the current user
|
@@ -51,7 +51,7 @@ async def get_workflow_jobs(
|
|
51
51
|
project_id: int,
|
52
52
|
workflow_id: int,
|
53
53
|
user: User = Depends(current_active_user),
|
54
|
-
db: AsyncSession = Depends(
|
54
|
+
db: AsyncSession = Depends(get_async_db),
|
55
55
|
) -> Optional[list[ApplyWorkflowRead]]:
|
56
56
|
"""
|
57
57
|
Returns all the jobs related to a specific workflow
|
@@ -73,7 +73,7 @@ async def read_job(
|
|
73
73
|
project_id: int,
|
74
74
|
job_id: int,
|
75
75
|
user: User = Depends(current_active_user),
|
76
|
-
db: AsyncSession = Depends(
|
76
|
+
db: AsyncSession = Depends(get_async_db),
|
77
77
|
) -> Optional[ApplyWorkflowRead]:
|
78
78
|
"""
|
79
79
|
Return info on an existing job
|
@@ -99,7 +99,7 @@ async def download_job_logs(
|
|
99
99
|
project_id: int,
|
100
100
|
job_id: int,
|
101
101
|
user: User = Depends(current_active_user),
|
102
|
-
db: AsyncSession = Depends(
|
102
|
+
db: AsyncSession = Depends(get_async_db),
|
103
103
|
) -> StreamingResponse:
|
104
104
|
"""
|
105
105
|
Download job folder
|
@@ -132,7 +132,7 @@ async def download_job_logs(
|
|
132
132
|
async def get_job_list(
|
133
133
|
project_id: int,
|
134
134
|
user: User = Depends(current_active_user),
|
135
|
-
db: AsyncSession = Depends(
|
135
|
+
db: AsyncSession = Depends(get_async_db),
|
136
136
|
) -> Optional[list[ApplyWorkflowRead]]:
|
137
137
|
"""
|
138
138
|
Get job list for given project
|
@@ -156,7 +156,7 @@ async def stop_job(
|
|
156
156
|
project_id: int,
|
157
157
|
job_id: int,
|
158
158
|
user: User = Depends(current_active_user),
|
159
|
-
db: AsyncSession = Depends(
|
159
|
+
db: AsyncSession = Depends(get_async_db),
|
160
160
|
) -> Response:
|
161
161
|
"""
|
162
162
|
Stop execution of a workflow job (only available for slurm backend)
|