fractal-server 1.4.10__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +24 -22
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -4
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +1 -1
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.10.dist-info/RECORD +0 -98
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -15,29 +15,29 @@ from fastapi.responses import StreamingResponse
15
15
  from sqlalchemy import func
16
16
  from sqlmodel import select
17
17
 
18
- from ...config import get_settings
19
- from ...syringe import Inject
20
- from ...utils import get_timestamp
21
- from ..db import AsyncSession
22
- from ..db import get_async_db
23
- from ..models import ApplyWorkflow
24
- from ..models import Dataset
25
- from ..models import JobStatusType
26
- from ..models import Project
27
- from ..models import Workflow
28
- from ..models.security import UserOAuth as User
29
- from ..runner._common import WORKFLOW_LOG_FILENAME
30
- from ..schemas import ApplyWorkflowRead
31
- from ..schemas import ApplyWorkflowUpdate
32
- from ..schemas import DatasetRead
33
- from ..schemas import ProjectRead
34
- from ..schemas import WorkflowRead
35
- from ..security import current_active_superuser
36
- from .aux._job import _write_shutdown_file
37
- from .aux._job import _zip_folder_to_byte_stream
38
- from .aux._runner import _check_backend_is_slurm
39
-
40
- router_admin = APIRouter()
18
+ from ....config import get_settings
19
+ from ....syringe import Inject
20
+ from ....utils import get_timestamp
21
+ from ...db import AsyncSession
22
+ from ...db import get_async_db
23
+ from ...models import ApplyWorkflow
24
+ from ...models import Dataset
25
+ from ...models import JobStatusTypeV1
26
+ from ...models import Project
27
+ from ...models import Workflow
28
+ from ...models.security import UserOAuth as User
29
+ from ...runner.filenames import WORKFLOW_LOG_FILENAME
30
+ from ...schemas.v1 import ApplyWorkflowReadV1
31
+ from ...schemas.v1 import ApplyWorkflowUpdateV1
32
+ from ...schemas.v1 import DatasetReadV1
33
+ from ...schemas.v1 import ProjectReadV1
34
+ from ...schemas.v1 import WorkflowReadV1
35
+ from ...security import current_active_superuser
36
+ from ..aux._job import _write_shutdown_file
37
+ from ..aux._job import _zip_folder_to_byte_stream
38
+ from ..aux._runner import _check_backend_is_slurm
39
+
40
+ router_admin_v1 = APIRouter()
41
41
 
42
42
 
43
43
  def _convert_to_db_timestamp(dt: datetime) -> datetime:
@@ -57,7 +57,7 @@ def _convert_to_db_timestamp(dt: datetime) -> datetime:
57
57
  return _dt
58
58
 
59
59
 
60
- @router_admin.get("/project/", response_model=list[ProjectRead])
60
+ @router_admin_v1.get("/project/", response_model=list[ProjectReadV1])
61
61
  async def view_project(
62
62
  id: Optional[int] = None,
63
63
  user_id: Optional[int] = None,
@@ -65,7 +65,7 @@ async def view_project(
65
65
  timestamp_created_max: Optional[datetime] = None,
66
66
  user: User = Depends(current_active_superuser),
67
67
  db: AsyncSession = Depends(get_async_db),
68
- ) -> list[ProjectRead]:
68
+ ) -> list[ProjectReadV1]:
69
69
  """
70
70
  Query `project` table.
71
71
 
@@ -95,7 +95,7 @@ async def view_project(
95
95
  return project_list
96
96
 
97
97
 
98
- @router_admin.get("/workflow/", response_model=list[WorkflowRead])
98
+ @router_admin_v1.get("/workflow/", response_model=list[WorkflowReadV1])
99
99
  async def view_workflow(
100
100
  id: Optional[int] = None,
101
101
  user_id: Optional[int] = None,
@@ -105,7 +105,7 @@ async def view_workflow(
105
105
  timestamp_created_max: Optional[datetime] = None,
106
106
  user: User = Depends(current_active_superuser),
107
107
  db: AsyncSession = Depends(get_async_db),
108
- ) -> list[WorkflowRead]:
108
+ ) -> list[WorkflowReadV1]:
109
109
  """
110
110
  Query `workflow` table.
111
111
 
@@ -144,7 +144,7 @@ async def view_workflow(
144
144
  return workflow_list
145
145
 
146
146
 
147
- @router_admin.get("/dataset/", response_model=list[DatasetRead])
147
+ @router_admin_v1.get("/dataset/", response_model=list[DatasetReadV1])
148
148
  async def view_dataset(
149
149
  id: Optional[int] = None,
150
150
  user_id: Optional[int] = None,
@@ -155,7 +155,7 @@ async def view_dataset(
155
155
  timestamp_created_max: Optional[datetime] = None,
156
156
  user: User = Depends(current_active_superuser),
157
157
  db: AsyncSession = Depends(get_async_db),
158
- ) -> list[DatasetRead]:
158
+ ) -> list[DatasetReadV1]:
159
159
  """
160
160
  Query `dataset` table.
161
161
 
@@ -197,7 +197,7 @@ async def view_dataset(
197
197
  return dataset_list
198
198
 
199
199
 
200
- @router_admin.get("/job/", response_model=list[ApplyWorkflowRead])
200
+ @router_admin_v1.get("/job/", response_model=list[ApplyWorkflowReadV1])
201
201
  async def view_job(
202
202
  id: Optional[int] = None,
203
203
  user_id: Optional[int] = None,
@@ -205,7 +205,7 @@ async def view_job(
205
205
  input_dataset_id: Optional[int] = None,
206
206
  output_dataset_id: Optional[int] = None,
207
207
  workflow_id: Optional[int] = None,
208
- status: Optional[JobStatusType] = None,
208
+ status: Optional[JobStatusTypeV1] = None,
209
209
  start_timestamp_min: Optional[datetime] = None,
210
210
  start_timestamp_max: Optional[datetime] = None,
211
211
  end_timestamp_min: Optional[datetime] = None,
@@ -213,7 +213,7 @@ async def view_job(
213
213
  log: bool = True,
214
214
  user: User = Depends(current_active_superuser),
215
215
  db: AsyncSession = Depends(get_async_db),
216
- ) -> list[ApplyWorkflowRead]:
216
+ ) -> list[ApplyWorkflowReadV1]:
217
217
  """
218
218
  Query `ApplyWorkflow` table.
219
219
 
@@ -278,13 +278,13 @@ async def view_job(
278
278
  return job_list
279
279
 
280
280
 
281
- @router_admin.get("/job/{job_id}/", response_model=ApplyWorkflowRead)
281
+ @router_admin_v1.get("/job/{job_id}/", response_model=ApplyWorkflowReadV1)
282
282
  async def view_single_job(
283
283
  job_id: int = None,
284
284
  show_tmp_logs: bool = False,
285
285
  user: User = Depends(current_active_superuser),
286
286
  db: AsyncSession = Depends(get_async_db),
287
- ) -> ApplyWorkflowRead:
287
+ ) -> ApplyWorkflowReadV1:
288
288
 
289
289
  job = await db.get(ApplyWorkflow, job_id)
290
290
  if not job:
@@ -294,7 +294,7 @@ async def view_single_job(
294
294
  )
295
295
  await db.close()
296
296
 
297
- if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
297
+ if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
298
298
  try:
299
299
  with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
300
300
  job.log = f.read()
@@ -304,16 +304,16 @@ async def view_single_job(
304
304
  return job
305
305
 
306
306
 
307
- @router_admin.patch(
307
+ @router_admin_v1.patch(
308
308
  "/job/{job_id}/",
309
- response_model=ApplyWorkflowRead,
309
+ response_model=ApplyWorkflowReadV1,
310
310
  )
311
311
  async def update_job(
312
- job_update: ApplyWorkflowUpdate,
312
+ job_update: ApplyWorkflowUpdateV1,
313
313
  job_id: int,
314
314
  user: User = Depends(current_active_superuser),
315
315
  db: AsyncSession = Depends(get_async_db),
316
- ) -> Optional[ApplyWorkflowRead]:
316
+ ) -> Optional[ApplyWorkflowReadV1]:
317
317
  """
318
318
  Change the status of an existing job.
319
319
 
@@ -327,7 +327,7 @@ async def update_job(
327
327
  detail=f"Job {job_id} not found",
328
328
  )
329
329
 
330
- if job_update.status != JobStatusType.FAILED:
330
+ if job_update.status != JobStatusTypeV1.FAILED:
331
331
  raise HTTPException(
332
332
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
333
333
  detail=f"Cannot set job status to {job_update.status}",
@@ -341,7 +341,7 @@ async def update_job(
341
341
  return job
342
342
 
343
343
 
344
- @router_admin.get("/job/{job_id}/stop/", status_code=202)
344
+ @router_admin_v1.get("/job/{job_id}/stop/", status_code=202)
345
345
  async def stop_job(
346
346
  job_id: int,
347
347
  user: User = Depends(current_active_superuser),
@@ -367,7 +367,7 @@ async def stop_job(
367
367
  return Response(status_code=status.HTTP_202_ACCEPTED)
368
368
 
369
369
 
370
- @router_admin.get(
370
+ @router_admin_v1.get(
371
371
  "/job/{job_id}/download/",
372
372
  response_class=StreamingResponse,
373
373
  )
@@ -0,0 +1,275 @@
1
+ """
2
+ Definition of `/admin` routes.
3
+ """
4
+ from datetime import datetime
5
+ from datetime import timezone
6
+ from pathlib import Path
7
+ from typing import Optional
8
+
9
+ from fastapi import APIRouter
10
+ from fastapi import Depends
11
+ from fastapi import HTTPException
12
+ from fastapi import Response
13
+ from fastapi import status
14
+ from fastapi.responses import StreamingResponse
15
+ from sqlmodel import select
16
+
17
+ from ....config import get_settings
18
+ from ....syringe import Inject
19
+ from ....utils import get_timestamp
20
+ from ...db import AsyncSession
21
+ from ...db import get_async_db
22
+ from ...models import JobStatusTypeV1
23
+ from ...models.security import UserOAuth as User
24
+ from ...models.v1 import Task
25
+ from ...models.v2 import JobV2
26
+ from ...models.v2 import ProjectV2
27
+ from ...runner.filenames import WORKFLOW_LOG_FILENAME
28
+ from ...schemas.v2 import JobReadV2
29
+ from ...schemas.v2 import JobUpdateV2
30
+ from ...security import current_active_superuser
31
+ from ..aux._job import _write_shutdown_file
32
+ from ..aux._job import _zip_folder_to_byte_stream
33
+ from ..aux._runner import _check_backend_is_slurm
34
+
35
+ router_admin_v2 = APIRouter()
36
+
37
+
38
+ def _convert_to_db_timestamp(dt: datetime) -> datetime:
39
+ """
40
+ This function takes a timezone-aware datetime and converts it to UTC.
41
+ If using SQLite, it also removes the timezone information in order to make
42
+ the datetime comparable with datetimes in the database.
43
+ """
44
+ if dt.tzinfo is None:
45
+ raise HTTPException(
46
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
47
+ detail=f"The timestamp provided has no timezone information: {dt}",
48
+ )
49
+ _dt = dt.astimezone(timezone.utc)
50
+ if Inject(get_settings).DB_ENGINE == "sqlite":
51
+ return _dt.replace(tzinfo=None)
52
+ return _dt
53
+
54
+
55
+ @router_admin_v2.get("/job/", response_model=list[JobReadV2])
56
+ async def view_job(
57
+ id: Optional[int] = None,
58
+ user_id: Optional[int] = None,
59
+ project_id: Optional[int] = None,
60
+ dataset_id: Optional[int] = None,
61
+ workflow_id: Optional[int] = None,
62
+ status: Optional[JobStatusTypeV1] = None,
63
+ start_timestamp_min: Optional[datetime] = None,
64
+ start_timestamp_max: Optional[datetime] = None,
65
+ end_timestamp_min: Optional[datetime] = None,
66
+ end_timestamp_max: Optional[datetime] = None,
67
+ log: bool = True,
68
+ user: User = Depends(current_active_superuser),
69
+ db: AsyncSession = Depends(get_async_db),
70
+ ) -> list[JobReadV2]:
71
+ """
72
+ Query `ApplyWorkflow` table.
73
+
74
+ Args:
75
+ id: If not `None`, select a given `applyworkflow.id`.
76
+ project_id: If not `None`, select a given `applyworkflow.project_id`.
77
+ dataset_id: If not `None`, select a given
78
+ `applyworkflow.input_dataset_id`.
79
+ workflow_id: If not `None`, select a given `applyworkflow.workflow_id`.
80
+ status: If not `None`, select a given `applyworkflow.status`.
81
+ start_timestamp_min: If not `None`, select a rows with
82
+ `start_timestamp` after `start_timestamp_min`.
83
+ start_timestamp_max: If not `None`, select a rows with
84
+ `start_timestamp` before `start_timestamp_min`.
85
+ end_timestamp_min: If not `None`, select a rows with `end_timestamp`
86
+ after `end_timestamp_min`.
87
+ end_timestamp_max: If not `None`, select a rows with `end_timestamp`
88
+ before `end_timestamp_min`.
89
+ log: If `True`, include `job.log`, if `False`
90
+ `job.log` is set to `None`.
91
+ """
92
+ stm = select(JobV2)
93
+
94
+ if id is not None:
95
+ stm = stm.where(JobV2.id == id)
96
+ if user_id is not None:
97
+ stm = stm.join(ProjectV2).where(
98
+ ProjectV2.user_list.any(User.id == user_id)
99
+ )
100
+ if project_id is not None:
101
+ stm = stm.where(JobV2.project_id == project_id)
102
+ if dataset_id is not None:
103
+ stm = stm.where(JobV2.dataset_id == dataset_id)
104
+ if workflow_id is not None:
105
+ stm = stm.where(JobV2.workflow_id == workflow_id)
106
+ if status is not None:
107
+ stm = stm.where(JobV2.status == status)
108
+ if start_timestamp_min is not None:
109
+ start_timestamp_min = _convert_to_db_timestamp(start_timestamp_min)
110
+ stm = stm.where(JobV2.start_timestamp >= start_timestamp_min)
111
+ if start_timestamp_max is not None:
112
+ start_timestamp_max = _convert_to_db_timestamp(start_timestamp_max)
113
+ stm = stm.where(JobV2.start_timestamp <= start_timestamp_max)
114
+ if end_timestamp_min is not None:
115
+ end_timestamp_min = _convert_to_db_timestamp(end_timestamp_min)
116
+ stm = stm.where(JobV2.end_timestamp >= end_timestamp_min)
117
+ if end_timestamp_max is not None:
118
+ end_timestamp_max = _convert_to_db_timestamp(end_timestamp_max)
119
+ stm = stm.where(JobV2.end_timestamp <= end_timestamp_max)
120
+
121
+ res = await db.execute(stm)
122
+ job_list = res.scalars().all()
123
+ await db.close()
124
+ if not log:
125
+ for job in job_list:
126
+ setattr(job, "log", None)
127
+
128
+ return job_list
129
+
130
+
131
+ @router_admin_v2.get("/job/{job_id}/", response_model=JobReadV2)
132
+ async def view_single_job(
133
+ job_id: int = None,
134
+ show_tmp_logs: bool = False,
135
+ user: User = Depends(current_active_superuser),
136
+ db: AsyncSession = Depends(get_async_db),
137
+ ) -> JobReadV2:
138
+
139
+ job = await db.get(JobV2, job_id)
140
+ if not job:
141
+ raise HTTPException(
142
+ status_code=status.HTTP_404_NOT_FOUND,
143
+ detail=f"Job {job_id} not found",
144
+ )
145
+ await db.close()
146
+
147
+ if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
148
+ try:
149
+ with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
150
+ job.log = f.read()
151
+ except FileNotFoundError:
152
+ pass
153
+
154
+ return job
155
+
156
+
157
+ @router_admin_v2.patch(
158
+ "/job/{job_id}/",
159
+ response_model=JobReadV2,
160
+ )
161
+ async def update_job(
162
+ job_update: JobUpdateV2,
163
+ job_id: int,
164
+ user: User = Depends(current_active_superuser),
165
+ db: AsyncSession = Depends(get_async_db),
166
+ ) -> Optional[JobReadV2]:
167
+ """
168
+ Change the status of an existing job.
169
+
170
+ This endpoint is only open to superusers, and it does not apply
171
+ project-based access-control to jobs.
172
+ """
173
+ job = await db.get(JobV2, job_id)
174
+ if job is None:
175
+ raise HTTPException(
176
+ status_code=status.HTTP_404_NOT_FOUND,
177
+ detail=f"Job {job_id} not found",
178
+ )
179
+
180
+ if job_update.status != JobStatusTypeV1.FAILED:
181
+ raise HTTPException(
182
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
183
+ detail=f"Cannot set job status to {job_update.status}",
184
+ )
185
+
186
+ setattr(job, "status", job_update.status)
187
+ setattr(job, "end_timestamp", get_timestamp())
188
+ await db.commit()
189
+ await db.refresh(job)
190
+ await db.close()
191
+ return job
192
+
193
+
194
+ @router_admin_v2.get("/job/{job_id}/stop/", status_code=202)
195
+ async def stop_job(
196
+ job_id: int,
197
+ user: User = Depends(current_active_superuser),
198
+ db: AsyncSession = Depends(get_async_db),
199
+ ) -> Response:
200
+ """
201
+ Stop execution of a workflow job.
202
+
203
+ Only available for slurm backend.
204
+ """
205
+
206
+ _check_backend_is_slurm()
207
+
208
+ job = await db.get(JobV2, job_id)
209
+ if job is None:
210
+ raise HTTPException(
211
+ status_code=status.HTTP_404_NOT_FOUND,
212
+ detail=f"Job {job_id} not found",
213
+ )
214
+
215
+ _write_shutdown_file(job=job)
216
+
217
+ return Response(status_code=status.HTTP_202_ACCEPTED)
218
+
219
+
220
+ @router_admin_v2.get(
221
+ "/job/{job_id}/download/",
222
+ response_class=StreamingResponse,
223
+ )
224
+ async def download_job_logs(
225
+ job_id: int,
226
+ user: User = Depends(current_active_superuser),
227
+ db: AsyncSession = Depends(get_async_db),
228
+ ) -> StreamingResponse:
229
+ """
230
+ Download job folder
231
+ """
232
+ # Get job from DB
233
+ job = await db.get(JobV2, job_id)
234
+ if job is None:
235
+ raise HTTPException(
236
+ status_code=status.HTTP_404_NOT_FOUND,
237
+ detail=f"Job {job_id} not found",
238
+ )
239
+ # Create and return byte stream for zipped log folder
240
+ PREFIX_ZIP = Path(job.working_dir).name
241
+ zip_filename = f"{PREFIX_ZIP}_archive.zip"
242
+ byte_stream = _zip_folder_to_byte_stream(
243
+ folder=job.working_dir, zip_filename=zip_filename
244
+ )
245
+ return StreamingResponse(
246
+ iter([byte_stream.getvalue()]),
247
+ media_type="application/x-zip-compressed",
248
+ headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
249
+ )
250
+
251
+
252
+ @router_admin_v2.patch(
253
+ "/task-v1/{task_id}/",
254
+ status_code=status.HTTP_200_OK,
255
+ )
256
+ async def flag_task_v1_as_v2_compatible(
257
+ task_id: int,
258
+ is_v2_compatible: bool,
259
+ user: User = Depends(current_active_superuser),
260
+ db: AsyncSession = Depends(get_async_db),
261
+ ) -> Response:
262
+
263
+ task = await db.get(Task, task_id)
264
+ if task is None:
265
+ raise HTTPException(
266
+ status_code=status.HTTP_404_NOT_FOUND,
267
+ detail=f"Task {task_id} not found",
268
+ )
269
+
270
+ task.is_v2_compatible = is_v2_compatible
271
+ db.add(task)
272
+ await db.commit()
273
+ await db.close()
274
+
275
+ return Response(status_code=status.HTTP_200_OK)
@@ -14,13 +14,13 @@ from .workflowtask import router as workflowtask_router
14
14
  router_api_v1 = APIRouter()
15
15
 
16
16
  router_api_v1.include_router(
17
- project_router, prefix="/project", tags=["Projects"]
17
+ project_router, prefix="/project", tags=["V1 Project"]
18
18
  )
19
- router_api_v1.include_router(task_router, prefix="/task", tags=["Tasks"])
19
+ router_api_v1.include_router(task_router, prefix="/task", tags=["V1 Task"])
20
20
  router_api_v1.include_router(
21
- taskcollection_router, prefix="/task", tags=["Task Collection"]
21
+ taskcollection_router, prefix="/task", tags=["V1 Task Collection"]
22
22
  )
23
- router_api_v1.include_router(dataset_router, tags=["Datasets"])
24
- router_api_v1.include_router(workflow_router, tags=["Workflows"])
25
- router_api_v1.include_router(workflowtask_router, tags=["Workflow Tasks"])
26
- router_api_v1.include_router(job_router, tags=["Jobs"])
23
+ router_api_v1.include_router(dataset_router, tags=["V1 Dataset"])
24
+ router_api_v1.include_router(workflow_router, tags=["V1 Workflow"])
25
+ router_api_v1.include_router(workflowtask_router, tags=["V1 WorkflowTask"])
26
+ router_api_v1.include_router(job_router, tags=["V1 Job"])
@@ -19,7 +19,7 @@ from ....models import Project
19
19
  from ....models import Task
20
20
  from ....models import Workflow
21
21
  from ....models import WorkflowTask
22
- from ....schemas import JobStatusType
22
+ from ....schemas.v1 import JobStatusTypeV1
23
23
  from ....security import User
24
24
 
25
25
 
@@ -375,7 +375,7 @@ def _get_submitted_jobs_statement() -> SelectOfScalar:
375
375
  `ApplyWorkflow.status` equal to `submitted`.
376
376
  """
377
377
  stm = select(ApplyWorkflow).where(
378
- ApplyWorkflow.status == JobStatusType.SUBMITTED
378
+ ApplyWorkflow.status == JobStatusTypeV1.SUBMITTED
379
379
  )
380
380
  return stm
381
381