fractal-server 1.4.6__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/db/__init__.py +0 -1
  3. fractal_server/app/models/__init__.py +6 -8
  4. fractal_server/app/models/linkuserproject.py +9 -0
  5. fractal_server/app/models/security.py +6 -0
  6. fractal_server/app/models/v1/__init__.py +12 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{state.py → v1/state.py} +2 -2
  11. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  12. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  13. fractal_server/app/models/v2/__init__.py +22 -0
  14. fractal_server/app/models/v2/collection_state.py +21 -0
  15. fractal_server/app/models/v2/dataset.py +54 -0
  16. fractal_server/app/models/v2/job.py +51 -0
  17. fractal_server/app/models/v2/project.py +30 -0
  18. fractal_server/app/models/v2/task.py +93 -0
  19. fractal_server/app/models/v2/workflow.py +35 -0
  20. fractal_server/app/models/v2/workflowtask.py +49 -0
  21. fractal_server/app/routes/admin/__init__.py +0 -0
  22. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  23. fractal_server/app/routes/admin/v2.py +309 -0
  24. fractal_server/app/routes/api/v1/__init__.py +7 -7
  25. fractal_server/app/routes/api/v1/_aux_functions.py +8 -8
  26. fractal_server/app/routes/api/v1/dataset.py +48 -41
  27. fractal_server/app/routes/api/v1/job.py +14 -14
  28. fractal_server/app/routes/api/v1/project.py +30 -27
  29. fractal_server/app/routes/api/v1/task.py +26 -16
  30. fractal_server/app/routes/api/v1/task_collection.py +28 -16
  31. fractal_server/app/routes/api/v1/workflow.py +28 -28
  32. fractal_server/app/routes/api/v1/workflowtask.py +11 -11
  33. fractal_server/app/routes/api/v2/__init__.py +34 -0
  34. fractal_server/app/routes/api/v2/_aux_functions.py +502 -0
  35. fractal_server/app/routes/api/v2/dataset.py +293 -0
  36. fractal_server/app/routes/api/v2/images.py +279 -0
  37. fractal_server/app/routes/api/v2/job.py +200 -0
  38. fractal_server/app/routes/api/v2/project.py +186 -0
  39. fractal_server/app/routes/api/v2/status.py +150 -0
  40. fractal_server/app/routes/api/v2/submit.py +210 -0
  41. fractal_server/app/routes/api/v2/task.py +222 -0
  42. fractal_server/app/routes/api/v2/task_collection.py +239 -0
  43. fractal_server/app/routes/api/v2/task_legacy.py +59 -0
  44. fractal_server/app/routes/api/v2/workflow.py +380 -0
  45. fractal_server/app/routes/api/v2/workflowtask.py +265 -0
  46. fractal_server/app/routes/aux/_job.py +2 -2
  47. fractal_server/app/runner/__init__.py +0 -379
  48. fractal_server/app/runner/async_wrap.py +27 -0
  49. fractal_server/app/runner/components.py +5 -0
  50. fractal_server/app/runner/exceptions.py +129 -0
  51. fractal_server/app/runner/executors/__init__.py +0 -0
  52. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  53. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  54. fractal_server/app/runner/executors/slurm/_check_jobs_status.py +72 -0
  55. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +3 -4
  56. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  57. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +42 -1
  58. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +46 -27
  59. fractal_server/app/runner/filenames.py +6 -0
  60. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  61. fractal_server/app/runner/task_files.py +103 -0
  62. fractal_server/app/runner/v1/__init__.py +366 -0
  63. fractal_server/app/runner/{_common.py → v1/_common.py} +56 -111
  64. fractal_server/app/runner/{_local → v1/_local}/__init__.py +5 -4
  65. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  66. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  67. fractal_server/app/runner/v1/_slurm/__init__.py +312 -0
  68. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +5 -11
  69. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  70. fractal_server/app/runner/v1/common.py +117 -0
  71. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  72. fractal_server/app/runner/v2/__init__.py +336 -0
  73. fractal_server/app/runner/v2/_local/__init__.py +162 -0
  74. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  75. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  76. fractal_server/app/runner/v2/_local/executor.py +100 -0
  77. fractal_server/app/runner/{_slurm → v2/_slurm}/__init__.py +38 -47
  78. fractal_server/app/runner/v2/_slurm/_submit_setup.py +82 -0
  79. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +182 -0
  80. fractal_server/app/runner/v2/deduplicate_list.py +23 -0
  81. fractal_server/app/runner/v2/handle_failed_job.py +165 -0
  82. fractal_server/app/runner/v2/merge_outputs.py +38 -0
  83. fractal_server/app/runner/v2/runner.py +343 -0
  84. fractal_server/app/runner/v2/runner_functions.py +374 -0
  85. fractal_server/app/runner/v2/runner_functions_low_level.py +130 -0
  86. fractal_server/app/runner/v2/task_interface.py +62 -0
  87. fractal_server/app/runner/v2/v1_compat.py +31 -0
  88. fractal_server/app/schemas/__init__.py +1 -42
  89. fractal_server/app/schemas/_validators.py +28 -5
  90. fractal_server/app/schemas/v1/__init__.py +36 -0
  91. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  92. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  93. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  94. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  95. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  96. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  97. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  98. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  99. fractal_server/app/schemas/v2/__init__.py +37 -0
  100. fractal_server/app/schemas/v2/dataset.py +126 -0
  101. fractal_server/app/schemas/v2/dumps.py +87 -0
  102. fractal_server/app/schemas/v2/job.py +114 -0
  103. fractal_server/app/schemas/v2/manifest.py +159 -0
  104. fractal_server/app/schemas/v2/project.py +34 -0
  105. fractal_server/app/schemas/v2/status.py +16 -0
  106. fractal_server/app/schemas/v2/task.py +151 -0
  107. fractal_server/app/schemas/v2/task_collection.py +109 -0
  108. fractal_server/app/schemas/v2/workflow.py +79 -0
  109. fractal_server/app/schemas/v2/workflowtask.py +208 -0
  110. fractal_server/config.py +13 -10
  111. fractal_server/images/__init__.py +4 -0
  112. fractal_server/images/models.py +136 -0
  113. fractal_server/images/tools.py +84 -0
  114. fractal_server/main.py +11 -3
  115. fractal_server/migrations/env.py +0 -2
  116. fractal_server/migrations/versions/5bf02391cfef_v2.py +245 -0
  117. fractal_server/tasks/__init__.py +0 -5
  118. fractal_server/tasks/endpoint_operations.py +13 -19
  119. fractal_server/tasks/utils.py +35 -0
  120. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  121. fractal_server/tasks/v1/__init__.py +0 -0
  122. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +20 -52
  123. fractal_server/tasks/v1/get_collection_data.py +14 -0
  124. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  125. fractal_server/tasks/v2/__init__.py +0 -0
  126. fractal_server/tasks/v2/background_operations.py +381 -0
  127. fractal_server/tasks/v2/get_collection_data.py +14 -0
  128. fractal_server/urls.py +13 -0
  129. {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/METADATA +11 -12
  130. fractal_server-2.0.0.dist-info/RECORD +169 -0
  131. fractal_server/app/runner/_slurm/.gitignore +0 -2
  132. fractal_server/app/runner/common.py +0 -307
  133. fractal_server/app/schemas/json_schemas/manifest.json +0 -81
  134. fractal_server-1.4.6.dist-info/RECORD +0 -97
  135. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  136. /fractal_server/app/runner/{_local → v1/_local}/executor.py +0 -0
  137. {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/LICENSE +0 -0
  138. {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/WHEEL +0 -0
  139. {fractal_server-1.4.6.dist-info → fractal_server-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -15,29 +15,29 @@ from fastapi.responses import StreamingResponse
15
15
  from sqlalchemy import func
16
16
  from sqlmodel import select
17
17
 
18
- from ...config import get_settings
19
- from ...syringe import Inject
20
- from ...utils import get_timestamp
21
- from ..db import AsyncSession
22
- from ..db import get_async_db
23
- from ..models import ApplyWorkflow
24
- from ..models import Dataset
25
- from ..models import JobStatusType
26
- from ..models import Project
27
- from ..models import Workflow
28
- from ..models.security import UserOAuth as User
29
- from ..runner._common import WORKFLOW_LOG_FILENAME
30
- from ..schemas import ApplyWorkflowRead
31
- from ..schemas import ApplyWorkflowUpdate
32
- from ..schemas import DatasetRead
33
- from ..schemas import ProjectRead
34
- from ..schemas import WorkflowRead
35
- from ..security import current_active_superuser
36
- from .aux._job import _write_shutdown_file
37
- from .aux._job import _zip_folder_to_byte_stream
38
- from .aux._runner import _check_backend_is_slurm
39
-
40
- router_admin = APIRouter()
18
+ from ....config import get_settings
19
+ from ....syringe import Inject
20
+ from ....utils import get_timestamp
21
+ from ...db import AsyncSession
22
+ from ...db import get_async_db
23
+ from ...models.security import UserOAuth as User
24
+ from ...models.v1 import ApplyWorkflow
25
+ from ...models.v1 import Dataset
26
+ from ...models.v1 import JobStatusTypeV1
27
+ from ...models.v1 import Project
28
+ from ...models.v1 import Workflow
29
+ from ...runner.filenames import WORKFLOW_LOG_FILENAME
30
+ from ...schemas.v1 import ApplyWorkflowReadV1
31
+ from ...schemas.v1 import ApplyWorkflowUpdateV1
32
+ from ...schemas.v1 import DatasetReadV1
33
+ from ...schemas.v1 import ProjectReadV1
34
+ from ...schemas.v1 import WorkflowReadV1
35
+ from ...security import current_active_superuser
36
+ from ..aux._job import _write_shutdown_file
37
+ from ..aux._job import _zip_folder_to_byte_stream
38
+ from ..aux._runner import _check_backend_is_slurm
39
+
40
+ router_admin_v1 = APIRouter()
41
41
 
42
42
 
43
43
  def _convert_to_db_timestamp(dt: datetime) -> datetime:
@@ -57,7 +57,7 @@ def _convert_to_db_timestamp(dt: datetime) -> datetime:
57
57
  return _dt
58
58
 
59
59
 
60
- @router_admin.get("/project/", response_model=list[ProjectRead])
60
+ @router_admin_v1.get("/project/", response_model=list[ProjectReadV1])
61
61
  async def view_project(
62
62
  id: Optional[int] = None,
63
63
  user_id: Optional[int] = None,
@@ -65,7 +65,7 @@ async def view_project(
65
65
  timestamp_created_max: Optional[datetime] = None,
66
66
  user: User = Depends(current_active_superuser),
67
67
  db: AsyncSession = Depends(get_async_db),
68
- ) -> list[ProjectRead]:
68
+ ) -> list[ProjectReadV1]:
69
69
  """
70
70
  Query `project` table.
71
71
 
@@ -95,7 +95,7 @@ async def view_project(
95
95
  return project_list
96
96
 
97
97
 
98
- @router_admin.get("/workflow/", response_model=list[WorkflowRead])
98
+ @router_admin_v1.get("/workflow/", response_model=list[WorkflowReadV1])
99
99
  async def view_workflow(
100
100
  id: Optional[int] = None,
101
101
  user_id: Optional[int] = None,
@@ -105,7 +105,7 @@ async def view_workflow(
105
105
  timestamp_created_max: Optional[datetime] = None,
106
106
  user: User = Depends(current_active_superuser),
107
107
  db: AsyncSession = Depends(get_async_db),
108
- ) -> list[WorkflowRead]:
108
+ ) -> list[WorkflowReadV1]:
109
109
  """
110
110
  Query `workflow` table.
111
111
 
@@ -144,7 +144,7 @@ async def view_workflow(
144
144
  return workflow_list
145
145
 
146
146
 
147
- @router_admin.get("/dataset/", response_model=list[DatasetRead])
147
+ @router_admin_v1.get("/dataset/", response_model=list[DatasetReadV1])
148
148
  async def view_dataset(
149
149
  id: Optional[int] = None,
150
150
  user_id: Optional[int] = None,
@@ -155,7 +155,7 @@ async def view_dataset(
155
155
  timestamp_created_max: Optional[datetime] = None,
156
156
  user: User = Depends(current_active_superuser),
157
157
  db: AsyncSession = Depends(get_async_db),
158
- ) -> list[DatasetRead]:
158
+ ) -> list[DatasetReadV1]:
159
159
  """
160
160
  Query `dataset` table.
161
161
 
@@ -197,7 +197,7 @@ async def view_dataset(
197
197
  return dataset_list
198
198
 
199
199
 
200
- @router_admin.get("/job/", response_model=list[ApplyWorkflowRead])
200
+ @router_admin_v1.get("/job/", response_model=list[ApplyWorkflowReadV1])
201
201
  async def view_job(
202
202
  id: Optional[int] = None,
203
203
  user_id: Optional[int] = None,
@@ -205,7 +205,7 @@ async def view_job(
205
205
  input_dataset_id: Optional[int] = None,
206
206
  output_dataset_id: Optional[int] = None,
207
207
  workflow_id: Optional[int] = None,
208
- status: Optional[JobStatusType] = None,
208
+ status: Optional[JobStatusTypeV1] = None,
209
209
  start_timestamp_min: Optional[datetime] = None,
210
210
  start_timestamp_max: Optional[datetime] = None,
211
211
  end_timestamp_min: Optional[datetime] = None,
@@ -213,7 +213,7 @@ async def view_job(
213
213
  log: bool = True,
214
214
  user: User = Depends(current_active_superuser),
215
215
  db: AsyncSession = Depends(get_async_db),
216
- ) -> list[ApplyWorkflowRead]:
216
+ ) -> list[ApplyWorkflowReadV1]:
217
217
  """
218
218
  Query `ApplyWorkflow` table.
219
219
 
@@ -278,13 +278,13 @@ async def view_job(
278
278
  return job_list
279
279
 
280
280
 
281
- @router_admin.get("/job/{job_id}/", response_model=ApplyWorkflowRead)
281
+ @router_admin_v1.get("/job/{job_id}/", response_model=ApplyWorkflowReadV1)
282
282
  async def view_single_job(
283
283
  job_id: int = None,
284
284
  show_tmp_logs: bool = False,
285
285
  user: User = Depends(current_active_superuser),
286
286
  db: AsyncSession = Depends(get_async_db),
287
- ) -> ApplyWorkflowRead:
287
+ ) -> ApplyWorkflowReadV1:
288
288
 
289
289
  job = await db.get(ApplyWorkflow, job_id)
290
290
  if not job:
@@ -294,7 +294,7 @@ async def view_single_job(
294
294
  )
295
295
  await db.close()
296
296
 
297
- if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
297
+ if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
298
298
  try:
299
299
  with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
300
300
  job.log = f.read()
@@ -304,16 +304,16 @@ async def view_single_job(
304
304
  return job
305
305
 
306
306
 
307
- @router_admin.patch(
307
+ @router_admin_v1.patch(
308
308
  "/job/{job_id}/",
309
- response_model=ApplyWorkflowRead,
309
+ response_model=ApplyWorkflowReadV1,
310
310
  )
311
311
  async def update_job(
312
- job_update: ApplyWorkflowUpdate,
312
+ job_update: ApplyWorkflowUpdateV1,
313
313
  job_id: int,
314
314
  user: User = Depends(current_active_superuser),
315
315
  db: AsyncSession = Depends(get_async_db),
316
- ) -> Optional[ApplyWorkflowRead]:
316
+ ) -> Optional[ApplyWorkflowReadV1]:
317
317
  """
318
318
  Change the status of an existing job.
319
319
 
@@ -327,7 +327,7 @@ async def update_job(
327
327
  detail=f"Job {job_id} not found",
328
328
  )
329
329
 
330
- if job_update.status != JobStatusType.FAILED:
330
+ if job_update.status != JobStatusTypeV1.FAILED:
331
331
  raise HTTPException(
332
332
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
333
333
  detail=f"Cannot set job status to {job_update.status}",
@@ -341,7 +341,7 @@ async def update_job(
341
341
  return job
342
342
 
343
343
 
344
- @router_admin.get("/job/{job_id}/stop/", status_code=202)
344
+ @router_admin_v1.get("/job/{job_id}/stop/", status_code=202)
345
345
  async def stop_job(
346
346
  job_id: int,
347
347
  user: User = Depends(current_active_superuser),
@@ -367,7 +367,7 @@ async def stop_job(
367
367
  return Response(status_code=status.HTTP_202_ACCEPTED)
368
368
 
369
369
 
370
- @router_admin.get(
370
+ @router_admin_v1.get(
371
371
  "/job/{job_id}/download/",
372
372
  response_class=StreamingResponse,
373
373
  )
@@ -0,0 +1,309 @@
1
+ """
2
+ Definition of `/admin` routes.
3
+ """
4
+ from datetime import datetime
5
+ from datetime import timezone
6
+ from pathlib import Path
7
+ from typing import Optional
8
+
9
+ from fastapi import APIRouter
10
+ from fastapi import Depends
11
+ from fastapi import HTTPException
12
+ from fastapi import Response
13
+ from fastapi import status
14
+ from fastapi.responses import StreamingResponse
15
+ from pydantic import BaseModel
16
+ from sqlmodel import select
17
+
18
+ from ....config import get_settings
19
+ from ....syringe import Inject
20
+ from ....utils import get_timestamp
21
+ from ...db import AsyncSession
22
+ from ...db import get_async_db
23
+ from ...models.security import UserOAuth as User
24
+ from ...models.v1 import Task
25
+ from ...models.v2 import JobV2
26
+ from ...models.v2 import ProjectV2
27
+ from ...runner.filenames import WORKFLOW_LOG_FILENAME
28
+ from ...schemas.v2 import JobReadV2
29
+ from ...schemas.v2 import JobStatusTypeV2
30
+ from ...schemas.v2 import JobUpdateV2
31
+ from ...schemas.v2 import ProjectReadV2
32
+ from ...security import current_active_superuser
33
+ from ..aux._job import _write_shutdown_file
34
+ from ..aux._job import _zip_folder_to_byte_stream
35
+ from ..aux._runner import _check_backend_is_slurm
36
+
37
+ router_admin_v2 = APIRouter()
38
+
39
+
40
+ def _convert_to_db_timestamp(dt: datetime) -> datetime:
41
+ """
42
+ This function takes a timezone-aware datetime and converts it to UTC.
43
+ If using SQLite, it also removes the timezone information in order to make
44
+ the datetime comparable with datetimes in the database.
45
+ """
46
+ if dt.tzinfo is None:
47
+ raise HTTPException(
48
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
49
+ detail=f"The timestamp provided has no timezone information: {dt}",
50
+ )
51
+ _dt = dt.astimezone(timezone.utc)
52
+ if Inject(get_settings).DB_ENGINE == "sqlite":
53
+ return _dt.replace(tzinfo=None)
54
+ return _dt
55
+
56
+
57
+ @router_admin_v2.get("/project/", response_model=list[ProjectReadV2])
58
+ async def view_project(
59
+ id: Optional[int] = None,
60
+ user_id: Optional[int] = None,
61
+ user: User = Depends(current_active_superuser),
62
+ db: AsyncSession = Depends(get_async_db),
63
+ ) -> list[ProjectReadV2]:
64
+ """
65
+ Query `ProjectV2` table.
66
+
67
+ Args:
68
+ id: If not `None`, select a given `project.id`.
69
+ user_id: If not `None`, select a given `project.user_id`.
70
+ """
71
+
72
+ stm = select(ProjectV2)
73
+
74
+ if id is not None:
75
+ stm = stm.where(ProjectV2.id == id)
76
+ if user_id is not None:
77
+ stm = stm.where(ProjectV2.user_list.any(User.id == user_id))
78
+
79
+ res = await db.execute(stm)
80
+ project_list = res.scalars().all()
81
+ await db.close()
82
+
83
+ return project_list
84
+
85
+
86
+ @router_admin_v2.get("/job/", response_model=list[JobReadV2])
87
+ async def view_job(
88
+ id: Optional[int] = None,
89
+ user_id: Optional[int] = None,
90
+ project_id: Optional[int] = None,
91
+ dataset_id: Optional[int] = None,
92
+ workflow_id: Optional[int] = None,
93
+ status: Optional[JobStatusTypeV2] = None,
94
+ start_timestamp_min: Optional[datetime] = None,
95
+ start_timestamp_max: Optional[datetime] = None,
96
+ end_timestamp_min: Optional[datetime] = None,
97
+ end_timestamp_max: Optional[datetime] = None,
98
+ log: bool = True,
99
+ user: User = Depends(current_active_superuser),
100
+ db: AsyncSession = Depends(get_async_db),
101
+ ) -> list[JobReadV2]:
102
+ """
103
+ Query `ApplyWorkflow` table.
104
+
105
+ Args:
106
+ id: If not `None`, select a given `applyworkflow.id`.
107
+ project_id: If not `None`, select a given `applyworkflow.project_id`.
108
+ dataset_id: If not `None`, select a given
109
+ `applyworkflow.input_dataset_id`.
110
+ workflow_id: If not `None`, select a given `applyworkflow.workflow_id`.
111
+ status: If not `None`, select a given `applyworkflow.status`.
112
+ start_timestamp_min: If not `None`, select a rows with
113
+ `start_timestamp` after `start_timestamp_min`.
114
+ start_timestamp_max: If not `None`, select a rows with
115
+ `start_timestamp` before `start_timestamp_min`.
116
+ end_timestamp_min: If not `None`, select a rows with `end_timestamp`
117
+ after `end_timestamp_min`.
118
+ end_timestamp_max: If not `None`, select a rows with `end_timestamp`
119
+ before `end_timestamp_min`.
120
+ log: If `True`, include `job.log`, if `False`
121
+ `job.log` is set to `None`.
122
+ """
123
+ stm = select(JobV2)
124
+
125
+ if id is not None:
126
+ stm = stm.where(JobV2.id == id)
127
+ if user_id is not None:
128
+ stm = stm.join(ProjectV2).where(
129
+ ProjectV2.user_list.any(User.id == user_id)
130
+ )
131
+ if project_id is not None:
132
+ stm = stm.where(JobV2.project_id == project_id)
133
+ if dataset_id is not None:
134
+ stm = stm.where(JobV2.dataset_id == dataset_id)
135
+ if workflow_id is not None:
136
+ stm = stm.where(JobV2.workflow_id == workflow_id)
137
+ if status is not None:
138
+ stm = stm.where(JobV2.status == status)
139
+ if start_timestamp_min is not None:
140
+ start_timestamp_min = _convert_to_db_timestamp(start_timestamp_min)
141
+ stm = stm.where(JobV2.start_timestamp >= start_timestamp_min)
142
+ if start_timestamp_max is not None:
143
+ start_timestamp_max = _convert_to_db_timestamp(start_timestamp_max)
144
+ stm = stm.where(JobV2.start_timestamp <= start_timestamp_max)
145
+ if end_timestamp_min is not None:
146
+ end_timestamp_min = _convert_to_db_timestamp(end_timestamp_min)
147
+ stm = stm.where(JobV2.end_timestamp >= end_timestamp_min)
148
+ if end_timestamp_max is not None:
149
+ end_timestamp_max = _convert_to_db_timestamp(end_timestamp_max)
150
+ stm = stm.where(JobV2.end_timestamp <= end_timestamp_max)
151
+
152
+ res = await db.execute(stm)
153
+ job_list = res.scalars().all()
154
+ await db.close()
155
+ if not log:
156
+ for job in job_list:
157
+ setattr(job, "log", None)
158
+
159
+ return job_list
160
+
161
+
162
+ @router_admin_v2.get("/job/{job_id}/", response_model=JobReadV2)
163
+ async def view_single_job(
164
+ job_id: int = None,
165
+ show_tmp_logs: bool = False,
166
+ user: User = Depends(current_active_superuser),
167
+ db: AsyncSession = Depends(get_async_db),
168
+ ) -> JobReadV2:
169
+
170
+ job = await db.get(JobV2, job_id)
171
+ if not job:
172
+ raise HTTPException(
173
+ status_code=status.HTTP_404_NOT_FOUND,
174
+ detail=f"Job {job_id} not found",
175
+ )
176
+ await db.close()
177
+
178
+ if show_tmp_logs and (job.status == JobStatusTypeV2.SUBMITTED):
179
+ try:
180
+ with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
181
+ job.log = f.read()
182
+ except FileNotFoundError:
183
+ pass
184
+
185
+ return job
186
+
187
+
188
+ @router_admin_v2.patch(
189
+ "/job/{job_id}/",
190
+ response_model=JobReadV2,
191
+ )
192
+ async def update_job(
193
+ job_update: JobUpdateV2,
194
+ job_id: int,
195
+ user: User = Depends(current_active_superuser),
196
+ db: AsyncSession = Depends(get_async_db),
197
+ ) -> Optional[JobReadV2]:
198
+ """
199
+ Change the status of an existing job.
200
+
201
+ This endpoint is only open to superusers, and it does not apply
202
+ project-based access-control to jobs.
203
+ """
204
+ job = await db.get(JobV2, job_id)
205
+ if job is None:
206
+ raise HTTPException(
207
+ status_code=status.HTTP_404_NOT_FOUND,
208
+ detail=f"Job {job_id} not found",
209
+ )
210
+
211
+ if job_update.status != JobStatusTypeV2.FAILED:
212
+ raise HTTPException(
213
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
214
+ detail=f"Cannot set job status to {job_update.status}",
215
+ )
216
+
217
+ setattr(job, "status", job_update.status)
218
+ setattr(job, "end_timestamp", get_timestamp())
219
+ await db.commit()
220
+ await db.refresh(job)
221
+ await db.close()
222
+ return job
223
+
224
+
225
+ @router_admin_v2.get("/job/{job_id}/stop/", status_code=202)
226
+ async def stop_job(
227
+ job_id: int,
228
+ user: User = Depends(current_active_superuser),
229
+ db: AsyncSession = Depends(get_async_db),
230
+ ) -> Response:
231
+ """
232
+ Stop execution of a workflow job.
233
+
234
+ Only available for slurm backend.
235
+ """
236
+
237
+ _check_backend_is_slurm()
238
+
239
+ job = await db.get(JobV2, job_id)
240
+ if job is None:
241
+ raise HTTPException(
242
+ status_code=status.HTTP_404_NOT_FOUND,
243
+ detail=f"Job {job_id} not found",
244
+ )
245
+
246
+ _write_shutdown_file(job=job)
247
+
248
+ return Response(status_code=status.HTTP_202_ACCEPTED)
249
+
250
+
251
+ @router_admin_v2.get(
252
+ "/job/{job_id}/download/",
253
+ response_class=StreamingResponse,
254
+ )
255
+ async def download_job_logs(
256
+ job_id: int,
257
+ user: User = Depends(current_active_superuser),
258
+ db: AsyncSession = Depends(get_async_db),
259
+ ) -> StreamingResponse:
260
+ """
261
+ Download job folder
262
+ """
263
+ # Get job from DB
264
+ job = await db.get(JobV2, job_id)
265
+ if job is None:
266
+ raise HTTPException(
267
+ status_code=status.HTTP_404_NOT_FOUND,
268
+ detail=f"Job {job_id} not found",
269
+ )
270
+ # Create and return byte stream for zipped log folder
271
+ PREFIX_ZIP = Path(job.working_dir).name
272
+ zip_filename = f"{PREFIX_ZIP}_archive.zip"
273
+ byte_stream = _zip_folder_to_byte_stream(
274
+ folder=job.working_dir, zip_filename=zip_filename
275
+ )
276
+ return StreamingResponse(
277
+ iter([byte_stream.getvalue()]),
278
+ media_type="application/x-zip-compressed",
279
+ headers={"Content-Disposition": f"attachment;filename={zip_filename}"},
280
+ )
281
+
282
+
283
+ class TaskCompatibility(BaseModel):
284
+ is_v2_compatible: bool
285
+
286
+
287
+ @router_admin_v2.patch(
288
+ "/task-v1/{task_id}/",
289
+ status_code=status.HTTP_200_OK,
290
+ )
291
+ async def flag_task_v1_as_v2_compatible(
292
+ task_id: int,
293
+ compatibility: TaskCompatibility,
294
+ user: User = Depends(current_active_superuser),
295
+ db: AsyncSession = Depends(get_async_db),
296
+ ) -> Response:
297
+
298
+ task = await db.get(Task, task_id)
299
+ if task is None:
300
+ raise HTTPException(
301
+ status_code=status.HTTP_404_NOT_FOUND,
302
+ detail=f"Task {task_id} not found",
303
+ )
304
+
305
+ task.is_v2_compatible = compatibility.is_v2_compatible
306
+ await db.commit()
307
+ await db.close()
308
+
309
+ return Response(status_code=status.HTTP_200_OK)
@@ -14,13 +14,13 @@ from .workflowtask import router as workflowtask_router
14
14
  router_api_v1 = APIRouter()
15
15
 
16
16
  router_api_v1.include_router(
17
- project_router, prefix="/project", tags=["Projects"]
17
+ project_router, prefix="/project", tags=["V1 Project"]
18
18
  )
19
- router_api_v1.include_router(task_router, prefix="/task", tags=["Tasks"])
19
+ router_api_v1.include_router(task_router, prefix="/task", tags=["V1 Task"])
20
20
  router_api_v1.include_router(
21
- taskcollection_router, prefix="/task", tags=["Task Collection"]
21
+ taskcollection_router, prefix="/task", tags=["V1 Task Collection"]
22
22
  )
23
- router_api_v1.include_router(dataset_router, tags=["Datasets"])
24
- router_api_v1.include_router(workflow_router, tags=["Workflows"])
25
- router_api_v1.include_router(workflowtask_router, tags=["Workflow Tasks"])
26
- router_api_v1.include_router(job_router, tags=["Jobs"])
23
+ router_api_v1.include_router(dataset_router, tags=["V1 Dataset"])
24
+ router_api_v1.include_router(workflow_router, tags=["V1 Workflow"])
25
+ router_api_v1.include_router(workflowtask_router, tags=["V1 WorkflowTask"])
26
+ router_api_v1.include_router(job_router, tags=["V1 Job"])
@@ -12,14 +12,14 @@ from sqlmodel import select
12
12
  from sqlmodel.sql.expression import SelectOfScalar
13
13
 
14
14
  from ....db import AsyncSession
15
- from ....models import ApplyWorkflow
16
- from ....models import Dataset
17
15
  from ....models import LinkUserProject
18
- from ....models import Project
19
- from ....models import Task
20
- from ....models import Workflow
21
- from ....models import WorkflowTask
22
- from ....schemas import JobStatusType
16
+ from ....models.v1 import ApplyWorkflow
17
+ from ....models.v1 import Dataset
18
+ from ....models.v1 import Project
19
+ from ....models.v1 import Task
20
+ from ....models.v1 import Workflow
21
+ from ....models.v1 import WorkflowTask
22
+ from ....schemas.v1 import JobStatusTypeV1
23
23
  from ....security import User
24
24
 
25
25
 
@@ -375,7 +375,7 @@ def _get_submitted_jobs_statement() -> SelectOfScalar:
375
375
  `ApplyWorkflow.status` equal to `submitted`.
376
376
  """
377
377
  stm = select(ApplyWorkflow).where(
378
- ApplyWorkflow.status == JobStatusType.SUBMITTED
378
+ ApplyWorkflow.status == JobStatusTypeV1.SUBMITTED
379
379
  )
380
380
  return stm
381
381