fractal-server 1.4.10__py3-none-any.whl → 2.0.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +4 -7
  3. fractal_server/app/models/linkuserproject.py +9 -0
  4. fractal_server/app/models/security.py +6 -0
  5. fractal_server/app/models/state.py +1 -1
  6. fractal_server/app/models/v1/__init__.py +10 -0
  7. fractal_server/app/models/{dataset.py → v1/dataset.py} +5 -5
  8. fractal_server/app/models/{job.py → v1/job.py} +5 -5
  9. fractal_server/app/models/{project.py → v1/project.py} +5 -5
  10. fractal_server/app/models/{task.py → v1/task.py} +7 -2
  11. fractal_server/app/models/{workflow.py → v1/workflow.py} +5 -5
  12. fractal_server/app/models/v2/__init__.py +20 -0
  13. fractal_server/app/models/v2/dataset.py +55 -0
  14. fractal_server/app/models/v2/job.py +51 -0
  15. fractal_server/app/models/v2/project.py +31 -0
  16. fractal_server/app/models/v2/task.py +93 -0
  17. fractal_server/app/models/v2/workflow.py +43 -0
  18. fractal_server/app/models/v2/workflowtask.py +90 -0
  19. fractal_server/app/routes/{admin.py → admin/v1.py} +42 -42
  20. fractal_server/app/routes/admin/v2.py +275 -0
  21. fractal_server/app/routes/api/v1/__init__.py +7 -7
  22. fractal_server/app/routes/api/v1/_aux_functions.py +2 -2
  23. fractal_server/app/routes/api/v1/dataset.py +37 -37
  24. fractal_server/app/routes/api/v1/job.py +12 -12
  25. fractal_server/app/routes/api/v1/project.py +23 -21
  26. fractal_server/app/routes/api/v1/task.py +24 -14
  27. fractal_server/app/routes/api/v1/task_collection.py +16 -14
  28. fractal_server/app/routes/api/v1/workflow.py +24 -24
  29. fractal_server/app/routes/api/v1/workflowtask.py +10 -10
  30. fractal_server/app/routes/api/v2/__init__.py +28 -0
  31. fractal_server/app/routes/api/v2/_aux_functions.py +497 -0
  32. fractal_server/app/routes/api/v2/apply.py +220 -0
  33. fractal_server/app/routes/api/v2/dataset.py +310 -0
  34. fractal_server/app/routes/api/v2/images.py +212 -0
  35. fractal_server/app/routes/api/v2/job.py +200 -0
  36. fractal_server/app/routes/api/v2/project.py +205 -0
  37. fractal_server/app/routes/api/v2/task.py +222 -0
  38. fractal_server/app/routes/api/v2/task_collection.py +229 -0
  39. fractal_server/app/routes/api/v2/workflow.py +398 -0
  40. fractal_server/app/routes/api/v2/workflowtask.py +269 -0
  41. fractal_server/app/routes/aux/_job.py +1 -1
  42. fractal_server/app/runner/async_wrap.py +27 -0
  43. fractal_server/app/runner/exceptions.py +129 -0
  44. fractal_server/app/runner/executors/local/__init__.py +3 -0
  45. fractal_server/app/runner/{_local → executors/local}/executor.py +2 -2
  46. fractal_server/app/runner/executors/slurm/__init__.py +3 -0
  47. fractal_server/app/runner/{_slurm → executors/slurm}/_batching.py +1 -1
  48. fractal_server/app/runner/{_slurm → executors/slurm}/_check_jobs_status.py +1 -1
  49. fractal_server/app/runner/{_slurm → executors/slurm}/_executor_wait_thread.py +1 -1
  50. fractal_server/app/runner/{_slurm → executors/slurm}/_slurm_config.py +3 -152
  51. fractal_server/app/runner/{_slurm → executors/slurm}/_subprocess_run_as_user.py +1 -1
  52. fractal_server/app/runner/{_slurm → executors/slurm}/executor.py +9 -9
  53. fractal_server/app/runner/filenames.py +6 -0
  54. fractal_server/app/runner/set_start_and_last_task_index.py +39 -0
  55. fractal_server/app/runner/task_files.py +105 -0
  56. fractal_server/app/runner/{__init__.py → v1/__init__.py} +24 -22
  57. fractal_server/app/runner/{_common.py → v1/_common.py} +13 -120
  58. fractal_server/app/runner/{_local → v1/_local}/__init__.py +6 -6
  59. fractal_server/app/runner/{_local → v1/_local}/_local_config.py +6 -7
  60. fractal_server/app/runner/{_local → v1/_local}/_submit_setup.py +1 -5
  61. fractal_server/app/runner/v1/_slurm/__init__.py +310 -0
  62. fractal_server/app/runner/{_slurm → v1/_slurm}/_submit_setup.py +3 -9
  63. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +163 -0
  64. fractal_server/app/runner/v1/common.py +117 -0
  65. fractal_server/app/runner/{handle_failed_job.py → v1/handle_failed_job.py} +8 -8
  66. fractal_server/app/runner/v2/__init__.py +337 -0
  67. fractal_server/app/runner/v2/_local/__init__.py +169 -0
  68. fractal_server/app/runner/v2/_local/_local_config.py +118 -0
  69. fractal_server/app/runner/v2/_local/_submit_setup.py +52 -0
  70. fractal_server/app/runner/v2/_slurm/__init__.py +157 -0
  71. fractal_server/app/runner/v2/_slurm/_submit_setup.py +83 -0
  72. fractal_server/app/runner/v2/_slurm/get_slurm_config.py +179 -0
  73. fractal_server/app/runner/v2/components.py +5 -0
  74. fractal_server/app/runner/v2/deduplicate_list.py +24 -0
  75. fractal_server/app/runner/v2/handle_failed_job.py +156 -0
  76. fractal_server/app/runner/v2/merge_outputs.py +41 -0
  77. fractal_server/app/runner/v2/runner.py +264 -0
  78. fractal_server/app/runner/v2/runner_functions.py +339 -0
  79. fractal_server/app/runner/v2/runner_functions_low_level.py +134 -0
  80. fractal_server/app/runner/v2/task_interface.py +43 -0
  81. fractal_server/app/runner/v2/v1_compat.py +21 -0
  82. fractal_server/app/schemas/__init__.py +4 -42
  83. fractal_server/app/schemas/v1/__init__.py +42 -0
  84. fractal_server/app/schemas/{applyworkflow.py → v1/applyworkflow.py} +18 -18
  85. fractal_server/app/schemas/{dataset.py → v1/dataset.py} +30 -30
  86. fractal_server/app/schemas/{dumps.py → v1/dumps.py} +8 -8
  87. fractal_server/app/schemas/{manifest.py → v1/manifest.py} +5 -5
  88. fractal_server/app/schemas/{project.py → v1/project.py} +9 -9
  89. fractal_server/app/schemas/{task.py → v1/task.py} +12 -12
  90. fractal_server/app/schemas/{task_collection.py → v1/task_collection.py} +7 -7
  91. fractal_server/app/schemas/{workflow.py → v1/workflow.py} +38 -38
  92. fractal_server/app/schemas/v2/__init__.py +34 -0
  93. fractal_server/app/schemas/v2/dataset.py +88 -0
  94. fractal_server/app/schemas/v2/dumps.py +87 -0
  95. fractal_server/app/schemas/v2/job.py +113 -0
  96. fractal_server/app/schemas/v2/manifest.py +109 -0
  97. fractal_server/app/schemas/v2/project.py +36 -0
  98. fractal_server/app/schemas/v2/task.py +121 -0
  99. fractal_server/app/schemas/v2/task_collection.py +105 -0
  100. fractal_server/app/schemas/v2/workflow.py +78 -0
  101. fractal_server/app/schemas/v2/workflowtask.py +118 -0
  102. fractal_server/config.py +5 -4
  103. fractal_server/images/__init__.py +50 -0
  104. fractal_server/images/tools.py +86 -0
  105. fractal_server/main.py +11 -3
  106. fractal_server/migrations/versions/4b35c5cefbe3_tmp_is_v2_compatible.py +39 -0
  107. fractal_server/migrations/versions/56af171b0159_v2.py +217 -0
  108. fractal_server/migrations/versions/876f28db9d4e_tmp_split_task_and_wftask_meta.py +68 -0
  109. fractal_server/migrations/versions/974c802f0dd0_tmp_workflowtaskv2_type_in_db.py +37 -0
  110. fractal_server/migrations/versions/9cd305cd6023_tmp_workflowtaskv2.py +40 -0
  111. fractal_server/migrations/versions/a6231ed6273c_tmp_args_schemas_in_taskv2.py +42 -0
  112. fractal_server/migrations/versions/b9e9eed9d442_tmp_taskv2_type.py +37 -0
  113. fractal_server/migrations/versions/e3e639454d4b_tmp_make_task_meta_non_optional.py +50 -0
  114. fractal_server/tasks/__init__.py +0 -5
  115. fractal_server/tasks/endpoint_operations.py +13 -19
  116. fractal_server/tasks/utils.py +35 -0
  117. fractal_server/tasks/{_TaskCollectPip.py → v1/_TaskCollectPip.py} +3 -3
  118. fractal_server/tasks/{background_operations.py → v1/background_operations.py} +18 -50
  119. fractal_server/tasks/v1/get_collection_data.py +14 -0
  120. fractal_server/tasks/v2/_TaskCollectPip.py +103 -0
  121. fractal_server/tasks/v2/background_operations.py +382 -0
  122. fractal_server/tasks/v2/get_collection_data.py +14 -0
  123. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/METADATA +1 -1
  124. fractal_server-2.0.0a0.dist-info/RECORD +166 -0
  125. fractal_server/app/runner/_slurm/.gitignore +0 -2
  126. fractal_server/app/runner/_slurm/__init__.py +0 -150
  127. fractal_server/app/runner/common.py +0 -311
  128. fractal_server-1.4.10.dist-info/RECORD +0 -98
  129. /fractal_server/app/runner/{_slurm → executors/slurm}/remote.py +0 -0
  130. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/LICENSE +0 -0
  131. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/WHEEL +0 -0
  132. {fractal_server-1.4.10.dist-info → fractal_server-2.0.0a0.dist-info}/entry_points.txt +0 -0
@@ -17,16 +17,16 @@ from ....models import ApplyWorkflow
17
17
  from ....models import Dataset
18
18
  from ....models import Project
19
19
  from ....models import Resource
20
- from ....runner._common import HISTORY_FILENAME
21
- from ....schemas import DatasetCreate
22
- from ....schemas import DatasetRead
23
- from ....schemas import DatasetStatusRead
24
- from ....schemas import DatasetUpdate
25
- from ....schemas import ResourceCreate
26
- from ....schemas import ResourceRead
27
- from ....schemas import ResourceUpdate
28
- from ....schemas import WorkflowExport
29
- from ....schemas import WorkflowTaskExport
20
+ from ....runner.filenames import HISTORY_FILENAME
21
+ from ....schemas.v1 import DatasetCreateV1
22
+ from ....schemas.v1 import DatasetReadV1
23
+ from ....schemas.v1 import DatasetStatusReadV1
24
+ from ....schemas.v1 import DatasetUpdateV1
25
+ from ....schemas.v1 import ResourceCreateV1
26
+ from ....schemas.v1 import ResourceReadV1
27
+ from ....schemas.v1 import ResourceUpdateV1
28
+ from ....schemas.v1 import WorkflowExportV1
29
+ from ....schemas.v1 import WorkflowTaskExportV1
30
30
  from ....security import current_active_user
31
31
  from ....security import User
32
32
  from ._aux_functions import _get_dataset_check_owner
@@ -40,15 +40,15 @@ router = APIRouter()
40
40
 
41
41
  @router.post(
42
42
  "/project/{project_id}/dataset/",
43
- response_model=DatasetRead,
43
+ response_model=DatasetReadV1,
44
44
  status_code=status.HTTP_201_CREATED,
45
45
  )
46
46
  async def create_dataset(
47
47
  project_id: int,
48
- dataset: DatasetCreate,
48
+ dataset: DatasetCreateV1,
49
49
  user: User = Depends(current_active_user),
50
50
  db: AsyncSession = Depends(get_async_db),
51
- ) -> Optional[DatasetRead]:
51
+ ) -> Optional[DatasetReadV1]:
52
52
  """
53
53
  Add new dataset to current project
54
54
  """
@@ -66,14 +66,14 @@ async def create_dataset(
66
66
 
67
67
  @router.get(
68
68
  "/project/{project_id}/dataset/",
69
- response_model=list[DatasetRead],
69
+ response_model=list[DatasetReadV1],
70
70
  )
71
71
  async def read_dataset_list(
72
72
  project_id: int,
73
73
  history: bool = True,
74
74
  user: User = Depends(current_active_user),
75
75
  db: AsyncSession = Depends(get_async_db),
76
- ) -> Optional[list[DatasetRead]]:
76
+ ) -> Optional[list[DatasetReadV1]]:
77
77
  """
78
78
  Get dataset list for given project
79
79
  """
@@ -97,14 +97,14 @@ async def read_dataset_list(
97
97
 
98
98
  @router.get(
99
99
  "/project/{project_id}/dataset/{dataset_id}/",
100
- response_model=DatasetRead,
100
+ response_model=DatasetReadV1,
101
101
  )
102
102
  async def read_dataset(
103
103
  project_id: int,
104
104
  dataset_id: int,
105
105
  user: User = Depends(current_active_user),
106
106
  db: AsyncSession = Depends(get_async_db),
107
- ) -> Optional[DatasetRead]:
107
+ ) -> Optional[DatasetReadV1]:
108
108
  """
109
109
  Get info on a dataset associated to the current project
110
110
  """
@@ -121,15 +121,15 @@ async def read_dataset(
121
121
 
122
122
  @router.patch(
123
123
  "/project/{project_id}/dataset/{dataset_id}/",
124
- response_model=DatasetRead,
124
+ response_model=DatasetReadV1,
125
125
  )
126
126
  async def update_dataset(
127
127
  project_id: int,
128
128
  dataset_id: int,
129
- dataset_update: DatasetUpdate,
129
+ dataset_update: DatasetUpdateV1,
130
130
  user: User = Depends(current_active_user),
131
131
  db: AsyncSession = Depends(get_async_db),
132
- ) -> Optional[DatasetRead]:
132
+ ) -> Optional[DatasetReadV1]:
133
133
  """
134
134
  Edit a dataset associated to the current project
135
135
  """
@@ -230,16 +230,16 @@ async def delete_dataset(
230
230
 
231
231
  @router.post(
232
232
  "/project/{project_id}/dataset/{dataset_id}/resource/",
233
- response_model=ResourceRead,
233
+ response_model=ResourceReadV1,
234
234
  status_code=status.HTTP_201_CREATED,
235
235
  )
236
236
  async def create_resource(
237
237
  project_id: int,
238
238
  dataset_id: int,
239
- resource: ResourceCreate,
239
+ resource: ResourceCreateV1,
240
240
  user: User = Depends(current_active_user),
241
241
  db: AsyncSession = Depends(get_async_db),
242
- ) -> Optional[ResourceRead]:
242
+ ) -> Optional[ResourceReadV1]:
243
243
  """
244
244
  Add resource to an existing dataset
245
245
  """
@@ -260,14 +260,14 @@ async def create_resource(
260
260
 
261
261
  @router.get(
262
262
  "/project/{project_id}/dataset/{dataset_id}/resource/",
263
- response_model=list[ResourceRead],
263
+ response_model=list[ResourceReadV1],
264
264
  )
265
265
  async def get_resource_list(
266
266
  project_id: int,
267
267
  dataset_id: int,
268
268
  user: User = Depends(current_active_user),
269
269
  db: AsyncSession = Depends(get_async_db),
270
- ) -> Optional[list[ResourceRead]]:
270
+ ) -> Optional[list[ResourceReadV1]]:
271
271
  """
272
272
  Get resources from a dataset
273
273
  """
@@ -286,16 +286,16 @@ async def get_resource_list(
286
286
 
287
287
  @router.patch(
288
288
  "/project/{project_id}/dataset/{dataset_id}/resource/{resource_id}/",
289
- response_model=ResourceRead,
289
+ response_model=ResourceReadV1,
290
290
  )
291
291
  async def update_resource(
292
292
  project_id: int,
293
293
  dataset_id: int,
294
294
  resource_id: int,
295
- resource_update: ResourceUpdate,
295
+ resource_update: ResourceUpdateV1,
296
296
  user: User = Depends(current_active_user),
297
297
  db: AsyncSession = Depends(get_async_db),
298
- ) -> Optional[ResourceRead]:
298
+ ) -> Optional[ResourceReadV1]:
299
299
  """
300
300
  Edit a resource of a dataset
301
301
  """
@@ -361,14 +361,14 @@ async def delete_resource(
361
361
 
362
362
  @router.get(
363
363
  "/project/{project_id}/dataset/{dataset_id}/export_history/",
364
- response_model=WorkflowExport,
364
+ response_model=WorkflowExportV1,
365
365
  )
366
366
  async def export_history_as_workflow(
367
367
  project_id: int,
368
368
  dataset_id: int,
369
369
  user: User = Depends(current_active_user),
370
370
  db: AsyncSession = Depends(get_async_db),
371
- ) -> Optional[WorkflowExport]:
371
+ ) -> Optional[WorkflowExportV1]:
372
372
  """
373
373
  Extract a reproducible workflow from the dataset history.
374
374
  """
@@ -413,7 +413,7 @@ async def export_history_as_workflow(
413
413
  wftask = history_item["workflowtask"]
414
414
  wftask_status = history_item["status"]
415
415
  if wftask_status == "done":
416
- task_list.append(WorkflowTaskExport(**wftask))
416
+ task_list.append(WorkflowTaskExportV1(**wftask))
417
417
 
418
418
  def _slugify_dataset_name(_name: str) -> str:
419
419
  _new_name = _name
@@ -423,20 +423,20 @@ async def export_history_as_workflow(
423
423
 
424
424
  name = f"history_{_slugify_dataset_name(dataset.name)}"
425
425
 
426
- workflow = WorkflowExport(name=name, task_list=task_list)
426
+ workflow = WorkflowExportV1(name=name, task_list=task_list)
427
427
  return workflow
428
428
 
429
429
 
430
430
  @router.get(
431
431
  "/project/{project_id}/dataset/{dataset_id}/status/",
432
- response_model=DatasetStatusRead,
432
+ response_model=DatasetStatusReadV1,
433
433
  )
434
434
  async def get_workflowtask_status(
435
435
  project_id: int,
436
436
  dataset_id: int,
437
437
  user: User = Depends(current_active_user),
438
438
  db: AsyncSession = Depends(get_async_db),
439
- ) -> Optional[DatasetStatusRead]:
439
+ ) -> Optional[DatasetStatusReadV1]:
440
440
  """
441
441
  Extract the status of all `WorkflowTask`s that ran on a given `Dataset`.
442
442
  """
@@ -523,16 +523,16 @@ async def get_workflowtask_status(
523
523
  wftask_status = history_item["status"]
524
524
  workflow_tasks_status_dict[wftask_id] = wftask_status
525
525
 
526
- response_body = DatasetStatusRead(status=workflow_tasks_status_dict)
526
+ response_body = DatasetStatusReadV1(status=workflow_tasks_status_dict)
527
527
  return response_body
528
528
 
529
529
 
530
- @router.get("/dataset/", response_model=list[DatasetRead])
530
+ @router.get("/dataset/", response_model=list[DatasetReadV1])
531
531
  async def get_user_datasets(
532
532
  history: bool = True,
533
533
  user: User = Depends(current_active_user),
534
534
  db: AsyncSession = Depends(get_async_db),
535
- ) -> list[DatasetRead]:
535
+ ) -> list[DatasetReadV1]:
536
536
  """
537
537
  Returns all the datasets of the current user
538
538
  """
@@ -11,10 +11,10 @@ from sqlmodel import select
11
11
  from ....db import AsyncSession
12
12
  from ....db import get_async_db
13
13
  from ....models import ApplyWorkflow
14
- from ....models import JobStatusType
14
+ from ....models import JobStatusTypeV1
15
15
  from ....models import Project
16
- from ....runner._common import WORKFLOW_LOG_FILENAME
17
- from ....schemas import ApplyWorkflowRead
16
+ from ....runner.filenames import WORKFLOW_LOG_FILENAME
17
+ from ....schemas.v1 import ApplyWorkflowReadV1
18
18
  from ....security import current_active_user
19
19
  from ....security import User
20
20
  from ...aux._job import _write_shutdown_file
@@ -27,12 +27,12 @@ from ._aux_functions import _get_workflow_check_owner
27
27
  router = APIRouter()
28
28
 
29
29
 
30
- @router.get("/job/", response_model=list[ApplyWorkflowRead])
30
+ @router.get("/job/", response_model=list[ApplyWorkflowReadV1])
31
31
  async def get_user_jobs(
32
32
  user: User = Depends(current_active_user),
33
33
  log: bool = True,
34
34
  db: AsyncSession = Depends(get_async_db),
35
- ) -> list[ApplyWorkflowRead]:
35
+ ) -> list[ApplyWorkflowReadV1]:
36
36
  """
37
37
  Returns all the jobs of the current user
38
38
  """
@@ -50,14 +50,14 @@ async def get_user_jobs(
50
50
 
51
51
  @router.get(
52
52
  "/project/{project_id}/workflow/{workflow_id}/job/",
53
- response_model=list[ApplyWorkflowRead],
53
+ response_model=list[ApplyWorkflowReadV1],
54
54
  )
55
55
  async def get_workflow_jobs(
56
56
  project_id: int,
57
57
  workflow_id: int,
58
58
  user: User = Depends(current_active_user),
59
59
  db: AsyncSession = Depends(get_async_db),
60
- ) -> Optional[list[ApplyWorkflowRead]]:
60
+ ) -> Optional[list[ApplyWorkflowReadV1]]:
61
61
  """
62
62
  Returns all the jobs related to a specific workflow
63
63
  """
@@ -72,7 +72,7 @@ async def get_workflow_jobs(
72
72
 
73
73
  @router.get(
74
74
  "/project/{project_id}/job/{job_id}/",
75
- response_model=ApplyWorkflowRead,
75
+ response_model=ApplyWorkflowReadV1,
76
76
  )
77
77
  async def read_job(
78
78
  project_id: int,
@@ -80,7 +80,7 @@ async def read_job(
80
80
  show_tmp_logs: bool = False,
81
81
  user: User = Depends(current_active_user),
82
82
  db: AsyncSession = Depends(get_async_db),
83
- ) -> Optional[ApplyWorkflowRead]:
83
+ ) -> Optional[ApplyWorkflowReadV1]:
84
84
  """
85
85
  Return info on an existing job
86
86
  """
@@ -94,7 +94,7 @@ async def read_job(
94
94
  job = output["job"]
95
95
  await db.close()
96
96
 
97
- if show_tmp_logs and (job.status == JobStatusType.SUBMITTED):
97
+ if show_tmp_logs and (job.status == JobStatusTypeV1.SUBMITTED):
98
98
  try:
99
99
  with open(f"{job.working_dir}/{WORKFLOW_LOG_FILENAME}", "r") as f:
100
100
  job.log = f.read()
@@ -140,14 +140,14 @@ async def download_job_logs(
140
140
 
141
141
  @router.get(
142
142
  "/project/{project_id}/job/",
143
- response_model=list[ApplyWorkflowRead],
143
+ response_model=list[ApplyWorkflowReadV1],
144
144
  )
145
145
  async def get_job_list(
146
146
  project_id: int,
147
147
  user: User = Depends(current_active_user),
148
148
  log: bool = True,
149
149
  db: AsyncSession = Depends(get_async_db),
150
- ) -> Optional[list[ApplyWorkflowRead]]:
150
+ ) -> Optional[list[ApplyWorkflowReadV1]]:
151
151
  """
152
152
  Get job list for given project
153
153
  """
@@ -23,15 +23,17 @@ from ....models import Dataset
23
23
  from ....models import LinkUserProject
24
24
  from ....models import Project
25
25
  from ....models import Workflow
26
- from ....runner import submit_workflow
27
- from ....runner import validate_workflow_compatibility
28
- from ....runner.common import set_start_and_last_task_index
29
- from ....schemas import ApplyWorkflowCreate
30
- from ....schemas import ApplyWorkflowRead
31
- from ....schemas import JobStatusType
32
- from ....schemas import ProjectCreate
33
- from ....schemas import ProjectRead
34
- from ....schemas import ProjectUpdate
26
+ from ....runner.set_start_and_last_task_index import (
27
+ set_start_and_last_task_index,
28
+ )
29
+ from ....runner.v1 import submit_workflow
30
+ from ....runner.v1 import validate_workflow_compatibility
31
+ from ....schemas.v1 import ApplyWorkflowCreateV1
32
+ from ....schemas.v1 import ApplyWorkflowReadV1
33
+ from ....schemas.v1 import JobStatusTypeV1
34
+ from ....schemas.v1 import ProjectCreateV1
35
+ from ....schemas.v1 import ProjectReadV1
36
+ from ....schemas.v1 import ProjectUpdateV1
35
37
  from ....security import current_active_user
36
38
  from ....security import current_active_verified_user
37
39
  from ....security import User
@@ -48,7 +50,7 @@ def _encode_as_utc(dt: datetime):
48
50
  return dt.replace(tzinfo=timezone.utc).isoformat()
49
51
 
50
52
 
51
- @router.get("/", response_model=list[ProjectRead])
53
+ @router.get("/", response_model=list[ProjectReadV1])
52
54
  async def get_list_project(
53
55
  user: User = Depends(current_active_user),
54
56
  db: AsyncSession = Depends(get_async_db),
@@ -67,12 +69,12 @@ async def get_list_project(
67
69
  return project_list
68
70
 
69
71
 
70
- @router.post("/", response_model=ProjectRead, status_code=201)
72
+ @router.post("/", response_model=ProjectReadV1, status_code=201)
71
73
  async def create_project(
72
- project: ProjectCreate,
74
+ project: ProjectCreateV1,
73
75
  user: User = Depends(current_active_user),
74
76
  db: AsyncSession = Depends(get_async_db),
75
- ) -> Optional[ProjectRead]:
77
+ ) -> Optional[ProjectReadV1]:
76
78
  """
77
79
  Create new poject
78
80
  """
@@ -102,12 +104,12 @@ async def create_project(
102
104
  return db_project
103
105
 
104
106
 
105
- @router.get("/{project_id}/", response_model=ProjectRead)
107
+ @router.get("/{project_id}/", response_model=ProjectReadV1)
106
108
  async def read_project(
107
109
  project_id: int,
108
110
  user: User = Depends(current_active_user),
109
111
  db: AsyncSession = Depends(get_async_db),
110
- ) -> Optional[ProjectRead]:
112
+ ) -> Optional[ProjectReadV1]:
111
113
  """
112
114
  Return info on an existing project
113
115
  """
@@ -118,10 +120,10 @@ async def read_project(
118
120
  return project
119
121
 
120
122
 
121
- @router.patch("/{project_id}/", response_model=ProjectRead)
123
+ @router.patch("/{project_id}/", response_model=ProjectReadV1)
122
124
  async def update_project(
123
125
  project_id: int,
124
- project_update: ProjectUpdate,
126
+ project_update: ProjectUpdateV1,
125
127
  user: User = Depends(current_active_user),
126
128
  db: AsyncSession = Depends(get_async_db),
127
129
  ):
@@ -241,18 +243,18 @@ async def delete_project(
241
243
  @router.post(
242
244
  "/{project_id}/workflow/{workflow_id}/apply/",
243
245
  status_code=status.HTTP_202_ACCEPTED,
244
- response_model=ApplyWorkflowRead,
246
+ response_model=ApplyWorkflowReadV1,
245
247
  )
246
248
  async def apply_workflow(
247
249
  project_id: int,
248
250
  workflow_id: int,
249
- apply_workflow: ApplyWorkflowCreate,
251
+ apply_workflow: ApplyWorkflowCreateV1,
250
252
  background_tasks: BackgroundTasks,
251
253
  input_dataset_id: int,
252
254
  output_dataset_id: int,
253
255
  user: User = Depends(current_active_verified_user),
254
256
  db: AsyncSession = Depends(get_async_db),
255
- ) -> Optional[ApplyWorkflowRead]:
257
+ ) -> Optional[ApplyWorkflowReadV1]:
256
258
 
257
259
  output = await _get_dataset_check_owner(
258
260
  project_id=project_id,
@@ -362,7 +364,7 @@ async def apply_workflow(
362
364
  stm = (
363
365
  select(ApplyWorkflow)
364
366
  .where(ApplyWorkflow.output_dataset_id == output_dataset_id)
365
- .where(ApplyWorkflow.status == JobStatusType.SUBMITTED)
367
+ .where(ApplyWorkflow.status == JobStatusTypeV1.SUBMITTED)
366
368
  )
367
369
  res = await db.execute(stm)
368
370
  if res.scalars().all():
@@ -13,9 +13,10 @@ from ....db import AsyncSession
13
13
  from ....db import get_async_db
14
14
  from ....models import Task
15
15
  from ....models import WorkflowTask
16
- from ....schemas import TaskCreate
17
- from ....schemas import TaskRead
18
- from ....schemas import TaskUpdate
16
+ from ....models.v2 import TaskV2
17
+ from ....schemas.v1 import TaskCreateV1
18
+ from ....schemas.v1 import TaskReadV1
19
+ from ....schemas.v1 import TaskUpdateV1
19
20
  from ....security import current_active_user
20
21
  from ....security import current_active_verified_user
21
22
  from ....security import User
@@ -26,12 +27,12 @@ router = APIRouter()
26
27
  logger = set_logger(__name__)
27
28
 
28
29
 
29
- @router.get("/", response_model=list[TaskRead])
30
+ @router.get("/", response_model=list[TaskReadV1])
30
31
  async def get_list_task(
31
32
  user: User = Depends(current_active_user),
32
33
  args_schema: bool = True,
33
34
  db: AsyncSession = Depends(get_async_db),
34
- ) -> list[TaskRead]:
35
+ ) -> list[TaskReadV1]:
35
36
  """
36
37
  Get list of available tasks
37
38
  """
@@ -46,12 +47,12 @@ async def get_list_task(
46
47
  return task_list
47
48
 
48
49
 
49
- @router.get("/{task_id}/", response_model=TaskRead)
50
+ @router.get("/{task_id}/", response_model=TaskReadV1)
50
51
  async def get_task(
51
52
  task_id: int,
52
53
  user: User = Depends(current_active_user),
53
54
  db: AsyncSession = Depends(get_async_db),
54
- ) -> TaskRead:
55
+ ) -> TaskReadV1:
55
56
  """
56
57
  Get info on a specific task
57
58
  """
@@ -64,13 +65,13 @@ async def get_task(
64
65
  return task
65
66
 
66
67
 
67
- @router.patch("/{task_id}/", response_model=TaskRead)
68
+ @router.patch("/{task_id}/", response_model=TaskReadV1)
68
69
  async def patch_task(
69
70
  task_id: int,
70
- task_update: TaskUpdate,
71
+ task_update: TaskUpdateV1,
71
72
  user: User = Depends(current_active_verified_user),
72
73
  db: AsyncSession = Depends(get_async_db),
73
- ) -> Optional[TaskRead]:
74
+ ) -> Optional[TaskReadV1]:
74
75
  """
75
76
  Edit a specific task (restricted to superusers and task owner)
76
77
  """
@@ -109,12 +110,14 @@ async def patch_task(
109
110
  return db_task
110
111
 
111
112
 
112
- @router.post("/", response_model=TaskRead, status_code=status.HTTP_201_CREATED)
113
+ @router.post(
114
+ "/", response_model=TaskReadV1, status_code=status.HTTP_201_CREATED
115
+ )
113
116
  async def create_task(
114
- task: TaskCreate,
117
+ task: TaskCreateV1,
115
118
  user: User = Depends(current_active_verified_user),
116
119
  db: AsyncSession = Depends(get_async_db),
117
- ) -> Optional[TaskRead]:
120
+ ) -> Optional[TaskReadV1]:
118
121
  """
119
122
  Create a new task
120
123
  """
@@ -143,7 +146,14 @@ async def create_task(
143
146
  if res.scalars().all():
144
147
  raise HTTPException(
145
148
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
146
- detail=f'Task source "{task.source}" already in use',
149
+ detail=f"Source '{task.source}' already used by some TaskV1",
150
+ )
151
+ stm = select(TaskV2).where(TaskV2.source == task.source)
152
+ res = await db.execute(stm)
153
+ if res.scalars().all():
154
+ raise HTTPException(
155
+ status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
156
+ detail=f"Source '{task.source}' already used by some TaskV2",
147
157
  )
148
158
 
149
159
  # Add task
@@ -15,24 +15,26 @@ from .....config import get_settings
15
15
  from .....logger import close_logger
16
16
  from .....logger import set_logger
17
17
  from .....syringe import Inject
18
- from .....tasks._TaskCollectPip import _TaskCollectPip
19
- from .....tasks.background_operations import background_collect_pip
20
- from .....tasks.endpoint_operations import create_package_dir_pip
21
- from .....tasks.endpoint_operations import download_package
22
- from .....tasks.endpoint_operations import get_collection_data
23
- from .....tasks.endpoint_operations import inspect_package
24
- from .....tasks.utils import get_collection_log
25
- from .....tasks.utils import slugify_task_name
26
18
  from ....db import AsyncSession
27
19
  from ....db import get_async_db
28
20
  from ....models import State
29
21
  from ....models import Task
30
- from ....schemas import StateRead
31
- from ....schemas import TaskCollectPip
32
- from ....schemas import TaskCollectStatus
22
+ from ....schemas.v1 import StateRead
23
+ from ....schemas.v1 import TaskCollectPipV1
24
+ from ....schemas.v1 import TaskCollectStatusV1
33
25
  from ....security import current_active_user
34
26
  from ....security import current_active_verified_user
35
27
  from ....security import User
28
+ from fractal_server.tasks.endpoint_operations import create_package_dir_pip
29
+ from fractal_server.tasks.endpoint_operations import download_package
30
+ from fractal_server.tasks.endpoint_operations import inspect_package
31
+ from fractal_server.tasks.utils import get_collection_log
32
+ from fractal_server.tasks.utils import slugify_task_name
33
+ from fractal_server.tasks.v1._TaskCollectPip import _TaskCollectPip
34
+ from fractal_server.tasks.v1.background_operations import (
35
+ background_collect_pip,
36
+ )
37
+ from fractal_server.tasks.v1.get_collection_data import get_collection_data
36
38
 
37
39
  router = APIRouter()
38
40
 
@@ -57,7 +59,7 @@ logger = set_logger(__name__)
57
59
  },
58
60
  )
59
61
  async def collect_tasks_pip(
60
- task_collect: TaskCollectPip,
62
+ task_collect: TaskCollectPipV1,
61
63
  background_tasks: BackgroundTasks,
62
64
  response: Response,
63
65
  user: User = Depends(current_active_verified_user),
@@ -162,7 +164,7 @@ async def collect_tasks_pip(
162
164
 
163
165
  # All checks are OK, proceed with task collection
164
166
  full_venv_path = venv_path.relative_to(settings.FRACTAL_TASKS_DIR)
165
- collection_status = TaskCollectStatus(
167
+ collection_status = TaskCollectStatusV1(
166
168
  status="pending", venv_path=full_venv_path, package=task_pkg.package
167
169
  )
168
170
 
@@ -214,7 +216,7 @@ async def check_collection_status(
214
216
  status_code=status.HTTP_404_NOT_FOUND,
215
217
  detail=f"No task collection info with id={state_id}",
216
218
  )
217
- data = TaskCollectStatus(**state.data)
219
+ data = TaskCollectStatusV1(**state.data)
218
220
 
219
221
  # In some cases (i.e. a successful or ongoing task collection), data.log is
220
222
  # not set; if so, we collect the current logs