fractal-server 2.15.2__py3-none-any.whl → 2.15.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/api/v2/_aux_functions.py +40 -0
- fractal_server/app/routes/api/v2/history.py +62 -23
- {fractal_server-2.15.2.dist-info → fractal_server-2.15.3.dist-info}/METADATA +1 -1
- {fractal_server-2.15.2.dist-info → fractal_server-2.15.3.dist-info}/RECORD +8 -8
- {fractal_server-2.15.2.dist-info → fractal_server-2.15.3.dist-info}/LICENSE +0 -0
- {fractal_server-2.15.2.dist-info → fractal_server-2.15.3.dist-info}/WHEEL +0 -0
- {fractal_server-2.15.2.dist-info → fractal_server-2.15.3.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__VERSION__ = "2.15.
|
|
1
|
+
__VERSION__ = "2.15.3"
|
|
@@ -6,6 +6,7 @@ from typing import Literal
|
|
|
6
6
|
|
|
7
7
|
from fastapi import HTTPException
|
|
8
8
|
from fastapi import status
|
|
9
|
+
from sqlalchemy.exc import MultipleResultsFound
|
|
9
10
|
from sqlalchemy.orm.attributes import flag_modified
|
|
10
11
|
from sqlmodel import select
|
|
11
12
|
from sqlmodel.sql.expression import SelectOfScalar
|
|
@@ -19,6 +20,9 @@ from ....models.v2 import TaskV2
|
|
|
19
20
|
from ....models.v2 import WorkflowTaskV2
|
|
20
21
|
from ....models.v2 import WorkflowV2
|
|
21
22
|
from ....schemas.v2 import JobStatusTypeV2
|
|
23
|
+
from fractal_server.logger import set_logger
|
|
24
|
+
|
|
25
|
+
logger = set_logger(__name__)
|
|
22
26
|
|
|
23
27
|
|
|
24
28
|
async def _get_project_check_owner(
|
|
@@ -499,3 +503,39 @@ async def _get_workflowtask_or_404(
|
|
|
499
503
|
)
|
|
500
504
|
else:
|
|
501
505
|
return wftask
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
async def _get_submitted_job_or_none(
|
|
509
|
+
*,
|
|
510
|
+
dataset_id: int,
|
|
511
|
+
workflow_id: int,
|
|
512
|
+
db: AsyncSession,
|
|
513
|
+
) -> JobV2 | None:
|
|
514
|
+
"""
|
|
515
|
+
Get the submitted job for given dataset/workflow, if any.
|
|
516
|
+
|
|
517
|
+
This function also handles the invalid branch where more than one job
|
|
518
|
+
is found.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
dataset_id:
|
|
522
|
+
workflow_id:
|
|
523
|
+
db:
|
|
524
|
+
"""
|
|
525
|
+
res = await db.execute(
|
|
526
|
+
_get_submitted_jobs_statement()
|
|
527
|
+
.where(JobV2.dataset_id == dataset_id)
|
|
528
|
+
.where(JobV2.workflow_id == workflow_id)
|
|
529
|
+
)
|
|
530
|
+
try:
|
|
531
|
+
return res.scalars().one_or_none()
|
|
532
|
+
except MultipleResultsFound as e:
|
|
533
|
+
error_msg = (
|
|
534
|
+
"Multiple running jobs found for "
|
|
535
|
+
f"{dataset_id=} and {workflow_id=}."
|
|
536
|
+
)
|
|
537
|
+
logger.error(f"{error_msg} Original error: {str(e)}.")
|
|
538
|
+
raise HTTPException(
|
|
539
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
|
540
|
+
detail=error_msg,
|
|
541
|
+
)
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
from copy import deepcopy
|
|
2
|
-
|
|
3
1
|
from fastapi import APIRouter
|
|
4
2
|
from fastapi import Depends
|
|
5
3
|
from fastapi import HTTPException
|
|
@@ -9,6 +7,7 @@ from sqlmodel import func
|
|
|
9
7
|
from sqlmodel import select
|
|
10
8
|
|
|
11
9
|
from ._aux_functions import _get_dataset_check_owner
|
|
10
|
+
from ._aux_functions import _get_submitted_job_or_none
|
|
12
11
|
from ._aux_functions import _get_workflow_check_owner
|
|
13
12
|
from ._aux_functions_history import _verify_workflow_and_dataset_access
|
|
14
13
|
from ._aux_functions_history import get_history_run_or_404
|
|
@@ -72,6 +71,7 @@ async def get_workflow_tasks_statuses(
|
|
|
72
71
|
user: UserOAuth = Depends(current_active_user),
|
|
73
72
|
db: AsyncSession = Depends(get_async_db),
|
|
74
73
|
) -> JSONResponse:
|
|
74
|
+
|
|
75
75
|
# Access control
|
|
76
76
|
workflow = await _get_workflow_check_owner(
|
|
77
77
|
project_id=project_id,
|
|
@@ -86,6 +86,19 @@ async def get_workflow_tasks_statuses(
|
|
|
86
86
|
db=db,
|
|
87
87
|
)
|
|
88
88
|
|
|
89
|
+
running_job = await _get_submitted_job_or_none(
|
|
90
|
+
db=db,
|
|
91
|
+
dataset_id=dataset_id,
|
|
92
|
+
workflow_id=workflow_id,
|
|
93
|
+
)
|
|
94
|
+
if running_job is not None:
|
|
95
|
+
running_wftasks = workflow.task_list[
|
|
96
|
+
running_job.first_task_index : running_job.last_task_index + 1
|
|
97
|
+
]
|
|
98
|
+
running_wftask_ids = [wft.id for wft in running_wftasks]
|
|
99
|
+
else:
|
|
100
|
+
running_wftask_ids = []
|
|
101
|
+
|
|
89
102
|
response: dict[int, dict[str, int | str] | None] = {}
|
|
90
103
|
for wftask in workflow.task_list:
|
|
91
104
|
res = await db.execute(
|
|
@@ -95,17 +108,37 @@ async def get_workflow_tasks_statuses(
|
|
|
95
108
|
.order_by(HistoryRun.timestamp_started.desc())
|
|
96
109
|
.limit(1)
|
|
97
110
|
)
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
111
|
+
latest_run = res.scalar_one_or_none()
|
|
112
|
+
|
|
113
|
+
if latest_run is None:
|
|
114
|
+
if wftask.id in running_wftask_ids:
|
|
115
|
+
logger.debug(f"A1: No HistoryRun for {wftask.id=}.")
|
|
116
|
+
response[wftask.id] = dict(status=HistoryUnitStatus.SUBMITTED)
|
|
117
|
+
else:
|
|
118
|
+
logger.debug(f"A2: No HistoryRun for {wftask.id=}.")
|
|
119
|
+
response[wftask.id] = None
|
|
104
120
|
continue
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
121
|
+
else:
|
|
122
|
+
if wftask.id in running_wftask_ids:
|
|
123
|
+
if latest_run.job_id == running_job.id:
|
|
124
|
+
logger.debug(
|
|
125
|
+
f"B1 for {wftask.id} and {latest_run.job_id=}."
|
|
126
|
+
)
|
|
127
|
+
response[wftask.id] = dict(status=latest_run.status)
|
|
128
|
+
else:
|
|
129
|
+
logger.debug(
|
|
130
|
+
f"B2 for {wftask.id} and {latest_run.job_id=}."
|
|
131
|
+
)
|
|
132
|
+
response[wftask.id] = dict(
|
|
133
|
+
status=HistoryUnitStatus.SUBMITTED
|
|
134
|
+
)
|
|
135
|
+
else:
|
|
136
|
+
logger.debug(f"C1: {wftask.id=} not in {running_wftask_ids=}.")
|
|
137
|
+
response[wftask.id] = dict(status=latest_run.status)
|
|
138
|
+
|
|
139
|
+
response[wftask.id][
|
|
140
|
+
"num_available_images"
|
|
141
|
+
] = latest_run.num_available_images
|
|
109
142
|
|
|
110
143
|
for target_status in HistoryUnitStatus:
|
|
111
144
|
stm = (
|
|
@@ -122,18 +155,24 @@ async def get_workflow_tasks_statuses(
|
|
|
122
155
|
num_images = res.scalar()
|
|
123
156
|
response[wftask.id][f"num_{target_status}_images"] = num_images
|
|
124
157
|
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
158
|
+
# Set `num_available_images=None` for cases where it would be
|
|
159
|
+
# smaller than `num_total_images`
|
|
160
|
+
values_to_skip = (None, {"status": HistoryUnitStatus.SUBMITTED})
|
|
161
|
+
response_update = {}
|
|
162
|
+
for wftask_id, status_value in response.items():
|
|
163
|
+
if status_value in values_to_skip:
|
|
164
|
+
# Skip cases where status has no image counters
|
|
165
|
+
continue
|
|
166
|
+
num_total_images = sum(
|
|
167
|
+
status_value[f"num_{target_status}_images"]
|
|
168
|
+
for target_status in HistoryUnitStatus
|
|
169
|
+
)
|
|
170
|
+
if num_total_images > status_value["num_available_images"]:
|
|
171
|
+
status_value["num_available_images"] = None
|
|
172
|
+
response_update[wftask_id] = status_value
|
|
173
|
+
response.update(response_update)
|
|
135
174
|
|
|
136
|
-
return JSONResponse(content=
|
|
175
|
+
return JSONResponse(content=response, status_code=200)
|
|
137
176
|
|
|
138
177
|
|
|
139
178
|
@router.get("/project/{project_id}/status/run/")
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
fractal_server/__init__.py,sha256=
|
|
1
|
+
fractal_server/__init__.py,sha256=TohihjKSFnz5CZYMVH94PEv_l0OHohZckQFN39DhyqE,23
|
|
2
2
|
fractal_server/__main__.py,sha256=rkM8xjY1KeS3l63irB8yCrlVobR-73uDapC4wvrIlxI,6957
|
|
3
3
|
fractal_server/alembic.ini,sha256=MWwi7GzjzawI9cCAK1LW7NxIBQDUqD12-ptJoq5JpP0,3153
|
|
4
4
|
fractal_server/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -30,14 +30,14 @@ fractal_server/app/routes/admin/v2/task_group.py,sha256=biibAvMPD2w-267eyTm3wH2s
|
|
|
30
30
|
fractal_server/app/routes/admin/v2/task_group_lifecycle.py,sha256=2J3M9VXWD_0j9jRTZ5APuUXl9E-aVv0qF8K02vvcO3s,9150
|
|
31
31
|
fractal_server/app/routes/api/__init__.py,sha256=B8l6PSAhR10iZqHEiyTat-_0tkeKdrCigIE6DJGP5b8,638
|
|
32
32
|
fractal_server/app/routes/api/v2/__init__.py,sha256=D3sRRsqkmZO6kBxUjg40q0aRDsnuXI4sOOfn0xF9JsM,2820
|
|
33
|
-
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=
|
|
33
|
+
fractal_server/app/routes/api/v2/_aux_functions.py,sha256=YU7yT9L6yc01VMWozXPnRcx0X0063rTylmeU6PKNyKM,14260
|
|
34
34
|
fractal_server/app/routes/api/v2/_aux_functions_history.py,sha256=Z23xwvBaVEEQ5B-JsWZJpjj4_QqoXqHYONztnbAH6gw,4425
|
|
35
35
|
fractal_server/app/routes/api/v2/_aux_functions_task_lifecycle.py,sha256=GpKfw9yj01LmOAuNMTOreU1PFkCKpjK5oCt7_wp35-A,6741
|
|
36
36
|
fractal_server/app/routes/api/v2/_aux_functions_task_version_update.py,sha256=WLDOYCnb6fnS5avKflyx6yN24Vo1n5kJk5ZyiKbzb8Y,1175
|
|
37
37
|
fractal_server/app/routes/api/v2/_aux_functions_tasks.py,sha256=MNty8CBnTMPSAKE5gMT7tCY8QWpCQyhft_shq12hHpA,12208
|
|
38
38
|
fractal_server/app/routes/api/v2/_aux_task_group_disambiguation.py,sha256=8x1_q9FyCzItnPmdSdLQuwUTy4B9xCsXscp97_lJcpM,4635
|
|
39
39
|
fractal_server/app/routes/api/v2/dataset.py,sha256=6u4MFqJ3YZ0Zq6Xx8CRMrTPKW55ZaR63Uno21DqFr4Q,8889
|
|
40
|
-
fractal_server/app/routes/api/v2/history.py,sha256=
|
|
40
|
+
fractal_server/app/routes/api/v2/history.py,sha256=ErLqkJbhx9XzHL4KQvMraVAtD9WOmDNh5tZi5wmNkL0,17114
|
|
41
41
|
fractal_server/app/routes/api/v2/images.py,sha256=TS1ltUhP0_SaViupdHrSh3MLDi5OVk-lOhE1VCVyZj0,7869
|
|
42
42
|
fractal_server/app/routes/api/v2/job.py,sha256=8xRTwh_OCHmK9IfI_zUASa2ozewR0qu0zVBl_a4IvHw,6467
|
|
43
43
|
fractal_server/app/routes/api/v2/pre_submission_checks.py,sha256=2jaaM5WJBTGpOWhm6a42JViT8j4X5hixltxIY1p-188,4936
|
|
@@ -230,8 +230,8 @@ fractal_server/types/validators/_workflow_task_arguments_validators.py,sha256=HL
|
|
|
230
230
|
fractal_server/urls.py,sha256=QjIKAC1a46bCdiPMu3AlpgFbcv6a4l3ABcd5xz190Og,471
|
|
231
231
|
fractal_server/utils.py,sha256=Vn35lApt1T1J8nc09sAVqd10Cy0sa3dLipcljI-hkuk,2185
|
|
232
232
|
fractal_server/zip_tools.py,sha256=tqz_8f-vQ9OBRW-4OQfO6xxY-YInHTyHmZxU7U4PqZo,4885
|
|
233
|
-
fractal_server-2.15.
|
|
234
|
-
fractal_server-2.15.
|
|
235
|
-
fractal_server-2.15.
|
|
236
|
-
fractal_server-2.15.
|
|
237
|
-
fractal_server-2.15.
|
|
233
|
+
fractal_server-2.15.3.dist-info/LICENSE,sha256=QKAharUuhxL58kSoLizKJeZE3mTCBnX6ucmz8W0lxlk,1576
|
|
234
|
+
fractal_server-2.15.3.dist-info/METADATA,sha256=-Z1TLhBsEtPcHcx4oq8Hij5f1mUfDyTiL3ttW1cBmTE,4243
|
|
235
|
+
fractal_server-2.15.3.dist-info/WHEEL,sha256=7dDg4QLnNKTvwIDR9Ac8jJaAmBC_owJrckbC0jjThyA,88
|
|
236
|
+
fractal_server-2.15.3.dist-info/entry_points.txt,sha256=8tV2kynvFkjnhbtDnxAqImL6HMVKsopgGfew0DOp5UY,58
|
|
237
|
+
fractal_server-2.15.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|