fractal-server 2.7.0a0__py3-none-any.whl → 2.7.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/__main__.py +10 -4
- fractal_server/app/models/linkusergroup.py +11 -0
- fractal_server/app/models/v2/task.py +19 -5
- fractal_server/app/routes/admin/v2/__init__.py +16 -0
- fractal_server/app/routes/admin/{v2.py → v2/job.py} +20 -191
- fractal_server/app/routes/admin/v2/project.py +43 -0
- fractal_server/app/routes/admin/v2/task.py +146 -0
- fractal_server/app/routes/admin/v2/task_group.py +134 -0
- fractal_server/app/routes/api/v2/task.py +13 -0
- fractal_server/app/routes/api/v2/task_collection_custom.py +7 -1
- fractal_server/app/routes/api/v2/task_group.py +11 -3
- fractal_server/app/routes/auth/_aux_auth.py +30 -29
- fractal_server/app/routes/auth/current_user.py +5 -5
- fractal_server/app/routes/auth/router.py +0 -2
- fractal_server/app/routes/auth/users.py +8 -7
- fractal_server/app/schemas/user.py +1 -2
- fractal_server/app/schemas/v2/manifest.py +12 -1
- fractal_server/app/schemas/v2/task.py +73 -25
- fractal_server/app/schemas/v2/task_group.py +28 -1
- fractal_server/data_migrations/2_7_0.py +274 -0
- fractal_server/migrations/versions/742b74e1cc6e_revamp_taskv2_and_taskgroupv2.py +101 -0
- fractal_server/migrations/versions/df7cc3501bf7_linkusergroup_timestamp_created.py +42 -0
- fractal_server/tasks/v2/background_operations.py +12 -1
- fractal_server/tasks/v2/background_operations_ssh.py +11 -1
- fractal_server/tasks/v2/endpoint_operations.py +42 -0
- {fractal_server-2.7.0a0.dist-info → fractal_server-2.7.0a2.dist-info}/METADATA +1 -1
- {fractal_server-2.7.0a0.dist-info → fractal_server-2.7.0a2.dist-info}/RECORD +31 -25
- fractal_server/app/routes/auth/group_names.py +0 -34
- {fractal_server-2.7.0a0.dist-info → fractal_server-2.7.0a2.dist-info}/LICENSE +0 -0
- {fractal_server-2.7.0a0.dist-info → fractal_server-2.7.0a2.dist-info}/WHEEL +0 -0
- {fractal_server-2.7.0a0.dist-info → fractal_server-2.7.0a2.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,274 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
from pathlib import Path
|
4
|
+
from typing import Any
|
5
|
+
|
6
|
+
from sqlalchemy import select
|
7
|
+
from sqlalchemy.orm import Session
|
8
|
+
|
9
|
+
from fractal_server.app.db import get_sync_db
|
10
|
+
from fractal_server.app.models import TaskGroupV2
|
11
|
+
from fractal_server.app.models import TaskV2
|
12
|
+
from fractal_server.app.models import UserGroup
|
13
|
+
from fractal_server.app.models import UserOAuth
|
14
|
+
from fractal_server.app.models import UserSettings
|
15
|
+
from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
|
16
|
+
from fractal_server.data_migrations.tools import _check_current_version
|
17
|
+
from fractal_server.utils import get_timestamp
|
18
|
+
|
19
|
+
logger = logging.getLogger("fix_db")
|
20
|
+
|
21
|
+
|
22
|
+
def get_unique_value(list_of_objects: list[dict[str, Any]], key: str):
|
23
|
+
"""
|
24
|
+
Loop over `list_of_objects` and extract (unique) value for `key`.
|
25
|
+
"""
|
26
|
+
unique_values = set()
|
27
|
+
for this_obj in list_of_objects:
|
28
|
+
this_value = this_obj.get(key, None)
|
29
|
+
unique_values.add(this_value)
|
30
|
+
if len(unique_values) != 1:
|
31
|
+
raise RuntimeError(
|
32
|
+
f"There must be a single taskgroup `{key}`, "
|
33
|
+
f"but {unique_values=}"
|
34
|
+
)
|
35
|
+
return unique_values.pop()
|
36
|
+
|
37
|
+
|
38
|
+
def get_users_mapping(db) -> dict[str, int]:
|
39
|
+
logger.warning("START _check_users")
|
40
|
+
print()
|
41
|
+
|
42
|
+
stm_users = select(UserOAuth).order_by(UserOAuth.id)
|
43
|
+
users = db.execute(stm_users).scalars().unique().all()
|
44
|
+
name_to_user_id = {}
|
45
|
+
for user in users:
|
46
|
+
logger.warning(f"START handling user {user.id}: '{user.email}'")
|
47
|
+
# Compute "name" attribute
|
48
|
+
user_settings = db.get(UserSettings, user.user_settings_id)
|
49
|
+
name = user.username or user_settings.slurm_user
|
50
|
+
logger.warning(f"{name=}")
|
51
|
+
# Fail for missing values
|
52
|
+
if name is None:
|
53
|
+
raise ValueError(
|
54
|
+
f"User with {user.id=} and {user.email=} has no "
|
55
|
+
"`username` or `slurm_user` set."
|
56
|
+
"Please fix this issue manually."
|
57
|
+
)
|
58
|
+
# Fail for non-unique values
|
59
|
+
existing_user = name_to_user_id.get(name, None)
|
60
|
+
if existing_user is not None:
|
61
|
+
raise ValueError(
|
62
|
+
f"User with {user.id=} and {user.email=} has same "
|
63
|
+
f"`(username or slurm_user)={name}` as another user. "
|
64
|
+
"Please fix this issue manually."
|
65
|
+
)
|
66
|
+
# Update dictionary
|
67
|
+
name_to_user_id[name] = user.id
|
68
|
+
logger.warning(f"END handling user {user.id}: '{user.email}'")
|
69
|
+
print()
|
70
|
+
logger.warning("END _check_users")
|
71
|
+
print()
|
72
|
+
return name_to_user_id
|
73
|
+
|
74
|
+
|
75
|
+
def get_default_user_group_id(db):
|
76
|
+
stm = select(UserGroup.id).where(
|
77
|
+
UserGroup.name == FRACTAL_DEFAULT_GROUP_NAME
|
78
|
+
)
|
79
|
+
res = db.execute(stm)
|
80
|
+
default_group_id = res.scalars().one_or_none()
|
81
|
+
if default_group_id is None:
|
82
|
+
raise RuntimeError("Default user group is missing.")
|
83
|
+
else:
|
84
|
+
return default_group_id
|
85
|
+
|
86
|
+
|
87
|
+
def get_default_user_id(db):
|
88
|
+
|
89
|
+
DEFAULT_USER_EMAIL = os.getenv("FRACTAL_V27_DEFAULT_USER_EMAIL")
|
90
|
+
if DEFAULT_USER_EMAIL is None:
|
91
|
+
raise ValueError(
|
92
|
+
"FRACTAL_V27_DEFAULT_USER_EMAIL env variable is not set. "
|
93
|
+
"Please set it to be the email of the user who will own "
|
94
|
+
"all previously-global tasks."
|
95
|
+
)
|
96
|
+
|
97
|
+
stm = select(UserOAuth.id).where(UserOAuth.email == DEFAULT_USER_EMAIL)
|
98
|
+
res = db.execute(stm)
|
99
|
+
default_user_id = res.scalars().one_or_none()
|
100
|
+
if default_user_id is None:
|
101
|
+
raise RuntimeError(
|
102
|
+
f"Default user with email {DEFAULT_USER_EMAIL} is missing."
|
103
|
+
)
|
104
|
+
else:
|
105
|
+
return default_user_id
|
106
|
+
|
107
|
+
|
108
|
+
def prepare_task_groups(
|
109
|
+
*,
|
110
|
+
user_mapping: dict[str, int],
|
111
|
+
default_user_group_id: int,
|
112
|
+
default_user_id: int,
|
113
|
+
dry_run: bool,
|
114
|
+
db: Session,
|
115
|
+
):
|
116
|
+
stm_tasks = select(TaskV2).order_by(TaskV2.id)
|
117
|
+
res = db.execute(stm_tasks).scalars().all()
|
118
|
+
task_groups = {}
|
119
|
+
for task in res:
|
120
|
+
if (
|
121
|
+
task.source.startswith(("pip_remote", "pip_local"))
|
122
|
+
and task.source.count(":") == 5
|
123
|
+
):
|
124
|
+
source_fields = task.source.split(":")
|
125
|
+
(
|
126
|
+
collection_mode,
|
127
|
+
pkg_name,
|
128
|
+
version,
|
129
|
+
extras,
|
130
|
+
python_version,
|
131
|
+
name,
|
132
|
+
) = source_fields
|
133
|
+
task_group_key = ":".join(
|
134
|
+
[pkg_name, version, extras, python_version]
|
135
|
+
)
|
136
|
+
if collection_mode == "pip_remote":
|
137
|
+
origin = "pypi"
|
138
|
+
elif collection_mode == "pip_local":
|
139
|
+
origin = "wheel-file"
|
140
|
+
else:
|
141
|
+
raise RuntimeError(
|
142
|
+
f"Invalid {collection_mode=} for {task.source=}."
|
143
|
+
)
|
144
|
+
new_obj = dict(
|
145
|
+
task=task,
|
146
|
+
user_id=default_user_id,
|
147
|
+
origin=origin,
|
148
|
+
pkg_name=pkg_name,
|
149
|
+
version=version,
|
150
|
+
pip_extras=extras,
|
151
|
+
python_version=python_version,
|
152
|
+
)
|
153
|
+
|
154
|
+
if task_group_key in task_groups:
|
155
|
+
task_groups[task_group_key].append(new_obj)
|
156
|
+
else:
|
157
|
+
task_groups[task_group_key] = [new_obj]
|
158
|
+
else:
|
159
|
+
owner = task.owner
|
160
|
+
if owner is None:
|
161
|
+
raise RuntimeError(
|
162
|
+
"Error: `owner` is `None` for "
|
163
|
+
f"{task.id=}, {task.source=}, {task.owner=}."
|
164
|
+
)
|
165
|
+
user_id = user_mapping.get(owner, None)
|
166
|
+
if user_id is None:
|
167
|
+
raise RuntimeError(
|
168
|
+
"Error: `user_id` is `None` for "
|
169
|
+
f"{task.id=}, {task.source=}, {task.owner=}"
|
170
|
+
)
|
171
|
+
task_group_key = "-".join(
|
172
|
+
[
|
173
|
+
"NOT_PIP",
|
174
|
+
str(task.id),
|
175
|
+
str(task.version),
|
176
|
+
task.source,
|
177
|
+
str(task.owner),
|
178
|
+
]
|
179
|
+
)
|
180
|
+
if task_group_key in task_groups:
|
181
|
+
raise RuntimeError(
|
182
|
+
f"ERROR: Duplicated {task_group_key=} for "
|
183
|
+
f"{task.id=}, {task.source=}, {task.owner=}"
|
184
|
+
)
|
185
|
+
else:
|
186
|
+
task_groups[task_group_key] = [
|
187
|
+
dict(
|
188
|
+
task=task,
|
189
|
+
user_id=user_id,
|
190
|
+
origin="other",
|
191
|
+
pkg_name=task.source,
|
192
|
+
version=task.version,
|
193
|
+
)
|
194
|
+
]
|
195
|
+
|
196
|
+
for task_group_key, task_group_objects in task_groups.items():
|
197
|
+
print("-" * 80)
|
198
|
+
print(f"Start handling task group with key '{task_group_key}")
|
199
|
+
task_group_task_list = [item["task"] for item in task_group_objects]
|
200
|
+
print("List of tasks to be included")
|
201
|
+
for task in task_group_task_list:
|
202
|
+
print(f" {task.id=}, {task.source=}")
|
203
|
+
|
204
|
+
task_group_attributes = dict(
|
205
|
+
pkg_name=get_unique_value(task_group_objects, "pkg_name"),
|
206
|
+
version=get_unique_value(task_group_objects, "version"),
|
207
|
+
origin=get_unique_value(task_group_objects, "origin"),
|
208
|
+
user_id=get_unique_value(task_group_objects, "user_id"),
|
209
|
+
user_group_id=default_user_group_id,
|
210
|
+
python_version=get_unique_value(
|
211
|
+
task_group_objects, "python_version"
|
212
|
+
),
|
213
|
+
pip_extras=get_unique_value(task_group_objects, "pip_extras"),
|
214
|
+
task_list=task_group_task_list,
|
215
|
+
active=True,
|
216
|
+
timestamp_created=get_timestamp(),
|
217
|
+
)
|
218
|
+
|
219
|
+
if not task_group_key.startswith("NOT_PIP"):
|
220
|
+
cmd = next(
|
221
|
+
getattr(task_group_task_list[0], attr_name)
|
222
|
+
for attr_name in ["command_non_parallel", "command_parallel"]
|
223
|
+
if getattr(task_group_task_list[0], attr_name) is not None
|
224
|
+
)
|
225
|
+
python_bin = cmd.split()[0]
|
226
|
+
venv_path = Path(python_bin).parents[1].as_posix()
|
227
|
+
path = Path(python_bin).parents[2].as_posix()
|
228
|
+
task_group_attributes["venv_path"] = venv_path
|
229
|
+
task_group_attributes["path"] = path
|
230
|
+
|
231
|
+
print()
|
232
|
+
print("List of task-group attributes")
|
233
|
+
for key, value in task_group_attributes.items():
|
234
|
+
if key != "task_list":
|
235
|
+
print(f" {key}: {value}")
|
236
|
+
|
237
|
+
print()
|
238
|
+
|
239
|
+
if dry_run:
|
240
|
+
print(
|
241
|
+
"End dry-run of handling task group with key "
|
242
|
+
f"'{task_group_key}"
|
243
|
+
)
|
244
|
+
print("-" * 80)
|
245
|
+
continue
|
246
|
+
|
247
|
+
task_group = TaskGroupV2(**task_group_attributes)
|
248
|
+
db.add(task_group)
|
249
|
+
db.commit()
|
250
|
+
db.refresh(task_group)
|
251
|
+
logger.warning(f"Created task group {task_group.id=}")
|
252
|
+
print()
|
253
|
+
|
254
|
+
return
|
255
|
+
|
256
|
+
|
257
|
+
def fix_db(dry_run: bool = False):
|
258
|
+
logger.warning("START execution of fix_db function")
|
259
|
+
_check_current_version("2.7.0")
|
260
|
+
|
261
|
+
with next(get_sync_db()) as db:
|
262
|
+
user_mapping = get_users_mapping(db)
|
263
|
+
default_user_id = get_default_user_id(db)
|
264
|
+
default_user_group_id = get_default_user_group_id(db)
|
265
|
+
|
266
|
+
prepare_task_groups(
|
267
|
+
user_mapping=user_mapping,
|
268
|
+
default_user_id=default_user_id,
|
269
|
+
default_user_group_id=default_user_group_id,
|
270
|
+
db=db,
|
271
|
+
dry_run=dry_run,
|
272
|
+
)
|
273
|
+
|
274
|
+
logger.warning("END of execution of fix_db function")
|
@@ -0,0 +1,101 @@
|
|
1
|
+
"""Revamp TaskV2 and TaskGroupV2
|
2
|
+
|
3
|
+
Revision ID: 742b74e1cc6e
|
4
|
+
Revises: df7cc3501bf7
|
5
|
+
Create Date: 2024-10-07 16:56:37.399878
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
import sqlmodel
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
|
13
|
+
# revision identifiers, used by Alembic.
|
14
|
+
revision = "742b74e1cc6e"
|
15
|
+
down_revision = "df7cc3501bf7"
|
16
|
+
branch_labels = None
|
17
|
+
depends_on = None
|
18
|
+
|
19
|
+
|
20
|
+
def upgrade() -> None:
|
21
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
22
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
23
|
+
batch_op.add_column(
|
24
|
+
sa.Column(
|
25
|
+
"origin", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
26
|
+
)
|
27
|
+
)
|
28
|
+
batch_op.add_column(
|
29
|
+
sa.Column(
|
30
|
+
"pkg_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
31
|
+
)
|
32
|
+
)
|
33
|
+
batch_op.add_column(
|
34
|
+
sa.Column(
|
35
|
+
"version", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
36
|
+
)
|
37
|
+
)
|
38
|
+
batch_op.add_column(
|
39
|
+
sa.Column(
|
40
|
+
"python_version",
|
41
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
42
|
+
nullable=True,
|
43
|
+
)
|
44
|
+
)
|
45
|
+
batch_op.add_column(
|
46
|
+
sa.Column(
|
47
|
+
"path", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
48
|
+
)
|
49
|
+
)
|
50
|
+
batch_op.add_column(
|
51
|
+
sa.Column(
|
52
|
+
"venv_path", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
53
|
+
)
|
54
|
+
)
|
55
|
+
batch_op.add_column(
|
56
|
+
sa.Column(
|
57
|
+
"pip_extras", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
58
|
+
)
|
59
|
+
)
|
60
|
+
|
61
|
+
with op.batch_alter_table("taskv2", schema=None) as batch_op:
|
62
|
+
batch_op.add_column(
|
63
|
+
sa.Column(
|
64
|
+
"category", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
65
|
+
)
|
66
|
+
)
|
67
|
+
batch_op.add_column(
|
68
|
+
sa.Column(
|
69
|
+
"modality", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
70
|
+
)
|
71
|
+
)
|
72
|
+
batch_op.add_column(
|
73
|
+
sa.Column(
|
74
|
+
"authors", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
75
|
+
)
|
76
|
+
)
|
77
|
+
batch_op.add_column(
|
78
|
+
sa.Column("tags", sa.JSON(), server_default="[]", nullable=False)
|
79
|
+
)
|
80
|
+
|
81
|
+
# ### end Alembic commands ###
|
82
|
+
|
83
|
+
|
84
|
+
def downgrade() -> None:
|
85
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
86
|
+
with op.batch_alter_table("taskv2", schema=None) as batch_op:
|
87
|
+
batch_op.drop_column("tags")
|
88
|
+
batch_op.drop_column("authors")
|
89
|
+
batch_op.drop_column("modality")
|
90
|
+
batch_op.drop_column("category")
|
91
|
+
|
92
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
93
|
+
batch_op.drop_column("pip_extras")
|
94
|
+
batch_op.drop_column("venv_path")
|
95
|
+
batch_op.drop_column("path")
|
96
|
+
batch_op.drop_column("python_version")
|
97
|
+
batch_op.drop_column("version")
|
98
|
+
batch_op.drop_column("pkg_name")
|
99
|
+
batch_op.drop_column("origin")
|
100
|
+
|
101
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,42 @@
|
|
1
|
+
"""LinkUserGroup.timestamp_created
|
2
|
+
|
3
|
+
Revision ID: df7cc3501bf7
|
4
|
+
Revises: 7cf1baae8fb4
|
5
|
+
Create Date: 2024-10-03 13:55:53.272269
|
6
|
+
|
7
|
+
"""
|
8
|
+
from datetime import datetime
|
9
|
+
from datetime import timezone
|
10
|
+
|
11
|
+
import sqlalchemy as sa
|
12
|
+
from alembic import op
|
13
|
+
|
14
|
+
|
15
|
+
# revision identifiers, used by Alembic.
|
16
|
+
revision = "df7cc3501bf7"
|
17
|
+
down_revision = "7cf1baae8fb4"
|
18
|
+
branch_labels = None
|
19
|
+
depends_on = None
|
20
|
+
|
21
|
+
|
22
|
+
def upgrade() -> None:
|
23
|
+
with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
|
24
|
+
batch_op.add_column(
|
25
|
+
sa.Column(
|
26
|
+
"timestamp_created",
|
27
|
+
sa.DateTime(timezone=True),
|
28
|
+
nullable=False,
|
29
|
+
server_default=str(datetime(2000, 1, 1, tzinfo=timezone.utc)),
|
30
|
+
)
|
31
|
+
)
|
32
|
+
|
33
|
+
with op.batch_alter_table("project", schema=None) as batch_op:
|
34
|
+
batch_op.alter_column("timestamp_created", server_default=None)
|
35
|
+
|
36
|
+
|
37
|
+
def downgrade() -> None:
|
38
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
39
|
+
with op.batch_alter_table("linkusergroup", schema=None) as batch_op:
|
40
|
+
batch_op.drop_column("timestamp_created")
|
41
|
+
|
42
|
+
# ### end Alembic commands ###
|
@@ -172,6 +172,7 @@ def _prepare_tasks_metadata(
|
|
172
172
|
}
|
173
173
|
),
|
174
174
|
**task_attributes,
|
175
|
+
authors=package_manifest.authors,
|
175
176
|
)
|
176
177
|
task_list.append(task_obj)
|
177
178
|
return task_list
|
@@ -274,9 +275,19 @@ async def background_collect_pip(
|
|
274
275
|
)
|
275
276
|
_check_task_files_exist(task_list=task_list)
|
276
277
|
|
278
|
+
# Prepare some task-group attributes
|
279
|
+
task_group_attrs = dict(
|
280
|
+
pkg_name=task_pkg.package_name,
|
281
|
+
version=task_pkg.package_version,
|
282
|
+
)
|
283
|
+
if task_pkg.is_local_package:
|
284
|
+
task_group_attrs["origin"] = "wheel-file"
|
285
|
+
else:
|
286
|
+
task_group_attrs["origin"] = "pypi"
|
287
|
+
|
277
288
|
task_group = create_db_task_group_and_tasks(
|
278
289
|
task_list=task_list,
|
279
|
-
task_group_obj=TaskGroupCreateV2(),
|
290
|
+
task_group_obj=TaskGroupCreateV2(**task_group_attrs),
|
280
291
|
user_id=user_id,
|
281
292
|
user_group_id=user_group_id,
|
282
293
|
db=db,
|
@@ -316,9 +316,19 @@ def background_collect_pip_ssh(
|
|
316
316
|
python_bin=Path(python_bin),
|
317
317
|
)
|
318
318
|
|
319
|
+
# Prepare some task-group attributes
|
320
|
+
task_group_attrs = dict(
|
321
|
+
pkg_name=task_pkg.package_name,
|
322
|
+
version=task_pkg.package_version,
|
323
|
+
)
|
324
|
+
if task_pkg.is_local_package:
|
325
|
+
task_group_attrs["origin"] = "wheel-file"
|
326
|
+
else:
|
327
|
+
task_group_attrs["origin"] = "pypi"
|
328
|
+
|
319
329
|
create_db_task_group_and_tasks(
|
320
330
|
task_list=task_list,
|
321
|
-
task_group_obj=TaskGroupCreateV2(),
|
331
|
+
task_group_obj=TaskGroupCreateV2(**task_group_attrs),
|
322
332
|
user_id=user_id,
|
323
333
|
user_group_id=user_group_id,
|
324
334
|
db=db,
|
@@ -5,17 +5,24 @@ from typing import Optional
|
|
5
5
|
from typing import Union
|
6
6
|
from zipfile import ZipFile
|
7
7
|
|
8
|
+
from fastapi import HTTPException
|
9
|
+
from fastapi import status
|
10
|
+
from httpx import AsyncClient
|
11
|
+
from httpx import TimeoutException
|
12
|
+
|
8
13
|
from ._TaskCollectPip import _TaskCollectPip
|
9
14
|
from .utils import _parse_wheel_filename
|
10
15
|
from .utils import get_python_interpreter_v2
|
11
16
|
from fractal_server.app.schemas.v2 import ManifestV2
|
12
17
|
from fractal_server.config import get_settings
|
13
18
|
from fractal_server.logger import get_logger
|
19
|
+
from fractal_server.logger import set_logger
|
14
20
|
from fractal_server.syringe import Inject
|
15
21
|
from fractal_server.utils import execute_command
|
16
22
|
|
17
23
|
|
18
24
|
FRACTAL_PUBLIC_TASK_SUBDIR = ".fractal"
|
25
|
+
logger = set_logger(__name__)
|
19
26
|
|
20
27
|
|
21
28
|
async def download_package(
|
@@ -134,3 +141,38 @@ def create_package_dir_pip(
|
|
134
141
|
if create:
|
135
142
|
venv_path.mkdir(exist_ok=False, parents=True)
|
136
143
|
return venv_path
|
144
|
+
|
145
|
+
|
146
|
+
async def get_package_version_from_pypi(name: str) -> str:
|
147
|
+
"""
|
148
|
+
Make a GET call to PyPI JSON API and get latest package version.
|
149
|
+
|
150
|
+
Ref https://warehouse.pypa.io/api-reference/json.html.
|
151
|
+
|
152
|
+
Arguments:
|
153
|
+
name: Package name.
|
154
|
+
"""
|
155
|
+
|
156
|
+
url = f"https://pypi.org/pypi/{name}/json"
|
157
|
+
hint = f"Hint: specify the required version for '{name}'."
|
158
|
+
try:
|
159
|
+
async with AsyncClient(timeout=5.0) as client:
|
160
|
+
res = await client.get(url)
|
161
|
+
if res.status_code != 200:
|
162
|
+
raise HTTPException(
|
163
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
164
|
+
detail=(
|
165
|
+
f"Could not get {url} (status_code {res.status_code})."
|
166
|
+
f"\n{hint}"
|
167
|
+
),
|
168
|
+
)
|
169
|
+
version = res.json()["info"]["version"]
|
170
|
+
return version
|
171
|
+
except (KeyError, TimeoutException) as e:
|
172
|
+
logger.warning(
|
173
|
+
f"An error occurred while getting {url}. Original error: {str(e)}."
|
174
|
+
)
|
175
|
+
raise HTTPException(
|
176
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
177
|
+
detail=(f"An error occurred while getting {url}.\n{hint}"),
|
178
|
+
)
|