fractal-server 2.7.0a11__py3-none-any.whl → 2.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/user_settings.py +1 -0
- fractal_server/app/models/v2/task.py +16 -2
- fractal_server/app/routes/admin/v2/task_group.py +7 -0
- fractal_server/app/routes/api/v2/dataset.py +39 -6
- fractal_server/app/routes/api/v2/task.py +4 -6
- fractal_server/app/routes/api/v2/task_collection.py +17 -44
- fractal_server/app/routes/api/v2/task_collection_custom.py +5 -4
- fractal_server/app/schemas/user_settings.py +18 -0
- fractal_server/app/schemas/v2/__init__.py +1 -0
- fractal_server/app/schemas/v2/dataset.py +5 -3
- fractal_server/app/schemas/v2/task_collection.py +20 -4
- fractal_server/app/schemas/v2/task_group.py +8 -1
- fractal_server/app/security/__init__.py +8 -1
- fractal_server/config.py +8 -28
- fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py +39 -0
- fractal_server/migrations/versions/8e8f227a3e36_update_taskv2_post_2_7_0.py +42 -0
- fractal_server/tasks/utils.py +0 -31
- fractal_server/tasks/v1/background_operations.py +11 -11
- fractal_server/tasks/v1/endpoint_operations.py +5 -5
- fractal_server/tasks/v1/utils.py +2 -2
- fractal_server/tasks/v2/collection_local.py +357 -0
- fractal_server/tasks/v2/{background_operations_ssh.py → collection_ssh.py} +108 -102
- fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -8
- fractal_server/tasks/v2/templates/{_2_upgrade_pip.sh → _2_preliminary_pip_operations.sh} +2 -1
- fractal_server/tasks/v2/templates/_3_pip_install.sh +22 -1
- fractal_server/tasks/v2/templates/_5_pip_show.sh +5 -5
- fractal_server/tasks/v2/utils_background.py +209 -0
- fractal_server/tasks/v2/utils_package_names.py +77 -0
- fractal_server/tasks/v2/{utils.py → utils_python_interpreter.py} +0 -26
- fractal_server/tasks/v2/utils_templates.py +59 -0
- fractal_server/utils.py +48 -3
- {fractal_server-2.7.0a11.dist-info → fractal_server-2.8.0.dist-info}/METADATA +14 -17
- {fractal_server-2.7.0a11.dist-info → fractal_server-2.8.0.dist-info}/RECORD +38 -35
- fractal_server/data_migrations/2_7_0.py +0 -323
- fractal_server/tasks/v2/_venv_pip.py +0 -193
- fractal_server/tasks/v2/background_operations.py +0 -456
- /fractal_server/{tasks/v2/endpoint_operations.py → app/routes/api/v2/_aux_functions_task_collection.py} +0 -0
- {fractal_server-2.7.0a11.dist-info → fractal_server-2.8.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.7.0a11.dist-info → fractal_server-2.8.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.7.0a11.dist-info → fractal_server-2.8.0.dist-info}/entry_points.txt +0 -0
@@ -1,323 +0,0 @@
|
|
1
|
-
import asyncio
|
2
|
-
import logging
|
3
|
-
import os
|
4
|
-
import sys
|
5
|
-
from pathlib import Path
|
6
|
-
from typing import Any
|
7
|
-
from typing import Optional
|
8
|
-
|
9
|
-
from fastapi import HTTPException
|
10
|
-
from sqlalchemy import select
|
11
|
-
from sqlalchemy.orm import Session
|
12
|
-
|
13
|
-
from fractal_server.app.db import get_async_db
|
14
|
-
from fractal_server.app.db import get_sync_db
|
15
|
-
from fractal_server.app.models import TaskGroupV2
|
16
|
-
from fractal_server.app.models import TaskV2
|
17
|
-
from fractal_server.app.models import UserGroup
|
18
|
-
from fractal_server.app.models import UserOAuth
|
19
|
-
from fractal_server.app.models import UserSettings
|
20
|
-
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
21
|
-
_verify_non_duplication_group_constraint,
|
22
|
-
)
|
23
|
-
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
24
|
-
_verify_non_duplication_user_constraint,
|
25
|
-
)
|
26
|
-
from fractal_server.app.security import FRACTAL_DEFAULT_GROUP_NAME
|
27
|
-
from fractal_server.data_migrations.tools import _check_current_version
|
28
|
-
from fractal_server.tasks.utils import _normalize_package_name
|
29
|
-
from fractal_server.utils import get_timestamp
|
30
|
-
|
31
|
-
logger = logging.getLogger("fix_db")
|
32
|
-
|
33
|
-
|
34
|
-
async def check_non_duplication_constraints(
|
35
|
-
*,
|
36
|
-
user_id: int,
|
37
|
-
pkg_name: str,
|
38
|
-
version: Optional[str] = None,
|
39
|
-
user_group_id: Optional[int] = None,
|
40
|
-
):
|
41
|
-
try:
|
42
|
-
async for db_async in get_async_db():
|
43
|
-
await _verify_non_duplication_user_constraint(
|
44
|
-
user_id=user_id,
|
45
|
-
pkg_name=pkg_name,
|
46
|
-
version=version,
|
47
|
-
db=db_async,
|
48
|
-
)
|
49
|
-
await _verify_non_duplication_group_constraint(
|
50
|
-
user_group_id=user_group_id,
|
51
|
-
pkg_name=pkg_name,
|
52
|
-
version=version,
|
53
|
-
db=db_async,
|
54
|
-
)
|
55
|
-
except HTTPException as e:
|
56
|
-
logger.error(
|
57
|
-
"Adding a `TaskGroupV2` with "
|
58
|
-
f"{user_id=}, {pkg_name=}, {version=} and {user_group_id=} "
|
59
|
-
"would break the non-duplication constraint."
|
60
|
-
)
|
61
|
-
logger.error(f"Original error: {str(e)}")
|
62
|
-
|
63
|
-
sys.exit("ERROR")
|
64
|
-
|
65
|
-
|
66
|
-
def get_unique_value(list_of_objects: list[dict[str, Any]], key: str):
|
67
|
-
"""
|
68
|
-
Loop over `list_of_objects` and extract (unique) value for `key`.
|
69
|
-
"""
|
70
|
-
unique_values = set()
|
71
|
-
for this_obj in list_of_objects:
|
72
|
-
this_value = this_obj.get(key, None)
|
73
|
-
unique_values.add(this_value)
|
74
|
-
if len(unique_values) != 1:
|
75
|
-
raise RuntimeError(
|
76
|
-
f"There must be a single taskgroup `{key}`, but {unique_values=}"
|
77
|
-
)
|
78
|
-
return unique_values.pop()
|
79
|
-
|
80
|
-
|
81
|
-
def get_users_mapping(db) -> dict[str, int]:
|
82
|
-
logger.warning("START _check_users")
|
83
|
-
print()
|
84
|
-
|
85
|
-
stm_users = select(UserOAuth).order_by(UserOAuth.id)
|
86
|
-
users = db.execute(stm_users).scalars().unique().all()
|
87
|
-
name_to_user_id = {}
|
88
|
-
for user in users:
|
89
|
-
logger.warning(f"START handling user {user.id}: '{user.email}'")
|
90
|
-
# Compute "name" attribute
|
91
|
-
user_settings = db.get(UserSettings, user.user_settings_id)
|
92
|
-
name = user.username or user_settings.slurm_user
|
93
|
-
logger.warning(f"{name=}")
|
94
|
-
# Fail for missing values
|
95
|
-
if name is None:
|
96
|
-
raise ValueError(
|
97
|
-
f"User with {user.id=} and {user.email=} has no "
|
98
|
-
"`username` or `slurm_user` set."
|
99
|
-
"Please fix this issue manually."
|
100
|
-
)
|
101
|
-
# Fail for non-unique values
|
102
|
-
existing_user = name_to_user_id.get(name, None)
|
103
|
-
if existing_user is not None:
|
104
|
-
raise ValueError(
|
105
|
-
f"User with {user.id=} and {user.email=} has same "
|
106
|
-
f"`(username or slurm_user)={name}` as another user. "
|
107
|
-
"Please fix this issue manually."
|
108
|
-
)
|
109
|
-
# Update dictionary
|
110
|
-
name_to_user_id[name] = user.id
|
111
|
-
logger.warning(f"END handling user {user.id}: '{user.email}'")
|
112
|
-
print()
|
113
|
-
logger.warning("END _check_users")
|
114
|
-
print()
|
115
|
-
return name_to_user_id
|
116
|
-
|
117
|
-
|
118
|
-
def get_default_user_group_id(db):
|
119
|
-
stm = select(UserGroup.id).where(
|
120
|
-
UserGroup.name == FRACTAL_DEFAULT_GROUP_NAME
|
121
|
-
)
|
122
|
-
res = db.execute(stm)
|
123
|
-
default_group_id = res.scalars().one_or_none()
|
124
|
-
if default_group_id is None:
|
125
|
-
raise RuntimeError("Default user group is missing.")
|
126
|
-
else:
|
127
|
-
return default_group_id
|
128
|
-
|
129
|
-
|
130
|
-
def get_default_user_id(db):
|
131
|
-
DEFAULT_USER_EMAIL = os.getenv("FRACTAL_V27_DEFAULT_USER_EMAIL")
|
132
|
-
if DEFAULT_USER_EMAIL is None:
|
133
|
-
raise ValueError(
|
134
|
-
"FRACTAL_V27_DEFAULT_USER_EMAIL env variable is not set. "
|
135
|
-
"Please set it to be the email of the user who will own "
|
136
|
-
"all previously-global tasks."
|
137
|
-
)
|
138
|
-
|
139
|
-
stm = select(UserOAuth.id).where(UserOAuth.email == DEFAULT_USER_EMAIL)
|
140
|
-
res = db.execute(stm)
|
141
|
-
default_user_id = res.scalars().one_or_none()
|
142
|
-
if default_user_id is None:
|
143
|
-
raise RuntimeError(
|
144
|
-
f"Default user with email {DEFAULT_USER_EMAIL} is missing."
|
145
|
-
)
|
146
|
-
else:
|
147
|
-
return default_user_id
|
148
|
-
|
149
|
-
|
150
|
-
def prepare_task_groups(
|
151
|
-
*,
|
152
|
-
user_mapping: dict[str, int],
|
153
|
-
default_user_group_id: int,
|
154
|
-
default_user_id: int,
|
155
|
-
db: Session,
|
156
|
-
):
|
157
|
-
stm_tasks = select(TaskV2).order_by(TaskV2.id)
|
158
|
-
res = db.execute(stm_tasks).scalars().all()
|
159
|
-
task_groups = {}
|
160
|
-
for task in res:
|
161
|
-
if (
|
162
|
-
task.source.startswith(("pip_remote", "pip_local"))
|
163
|
-
and task.source.count(":") == 5
|
164
|
-
):
|
165
|
-
source_fields = task.source.split(":")
|
166
|
-
(
|
167
|
-
collection_mode,
|
168
|
-
pkg_name,
|
169
|
-
version,
|
170
|
-
extras,
|
171
|
-
python_version,
|
172
|
-
name,
|
173
|
-
) = source_fields
|
174
|
-
pkg_name = _normalize_package_name(pkg_name)
|
175
|
-
task_group_key = ":".join(
|
176
|
-
[pkg_name, version, extras, python_version]
|
177
|
-
)
|
178
|
-
if collection_mode == "pip_remote":
|
179
|
-
origin = "pypi"
|
180
|
-
elif collection_mode == "pip_local":
|
181
|
-
origin = "wheel-file"
|
182
|
-
else:
|
183
|
-
raise RuntimeError(
|
184
|
-
f"Invalid {collection_mode=} for {task.source=}."
|
185
|
-
)
|
186
|
-
new_obj = dict(
|
187
|
-
task=task,
|
188
|
-
user_id=default_user_id,
|
189
|
-
origin=origin,
|
190
|
-
pkg_name=pkg_name,
|
191
|
-
version=version,
|
192
|
-
pip_extras=extras,
|
193
|
-
python_version=python_version,
|
194
|
-
)
|
195
|
-
|
196
|
-
if task_group_key in task_groups:
|
197
|
-
task_groups[task_group_key].append(new_obj)
|
198
|
-
else:
|
199
|
-
task_groups[task_group_key] = [new_obj]
|
200
|
-
else:
|
201
|
-
owner = task.owner
|
202
|
-
if owner is None:
|
203
|
-
raise RuntimeError(
|
204
|
-
"Error: `owner` is `None` for "
|
205
|
-
f"{task.id=}, {task.source=}, {task.owner=}."
|
206
|
-
)
|
207
|
-
user_id = user_mapping.get(owner, None)
|
208
|
-
if user_id is None:
|
209
|
-
raise RuntimeError(
|
210
|
-
"Error: `user_id` is `None` for "
|
211
|
-
f"{task.id=}, {task.source=}, {task.owner=}"
|
212
|
-
)
|
213
|
-
task_group_key = "-".join(
|
214
|
-
[
|
215
|
-
"NOT_PIP",
|
216
|
-
str(task.id),
|
217
|
-
str(task.version),
|
218
|
-
task.source,
|
219
|
-
str(task.owner),
|
220
|
-
]
|
221
|
-
)
|
222
|
-
if task_group_key in task_groups:
|
223
|
-
raise RuntimeError(
|
224
|
-
f"ERROR: Duplicated {task_group_key=} for "
|
225
|
-
f"{task.id=}, {task.source=}, {task.owner=}"
|
226
|
-
)
|
227
|
-
else:
|
228
|
-
task_groups[task_group_key] = [
|
229
|
-
dict(
|
230
|
-
task=task,
|
231
|
-
user_id=user_id,
|
232
|
-
origin="other",
|
233
|
-
pkg_name=task.source,
|
234
|
-
version=task.version,
|
235
|
-
)
|
236
|
-
]
|
237
|
-
|
238
|
-
for task_group_key, task_group_objects in task_groups.items():
|
239
|
-
print("-" * 80)
|
240
|
-
print(f"Start handling task group with key '{task_group_key}")
|
241
|
-
task_group_task_list = [item["task"] for item in task_group_objects]
|
242
|
-
print("List of tasks to be included")
|
243
|
-
for task in task_group_task_list:
|
244
|
-
print(f" {task.id=}, {task.source=}")
|
245
|
-
|
246
|
-
task_group_attributes = dict(
|
247
|
-
pkg_name=get_unique_value(task_group_objects, "pkg_name"),
|
248
|
-
version=get_unique_value(task_group_objects, "version"),
|
249
|
-
origin=get_unique_value(task_group_objects, "origin"),
|
250
|
-
user_id=get_unique_value(task_group_objects, "user_id"),
|
251
|
-
user_group_id=default_user_group_id,
|
252
|
-
python_version=get_unique_value(
|
253
|
-
task_group_objects, "python_version"
|
254
|
-
),
|
255
|
-
pip_extras=get_unique_value(task_group_objects, "pip_extras"),
|
256
|
-
task_list=task_group_task_list,
|
257
|
-
active=True,
|
258
|
-
timestamp_created=get_timestamp(),
|
259
|
-
)
|
260
|
-
|
261
|
-
if not task_group_key.startswith("NOT_PIP"):
|
262
|
-
cmd = next(
|
263
|
-
getattr(task_group_task_list[0], attr_name)
|
264
|
-
for attr_name in ["command_non_parallel", "command_parallel"]
|
265
|
-
if getattr(task_group_task_list[0], attr_name) is not None
|
266
|
-
)
|
267
|
-
python_bin = cmd.split()[0]
|
268
|
-
venv_path = Path(python_bin).parents[1].as_posix()
|
269
|
-
path = Path(python_bin).parents[2].as_posix()
|
270
|
-
task_group_attributes["venv_path"] = venv_path
|
271
|
-
task_group_attributes["path"] = path
|
272
|
-
|
273
|
-
print()
|
274
|
-
print("List of task-group attributes")
|
275
|
-
for key, value in task_group_attributes.items():
|
276
|
-
if key != "task_list":
|
277
|
-
print(f" {key}: {value}")
|
278
|
-
|
279
|
-
print()
|
280
|
-
|
281
|
-
# Verify non-duplication constraints
|
282
|
-
asyncio.run(
|
283
|
-
check_non_duplication_constraints(
|
284
|
-
user_id=task_group_attributes["user_id"],
|
285
|
-
user_group_id=task_group_attributes["user_group_id"],
|
286
|
-
pkg_name=task_group_attributes["pkg_name"],
|
287
|
-
version=task_group_attributes["version"],
|
288
|
-
)
|
289
|
-
)
|
290
|
-
logger.warning(
|
291
|
-
"Non-duplication-constraint check is OK, "
|
292
|
-
"proceed and create TaskGroupV2."
|
293
|
-
)
|
294
|
-
|
295
|
-
# Create the TaskGroupV2 object and commit it
|
296
|
-
task_group = TaskGroupV2(**task_group_attributes)
|
297
|
-
db.add(task_group)
|
298
|
-
db.commit()
|
299
|
-
db.refresh(task_group)
|
300
|
-
logger.warning(f"Created task group {task_group.id=}")
|
301
|
-
print()
|
302
|
-
|
303
|
-
return
|
304
|
-
|
305
|
-
|
306
|
-
def fix_db():
|
307
|
-
logger.warning("START execution of fix_db function")
|
308
|
-
_check_current_version("2.7.0")
|
309
|
-
|
310
|
-
with next(get_sync_db()) as db:
|
311
|
-
user_mapping = get_users_mapping(db)
|
312
|
-
default_user_id = get_default_user_id(db)
|
313
|
-
default_user_group_id = get_default_user_group_id(db)
|
314
|
-
|
315
|
-
prepare_task_groups(
|
316
|
-
user_mapping=user_mapping,
|
317
|
-
default_user_id=default_user_id,
|
318
|
-
default_user_group_id=default_user_group_id,
|
319
|
-
db=db,
|
320
|
-
)
|
321
|
-
|
322
|
-
logger.warning("END of execution of fix_db function")
|
323
|
-
print()
|
@@ -1,193 +0,0 @@
|
|
1
|
-
from pathlib import Path
|
2
|
-
from typing import Optional
|
3
|
-
|
4
|
-
from ..utils import COLLECTION_FREEZE_FILENAME
|
5
|
-
from fractal_server.app.models.v2 import TaskGroupV2
|
6
|
-
from fractal_server.config import get_settings
|
7
|
-
from fractal_server.logger import get_logger
|
8
|
-
from fractal_server.syringe import Inject
|
9
|
-
from fractal_server.tasks.v2.utils import get_python_interpreter_v2
|
10
|
-
from fractal_server.utils import execute_command
|
11
|
-
|
12
|
-
|
13
|
-
async def _init_venv_v2(
|
14
|
-
*,
|
15
|
-
venv_path: Path,
|
16
|
-
python_version: Optional[str] = None,
|
17
|
-
logger_name: str,
|
18
|
-
) -> Path:
|
19
|
-
"""
|
20
|
-
Set a virtual environment at `path/venv`
|
21
|
-
|
22
|
-
Args:
|
23
|
-
path : Path
|
24
|
-
path to the venv actual directory (not its parent).
|
25
|
-
python_version : default=None
|
26
|
-
Python version the virtual environment will be based upon
|
27
|
-
|
28
|
-
Returns:
|
29
|
-
python_bin : Path
|
30
|
-
path to python interpreter
|
31
|
-
"""
|
32
|
-
logger = get_logger(logger_name)
|
33
|
-
logger.debug(f"[_init_venv_v2] {venv_path=}")
|
34
|
-
interpreter = get_python_interpreter_v2(python_version=python_version)
|
35
|
-
logger.debug(f"[_init_venv_v2] {interpreter=}")
|
36
|
-
await execute_command(
|
37
|
-
command=f"{interpreter} -m venv {venv_path}",
|
38
|
-
logger_name=logger_name,
|
39
|
-
)
|
40
|
-
python_bin = venv_path / "bin/python"
|
41
|
-
logger.debug(f"[_init_venv_v2] {python_bin=}")
|
42
|
-
return python_bin
|
43
|
-
|
44
|
-
|
45
|
-
async def _pip_install(
|
46
|
-
task_group: TaskGroupV2,
|
47
|
-
logger_name: str,
|
48
|
-
) -> Path:
|
49
|
-
"""
|
50
|
-
Install package in venv
|
51
|
-
|
52
|
-
Args:
|
53
|
-
venv_path:
|
54
|
-
task_pkg:
|
55
|
-
logger_name:
|
56
|
-
|
57
|
-
Returns:
|
58
|
-
The location of the package.
|
59
|
-
"""
|
60
|
-
settings = Inject(get_settings)
|
61
|
-
|
62
|
-
logger = get_logger(logger_name)
|
63
|
-
|
64
|
-
python_bin = Path(task_group.venv_path) / "bin/python"
|
65
|
-
pip_install_str = task_group.pip_install_string
|
66
|
-
logger.info(f"{pip_install_str=}")
|
67
|
-
|
68
|
-
await execute_command(
|
69
|
-
cwd=Path(task_group.venv_path),
|
70
|
-
command=(
|
71
|
-
f"{python_bin} -m pip install --upgrade "
|
72
|
-
f"'pip<={settings.FRACTAL_MAX_PIP_VERSION}'"
|
73
|
-
),
|
74
|
-
logger_name=logger_name,
|
75
|
-
)
|
76
|
-
await execute_command(
|
77
|
-
cwd=Path(task_group.venv_path),
|
78
|
-
command=f"{python_bin} -m pip install {pip_install_str}",
|
79
|
-
logger_name=logger_name,
|
80
|
-
)
|
81
|
-
|
82
|
-
if task_group.pinned_package_versions:
|
83
|
-
for (
|
84
|
-
pinned_pkg_name,
|
85
|
-
pinned_pkg_version,
|
86
|
-
) in task_group.pinned_package_versions.items():
|
87
|
-
logger.debug(
|
88
|
-
"Specific version required: "
|
89
|
-
f"{pinned_pkg_name}=={pinned_pkg_version}"
|
90
|
-
)
|
91
|
-
logger.debug(
|
92
|
-
"Preliminary check: verify that "
|
93
|
-
f"{pinned_pkg_name} is already installed"
|
94
|
-
)
|
95
|
-
stdout_show = await execute_command(
|
96
|
-
cwd=Path(task_group.venv_path),
|
97
|
-
command=f"{python_bin} -m pip show {pinned_pkg_name}",
|
98
|
-
logger_name=logger_name,
|
99
|
-
)
|
100
|
-
current_version = next(
|
101
|
-
line.split()[-1]
|
102
|
-
for line in stdout_show.split("\n")
|
103
|
-
if line.startswith("Version:")
|
104
|
-
)
|
105
|
-
if current_version != pinned_pkg_version:
|
106
|
-
logger.debug(
|
107
|
-
f"Currently installed version of {pinned_pkg_name} "
|
108
|
-
f"({current_version}) differs from pinned version "
|
109
|
-
f"({pinned_pkg_version}); "
|
110
|
-
f"install version {pinned_pkg_version}."
|
111
|
-
)
|
112
|
-
await execute_command(
|
113
|
-
cwd=Path(task_group.venv_path),
|
114
|
-
command=(
|
115
|
-
f"{python_bin} -m pip install "
|
116
|
-
f"{pinned_pkg_name}=={pinned_pkg_version}"
|
117
|
-
),
|
118
|
-
logger_name=logger_name,
|
119
|
-
)
|
120
|
-
else:
|
121
|
-
logger.debug(
|
122
|
-
f"Currently installed version of {pinned_pkg_name} "
|
123
|
-
f"({current_version}) already matches the pinned version."
|
124
|
-
)
|
125
|
-
|
126
|
-
# Extract package installation path from `pip show`
|
127
|
-
stdout_show = await execute_command(
|
128
|
-
cwd=Path(task_group.venv_path),
|
129
|
-
command=f"{python_bin} -m pip show {task_group.pkg_name}",
|
130
|
-
logger_name=logger_name,
|
131
|
-
)
|
132
|
-
|
133
|
-
location = Path(
|
134
|
-
next(
|
135
|
-
line.split()[-1]
|
136
|
-
for line in stdout_show.split("\n")
|
137
|
-
if line.startswith("Location:")
|
138
|
-
)
|
139
|
-
)
|
140
|
-
|
141
|
-
# NOTE
|
142
|
-
# https://packaging.python.org/en/latest/specifications/recording-installed-packages/
|
143
|
-
# This directory is named as {name}-{version}.dist-info, with name and
|
144
|
-
# version fields corresponding to Core metadata specifications. Both
|
145
|
-
# fields must be normalized (see the name normalization specification and
|
146
|
-
# the version normalization specification), and replace dash (-)
|
147
|
-
# characters with underscore (_) characters, so the .dist-info directory
|
148
|
-
# always has exactly one dash (-) character in its stem, separating the
|
149
|
-
# name and version fields.
|
150
|
-
package_root = location / (task_group.pkg_name.replace("-", "_"))
|
151
|
-
logger.debug(f"[_pip install] {location=}")
|
152
|
-
logger.debug(f"[_pip install] {task_group.pkg_name=}")
|
153
|
-
logger.debug(f"[_pip install] {package_root=}")
|
154
|
-
|
155
|
-
# Run `pip freeze --all` and store its output
|
156
|
-
stdout_freeze = await execute_command(
|
157
|
-
cwd=Path(task_group.venv_path),
|
158
|
-
command=f"{python_bin} -m pip freeze --all",
|
159
|
-
logger_name=logger_name,
|
160
|
-
)
|
161
|
-
with (Path(task_group.path) / COLLECTION_FREEZE_FILENAME).open("w") as f:
|
162
|
-
f.write(stdout_freeze)
|
163
|
-
|
164
|
-
return package_root
|
165
|
-
|
166
|
-
|
167
|
-
async def _create_venv_install_package_pip(
|
168
|
-
*,
|
169
|
-
task_group: TaskGroupV2,
|
170
|
-
logger_name: str,
|
171
|
-
) -> tuple[Path, Path]:
|
172
|
-
"""
|
173
|
-
Create venv and install package
|
174
|
-
|
175
|
-
Args:
|
176
|
-
path: the directory in which to create the environment
|
177
|
-
task_pkg: object containing the different metadata required to install
|
178
|
-
the package
|
179
|
-
|
180
|
-
Returns:
|
181
|
-
python_bin: path to venv's python interpreter
|
182
|
-
package_root: the location of the package manifest
|
183
|
-
"""
|
184
|
-
python_bin = await _init_venv_v2(
|
185
|
-
venv_path=Path(task_group.venv_path),
|
186
|
-
python_version=task_group.python_version,
|
187
|
-
logger_name=logger_name,
|
188
|
-
)
|
189
|
-
package_root = await _pip_install(
|
190
|
-
task_group=task_group,
|
191
|
-
logger_name=logger_name,
|
192
|
-
)
|
193
|
-
return python_bin, package_root
|