fractal-server 2.15.0a3__py3-none-any.whl → 2.15.0a5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +0 -3
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
- fractal_server/app/routes/api/v2/task_collection.py +5 -15
- fractal_server/app/routes/api/v2/task_collection_pixi.py +7 -24
- fractal_server/app/routes/api/v2/task_group.py +3 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +0 -3
- fractal_server/app/schemas/v2/__init__.py +0 -1
- fractal_server/app/schemas/v2/task_group.py +0 -9
- fractal_server/config.py +29 -1
- fractal_server/tasks/v2/local/_utils.py +1 -5
- fractal_server/tasks/v2/local/collect.py +5 -8
- fractal_server/tasks/v2/local/collect_pixi.py +29 -13
- fractal_server/tasks/v2/local/deactivate.py +5 -9
- fractal_server/tasks/v2/local/deactivate_pixi.py +4 -10
- fractal_server/tasks/v2/local/reactivate.py +5 -9
- fractal_server/tasks/v2/local/reactivate_pixi.py +29 -14
- fractal_server/tasks/v2/ssh/_utils.py +45 -4
- fractal_server/tasks/v2/ssh/collect.py +32 -37
- fractal_server/tasks/v2/ssh/collect_pixi.py +51 -45
- fractal_server/tasks/v2/ssh/deactivate.py +21 -28
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +20 -28
- fractal_server/tasks/v2/ssh/reactivate.py +23 -29
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +158 -38
- fractal_server/tasks/v2/templates/pixi_2_install.sh +12 -8
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh +0 -4
- fractal_server/tasks/v2/utils_background.py +7 -0
- fractal_server/tasks/v2/utils_templates.py +14 -1
- {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a5.dist-info}/METADATA +1 -1
- {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a5.dist-info}/RECORD +33 -33
- {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a5.dist-info}/LICENSE +0 -0
- {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a5.dist-info}/WHEEL +0 -0
- {fractal_server-2.15.0a3.dist-info → fractal_server-2.15.0a5.dist-info}/entry_points.txt +0 -0
fractal_server/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__VERSION__ = "2.15.
|
1
|
+
__VERSION__ = "2.15.0a5"
|
@@ -2,7 +2,6 @@ from fastapi import APIRouter
|
|
2
2
|
from fastapi import BackgroundTasks
|
3
3
|
from fastapi import Depends
|
4
4
|
from fastapi import HTTPException
|
5
|
-
from fastapi import Request
|
6
5
|
from fastapi import Response
|
7
6
|
from fastapi import status
|
8
7
|
|
@@ -51,7 +50,6 @@ async def deactivate_task_group(
|
|
51
50
|
task_group_id: int,
|
52
51
|
background_tasks: BackgroundTasks,
|
53
52
|
response: Response,
|
54
|
-
request: Request,
|
55
53
|
superuser: UserOAuth = Depends(current_active_superuser),
|
56
54
|
db: AsyncSession = Depends(get_async_db),
|
57
55
|
) -> TaskGroupReadV2:
|
@@ -157,7 +155,6 @@ async def reactivate_task_group(
|
|
157
155
|
task_group_id: int,
|
158
156
|
background_tasks: BackgroundTasks,
|
159
157
|
response: Response,
|
160
|
-
request: Request,
|
161
158
|
superuser: UserOAuth = Depends(current_active_superuser),
|
162
159
|
db: AsyncSession = Depends(get_async_db),
|
163
160
|
) -> TaskGroupReadV2:
|
@@ -333,6 +333,28 @@ async def _verify_non_duplication_group_constraint(
|
|
333
333
|
)
|
334
334
|
|
335
335
|
|
336
|
+
async def _verify_non_duplication_group_path(
|
337
|
+
path: str | None,
|
338
|
+
db: AsyncSession,
|
339
|
+
) -> None:
|
340
|
+
"""
|
341
|
+
Verify uniqueness of non-`None` `TaskGroupV2.path`
|
342
|
+
"""
|
343
|
+
if path is None:
|
344
|
+
return
|
345
|
+
stm = select(TaskGroupV2.id).where(TaskGroupV2.path == path)
|
346
|
+
res = await db.execute(stm)
|
347
|
+
duplicate_ids = res.scalars().all()
|
348
|
+
if duplicate_ids:
|
349
|
+
raise HTTPException(
|
350
|
+
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
351
|
+
detail=(
|
352
|
+
f"Other TaskGroups already have {path=}: "
|
353
|
+
f"{sorted(duplicate_ids)}."
|
354
|
+
),
|
355
|
+
)
|
356
|
+
|
357
|
+
|
336
358
|
async def _add_warnings_to_workflow_tasks(
|
337
359
|
wftask_list: list[WorkflowTaskV2], user_id: int, db: AsyncSession
|
338
360
|
) -> list[dict[str, Any]]:
|
@@ -7,14 +7,12 @@ from fastapi import Depends
|
|
7
7
|
from fastapi import File
|
8
8
|
from fastapi import Form
|
9
9
|
from fastapi import HTTPException
|
10
|
-
from fastapi import Request
|
11
10
|
from fastapi import Response
|
12
11
|
from fastapi import status
|
13
12
|
from fastapi import UploadFile
|
14
13
|
from pydantic import BaseModel
|
15
14
|
from pydantic import model_validator
|
16
15
|
from pydantic import ValidationError
|
17
|
-
from sqlmodel import select
|
18
16
|
|
19
17
|
from .....config import get_settings
|
20
18
|
from .....logger import reset_logger_handlers
|
@@ -32,6 +30,7 @@ from ...aux.validate_user_settings import validate_user_settings
|
|
32
30
|
from ._aux_functions_task_lifecycle import get_package_version_from_pypi
|
33
31
|
from ._aux_functions_tasks import _get_valid_user_group_id
|
34
32
|
from ._aux_functions_tasks import _verify_non_duplication_group_constraint
|
33
|
+
from ._aux_functions_tasks import _verify_non_duplication_group_path
|
35
34
|
from ._aux_functions_tasks import _verify_non_duplication_user_constraint
|
36
35
|
from fractal_server.app.models import UserOAuth
|
37
36
|
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
@@ -151,7 +150,6 @@ def parse_request_data(
|
|
151
150
|
response_model=TaskGroupActivityV2Read,
|
152
151
|
)
|
153
152
|
async def collect_tasks_pip(
|
154
|
-
request: Request,
|
155
153
|
response: Response,
|
156
154
|
background_tasks: BackgroundTasks,
|
157
155
|
request_data: CollectionRequestData = Depends(parse_request_data),
|
@@ -293,18 +291,10 @@ async def collect_tasks_pip(
|
|
293
291
|
version=task_group_attrs["version"],
|
294
292
|
db=db,
|
295
293
|
)
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
for conflicting_task_group in res.scalars().all():
|
301
|
-
raise HTTPException(
|
302
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
303
|
-
detail=(
|
304
|
-
f"Another task-group already has path={task_group_path}.\n"
|
305
|
-
f"{conflicting_task_group=}"
|
306
|
-
),
|
307
|
-
)
|
294
|
+
await _verify_non_duplication_group_path(
|
295
|
+
path=task_group_attrs["path"],
|
296
|
+
db=db,
|
297
|
+
)
|
308
298
|
|
309
299
|
# On-disk checks
|
310
300
|
|
@@ -6,12 +6,9 @@ from fastapi import BackgroundTasks
|
|
6
6
|
from fastapi import Depends
|
7
7
|
from fastapi import Form
|
8
8
|
from fastapi import HTTPException
|
9
|
-
from fastapi import Request
|
10
9
|
from fastapi import Response
|
11
10
|
from fastapi import status
|
12
11
|
from fastapi import UploadFile
|
13
|
-
from pydantic import ValidationError
|
14
|
-
from sqlmodel import select
|
15
12
|
|
16
13
|
from fractal_server.app.db import AsyncSession
|
17
14
|
from fractal_server.app.db import get_async_db
|
@@ -24,6 +21,9 @@ from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
|
24
21
|
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
25
22
|
_verify_non_duplication_group_constraint,
|
26
23
|
)
|
24
|
+
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
25
|
+
_verify_non_duplication_group_path,
|
26
|
+
)
|
27
27
|
from fractal_server.app.routes.api.v2._aux_functions_tasks import (
|
28
28
|
_verify_non_duplication_user_constraint,
|
29
29
|
)
|
@@ -35,7 +35,6 @@ from fractal_server.app.schemas.v2 import FractalUploadedFile
|
|
35
35
|
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
36
36
|
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
37
37
|
from fractal_server.app.schemas.v2 import TaskGroupActivityV2Read
|
38
|
-
from fractal_server.app.schemas.v2 import TaskGroupCreateV2StrictPixi
|
39
38
|
from fractal_server.app.schemas.v2.task_group import TaskGroupV2OriginEnum
|
40
39
|
from fractal_server.config import get_settings
|
41
40
|
from fractal_server.logger import set_logger
|
@@ -81,7 +80,6 @@ def validate_pkgname_and_version(filename: str) -> tuple[str, str]:
|
|
81
80
|
response_model=TaskGroupActivityV2Read,
|
82
81
|
)
|
83
82
|
async def collect_task_pixi(
|
84
|
-
request: Request,
|
85
83
|
response: Response,
|
86
84
|
background_tasks: BackgroundTasks,
|
87
85
|
file: UploadFile,
|
@@ -147,13 +145,6 @@ async def collect_task_pixi(
|
|
147
145
|
version=version,
|
148
146
|
path=task_group_path,
|
149
147
|
)
|
150
|
-
try:
|
151
|
-
TaskGroupCreateV2StrictPixi(**task_group_attrs)
|
152
|
-
except ValidationError as e:
|
153
|
-
raise HTTPException(
|
154
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
155
|
-
detail=f"Invalid task-group object. Original error: {e}",
|
156
|
-
)
|
157
148
|
|
158
149
|
await _verify_non_duplication_user_constraint(
|
159
150
|
user_id=user.id,
|
@@ -167,18 +158,10 @@ async def collect_task_pixi(
|
|
167
158
|
version=task_group_attrs["version"],
|
168
159
|
db=db,
|
169
160
|
)
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
for conflicting_task_group in res.scalars().all():
|
175
|
-
raise HTTPException(
|
176
|
-
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
177
|
-
detail=(
|
178
|
-
f"Another task-group already has path={task_group_path}.\n"
|
179
|
-
f"{conflicting_task_group=}"
|
180
|
-
),
|
181
|
-
)
|
161
|
+
await _verify_non_duplication_group_path(
|
162
|
+
path=task_group_attrs["path"],
|
163
|
+
db=db,
|
164
|
+
)
|
182
165
|
|
183
166
|
if settings.FRACTAL_RUNNER_BACKEND != "slurm_ssh":
|
184
167
|
if Path(task_group_path).exists():
|
@@ -12,6 +12,7 @@ from pydantic.types import AwareDatetime
|
|
12
12
|
from sqlmodel import or_
|
13
13
|
from sqlmodel import select
|
14
14
|
|
15
|
+
from ._aux_functions_task_lifecycle import check_no_ongoing_activity
|
15
16
|
from ._aux_functions_tasks import _get_task_group_full_access
|
16
17
|
from ._aux_functions_tasks import _get_task_group_read_access
|
17
18
|
from ._aux_functions_tasks import _verify_non_duplication_group_constraint
|
@@ -216,6 +217,8 @@ async def delete_task_group(
|
|
216
217
|
db=db,
|
217
218
|
)
|
218
219
|
|
220
|
+
await check_no_ongoing_activity(task_group_id=task_group_id, db=db)
|
221
|
+
|
219
222
|
stm = select(WorkflowTaskV2).where(
|
220
223
|
WorkflowTaskV2.task_id.in_({task.id for task in task_group.task_list})
|
221
224
|
)
|
@@ -2,7 +2,6 @@ from fastapi import APIRouter
|
|
2
2
|
from fastapi import BackgroundTasks
|
3
3
|
from fastapi import Depends
|
4
4
|
from fastapi import HTTPException
|
5
|
-
from fastapi import Request
|
6
5
|
from fastapi import Response
|
7
6
|
from fastapi import status
|
8
7
|
|
@@ -48,7 +47,6 @@ async def deactivate_task_group(
|
|
48
47
|
task_group_id: int,
|
49
48
|
background_tasks: BackgroundTasks,
|
50
49
|
response: Response,
|
51
|
-
request: Request,
|
52
50
|
user: UserOAuth = Depends(current_active_user),
|
53
51
|
db: AsyncSession = Depends(get_async_db),
|
54
52
|
) -> TaskGroupReadV2:
|
@@ -168,7 +166,6 @@ async def reactivate_task_group(
|
|
168
166
|
task_group_id: int,
|
169
167
|
background_tasks: BackgroundTasks,
|
170
168
|
response: Response,
|
171
|
-
request: Request,
|
172
169
|
user: UserOAuth = Depends(current_active_user),
|
173
170
|
db: AsyncSession = Depends(get_async_db),
|
174
171
|
) -> TaskGroupReadV2:
|
@@ -41,7 +41,6 @@ from .task_group import TaskGroupActivityStatusV2 # noqa F401
|
|
41
41
|
from .task_group import TaskGroupActivityV2Read # noqa F401
|
42
42
|
from .task_group import TaskGroupCreateV2 # noqa F401
|
43
43
|
from .task_group import TaskGroupCreateV2Strict # noqa F401
|
44
|
-
from .task_group import TaskGroupCreateV2StrictPixi # noqa F401
|
45
44
|
from .task_group import TaskGroupReadV2 # noqa F401
|
46
45
|
from .task_group import TaskGroupUpdateV2 # noqa F401
|
47
46
|
from .task_group import TaskGroupV2OriginEnum # noqa F401
|
@@ -62,15 +62,6 @@ class TaskGroupCreateV2Strict(TaskGroupCreateV2):
|
|
62
62
|
python_version: NonEmptyStr
|
63
63
|
|
64
64
|
|
65
|
-
class TaskGroupCreateV2StrictPixi(TaskGroupCreateV2):
|
66
|
-
"""
|
67
|
-
A strict version of TaskGroupCreateV2, to be used for pixi task collection.
|
68
|
-
"""
|
69
|
-
|
70
|
-
path: AbsolutePathStr
|
71
|
-
pixi_version: NonEmptyStr
|
72
|
-
|
73
|
-
|
74
65
|
class TaskGroupReadV2(BaseModel):
|
75
66
|
id: int
|
76
67
|
task_list: list[TaskReadV2]
|
fractal_server/config.py
CHANGED
@@ -65,8 +65,36 @@ class MailSettings(BaseModel):
|
|
65
65
|
|
66
66
|
|
67
67
|
class PixiSettings(BaseModel):
|
68
|
-
|
68
|
+
"""
|
69
|
+
Configuration for Pixi task collection
|
70
|
+
|
71
|
+
See https://pixi.sh/latest/reference/cli/pixi/install/#config-options for
|
72
|
+
`pixi install` concurrency options.
|
73
|
+
See https://docs.rs/tokio/latest/tokio/#cpu-bound-tasks-and-blocking-code
|
74
|
+
for `tokio` configuration.
|
75
|
+
|
76
|
+
versions:
|
77
|
+
Available `pixi` versions and their `PIXI_HOME` folders.
|
78
|
+
default_version:
|
79
|
+
Default `pixi` version to use for task collection - must be one
|
80
|
+
of `versions` keys.
|
81
|
+
PIXI_CONCURRENT_SOLVES:
|
82
|
+
Value of `--concurrent-solves` for `pixi install`.
|
83
|
+
PIXI_CONCURRENT_DOWNLOADS:
|
84
|
+
Value of `--concurrent-downloads for `pixi install`.
|
85
|
+
TOKIO_WORKER_THREADS:
|
86
|
+
From tokio docs, "The core threads are where all asynchronous code
|
87
|
+
runs, and Tokio will by default spawn one for each CPU core. You can
|
88
|
+
use the environment variable TOKIO_WORKER_THREADS to override the
|
89
|
+
default value."
|
90
|
+
"""
|
91
|
+
|
69
92
|
versions: DictStrStr
|
93
|
+
default_version: str
|
94
|
+
|
95
|
+
PIXI_CONCURRENT_SOLVES: int = 4
|
96
|
+
PIXI_CONCURRENT_DOWNLOADS: int = 4
|
97
|
+
TOKIO_WORKER_THREADS: int = 2
|
70
98
|
|
71
99
|
@model_validator(mode="after")
|
72
100
|
def check_pixi_settings(self):
|
@@ -31,7 +31,7 @@ def _customize_and_run_template(
|
|
31
31
|
f"Invalid {template_filename=} (it must end with '.sh')."
|
32
32
|
)
|
33
33
|
|
34
|
-
script_filename = f"{prefix}{template_filename}"
|
34
|
+
script_filename = f"{prefix}_{template_filename}"
|
35
35
|
script_path_local = Path(script_dir) / script_filename
|
36
36
|
# Read template
|
37
37
|
customize_template(
|
@@ -50,10 +50,6 @@ def check_task_files_exist(task_list: list[TaskCreateV2]) -> None:
|
|
50
50
|
"""
|
51
51
|
Check that the modules listed in task commands point to existing files.
|
52
52
|
|
53
|
-
Note: commands may be like `/one/python /another/task.py` or
|
54
|
-
`/one/pixi [...] /another/task.py`, and in both cases `split()[-1]`
|
55
|
-
returns `/another/task.py`.
|
56
|
-
|
57
53
|
Args:
|
58
54
|
task_list:
|
59
55
|
"""
|
@@ -67,20 +67,17 @@ def collect_local(
|
|
67
67
|
log_file_path=log_file_path,
|
68
68
|
)
|
69
69
|
|
70
|
+
logger.info("START")
|
70
71
|
with next(get_sync_db()) as db:
|
71
|
-
|
72
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
72
73
|
task_group_activity_id=task_group_activity_id,
|
73
74
|
task_group_id=task_group_id,
|
74
75
|
db=db,
|
76
|
+
logger_name=LOGGER_NAME,
|
75
77
|
)
|
76
|
-
if not
|
78
|
+
if not db_objects_ok:
|
77
79
|
return
|
78
80
|
|
79
|
-
# Log some info
|
80
|
-
logger.info("START")
|
81
|
-
for key, value in task_group.model_dump().items():
|
82
|
-
logger.debug(f"task_group.{key}: {value}")
|
83
|
-
|
84
81
|
# Check that the (local) task_group path does exist
|
85
82
|
if Path(task_group.path).exists():
|
86
83
|
error_msg = f"{task_group.path} already exists."
|
@@ -130,7 +127,7 @@ def collect_local(
|
|
130
127
|
).as_posix(),
|
131
128
|
prefix=(
|
132
129
|
f"{int(time.time())}_"
|
133
|
-
f"{TaskGroupActivityActionV2.COLLECT}
|
130
|
+
f"{TaskGroupActivityActionV2.COLLECT}"
|
134
131
|
),
|
135
132
|
logger_name=LOGGER_NAME,
|
136
133
|
)
|
@@ -27,6 +27,7 @@ from fractal_server.tasks.v2.utils_background import (
|
|
27
27
|
from fractal_server.tasks.v2.utils_background import get_current_log
|
28
28
|
from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
|
29
29
|
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
30
|
+
from fractal_server.utils import execute_command_sync
|
30
31
|
from fractal_server.utils import get_timestamp
|
31
32
|
|
32
33
|
|
@@ -47,22 +48,19 @@ def collect_local_pixi(
|
|
47
48
|
log_file_path=log_file_path,
|
48
49
|
)
|
49
50
|
|
51
|
+
logger.info("START")
|
50
52
|
with next(get_sync_db()) as db:
|
51
|
-
|
53
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
52
54
|
task_group_activity_id=task_group_activity_id,
|
53
55
|
task_group_id=task_group_id,
|
54
56
|
db=db,
|
57
|
+
logger_name=LOGGER_NAME,
|
55
58
|
)
|
56
|
-
if not
|
59
|
+
if not db_objects_ok:
|
57
60
|
return
|
58
61
|
|
59
|
-
logger.info("START")
|
60
|
-
for key, value in task_group.model_dump(
|
61
|
-
exclude={"env_info"}
|
62
|
-
).items():
|
63
|
-
logger.debug(f"task_group.{key}: {value}")
|
64
|
-
|
65
62
|
if Path(task_group.path).exists():
|
63
|
+
# We handle this before the try/except to avoid the rmtree
|
66
64
|
error_msg = f"{task_group.path} already exists."
|
67
65
|
logger.error(error_msg)
|
68
66
|
fail_and_cleanup(
|
@@ -100,13 +98,26 @@ def collect_local_pixi(
|
|
100
98
|
task_group.pkg_name.replace("-", "_"),
|
101
99
|
),
|
102
100
|
("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
|
101
|
+
("__FROZEN_OPTION__", ""),
|
102
|
+
(
|
103
|
+
"__TOKIO_WORKER_THREADS__",
|
104
|
+
str(settings.pixi.TOKIO_WORKER_THREADS),
|
105
|
+
),
|
106
|
+
(
|
107
|
+
"__PIXI_CONCURRENT_SOLVES__",
|
108
|
+
str(settings.pixi.PIXI_CONCURRENT_SOLVES),
|
109
|
+
),
|
110
|
+
(
|
111
|
+
"__PIXI_CONCURRENT_DOWNLOADS__",
|
112
|
+
str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
|
113
|
+
),
|
103
114
|
},
|
104
115
|
script_dir=Path(
|
105
116
|
task_group.path, SCRIPTS_SUBFOLDER
|
106
117
|
).as_posix(),
|
107
118
|
prefix=(
|
108
119
|
f"{int(time.time())}_"
|
109
|
-
f"{TaskGroupActivityActionV2.COLLECT}
|
120
|
+
f"{TaskGroupActivityActionV2.COLLECT}"
|
110
121
|
),
|
111
122
|
logger_name=LOGGER_NAME,
|
112
123
|
)
|
@@ -148,6 +159,14 @@ def collect_local_pixi(
|
|
148
159
|
"project_python_wrapper"
|
149
160
|
]
|
150
161
|
|
162
|
+
# Make task folder 755
|
163
|
+
source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
|
164
|
+
command = f"chmod 755 {source_dir} -R"
|
165
|
+
execute_command_sync(
|
166
|
+
command=command,
|
167
|
+
logger_name=LOGGER_NAME,
|
168
|
+
)
|
169
|
+
|
151
170
|
# Read and validate manifest
|
152
171
|
# NOTE: we are only supporting the manifest path being relative
|
153
172
|
# to the top-level folder
|
@@ -215,17 +234,14 @@ def collect_local_pixi(
|
|
215
234
|
reset_logger_handlers(logger)
|
216
235
|
|
217
236
|
except Exception as collection_e:
|
218
|
-
# Delete corrupted package dir
|
219
237
|
try:
|
220
238
|
logger.info(f"Now delete folder {task_group.path}")
|
221
239
|
shutil.rmtree(task_group.path)
|
222
240
|
logger.info(f"Deleted folder {task_group.path}")
|
223
241
|
except Exception as rm_e:
|
224
242
|
logger.error(
|
225
|
-
"Removing folder failed
|
226
|
-
f"Original error:\n{str(rm_e)}"
|
243
|
+
f"Removing folder failed. Original error: {str(rm_e)}"
|
227
244
|
)
|
228
|
-
|
229
245
|
fail_and_cleanup(
|
230
246
|
task_group=task_group,
|
231
247
|
task_group_activity=activity,
|
@@ -46,21 +46,17 @@ def deactivate_local(
|
|
46
46
|
log_file_path=log_file_path,
|
47
47
|
)
|
48
48
|
|
49
|
+
logger.debug("START")
|
49
50
|
with next(get_sync_db()) as db:
|
50
|
-
|
51
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
51
52
|
task_group_activity_id=task_group_activity_id,
|
52
53
|
task_group_id=task_group_id,
|
53
54
|
db=db,
|
55
|
+
logger_name=LOGGER_NAME,
|
54
56
|
)
|
55
|
-
if not
|
57
|
+
if not db_objects_ok:
|
56
58
|
return
|
57
59
|
|
58
|
-
# Log some info
|
59
|
-
logger.debug("START")
|
60
|
-
|
61
|
-
for key, value in task_group.model_dump().items():
|
62
|
-
logger.debug(f"task_group.{key}: {value}")
|
63
|
-
|
64
60
|
# Check that the (local) task_group venv_path does exist
|
65
61
|
if not Path(task_group.venv_path).exists():
|
66
62
|
error_msg = f"{task_group.venv_path} does not exist."
|
@@ -100,7 +96,7 @@ def deactivate_local(
|
|
100
96
|
).as_posix(),
|
101
97
|
prefix=(
|
102
98
|
f"{int(time.time())}_"
|
103
|
-
f"{TaskGroupActivityActionV2.DEACTIVATE}
|
99
|
+
f"{TaskGroupActivityActionV2.DEACTIVATE}"
|
104
100
|
),
|
105
101
|
logger_name=LOGGER_NAME,
|
106
102
|
)
|
@@ -40,23 +40,17 @@ def deactivate_local_pixi(
|
|
40
40
|
log_file_path=log_file_path,
|
41
41
|
)
|
42
42
|
|
43
|
+
logger.debug("START")
|
43
44
|
with next(get_sync_db()) as db:
|
44
|
-
|
45
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
45
46
|
task_group_activity_id=task_group_activity_id,
|
46
47
|
task_group_id=task_group_id,
|
47
48
|
db=db,
|
49
|
+
logger_name=LOGGER_NAME,
|
48
50
|
)
|
49
|
-
if not
|
51
|
+
if not db_objects_ok:
|
50
52
|
return
|
51
53
|
|
52
|
-
# Log some info
|
53
|
-
logger.debug("START")
|
54
|
-
|
55
|
-
for key, value in task_group.model_dump(
|
56
|
-
exclude={"env_info"}
|
57
|
-
).items():
|
58
|
-
logger.debug(f"task_group.{key}: {value}")
|
59
|
-
|
60
54
|
source_dir = Path(task_group.path, SOURCE_DIR_NAME)
|
61
55
|
if not source_dir.exists():
|
62
56
|
error_msg = f"'{source_dir.as_posix()}' does not exist."
|
@@ -47,21 +47,17 @@ def reactivate_local(
|
|
47
47
|
log_file_path=log_file_path,
|
48
48
|
)
|
49
49
|
|
50
|
+
logger.debug("START")
|
50
51
|
with next(get_sync_db()) as db:
|
51
|
-
|
52
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
52
53
|
task_group_activity_id=task_group_activity_id,
|
53
54
|
task_group_id=task_group_id,
|
54
55
|
db=db,
|
56
|
+
logger_name=LOGGER_NAME,
|
55
57
|
)
|
56
|
-
if not
|
58
|
+
if not db_objects_ok:
|
57
59
|
return
|
58
60
|
|
59
|
-
# Log some info
|
60
|
-
logger.debug("START")
|
61
|
-
|
62
|
-
for key, value in task_group.model_dump().items():
|
63
|
-
logger.debug(f"task_group.{key}: {value}")
|
64
|
-
|
65
61
|
# Check that the (local) task_group venv_path does not exist
|
66
62
|
if Path(task_group.venv_path).exists():
|
67
63
|
error_msg = f"{task_group.venv_path} already exists."
|
@@ -100,7 +96,7 @@ def reactivate_local(
|
|
100
96
|
).as_posix(),
|
101
97
|
prefix=(
|
102
98
|
f"{int(time.time())}_"
|
103
|
-
f"{TaskGroupActivityActionV2.REACTIVATE}
|
99
|
+
f"{TaskGroupActivityActionV2.REACTIVATE}"
|
104
100
|
),
|
105
101
|
logger_name=LOGGER_NAME,
|
106
102
|
)
|
@@ -18,6 +18,7 @@ from fractal_server.tasks.utils import get_log_path
|
|
18
18
|
from fractal_server.tasks.v2.local._utils import _customize_and_run_template
|
19
19
|
from fractal_server.tasks.v2.utils_background import get_current_log
|
20
20
|
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
21
|
+
from fractal_server.utils import execute_command_sync
|
21
22
|
from fractal_server.utils import get_timestamp
|
22
23
|
|
23
24
|
|
@@ -45,23 +46,17 @@ def reactivate_local_pixi(
|
|
45
46
|
logger_name=LOGGER_NAME,
|
46
47
|
log_file_path=log_file_path,
|
47
48
|
)
|
49
|
+
logger.debug("START")
|
48
50
|
with next(get_sync_db()) as db:
|
49
|
-
|
51
|
+
db_objects_ok, task_group, activity = get_activity_and_task_group(
|
50
52
|
task_group_activity_id=task_group_activity_id,
|
51
53
|
task_group_id=task_group_id,
|
52
54
|
db=db,
|
55
|
+
logger_name=LOGGER_NAME,
|
53
56
|
)
|
54
|
-
if not
|
57
|
+
if not db_objects_ok:
|
55
58
|
return
|
56
59
|
|
57
|
-
# Log some info
|
58
|
-
logger.debug("START")
|
59
|
-
|
60
|
-
for key, value in task_group.model_dump(
|
61
|
-
exclude={"env_info"}
|
62
|
-
).items():
|
63
|
-
logger.debug(f"task_group.{key}: {value}")
|
64
|
-
|
65
60
|
source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
|
66
61
|
if Path(source_dir).exists():
|
67
62
|
error_msg = f"{source_dir} already exists."
|
@@ -94,7 +89,19 @@ def reactivate_local_pixi(
|
|
94
89
|
task_group.pkg_name.replace("-", "_"),
|
95
90
|
),
|
96
91
|
("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
|
97
|
-
("__FROZEN_OPTION__", "
|
92
|
+
("__FROZEN_OPTION__", "--frozen"),
|
93
|
+
(
|
94
|
+
"__TOKIO_WORKER_THREADS__",
|
95
|
+
str(settings.pixi.TOKIO_WORKER_THREADS),
|
96
|
+
),
|
97
|
+
(
|
98
|
+
"__PIXI_CONCURRENT_SOLVES__",
|
99
|
+
str(settings.pixi.PIXI_CONCURRENT_SOLVES),
|
100
|
+
),
|
101
|
+
(
|
102
|
+
"__PIXI_CONCURRENT_DOWNLOADS__",
|
103
|
+
str(settings.pixi.PIXI_CONCURRENT_DOWNLOADS),
|
104
|
+
),
|
98
105
|
},
|
99
106
|
script_dir=Path(
|
100
107
|
task_group.path, SCRIPTS_SUBFOLDER
|
@@ -128,7 +135,7 @@ def reactivate_local_pixi(
|
|
128
135
|
activity.log = get_current_log(log_file_path)
|
129
136
|
activity = add_commit_refresh(obj=activity, db=db)
|
130
137
|
|
131
|
-
# Run script 3
|
138
|
+
# Run script 3 - post-install
|
132
139
|
_customize_and_run_template(
|
133
140
|
template_filename="pixi_3_post_install.sh",
|
134
141
|
**common_args,
|
@@ -136,6 +143,14 @@ def reactivate_local_pixi(
|
|
136
143
|
activity.log = get_current_log(log_file_path)
|
137
144
|
activity = add_commit_refresh(obj=activity, db=db)
|
138
145
|
|
146
|
+
# Make task folder 755
|
147
|
+
source_dir = Path(task_group.path, SOURCE_DIR_NAME).as_posix()
|
148
|
+
command = f"chmod 755 {source_dir} -R"
|
149
|
+
execute_command_sync(
|
150
|
+
command=command,
|
151
|
+
logger_name=LOGGER_NAME,
|
152
|
+
)
|
153
|
+
|
139
154
|
activity.log = get_current_log(log_file_path)
|
140
155
|
activity.status = TaskGroupActivityStatusV2.OK
|
141
156
|
activity.timestamp_ended = get_timestamp()
|
@@ -154,8 +169,8 @@ def reactivate_local_pixi(
|
|
154
169
|
logger.info(f"Deleted folder {source_dir}")
|
155
170
|
except Exception as rm_e:
|
156
171
|
logger.error(
|
157
|
-
"Removing folder failed
|
158
|
-
f"Original error
|
172
|
+
"Removing folder failed. "
|
173
|
+
f"Original error: {str(rm_e)}"
|
159
174
|
)
|
160
175
|
|
161
176
|
fail_and_cleanup(
|