fractal-server 2.8.1__py3-none-any.whl → 2.9.0a0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/v2/__init__.py +3 -3
- fractal_server/app/models/v2/task.py +0 -72
- fractal_server/app/models/v2/task_group.py +102 -0
- fractal_server/app/routes/admin/v1.py +1 -20
- fractal_server/app/routes/admin/v2/job.py +1 -20
- fractal_server/app/routes/admin/v2/task_group.py +53 -13
- fractal_server/app/routes/api/v2/__init__.py +11 -2
- fractal_server/app/routes/api/v2/{_aux_functions_task_collection.py → _aux_functions_task_lifecycle.py} +43 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +21 -14
- fractal_server/app/routes/api/v2/task_collection.py +26 -51
- fractal_server/app/routes/api/v2/task_collection_custom.py +3 -3
- fractal_server/app/routes/api/v2/task_group.py +83 -14
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +221 -0
- fractal_server/app/routes/api/v2/workflow.py +1 -1
- fractal_server/app/routes/api/v2/workflow_import.py +2 -2
- fractal_server/app/routes/aux/_timestamp.py +25 -0
- fractal_server/app/schemas/v2/__init__.py +3 -2
- fractal_server/app/schemas/v2/task_collection.py +0 -21
- fractal_server/app/schemas/v2/task_group.py +30 -6
- fractal_server/migrations/versions/3082479ac4ea_taskgroup_activity_and_venv_info_to_.py +105 -0
- fractal_server/ssh/_fabric.py +18 -0
- fractal_server/tasks/utils.py +2 -12
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/collect.py +291 -0
- fractal_server/tasks/v2/local/deactivate.py +162 -0
- fractal_server/tasks/v2/local/reactivate.py +159 -0
- fractal_server/tasks/v2/local/utils_local.py +52 -0
- fractal_server/tasks/v2/ssh/__init__.py +0 -0
- fractal_server/tasks/v2/ssh/collect.py +387 -0
- fractal_server/tasks/v2/ssh/deactivate.py +2 -0
- fractal_server/tasks/v2/ssh/reactivate.py +2 -0
- fractal_server/tasks/v2/templates/{_2_preliminary_pip_operations.sh → 1_create_venv.sh} +6 -7
- fractal_server/tasks/v2/templates/{_3_pip_install.sh → 2_pip_install.sh} +8 -1
- fractal_server/tasks/v2/templates/{_4_pip_freeze.sh → 3_pip_freeze.sh} +0 -7
- fractal_server/tasks/v2/templates/{_5_pip_show.sh → 4_pip_show.sh} +5 -6
- fractal_server/tasks/v2/templates/5_get_venv_size_and_file_number.sh +10 -0
- fractal_server/tasks/v2/templates/6_pip_install_from_freeze.sh +35 -0
- fractal_server/tasks/v2/utils_background.py +42 -103
- fractal_server/tasks/v2/utils_templates.py +32 -2
- fractal_server/utils.py +4 -2
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a0.dist-info}/METADATA +2 -2
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a0.dist-info}/RECORD +47 -36
- fractal_server/app/models/v2/collection_state.py +0 -22
- fractal_server/tasks/v2/collection_local.py +0 -357
- fractal_server/tasks/v2/collection_ssh.py +0 -352
- fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -42
- /fractal_server/tasks/v2/{database_operations.py → utils_database.py} +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a0.dist-info}/LICENSE +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a0.dist-info}/WHEEL +0 -0
- {fractal_server-2.8.1.dist-info → fractal_server-2.9.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,105 @@
|
|
1
|
+
"""TaskGroup Activity and venv-info to TaskGroup
|
2
|
+
|
3
|
+
Revision ID: 3082479ac4ea
|
4
|
+
Revises: 19eca0dd47a9
|
5
|
+
Create Date: 2024-11-12 14:39:34.035859
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
import sqlmodel
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
# revision identifiers, used by Alembic.
|
13
|
+
revision = "3082479ac4ea"
|
14
|
+
down_revision = "19eca0dd47a9"
|
15
|
+
branch_labels = None
|
16
|
+
depends_on = None
|
17
|
+
|
18
|
+
|
19
|
+
def upgrade() -> None:
|
20
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
21
|
+
op.create_table(
|
22
|
+
"taskgroupactivityv2",
|
23
|
+
sa.Column("id", sa.Integer(), nullable=False),
|
24
|
+
sa.Column("user_id", sa.Integer(), nullable=False),
|
25
|
+
sa.Column("taskgroupv2_id", sa.Integer(), nullable=True),
|
26
|
+
sa.Column(
|
27
|
+
"timestamp_started", sa.DateTime(timezone=True), nullable=False
|
28
|
+
),
|
29
|
+
sa.Column(
|
30
|
+
"pkg_name", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
31
|
+
),
|
32
|
+
sa.Column(
|
33
|
+
"version", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
34
|
+
),
|
35
|
+
sa.Column(
|
36
|
+
"status", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
37
|
+
),
|
38
|
+
sa.Column(
|
39
|
+
"action", sqlmodel.sql.sqltypes.AutoString(), nullable=False
|
40
|
+
),
|
41
|
+
sa.Column("log", sqlmodel.sql.sqltypes.AutoString(), nullable=True),
|
42
|
+
sa.Column(
|
43
|
+
"timestamp_ended", sa.DateTime(timezone=True), nullable=True
|
44
|
+
),
|
45
|
+
sa.ForeignKeyConstraint(
|
46
|
+
["taskgroupv2_id"],
|
47
|
+
["taskgroupv2.id"],
|
48
|
+
name=op.f("fk_taskgroupactivityv2_taskgroupv2_id_taskgroupv2"),
|
49
|
+
),
|
50
|
+
sa.ForeignKeyConstraint(
|
51
|
+
["user_id"],
|
52
|
+
["user_oauth.id"],
|
53
|
+
name=op.f("fk_taskgroupactivityv2_user_id_user_oauth"),
|
54
|
+
),
|
55
|
+
sa.PrimaryKeyConstraint("id", name=op.f("pk_taskgroupactivityv2")),
|
56
|
+
)
|
57
|
+
op.drop_table("collectionstatev2")
|
58
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
59
|
+
batch_op.add_column(
|
60
|
+
sa.Column(
|
61
|
+
"pip_freeze", sqlmodel.sql.sqltypes.AutoString(), nullable=True
|
62
|
+
)
|
63
|
+
)
|
64
|
+
batch_op.add_column(
|
65
|
+
sa.Column("venv_size_in_kB", sa.Integer(), nullable=True)
|
66
|
+
)
|
67
|
+
batch_op.add_column(
|
68
|
+
sa.Column("venv_file_number", sa.Integer(), nullable=True)
|
69
|
+
)
|
70
|
+
|
71
|
+
# ### end Alembic commands ###
|
72
|
+
|
73
|
+
|
74
|
+
def downgrade() -> None:
|
75
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
76
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
77
|
+
batch_op.drop_column("venv_file_number")
|
78
|
+
batch_op.drop_column("venv_size_in_kB")
|
79
|
+
batch_op.drop_column("pip_freeze")
|
80
|
+
|
81
|
+
op.create_table(
|
82
|
+
"collectionstatev2",
|
83
|
+
sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False),
|
84
|
+
sa.Column(
|
85
|
+
"data",
|
86
|
+
sa.JSON(),
|
87
|
+
nullable=True,
|
88
|
+
),
|
89
|
+
sa.Column(
|
90
|
+
"timestamp",
|
91
|
+
sa.DateTime(timezone=True),
|
92
|
+
nullable=True,
|
93
|
+
),
|
94
|
+
sa.Column(
|
95
|
+
"taskgroupv2_id", sa.INTEGER(), autoincrement=False, nullable=True
|
96
|
+
),
|
97
|
+
sa.ForeignKeyConstraint(
|
98
|
+
["taskgroupv2_id"],
|
99
|
+
["taskgroupv2.id"],
|
100
|
+
name="fk_collectionstatev2_taskgroupv2_id_taskgroupv2",
|
101
|
+
),
|
102
|
+
sa.PrimaryKeyConstraint("id", name="pk_collectionstatev2"),
|
103
|
+
)
|
104
|
+
op.drop_table("taskgroupactivityv2")
|
105
|
+
# ### end Alembic commands ###
|
fractal_server/ssh/_fabric.py
CHANGED
@@ -471,6 +471,24 @@ class FractalSSH(object):
|
|
471
471
|
f.write(content)
|
472
472
|
self.logger.info(f"END writing to remote file {path}.")
|
473
473
|
|
474
|
+
def remote_exists(self, path: str) -> bool:
|
475
|
+
"""
|
476
|
+
Return whether a remote file/folder exists
|
477
|
+
"""
|
478
|
+
self.logger.info(f"START remote_file_exists {path}")
|
479
|
+
with _acquire_lock_with_timeout(
|
480
|
+
lock=self._lock,
|
481
|
+
label=f"remote_file_exists {path=}",
|
482
|
+
timeout=self.default_lock_timeout,
|
483
|
+
):
|
484
|
+
try:
|
485
|
+
self._sftp_unsafe().stat(path)
|
486
|
+
self.logger.info(f"END remote_file_exists {path} / True")
|
487
|
+
return True
|
488
|
+
except FileNotFoundError:
|
489
|
+
self.logger.info(f"END remote_file_exists {path} / False")
|
490
|
+
return False
|
491
|
+
|
474
492
|
|
475
493
|
class FractalSSHList(object):
|
476
494
|
"""
|
fractal_server/tasks/utils.py
CHANGED
@@ -30,19 +30,9 @@ def get_log_path(base: Path) -> Path:
|
|
30
30
|
return base / COLLECTION_LOG_FILENAME
|
31
31
|
|
32
32
|
|
33
|
-
def get_freeze_path(base: Path) -> Path:
|
34
|
-
return base / COLLECTION_FREEZE_FILENAME
|
35
|
-
|
36
|
-
|
37
33
|
def get_collection_log_v1(path: Path) -> str:
|
38
34
|
package_path = get_absolute_venv_path_v1(path)
|
39
35
|
log_path = get_log_path(package_path)
|
40
|
-
|
36
|
+
with log_path.open("r") as f:
|
37
|
+
log = f.read()
|
41
38
|
return log
|
42
|
-
|
43
|
-
|
44
|
-
def get_collection_freeze_v1(venv_path: Path) -> str:
|
45
|
-
package_path = get_absolute_venv_path_v1(venv_path)
|
46
|
-
freeze_path = get_freeze_path(package_path)
|
47
|
-
freeze = freeze_path.open().read()
|
48
|
-
return freeze
|
@@ -0,0 +1,291 @@
|
|
1
|
+
import json
|
2
|
+
import logging
|
3
|
+
import shutil
|
4
|
+
import time
|
5
|
+
from pathlib import Path
|
6
|
+
from tempfile import TemporaryDirectory
|
7
|
+
|
8
|
+
from ..utils_database import create_db_tasks_and_update_task_group
|
9
|
+
from .utils_local import _customize_and_run_template
|
10
|
+
from fractal_server.app.db import get_sync_db
|
11
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
12
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
13
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
14
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
|
15
|
+
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
16
|
+
from fractal_server.logger import get_logger
|
17
|
+
from fractal_server.logger import set_logger
|
18
|
+
from fractal_server.tasks.utils import get_log_path
|
19
|
+
from fractal_server.tasks.v2.utils_background import _prepare_tasks_metadata
|
20
|
+
from fractal_server.tasks.v2.utils_background import add_commit_refresh
|
21
|
+
from fractal_server.tasks.v2.utils_background import check_task_files_exist
|
22
|
+
from fractal_server.tasks.v2.utils_background import fail_and_cleanup
|
23
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
24
|
+
from fractal_server.tasks.v2.utils_package_names import compare_package_names
|
25
|
+
from fractal_server.tasks.v2.utils_python_interpreter import (
|
26
|
+
get_python_interpreter_v2,
|
27
|
+
)
|
28
|
+
from fractal_server.tasks.v2.utils_templates import get_collection_replacements
|
29
|
+
from fractal_server.tasks.v2.utils_templates import (
|
30
|
+
parse_script_pip_show_stdout,
|
31
|
+
)
|
32
|
+
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
33
|
+
from fractal_server.utils import get_timestamp
|
34
|
+
|
35
|
+
LOGGER_NAME = __name__
|
36
|
+
|
37
|
+
|
38
|
+
def _copy_wheel_file_local(task_group: TaskGroupV2) -> str:
|
39
|
+
logger = get_logger(LOGGER_NAME)
|
40
|
+
source = task_group.wheel_path
|
41
|
+
dest = (
|
42
|
+
Path(task_group.path) / Path(task_group.wheel_path).name
|
43
|
+
).as_posix()
|
44
|
+
logger.debug(f"[_copy_wheel_file] START {source=} {dest=}")
|
45
|
+
shutil.copy(task_group.wheel_path, task_group.path)
|
46
|
+
logger.debug(f"[_copy_wheel_file] END {source=} {dest=}")
|
47
|
+
return dest
|
48
|
+
|
49
|
+
|
50
|
+
def collect_package_local(
|
51
|
+
*,
|
52
|
+
task_group_activity_id: int,
|
53
|
+
task_group_id: int,
|
54
|
+
) -> None:
|
55
|
+
"""
|
56
|
+
Collect a task package.
|
57
|
+
|
58
|
+
This function is run as a background task, therefore exceptions must be
|
59
|
+
handled.
|
60
|
+
|
61
|
+
NOTE: by making this function sync, it runs within a thread - due to
|
62
|
+
starlette/fastapi handling of background tasks (see
|
63
|
+
https://github.com/encode/starlette/blob/master/starlette/background.py).
|
64
|
+
|
65
|
+
|
66
|
+
Arguments:
|
67
|
+
task_group_id:
|
68
|
+
task_group_activity_id:
|
69
|
+
"""
|
70
|
+
|
71
|
+
with TemporaryDirectory() as tmpdir:
|
72
|
+
log_file_path = get_log_path(Path(tmpdir))
|
73
|
+
logger = set_logger(
|
74
|
+
logger_name=LOGGER_NAME,
|
75
|
+
log_file_path=log_file_path,
|
76
|
+
)
|
77
|
+
|
78
|
+
with next(get_sync_db()) as db:
|
79
|
+
|
80
|
+
# Get main objects from db
|
81
|
+
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
82
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
83
|
+
if activity is None or task_group is None:
|
84
|
+
# Use `logging` directly
|
85
|
+
logging.error(
|
86
|
+
"Cannot find database rows with "
|
87
|
+
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
88
|
+
f"{task_group=}\n{activity=}. Exit."
|
89
|
+
)
|
90
|
+
return
|
91
|
+
|
92
|
+
# Log some info
|
93
|
+
logger.debug("START")
|
94
|
+
for key, value in task_group.model_dump().items():
|
95
|
+
logger.debug(f"task_group.{key}: {value}")
|
96
|
+
|
97
|
+
# Check that the (local) task_group path does exist
|
98
|
+
if Path(task_group.path).exists():
|
99
|
+
error_msg = f"{task_group.path} already exists."
|
100
|
+
logger.error(error_msg)
|
101
|
+
fail_and_cleanup(
|
102
|
+
task_group=task_group,
|
103
|
+
task_group_activity=activity,
|
104
|
+
logger_name=LOGGER_NAME,
|
105
|
+
log_file_path=log_file_path,
|
106
|
+
exception=FileExistsError(error_msg),
|
107
|
+
db=db,
|
108
|
+
)
|
109
|
+
return
|
110
|
+
|
111
|
+
try:
|
112
|
+
|
113
|
+
# Create task_group.path folder
|
114
|
+
Path(task_group.path).mkdir(parents=True)
|
115
|
+
logger.debug(f"Created {task_group.path}")
|
116
|
+
|
117
|
+
# Copy wheel file into task group path
|
118
|
+
if task_group.wheel_path:
|
119
|
+
new_wheel_path = _copy_wheel_file_local(
|
120
|
+
task_group=task_group
|
121
|
+
)
|
122
|
+
task_group.wheel_path = new_wheel_path
|
123
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
124
|
+
|
125
|
+
# Prepare replacements for templates
|
126
|
+
replacements = get_collection_replacements(
|
127
|
+
task_group=task_group,
|
128
|
+
python_bin=get_python_interpreter_v2(
|
129
|
+
python_version=task_group.python_version
|
130
|
+
),
|
131
|
+
)
|
132
|
+
|
133
|
+
# Prepare common arguments for `_customize_and_run_template``
|
134
|
+
common_args = dict(
|
135
|
+
replacements=replacements,
|
136
|
+
script_dir=(
|
137
|
+
Path(task_group.path) / SCRIPTS_SUBFOLDER
|
138
|
+
).as_posix(),
|
139
|
+
prefix=(
|
140
|
+
f"{int(time.time())}_"
|
141
|
+
f"{TaskGroupActivityActionV2.COLLECT}_"
|
142
|
+
),
|
143
|
+
logger_name=LOGGER_NAME,
|
144
|
+
)
|
145
|
+
|
146
|
+
# Set status to ONGOING and refresh logs
|
147
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
148
|
+
activity.log = get_current_log(log_file_path)
|
149
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
150
|
+
|
151
|
+
# Run script 1
|
152
|
+
stdout = _customize_and_run_template(
|
153
|
+
template_filename="1_create_venv.sh",
|
154
|
+
**common_args,
|
155
|
+
)
|
156
|
+
activity.log = get_current_log(log_file_path)
|
157
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
158
|
+
|
159
|
+
# Run script 2
|
160
|
+
stdout = _customize_and_run_template(
|
161
|
+
template_filename="2_pip_install.sh",
|
162
|
+
**common_args,
|
163
|
+
)
|
164
|
+
activity.log = get_current_log(log_file_path)
|
165
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
166
|
+
|
167
|
+
# Run script 3
|
168
|
+
pip_freeze_stdout = _customize_and_run_template(
|
169
|
+
template_filename="3_pip_freeze.sh",
|
170
|
+
**common_args,
|
171
|
+
)
|
172
|
+
activity.log = get_current_log(log_file_path)
|
173
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
174
|
+
|
175
|
+
# Run script 4
|
176
|
+
stdout = _customize_and_run_template(
|
177
|
+
template_filename="4_pip_show.sh",
|
178
|
+
**common_args,
|
179
|
+
)
|
180
|
+
activity.log = get_current_log(log_file_path)
|
181
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
182
|
+
|
183
|
+
# Run script 5
|
184
|
+
venv_info = _customize_and_run_template(
|
185
|
+
template_filename="5_get_venv_size_and_file_number.sh",
|
186
|
+
**common_args,
|
187
|
+
)
|
188
|
+
venv_size, venv_file_number = venv_info.split()
|
189
|
+
activity.log = get_current_log(log_file_path)
|
190
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
191
|
+
|
192
|
+
pkg_attrs = parse_script_pip_show_stdout(stdout)
|
193
|
+
for key, value in pkg_attrs.items():
|
194
|
+
logger.debug(f"Parsed from pip-show: {key}={value}")
|
195
|
+
# Check package_name match between pip show and task-group
|
196
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
197
|
+
package_name_pip_show = pkg_attrs.get("package_name")
|
198
|
+
package_name_task_group = task_group.pkg_name
|
199
|
+
compare_package_names(
|
200
|
+
pkg_name_pip_show=package_name_pip_show,
|
201
|
+
pkg_name_task_group=package_name_task_group,
|
202
|
+
logger_name=LOGGER_NAME,
|
203
|
+
)
|
204
|
+
# Extract/drop parsed attributes
|
205
|
+
package_name = package_name_task_group
|
206
|
+
python_bin = pkg_attrs.pop("python_bin")
|
207
|
+
package_root_parent = pkg_attrs.pop("package_root_parent")
|
208
|
+
|
209
|
+
# TODO : Use more robust logic to determine `package_root`.
|
210
|
+
# Examples: use `importlib.util.find_spec`, or parse the
|
211
|
+
# output of `pip show --files {package_name}`.
|
212
|
+
package_name_underscore = package_name.replace("-", "_")
|
213
|
+
package_root = (
|
214
|
+
Path(package_root_parent) / package_name_underscore
|
215
|
+
).as_posix()
|
216
|
+
|
217
|
+
# Read and validate manifest file
|
218
|
+
manifest_path = pkg_attrs.pop("manifest_path")
|
219
|
+
logger.info(f"now loading {manifest_path=}")
|
220
|
+
with open(manifest_path) as json_data:
|
221
|
+
pkg_manifest_dict = json.load(json_data)
|
222
|
+
logger.info(f"loaded {manifest_path=}")
|
223
|
+
logger.info("now validating manifest content")
|
224
|
+
pkg_manifest = ManifestV2(**pkg_manifest_dict)
|
225
|
+
logger.info("validated manifest content")
|
226
|
+
activity.log = get_current_log(log_file_path)
|
227
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
228
|
+
|
229
|
+
logger.info("_prepare_tasks_metadata - start")
|
230
|
+
task_list = _prepare_tasks_metadata(
|
231
|
+
package_manifest=pkg_manifest,
|
232
|
+
package_version=task_group.version,
|
233
|
+
package_root=Path(package_root),
|
234
|
+
python_bin=Path(python_bin),
|
235
|
+
)
|
236
|
+
check_task_files_exist(task_list=task_list)
|
237
|
+
logger.info("_prepare_tasks_metadata - end")
|
238
|
+
activity.log = get_current_log(log_file_path)
|
239
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
240
|
+
|
241
|
+
logger.info("create_db_tasks_and_update_task_group - " "start")
|
242
|
+
create_db_tasks_and_update_task_group(
|
243
|
+
task_list=task_list,
|
244
|
+
task_group_id=task_group.id,
|
245
|
+
db=db,
|
246
|
+
)
|
247
|
+
logger.info("create_db_tasks_and_update_task_group - end")
|
248
|
+
|
249
|
+
# Update task_group data
|
250
|
+
logger.info(
|
251
|
+
"Add pip_freeze, venv_size and venv_file_number "
|
252
|
+
"to TaskGroupV2 - start"
|
253
|
+
)
|
254
|
+
task_group.pip_freeze = pip_freeze_stdout
|
255
|
+
task_group.venv_size_in_kB = int(venv_size)
|
256
|
+
task_group.venv_file_number = int(venv_file_number)
|
257
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
258
|
+
logger.info(
|
259
|
+
"Add pip_freeze, venv_size and venv_file_number "
|
260
|
+
"to TaskGroupV2 - end"
|
261
|
+
)
|
262
|
+
|
263
|
+
# Finalize (write metadata to DB)
|
264
|
+
logger.debug("finalising - START")
|
265
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
266
|
+
activity.timestamp_ended = get_timestamp()
|
267
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
268
|
+
logger.debug("finalising - END")
|
269
|
+
logger.debug("END")
|
270
|
+
|
271
|
+
except Exception as collection_e:
|
272
|
+
# Delete corrupted package dir
|
273
|
+
try:
|
274
|
+
logger.info(f"Now delete folder {task_group.path}")
|
275
|
+
shutil.rmtree(task_group.path)
|
276
|
+
logger.info(f"Deleted folder {task_group.path}")
|
277
|
+
except Exception as rm_e:
|
278
|
+
logger.error(
|
279
|
+
"Removing folder failed.\n"
|
280
|
+
f"Original error:\n{str(rm_e)}"
|
281
|
+
)
|
282
|
+
|
283
|
+
fail_and_cleanup(
|
284
|
+
task_group=task_group,
|
285
|
+
task_group_activity=activity,
|
286
|
+
logger_name=LOGGER_NAME,
|
287
|
+
log_file_path=log_file_path,
|
288
|
+
exception=collection_e,
|
289
|
+
db=db,
|
290
|
+
)
|
291
|
+
return
|
@@ -0,0 +1,162 @@
|
|
1
|
+
import logging
|
2
|
+
import shutil
|
3
|
+
import time
|
4
|
+
from pathlib import Path
|
5
|
+
from tempfile import TemporaryDirectory
|
6
|
+
|
7
|
+
from ..utils_background import add_commit_refresh
|
8
|
+
from ..utils_background import fail_and_cleanup
|
9
|
+
from ..utils_templates import get_collection_replacements
|
10
|
+
from .utils_local import _customize_and_run_template
|
11
|
+
from fractal_server.app.db import get_sync_db
|
12
|
+
from fractal_server.app.models.v2 import TaskGroupActivityV2
|
13
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
14
|
+
from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
|
15
|
+
from fractal_server.app.schemas.v2.task_group import TaskGroupActivityStatusV2
|
16
|
+
from fractal_server.logger import set_logger
|
17
|
+
from fractal_server.tasks.utils import get_log_path
|
18
|
+
from fractal_server.tasks.v2.utils_background import get_current_log
|
19
|
+
from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
|
20
|
+
from fractal_server.utils import get_timestamp
|
21
|
+
|
22
|
+
LOGGER_NAME = __name__
|
23
|
+
|
24
|
+
|
25
|
+
def deactivate_local(
|
26
|
+
*,
|
27
|
+
task_group_activity_id: int,
|
28
|
+
task_group_id: int,
|
29
|
+
) -> None:
|
30
|
+
"""
|
31
|
+
Deactivate a task group venv.
|
32
|
+
|
33
|
+
This function is run as a background task, therefore exceptions must be
|
34
|
+
handled.
|
35
|
+
|
36
|
+
Arguments:
|
37
|
+
task_group_id:
|
38
|
+
task_group_activity_id:
|
39
|
+
"""
|
40
|
+
|
41
|
+
with TemporaryDirectory() as tmpdir:
|
42
|
+
log_file_path = get_log_path(Path(tmpdir))
|
43
|
+
logger = set_logger(
|
44
|
+
logger_name=LOGGER_NAME,
|
45
|
+
log_file_path=log_file_path,
|
46
|
+
)
|
47
|
+
|
48
|
+
with next(get_sync_db()) as db:
|
49
|
+
|
50
|
+
# Get main objects from db
|
51
|
+
activity = db.get(TaskGroupActivityV2, task_group_activity_id)
|
52
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
53
|
+
if activity is None or task_group is None:
|
54
|
+
# Use `logging` directly
|
55
|
+
logging.error(
|
56
|
+
"Cannot find database rows with "
|
57
|
+
f"{task_group_id=} and {task_group_activity_id=}:\n"
|
58
|
+
f"{task_group=}\n{activity=}. Exit."
|
59
|
+
)
|
60
|
+
return
|
61
|
+
|
62
|
+
# Log some info
|
63
|
+
logger.debug("START")
|
64
|
+
|
65
|
+
for key, value in task_group.model_dump().items():
|
66
|
+
logger.debug(f"task_group.{key}: {value}")
|
67
|
+
|
68
|
+
# Check that the (local) task_group venv_path does exist
|
69
|
+
if not Path(task_group.venv_path).exists():
|
70
|
+
error_msg = f"{task_group.venv_path} does not exist."
|
71
|
+
logger.error(error_msg)
|
72
|
+
fail_and_cleanup(
|
73
|
+
task_group=task_group,
|
74
|
+
task_group_activity=activity,
|
75
|
+
logger_name=LOGGER_NAME,
|
76
|
+
log_file_path=log_file_path,
|
77
|
+
exception=FileNotFoundError(error_msg),
|
78
|
+
db=db,
|
79
|
+
)
|
80
|
+
return
|
81
|
+
|
82
|
+
try:
|
83
|
+
|
84
|
+
activity.status = TaskGroupActivityStatusV2.ONGOING
|
85
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
86
|
+
if task_group.pip_freeze is None:
|
87
|
+
logger.warning(
|
88
|
+
"Recreate pip-freeze information, since "
|
89
|
+
f"{task_group.pip_freeze=}. NOTE: this should only "
|
90
|
+
"happen for task groups created before 2.9.0."
|
91
|
+
)
|
92
|
+
# Prepare replacements for templates
|
93
|
+
replacements = get_collection_replacements(
|
94
|
+
task_group=task_group,
|
95
|
+
python_bin="/not/applicable",
|
96
|
+
)
|
97
|
+
|
98
|
+
# Prepare common arguments for _customize_and_run_template
|
99
|
+
common_args = dict(
|
100
|
+
replacements=replacements,
|
101
|
+
script_dir=(
|
102
|
+
Path(task_group.path) / SCRIPTS_SUBFOLDER
|
103
|
+
).as_posix(),
|
104
|
+
prefix=(
|
105
|
+
f"{int(time.time())}_"
|
106
|
+
f"{TaskGroupActivityActionV2.DEACTIVATE}_"
|
107
|
+
),
|
108
|
+
logger_name=LOGGER_NAME,
|
109
|
+
)
|
110
|
+
pip_freeze_stdout = _customize_and_run_template(
|
111
|
+
template_filename="3_pip_freeze.sh",
|
112
|
+
**common_args,
|
113
|
+
)
|
114
|
+
# Update pip-freeze data
|
115
|
+
logger.info("Add pip freeze stdout to TaskGroupV2 - start")
|
116
|
+
activity.log = get_current_log(log_file_path)
|
117
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
118
|
+
task_group.pip_freeze = pip_freeze_stdout
|
119
|
+
task_group = add_commit_refresh(obj=task_group, db=db)
|
120
|
+
logger.info("Add pip freeze stdout to TaskGroupV2 - end")
|
121
|
+
|
122
|
+
if task_group.origin == "wheel" and (
|
123
|
+
task_group.wheel_path is None
|
124
|
+
or not Path(task_group.wheel_path).exists()
|
125
|
+
):
|
126
|
+
|
127
|
+
logger.error(
|
128
|
+
"Cannot find task_group wheel_path with "
|
129
|
+
f"{task_group_id=} :\n"
|
130
|
+
f"{task_group=}\n. Exit."
|
131
|
+
)
|
132
|
+
error_msg = f"{task_group.wheel_path} does not exist."
|
133
|
+
logger.error(error_msg)
|
134
|
+
fail_and_cleanup(
|
135
|
+
task_group=task_group,
|
136
|
+
task_group_activity=activity,
|
137
|
+
logger_name=LOGGER_NAME,
|
138
|
+
log_file_path=log_file_path,
|
139
|
+
exception=FileNotFoundError(error_msg),
|
140
|
+
db=db,
|
141
|
+
)
|
142
|
+
return
|
143
|
+
|
144
|
+
# At this point we are sure that venv_path
|
145
|
+
# wheel_path and pip_freeze exist
|
146
|
+
shutil.rmtree(task_group.venv_path)
|
147
|
+
|
148
|
+
activity.log = f"All good, {task_group.venv_path} removed."
|
149
|
+
activity.status = TaskGroupActivityStatusV2.OK
|
150
|
+
activity.timestamp_ended = get_timestamp()
|
151
|
+
activity = add_commit_refresh(obj=activity, db=db)
|
152
|
+
|
153
|
+
except Exception as e:
|
154
|
+
fail_and_cleanup(
|
155
|
+
task_group=task_group,
|
156
|
+
task_group_activity=activity,
|
157
|
+
logger_name=LOGGER_NAME,
|
158
|
+
log_file_path=log_file_path,
|
159
|
+
exception=e,
|
160
|
+
db=db,
|
161
|
+
)
|
162
|
+
return
|