fractal-server 2.11.0a10__py3-none-any.whl → 2.12.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/__init__.py +0 -2
  3. fractal_server/app/models/linkuserproject.py +0 -9
  4. fractal_server/app/models/v2/dataset.py +0 -4
  5. fractal_server/app/models/v2/workflowtask.py +0 -4
  6. fractal_server/app/routes/aux/_job.py +1 -3
  7. fractal_server/app/runner/filenames.py +0 -2
  8. fractal_server/app/runner/shutdown.py +3 -27
  9. fractal_server/config.py +1 -15
  10. fractal_server/main.py +1 -12
  11. fractal_server/migrations/versions/1eac13a26c83_drop_v1_tables.py +67 -0
  12. fractal_server/migrations/versions/af8673379a5c_drop_old_filter_columns.py +54 -0
  13. fractal_server/string_tools.py +0 -21
  14. fractal_server/tasks/utils.py +0 -24
  15. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/METADATA +1 -1
  16. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/RECORD +19 -63
  17. fractal_server/app/models/v1/__init__.py +0 -13
  18. fractal_server/app/models/v1/dataset.py +0 -71
  19. fractal_server/app/models/v1/job.py +0 -101
  20. fractal_server/app/models/v1/project.py +0 -29
  21. fractal_server/app/models/v1/state.py +0 -34
  22. fractal_server/app/models/v1/task.py +0 -85
  23. fractal_server/app/models/v1/workflow.py +0 -133
  24. fractal_server/app/routes/admin/v1.py +0 -377
  25. fractal_server/app/routes/api/v1/__init__.py +0 -26
  26. fractal_server/app/routes/api/v1/_aux_functions.py +0 -478
  27. fractal_server/app/routes/api/v1/dataset.py +0 -554
  28. fractal_server/app/routes/api/v1/job.py +0 -195
  29. fractal_server/app/routes/api/v1/project.py +0 -475
  30. fractal_server/app/routes/api/v1/task.py +0 -203
  31. fractal_server/app/routes/api/v1/task_collection.py +0 -239
  32. fractal_server/app/routes/api/v1/workflow.py +0 -355
  33. fractal_server/app/routes/api/v1/workflowtask.py +0 -187
  34. fractal_server/app/runner/async_wrap_v1.py +0 -27
  35. fractal_server/app/runner/v1/__init__.py +0 -415
  36. fractal_server/app/runner/v1/_common.py +0 -620
  37. fractal_server/app/runner/v1/_local/__init__.py +0 -186
  38. fractal_server/app/runner/v1/_local/_local_config.py +0 -105
  39. fractal_server/app/runner/v1/_local/_submit_setup.py +0 -48
  40. fractal_server/app/runner/v1/_local/executor.py +0 -100
  41. fractal_server/app/runner/v1/_slurm/__init__.py +0 -312
  42. fractal_server/app/runner/v1/_slurm/_submit_setup.py +0 -81
  43. fractal_server/app/runner/v1/_slurm/get_slurm_config.py +0 -163
  44. fractal_server/app/runner/v1/common.py +0 -117
  45. fractal_server/app/runner/v1/handle_failed_job.py +0 -141
  46. fractal_server/app/schemas/v1/__init__.py +0 -37
  47. fractal_server/app/schemas/v1/applyworkflow.py +0 -161
  48. fractal_server/app/schemas/v1/dataset.py +0 -165
  49. fractal_server/app/schemas/v1/dumps.py +0 -64
  50. fractal_server/app/schemas/v1/manifest.py +0 -126
  51. fractal_server/app/schemas/v1/project.py +0 -66
  52. fractal_server/app/schemas/v1/state.py +0 -18
  53. fractal_server/app/schemas/v1/task.py +0 -167
  54. fractal_server/app/schemas/v1/task_collection.py +0 -110
  55. fractal_server/app/schemas/v1/workflow.py +0 -212
  56. fractal_server/data_migrations/2_11_0.py +0 -168
  57. fractal_server/tasks/v1/_TaskCollectPip.py +0 -103
  58. fractal_server/tasks/v1/__init__.py +0 -0
  59. fractal_server/tasks/v1/background_operations.py +0 -352
  60. fractal_server/tasks/v1/endpoint_operations.py +0 -156
  61. fractal_server/tasks/v1/get_collection_data.py +0 -14
  62. fractal_server/tasks/v1/utils.py +0 -67
  63. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/LICENSE +0 -0
  64. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/WHEEL +0 -0
  65. {fractal_server-2.11.0a10.dist-info → fractal_server-2.12.0a0.dist-info}/entry_points.txt +0 -0
@@ -1,168 +0,0 @@
1
- import logging
2
- from typing import Union
3
-
4
- from sqlalchemy.orm.attributes import flag_modified
5
- from sqlmodel import select
6
-
7
- from fractal_server.app.db import get_sync_db
8
- from fractal_server.app.models import DatasetV2
9
- from fractal_server.app.models import JobV2
10
- from fractal_server.app.models import ProjectV2
11
- from fractal_server.app.models import WorkflowTaskV2
12
- from fractal_server.app.models import WorkflowV2
13
- from fractal_server.app.schemas.v2 import DatasetReadV2
14
- from fractal_server.app.schemas.v2 import JobReadV2
15
- from fractal_server.app.schemas.v2 import ProjectReadV2
16
- from fractal_server.app.schemas.v2 import TaskReadV2
17
- from fractal_server.app.schemas.v2 import WorkflowTaskReadV2
18
- from fractal_server.images.models import AttributeFiltersType
19
-
20
- logger = logging.getLogger("fix_db")
21
- logger.setLevel(logging.INFO)
22
-
23
-
24
- def dict_values_to_list(
25
- input_dict: dict[str, Union[int, float, bool, str, None]],
26
- identifier: str,
27
- ) -> tuple[AttributeFiltersType, bool]:
28
- was_there_a_warning = False
29
- for k, v in input_dict.items():
30
- if not isinstance(v, (int, float, bool, str, type(None))):
31
- error_msg = (
32
- f"Attribute '{k}' from '{identifier}' "
33
- "has invalid type '{type(v)}'."
34
- )
35
- logger.error(error_msg)
36
- raise RuntimeError(error_msg)
37
- elif v is None:
38
- logger.warning(
39
- f"Attribute '{k}' from '{identifier}' is "
40
- "None and it will be removed."
41
- )
42
- was_there_a_warning = True
43
- else:
44
- input_dict[k] = [v]
45
- return input_dict, was_there_a_warning
46
-
47
-
48
- def fix_db():
49
- logger.info("START execution of fix_db function")
50
-
51
- with next(get_sync_db()) as db:
52
- # DatasetV2.filters
53
- stm = select(DatasetV2).order_by(DatasetV2.id)
54
- datasets = db.execute(stm).scalars().all()
55
- for ds in datasets:
56
- logger.info(f"DatasetV2[{ds.id}] START")
57
- if ds.filters is None:
58
- logger.info(f"DatasetV2[{ds.id}] SKIP")
59
- continue
60
-
61
- ds.attribute_filters, warning = dict_values_to_list(
62
- ds.filters["attributes"],
63
- f"Dataset[{ds.id}].filters.attributes",
64
- )
65
- if warning:
66
- proj = db.get(ProjectV2, ds.project_id)
67
- logger.warning(
68
- "Additional information: "
69
- f"{proj.id=}, "
70
- f"{proj.name=}, "
71
- f"{proj.user_list[0].email=}, "
72
- f"{ds.id=}, "
73
- f"{ds.name=}"
74
- )
75
- ds.type_filters = ds.filters["types"]
76
- ds.filters = None
77
- for i, h in enumerate(ds.history):
78
- ds.history[i]["workflowtask"]["type_filters"] = h[
79
- "workflowtask"
80
- ]["input_filters"]["types"]
81
- ds.history[i]["workflowtask"].pop("input_filters")
82
- flag_modified(ds, "history")
83
- DatasetReadV2(
84
- **ds.model_dump(),
85
- project=ProjectReadV2(**ds.project.model_dump()),
86
- )
87
- db.add(ds)
88
- logger.info(f"DatasetV2[{ds.id}] END - fixed filters")
89
-
90
- logger.info("------ switch from dataset to workflowtasks ------")
91
-
92
- # WorkflowTaskV2.input_filters
93
- stm = select(WorkflowTaskV2).order_by(WorkflowTaskV2.id)
94
- wftasks = db.execute(stm).scalars().all()
95
- for wft in wftasks:
96
- logger.info(f"WorkflowTaskV2[{wft.id}] START")
97
- if wft.input_filters is None:
98
- logger.info(f"WorkflowTaskV2[{wft.id}] SKIP")
99
- continue
100
- wft.type_filters = wft.input_filters["types"]
101
- if wft.input_filters["attributes"]:
102
- logger.warning(
103
- "Removing input_filters['attributes']. "
104
- f"(previous value: {wft.input_filters['attributes']})"
105
- )
106
- wf = db.get(WorkflowV2, wft.workflow_id)
107
- proj = db.get(ProjectV2, wf.project_id)
108
- logger.warning(
109
- "Additional information: "
110
- f"{proj.id=}, "
111
- f"{proj.name=}, "
112
- f"{proj.user_list[0].email=}, "
113
- f"{wf.id=}, "
114
- f"{wf.name=}, "
115
- f"{wft.task.name=}"
116
- )
117
- wft.input_filters = None
118
- flag_modified(wft, "input_filters")
119
- WorkflowTaskReadV2(
120
- **wft.model_dump(),
121
- task=TaskReadV2(**wft.task.model_dump()),
122
- )
123
- db.add(wft)
124
- logger.info(f"WorkflowTaskV2[{wft.id}] END - fixed filters")
125
-
126
- logger.info("------ switch from workflowtasks to jobs ------")
127
-
128
- # JOBS V2
129
- stm = select(JobV2).order_by(JobV2.id)
130
- jobs = db.execute(stm).scalars().all()
131
- for job in jobs:
132
- logger.info(f"JobV2[{job.id}] START")
133
- if "filters" not in job.dataset_dump.keys():
134
- logger.info(f"JobV2[{job.id}] SKIP")
135
- continue
136
- job.dataset_dump["type_filters"] = job.dataset_dump["filters"][
137
- "types"
138
- ]
139
- (
140
- job.dataset_dump["attribute_filters"],
141
- warning,
142
- ) = dict_values_to_list(
143
- job.dataset_dump["filters"]["attributes"],
144
- f"JobV2[{job.id}].dataset_dump.filters.attributes",
145
- )
146
- if warning and job.project_id is not None:
147
- proj = db.get(ProjectV2, job.project_id)
148
- logger.warning(
149
- "Additional information: "
150
- f"{proj.id=}, "
151
- f"{proj.name=}, "
152
- f"{proj.user_list[0].email=}, "
153
- f"{job.id=}, "
154
- f"{job.start_timestamp=}, "
155
- f"{job.end_timestamp=}, "
156
- f"{job.dataset_id=}, "
157
- f"{job.workflow_id=}."
158
- )
159
- job.dataset_dump.pop("filters")
160
- flag_modified(job, "dataset_dump")
161
- JobReadV2(**job.model_dump())
162
- db.add(job)
163
- logger.info(f"JobV2[{job.id}] END - fixed filters")
164
-
165
- db.commit()
166
- logger.info("Changes committed.")
167
-
168
- logger.info("END execution of fix_db function")
@@ -1,103 +0,0 @@
1
- from pathlib import Path
2
- from typing import Optional
3
-
4
- from pydantic import root_validator
5
-
6
- from fractal_server.app.schemas.v1 import ManifestV1
7
- from fractal_server.app.schemas.v1 import TaskCollectPipV1
8
-
9
-
10
- class _TaskCollectPip(TaskCollectPipV1):
11
- """
12
- Internal TaskCollectPip schema
13
-
14
- Differences with its parent class (`TaskCollectPip`):
15
-
16
- 1. We check if the package corresponds to a path in the filesystem, and
17
- whether it exists (via new validator `check_local_package`, new
18
- method `is_local_package` and new attribute `package_path`).
19
- 2. We include an additional `package_manifest` attribute.
20
- 3. We expose an additional attribute `package_name`, which is filled
21
- during task collection.
22
- """
23
-
24
- package_name: Optional[str] = None
25
- package_path: Optional[Path] = None
26
- package_manifest: Optional[ManifestV1] = None
27
-
28
- @property
29
- def is_local_package(self) -> bool:
30
- return bool(self.package_path)
31
-
32
- @root_validator(pre=True)
33
- def check_local_package(cls, values):
34
- """
35
- Checks if package corresponds to an existing path on the filesystem
36
-
37
- In this case, the user is providing directly a package file, rather
38
- than a remote one from PyPI. We set the `package_path` attribute and
39
- get the actual package name and version from the package file name.
40
- """
41
- if "/" in values["package"]:
42
- package_path = Path(values["package"])
43
- if not package_path.is_absolute():
44
- raise ValueError("Package path must be absolute")
45
- if package_path.exists():
46
- values["package_path"] = package_path
47
- (
48
- values["package"],
49
- values["version"],
50
- *_,
51
- ) = package_path.name.split("-")
52
- else:
53
- raise ValueError(f"Package {package_path} does not exist.")
54
- return values
55
-
56
- @property
57
- def package_source(self) -> str:
58
- """
59
- NOTE: As of PR #1188 in `fractal-server`, the attribute
60
- `self.package_name` is normalized; this means e.g. that `_` is
61
- replaced by `-`. To guarantee backwards compatibility with
62
- `Task.source` attributes created before this change, we still replace
63
- `-` with `_` upon generation of the `source` attribute, in this
64
- method.
65
- """
66
- if not self.package_name or not self.package_version:
67
- raise ValueError(
68
- "Cannot construct `package_source` property with "
69
- f"{self.package_name=} and {self.package_version=}."
70
- )
71
- if self.is_local_package:
72
- collection_type = "pip_local"
73
- else:
74
- collection_type = "pip_remote"
75
-
76
- package_extras = self.package_extras or ""
77
- if self.python_version:
78
- python_version = f"py{self.python_version}"
79
- else:
80
- python_version = ""
81
-
82
- source = ":".join(
83
- (
84
- collection_type,
85
- self.package_name.replace("-", "_"), # see method docstring
86
- self.package_version,
87
- package_extras,
88
- python_version,
89
- )
90
- )
91
- return source
92
-
93
- def check(self):
94
- """
95
- Verify that the package has all attributes that are needed to continue
96
- with task collection
97
- """
98
- if not self.package_name:
99
- raise ValueError("`package_name` attribute is not set")
100
- if not self.package_version:
101
- raise ValueError("`package_version` attribute is not set")
102
- if not self.package_manifest:
103
- raise ValueError("`package_manifest` attribute is not set")
File without changes
@@ -1,352 +0,0 @@
1
- """
2
- The main function exported from this module is `background_collect_pip`, which
3
- is used as a background task for the task-collection endpoint.
4
- """
5
- import json
6
- from pathlib import Path
7
- from shutil import rmtree as shell_rmtree
8
-
9
- from ...string_tools import slugify_task_name_for_source_v1
10
- from ..utils import get_collection_log_v1
11
- from ..utils import get_collection_path
12
- from ..utils import get_log_path
13
- from ..v2.utils_package_names import normalize_package_name
14
- from ._TaskCollectPip import _TaskCollectPip
15
- from .utils import _init_venv_v1
16
- from fractal_server.app.db import DBSyncSession
17
- from fractal_server.app.db import get_sync_db
18
- from fractal_server.app.models.v1 import State
19
- from fractal_server.app.models.v1 import Task
20
- from fractal_server.app.schemas.v1 import TaskCollectStatusV1
21
- from fractal_server.app.schemas.v1 import TaskCreateV1
22
- from fractal_server.app.schemas.v1 import TaskReadV1
23
- from fractal_server.logger import close_logger
24
- from fractal_server.logger import get_logger
25
- from fractal_server.logger import set_logger
26
- from fractal_server.utils import execute_command_async
27
-
28
-
29
- async def _pip_install(
30
- venv_path: Path,
31
- task_pkg: _TaskCollectPip,
32
- logger_name: str,
33
- ) -> Path:
34
- """
35
- Install package in venv
36
-
37
- Args:
38
- venv_path:
39
- task_pkg:
40
- logger_name:
41
-
42
- Returns:
43
- The location of the package.
44
- """
45
-
46
- logger = get_logger(logger_name)
47
-
48
- pip = venv_path / "venv/bin/pip"
49
-
50
- extras = f"[{task_pkg.package_extras}]" if task_pkg.package_extras else ""
51
-
52
- if task_pkg.is_local_package:
53
- pip_install_str = f"{task_pkg.package_path.as_posix()}{extras}"
54
- else:
55
- version_string = (
56
- f"=={task_pkg.package_version}" if task_pkg.package_version else ""
57
- )
58
- pip_install_str = f"{task_pkg.package}{extras}{version_string}"
59
-
60
- cmd_install = f"{pip} install {pip_install_str}"
61
- cmd_inspect = f"{pip} show {task_pkg.package}"
62
-
63
- await execute_command_async(
64
- cwd=venv_path,
65
- command=f"{pip} install --upgrade pip",
66
- logger_name=logger_name,
67
- )
68
- await execute_command_async(
69
- cwd=venv_path, command=cmd_install, logger_name=logger_name
70
- )
71
- if task_pkg.pinned_package_versions:
72
- for (
73
- pinned_pkg_name,
74
- pinned_pkg_version,
75
- ) in task_pkg.pinned_package_versions.items():
76
-
77
- logger.debug(
78
- "Specific version required: "
79
- f"{pinned_pkg_name}=={pinned_pkg_version}"
80
- )
81
- logger.debug(
82
- "Preliminary check: verify that "
83
- f"{pinned_pkg_version} is already installed"
84
- )
85
- stdout_inspect = await execute_command_async(
86
- cwd=venv_path,
87
- command=f"{pip} show {pinned_pkg_name}",
88
- logger_name=logger_name,
89
- )
90
- current_version = next(
91
- line.split()[-1]
92
- for line in stdout_inspect.split("\n")
93
- if line.startswith("Version:")
94
- )
95
- if current_version != pinned_pkg_version:
96
- logger.debug(
97
- f"Currently installed version of {pinned_pkg_name} "
98
- f"({current_version}) differs from pinned version "
99
- f"({pinned_pkg_version}); "
100
- f"install version {pinned_pkg_version}."
101
- )
102
- await execute_command_async(
103
- cwd=venv_path,
104
- command=(
105
- f"{pip} install "
106
- f"{pinned_pkg_name}=={pinned_pkg_version}"
107
- ),
108
- logger_name=logger_name,
109
- )
110
- else:
111
- logger.debug(
112
- f"Currently installed version of {pinned_pkg_name} "
113
- f"({current_version}) already matches the pinned version."
114
- )
115
-
116
- # Extract package installation path from `pip show`
117
- stdout_inspect = await execute_command_async(
118
- cwd=venv_path, command=cmd_inspect, logger_name=logger_name
119
- )
120
-
121
- location = Path(
122
- next(
123
- line.split()[-1]
124
- for line in stdout_inspect.split("\n")
125
- if line.startswith("Location:")
126
- )
127
- )
128
-
129
- # NOTE
130
- # https://packaging.python.org/en/latest/specifications/recording-installed-packages/
131
- # This directory is named as {name}-{version}.dist-info, with name and
132
- # version fields corresponding to Core metadata specifications. Both
133
- # fields must be normalized (see the name normalization specification and
134
- # the version normalization specification), and replace dash (-)
135
- # characters with underscore (_) characters, so the .dist-info directory
136
- # always has exactly one dash (-) character in its stem, separating the
137
- # name and version fields.
138
- package_root = location / (task_pkg.package.replace("-", "_"))
139
- logger.debug(f"[_pip install] {location=}")
140
- logger.debug(f"[_pip install] {task_pkg.package=}")
141
- logger.debug(f"[_pip install] {package_root=}")
142
- if not package_root.exists():
143
- raise RuntimeError(
144
- "Could not determine package installation location."
145
- )
146
- return package_root
147
-
148
-
149
- async def _create_venv_install_package(
150
- *,
151
- task_pkg: _TaskCollectPip,
152
- path: Path,
153
- logger_name: str,
154
- ) -> tuple[Path, Path]:
155
- """Create venv and install package
156
-
157
- Args:
158
- path: the directory in which to create the environment
159
- task_pkg: object containing the different metadata required to install
160
- the package
161
-
162
- Returns:
163
- python_bin: path to venv's python interpreter
164
- package_root: the location of the package manifest
165
- """
166
-
167
- # Normalize package name
168
- task_pkg.package_name = normalize_package_name(task_pkg.package_name)
169
- task_pkg.package = normalize_package_name(task_pkg.package)
170
-
171
- python_bin = await _init_venv_v1(
172
- path=path,
173
- python_version=task_pkg.python_version,
174
- logger_name=logger_name,
175
- )
176
- package_root = await _pip_install(
177
- venv_path=path, task_pkg=task_pkg, logger_name=logger_name
178
- )
179
- return python_bin, package_root
180
-
181
-
182
- async def create_package_environment_pip(
183
- *,
184
- task_pkg: _TaskCollectPip,
185
- venv_path: Path,
186
- logger_name: str,
187
- ) -> list[TaskCreateV1]:
188
- """
189
- Create environment, install package, and prepare task list
190
- """
191
-
192
- logger = get_logger(logger_name)
193
-
194
- # Normalize package name
195
- task_pkg.package_name = normalize_package_name(task_pkg.package_name)
196
- task_pkg.package = normalize_package_name(task_pkg.package)
197
-
198
- # Only proceed if package, version and manifest attributes are set
199
- task_pkg.check()
200
-
201
- try:
202
-
203
- logger.debug("Creating venv and installing package")
204
- python_bin, package_root = await _create_venv_install_package(
205
- path=venv_path,
206
- task_pkg=task_pkg,
207
- logger_name=logger_name,
208
- )
209
- logger.debug("Venv creation and package installation ended correctly.")
210
-
211
- # Prepare task_list with appropriate metadata
212
- logger.debug("Creating task list from manifest")
213
- task_list = []
214
- for t in task_pkg.package_manifest.task_list:
215
- # Fill in attributes for TaskCreate
216
- task_executable = package_root / t.executable
217
- cmd = f"{python_bin.as_posix()} {task_executable.as_posix()}"
218
- task_name_slug = slugify_task_name_for_source_v1(t.name)
219
- task_source = f"{task_pkg.package_source}:{task_name_slug}"
220
- if not task_executable.exists():
221
- raise FileNotFoundError(
222
- f"Cannot find executable `{task_executable}` "
223
- f"for task `{t.name}`"
224
- )
225
- manifest = task_pkg.package_manifest
226
- if manifest.has_args_schemas:
227
- additional_attrs = dict(
228
- args_schema_version=manifest.args_schema_version
229
- )
230
- else:
231
- additional_attrs = {}
232
- this_task = TaskCreateV1(
233
- **t.dict(),
234
- command=cmd,
235
- version=task_pkg.package_version,
236
- **additional_attrs,
237
- source=task_source,
238
- )
239
- task_list.append(this_task)
240
- logger.debug("Task list created correctly")
241
- except Exception as e:
242
- logger.error("Task manifest loading failed")
243
- raise e
244
- return task_list
245
-
246
-
247
- async def _insert_tasks(
248
- task_list: list[TaskCreateV1],
249
- db: DBSyncSession,
250
- ) -> list[Task]:
251
- """
252
- Insert tasks into database
253
- """
254
- task_db_list = [Task(**t.dict()) for t in task_list]
255
- db.add_all(task_db_list)
256
- db.commit()
257
- for t in task_db_list:
258
- db.refresh(t)
259
- db.close()
260
- return task_db_list
261
-
262
-
263
- async def background_collect_pip(
264
- state_id: int,
265
- venv_path: Path,
266
- task_pkg: _TaskCollectPip,
267
- ) -> None:
268
- """
269
- Install package and collect tasks
270
-
271
- Install a python package and collect the tasks it provides according to
272
- the manifest.
273
-
274
- In case of error, copy the log into the state and delete the package
275
- directory.
276
- """
277
- logger_name = task_pkg.package.replace("/", "_")
278
- logger = set_logger(
279
- logger_name=logger_name,
280
- log_file_path=get_log_path(venv_path),
281
- )
282
- logger.debug("Start background task collection")
283
- for key, value in task_pkg.dict(exclude={"package_manifest"}).items():
284
- logger.debug(f"{key}: {value}")
285
-
286
- with next(get_sync_db()) as db:
287
- state: State = db.get(State, state_id)
288
- data = TaskCollectStatusV1(**state.data)
289
- data.info = None
290
-
291
- try:
292
- # install
293
- logger.debug("Task-collection status: installing")
294
- data.status = "installing"
295
-
296
- state.data = data.sanitised_dict()
297
- db.merge(state)
298
- db.commit()
299
- task_list = await create_package_environment_pip(
300
- venv_path=venv_path,
301
- task_pkg=task_pkg,
302
- logger_name=logger_name,
303
- )
304
-
305
- # collect
306
- logger.debug("Task-collection status: collecting")
307
- data.status = "collecting"
308
- state.data = data.sanitised_dict()
309
- db.merge(state)
310
- db.commit()
311
- tasks = await _insert_tasks(task_list=task_list, db=db)
312
-
313
- # finalise
314
- logger.debug("Task-collection status: finalising")
315
- collection_path = get_collection_path(venv_path)
316
- data.task_list = [
317
- TaskReadV1(**task.model_dump()) for task in tasks
318
- ]
319
- with collection_path.open("w") as f:
320
- json.dump(data.sanitised_dict(), f, indent=2)
321
-
322
- # Update DB
323
- data.status = "OK"
324
- data.log = get_collection_log_v1(venv_path)
325
- state.data = data.sanitised_dict()
326
- db.add(state)
327
- db.merge(state)
328
- db.commit()
329
-
330
- # Write last logs to file
331
- logger.debug("Task-collection status: OK")
332
- logger.info("Background task collection completed successfully")
333
- close_logger(logger)
334
- db.close()
335
-
336
- except Exception as e:
337
- # Write last logs to file
338
- logger.debug("Task-collection status: fail")
339
- logger.info(f"Background collection failed. Original error: {e}")
340
- close_logger(logger)
341
-
342
- # Update db
343
- data.status = "fail"
344
- data.info = f"Original error: {e}"
345
- data.log = get_collection_log_v1(venv_path)
346
- state.data = data.sanitised_dict()
347
- db.merge(state)
348
- db.commit()
349
- db.close()
350
-
351
- # Delete corrupted package dir
352
- shell_rmtree(venv_path)