fractal-server 2.14.16__py3-none-any.whl → 2.15.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/security.py +2 -2
  3. fractal_server/app/models/user_settings.py +2 -2
  4. fractal_server/app/models/v2/dataset.py +3 -3
  5. fractal_server/app/models/v2/job.py +6 -6
  6. fractal_server/app/models/v2/task.py +12 -8
  7. fractal_server/app/models/v2/task_group.py +19 -7
  8. fractal_server/app/models/v2/workflowtask.py +6 -6
  9. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
  10. fractal_server/app/routes/api/v2/__init__.py +6 -0
  11. fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
  12. fractal_server/app/routes/api/v2/task_collection.py +8 -18
  13. fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
  14. fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
  15. fractal_server/app/routes/api/v2/task_group.py +3 -0
  16. fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
  17. fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
  18. fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
  19. fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
  20. fractal_server/app/schemas/v2/__init__.py +1 -1
  21. fractal_server/app/schemas/v2/dumps.py +1 -1
  22. fractal_server/app/schemas/v2/task_collection.py +1 -1
  23. fractal_server/app/schemas/v2/task_group.py +7 -5
  24. fractal_server/config.py +70 -0
  25. fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
  26. fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
  27. fractal_server/ssh/_fabric.py +26 -0
  28. fractal_server/tasks/v2/local/__init__.py +3 -0
  29. fractal_server/tasks/v2/local/_utils.py +4 -3
  30. fractal_server/tasks/v2/local/collect.py +26 -30
  31. fractal_server/tasks/v2/local/collect_pixi.py +252 -0
  32. fractal_server/tasks/v2/local/deactivate.py +39 -46
  33. fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
  34. fractal_server/tasks/v2/local/reactivate.py +12 -23
  35. fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
  36. fractal_server/tasks/v2/ssh/__init__.py +3 -0
  37. fractal_server/tasks/v2/ssh/_utils.py +50 -9
  38. fractal_server/tasks/v2/ssh/collect.py +46 -56
  39. fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
  40. fractal_server/tasks/v2/ssh/deactivate.py +54 -67
  41. fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
  42. fractal_server/tasks/v2/ssh/reactivate.py +25 -38
  43. fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
  44. fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
  45. fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
  46. fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
  47. fractal_server/tasks/v2/utils_background.py +50 -8
  48. fractal_server/tasks/v2/utils_pixi.py +38 -0
  49. fractal_server/tasks/v2/utils_templates.py +14 -1
  50. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/METADATA +1 -1
  51. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/RECORD +54 -41
  52. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
  53. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
  54. {fractal_server-2.14.16.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,340 @@
1
+ """JSON to JSONB
2
+
3
+ Revision ID: b3ffb095f973
4
+ Revises: b1e7f7a1ff71
5
+ Create Date: 2025-06-19 10:12:06.699107
6
+
7
+ """
8
+ import sqlalchemy as sa
9
+ from alembic import op
10
+ from sqlalchemy.dialects import postgresql
11
+
12
+ # revision identifiers, used by Alembic.
13
+ revision = "b3ffb095f973"
14
+ down_revision = "b1e7f7a1ff71"
15
+ branch_labels = None
16
+ depends_on = None
17
+
18
+
19
+ def upgrade() -> None:
20
+ # ### commands auto generated by Alembic - please adjust! ###
21
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
22
+ batch_op.alter_column(
23
+ "history",
24
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
25
+ type_=postgresql.JSONB(astext_type=sa.Text()),
26
+ existing_nullable=False,
27
+ existing_server_default=sa.text("'[]'::json"),
28
+ )
29
+ batch_op.alter_column(
30
+ "images",
31
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
32
+ type_=postgresql.JSONB(astext_type=sa.Text()),
33
+ existing_nullable=False,
34
+ existing_server_default=sa.text("'[]'::json"),
35
+ )
36
+
37
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
38
+ batch_op.alter_column(
39
+ "dataset_dump",
40
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
41
+ type_=postgresql.JSONB(astext_type=sa.Text()),
42
+ existing_nullable=False,
43
+ )
44
+ batch_op.alter_column(
45
+ "workflow_dump",
46
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
47
+ type_=postgresql.JSONB(astext_type=sa.Text()),
48
+ existing_nullable=False,
49
+ )
50
+ batch_op.alter_column(
51
+ "project_dump",
52
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
53
+ type_=postgresql.JSONB(astext_type=sa.Text()),
54
+ existing_nullable=False,
55
+ )
56
+ batch_op.alter_column(
57
+ "attribute_filters",
58
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
59
+ type_=postgresql.JSONB(astext_type=sa.Text()),
60
+ existing_nullable=False,
61
+ existing_server_default=sa.text("'{}'::json"),
62
+ )
63
+ batch_op.alter_column(
64
+ "type_filters",
65
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
66
+ type_=postgresql.JSONB(astext_type=sa.Text()),
67
+ existing_nullable=False,
68
+ existing_server_default=sa.text("'{}'::json"),
69
+ )
70
+
71
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
72
+ batch_op.alter_column(
73
+ "pinned_package_versions",
74
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
75
+ type_=postgresql.JSONB(astext_type=sa.Text()),
76
+ existing_nullable=True,
77
+ existing_server_default=sa.text("'{}'::json"),
78
+ )
79
+
80
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
81
+ batch_op.alter_column(
82
+ "meta_non_parallel",
83
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
84
+ type_=postgresql.JSONB(astext_type=sa.Text()),
85
+ existing_nullable=False,
86
+ existing_server_default=sa.text("'{}'::json"),
87
+ )
88
+ batch_op.alter_column(
89
+ "meta_parallel",
90
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
91
+ type_=postgresql.JSONB(astext_type=sa.Text()),
92
+ existing_nullable=False,
93
+ existing_server_default=sa.text("'{}'::json"),
94
+ )
95
+ batch_op.alter_column(
96
+ "args_schema_non_parallel",
97
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
98
+ type_=postgresql.JSONB(astext_type=sa.Text()),
99
+ existing_nullable=True,
100
+ )
101
+ batch_op.alter_column(
102
+ "args_schema_parallel",
103
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
104
+ type_=postgresql.JSONB(astext_type=sa.Text()),
105
+ existing_nullable=True,
106
+ )
107
+ batch_op.alter_column(
108
+ "input_types",
109
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
110
+ type_=postgresql.JSONB(astext_type=sa.Text()),
111
+ existing_nullable=True,
112
+ )
113
+ batch_op.alter_column(
114
+ "output_types",
115
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
116
+ type_=postgresql.JSONB(astext_type=sa.Text()),
117
+ existing_nullable=True,
118
+ )
119
+ batch_op.alter_column(
120
+ "tags",
121
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
122
+ type_=postgresql.JSONB(astext_type=sa.Text()),
123
+ existing_nullable=False,
124
+ existing_server_default=sa.text("'[]'::json"),
125
+ )
126
+
127
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
128
+ batch_op.alter_column(
129
+ "slurm_accounts",
130
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
131
+ type_=postgresql.JSONB(astext_type=sa.Text()),
132
+ existing_nullable=False,
133
+ existing_server_default=sa.text("'[]'::json"),
134
+ )
135
+
136
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
137
+ batch_op.alter_column(
138
+ "viewer_paths",
139
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
140
+ type_=postgresql.JSONB(astext_type=sa.Text()),
141
+ existing_nullable=False,
142
+ existing_server_default=sa.text("'[]'::json"),
143
+ )
144
+
145
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
146
+ batch_op.alter_column(
147
+ "meta_parallel",
148
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
149
+ type_=postgresql.JSONB(astext_type=sa.Text()),
150
+ existing_nullable=True,
151
+ )
152
+ batch_op.alter_column(
153
+ "meta_non_parallel",
154
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
155
+ type_=postgresql.JSONB(astext_type=sa.Text()),
156
+ existing_nullable=True,
157
+ )
158
+ batch_op.alter_column(
159
+ "args_parallel",
160
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
161
+ type_=postgresql.JSONB(astext_type=sa.Text()),
162
+ existing_nullable=True,
163
+ )
164
+ batch_op.alter_column(
165
+ "args_non_parallel",
166
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
167
+ type_=postgresql.JSONB(astext_type=sa.Text()),
168
+ existing_nullable=True,
169
+ )
170
+ batch_op.alter_column(
171
+ "type_filters",
172
+ existing_type=postgresql.JSON(astext_type=sa.Text()),
173
+ type_=postgresql.JSONB(astext_type=sa.Text()),
174
+ existing_nullable=False,
175
+ existing_server_default=sa.text("'{}'::json"),
176
+ )
177
+
178
+ # ### end Alembic commands ###
179
+
180
+
181
+ def downgrade() -> None:
182
+ # ### commands auto generated by Alembic - please adjust! ###
183
+ with op.batch_alter_table("workflowtaskv2", schema=None) as batch_op:
184
+ batch_op.alter_column(
185
+ "type_filters",
186
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
187
+ type_=postgresql.JSON(astext_type=sa.Text()),
188
+ existing_nullable=False,
189
+ existing_server_default=sa.text("'{}'::json"),
190
+ )
191
+ batch_op.alter_column(
192
+ "args_non_parallel",
193
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
194
+ type_=postgresql.JSON(astext_type=sa.Text()),
195
+ existing_nullable=True,
196
+ )
197
+ batch_op.alter_column(
198
+ "args_parallel",
199
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
200
+ type_=postgresql.JSON(astext_type=sa.Text()),
201
+ existing_nullable=True,
202
+ )
203
+ batch_op.alter_column(
204
+ "meta_non_parallel",
205
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
206
+ type_=postgresql.JSON(astext_type=sa.Text()),
207
+ existing_nullable=True,
208
+ )
209
+ batch_op.alter_column(
210
+ "meta_parallel",
211
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
212
+ type_=postgresql.JSON(astext_type=sa.Text()),
213
+ existing_nullable=True,
214
+ )
215
+
216
+ with op.batch_alter_table("usergroup", schema=None) as batch_op:
217
+ batch_op.alter_column(
218
+ "viewer_paths",
219
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
220
+ type_=postgresql.JSON(astext_type=sa.Text()),
221
+ existing_nullable=False,
222
+ existing_server_default=sa.text("'[]'::json"),
223
+ )
224
+
225
+ with op.batch_alter_table("user_settings", schema=None) as batch_op:
226
+ batch_op.alter_column(
227
+ "slurm_accounts",
228
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
229
+ type_=postgresql.JSON(astext_type=sa.Text()),
230
+ existing_nullable=False,
231
+ existing_server_default=sa.text("'[]'::json"),
232
+ )
233
+
234
+ with op.batch_alter_table("taskv2", schema=None) as batch_op:
235
+ batch_op.alter_column(
236
+ "tags",
237
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
238
+ type_=postgresql.JSON(astext_type=sa.Text()),
239
+ existing_nullable=False,
240
+ existing_server_default=sa.text("'[]'::json"),
241
+ )
242
+ batch_op.alter_column(
243
+ "output_types",
244
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
245
+ type_=postgresql.JSON(astext_type=sa.Text()),
246
+ existing_nullable=True,
247
+ )
248
+ batch_op.alter_column(
249
+ "input_types",
250
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
251
+ type_=postgresql.JSON(astext_type=sa.Text()),
252
+ existing_nullable=True,
253
+ )
254
+ batch_op.alter_column(
255
+ "args_schema_parallel",
256
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
257
+ type_=postgresql.JSON(astext_type=sa.Text()),
258
+ existing_nullable=True,
259
+ )
260
+ batch_op.alter_column(
261
+ "args_schema_non_parallel",
262
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
263
+ type_=postgresql.JSON(astext_type=sa.Text()),
264
+ existing_nullable=True,
265
+ )
266
+ batch_op.alter_column(
267
+ "meta_parallel",
268
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
269
+ type_=postgresql.JSON(astext_type=sa.Text()),
270
+ existing_nullable=False,
271
+ existing_server_default=sa.text("'{}'::json"),
272
+ )
273
+ batch_op.alter_column(
274
+ "meta_non_parallel",
275
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
276
+ type_=postgresql.JSON(astext_type=sa.Text()),
277
+ existing_nullable=False,
278
+ existing_server_default=sa.text("'{}'::json"),
279
+ )
280
+
281
+ with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
282
+ batch_op.alter_column(
283
+ "pinned_package_versions",
284
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
285
+ type_=postgresql.JSON(astext_type=sa.Text()),
286
+ existing_nullable=True,
287
+ existing_server_default=sa.text("'{}'::json"),
288
+ )
289
+
290
+ with op.batch_alter_table("jobv2", schema=None) as batch_op:
291
+ batch_op.alter_column(
292
+ "type_filters",
293
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
294
+ type_=postgresql.JSON(astext_type=sa.Text()),
295
+ existing_nullable=False,
296
+ existing_server_default=sa.text("'{}'::json"),
297
+ )
298
+ batch_op.alter_column(
299
+ "attribute_filters",
300
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
301
+ type_=postgresql.JSON(astext_type=sa.Text()),
302
+ existing_nullable=False,
303
+ existing_server_default=sa.text("'{}'::json"),
304
+ )
305
+ batch_op.alter_column(
306
+ "project_dump",
307
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
308
+ type_=postgresql.JSON(astext_type=sa.Text()),
309
+ existing_nullable=False,
310
+ )
311
+ batch_op.alter_column(
312
+ "workflow_dump",
313
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
314
+ type_=postgresql.JSON(astext_type=sa.Text()),
315
+ existing_nullable=False,
316
+ )
317
+ batch_op.alter_column(
318
+ "dataset_dump",
319
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
320
+ type_=postgresql.JSON(astext_type=sa.Text()),
321
+ existing_nullable=False,
322
+ )
323
+
324
+ with op.batch_alter_table("datasetv2", schema=None) as batch_op:
325
+ batch_op.alter_column(
326
+ "images",
327
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
328
+ type_=postgresql.JSON(astext_type=sa.Text()),
329
+ existing_nullable=False,
330
+ existing_server_default=sa.text("'[]'::json"),
331
+ )
332
+ batch_op.alter_column(
333
+ "history",
334
+ existing_type=postgresql.JSONB(astext_type=sa.Text()),
335
+ type_=postgresql.JSON(astext_type=sa.Text()),
336
+ existing_nullable=False,
337
+ existing_server_default=sa.text("'[]'::json"),
338
+ )
339
+
340
+ # ### end Alembic commands ###
@@ -204,6 +204,32 @@ class FractalSSH:
204
204
  self.logger.info(f"END reading remote JSON file {filepath}.")
205
205
  return data
206
206
 
207
+ def read_remote_text_file(self, filepath: str) -> dict[str, Any]:
208
+ """
209
+ Read a remote text file into a string.
210
+
211
+ Note from paramiko docs:
212
+ > The Python 'b' flag is ignored, since SSH treats all files as binary.
213
+ """
214
+ self.logger.info(f"START reading remote text file {filepath}.")
215
+ with _acquire_lock_with_timeout(
216
+ lock=self._lock,
217
+ label="read_remote_text_file",
218
+ timeout=self.default_lock_timeout,
219
+ ):
220
+ try:
221
+ with self._sftp_unsafe().open(filepath, "r") as f:
222
+ data = f.read().decode()
223
+ except Exception as e:
224
+ self.log_and_raise(
225
+ e=e,
226
+ message=(
227
+ f"Error in `read_remote_text_file`, for {filepath=}."
228
+ ),
229
+ )
230
+ self.logger.info(f"END reading remote text file {filepath}.")
231
+ return data
232
+
207
233
  def check_connection(self) -> None:
208
234
  """
209
235
  Open the SSH connection and handle exceptions.
@@ -1,3 +1,6 @@
1
1
  from .collect import collect_local # noqa
2
+ from .collect_pixi import collect_local_pixi # noqa
2
3
  from .deactivate import deactivate_local # noqa
4
+ from .deactivate_pixi import deactivate_local_pixi # noqa
3
5
  from .reactivate import reactivate_local # noqa
6
+ from .reactivate_pixi import reactivate_local_pixi # noqa
@@ -31,7 +31,7 @@ def _customize_and_run_template(
31
31
  f"Invalid {template_filename=} (it must end with '.sh')."
32
32
  )
33
33
 
34
- script_filename = f"{prefix}{template_filename}"
34
+ script_filename = f"{prefix}_{template_filename}"
35
35
  script_path_local = Path(script_dir) / script_filename
36
36
  # Read template
37
37
  customize_template(
@@ -53,16 +53,17 @@ def check_task_files_exist(task_list: list[TaskCreateV2]) -> None:
53
53
  Args:
54
54
  task_list:
55
55
  """
56
+
56
57
  for _task in task_list:
57
58
  if _task.command_non_parallel is not None:
58
- _task_path = _task.command_non_parallel.split()[1]
59
+ _task_path = _task.command_non_parallel.split()[-1]
59
60
  if not Path(_task_path).exists():
60
61
  raise FileNotFoundError(
61
62
  f"Task `{_task.name}` has `command_non_parallel` "
62
63
  f"pointing to missing file `{_task_path}`."
63
64
  )
64
65
  if _task.command_parallel is not None:
65
- _task_path = _task.command_parallel.split()[1]
66
+ _task_path = _task.command_parallel.split()[-1]
66
67
  if not Path(_task_path).exists():
67
68
  raise FileNotFoundError(
68
69
  f"Task `{_task.name}` has `command_parallel` "
@@ -1,5 +1,4 @@
1
1
  import json
2
- import logging
3
2
  import shutil
4
3
  import time
5
4
  from pathlib import Path
@@ -8,20 +7,22 @@ from tempfile import TemporaryDirectory
8
7
  from ..utils_database import create_db_tasks_and_update_task_group_sync
9
8
  from ._utils import _customize_and_run_template
10
9
  from fractal_server.app.db import get_sync_db
11
- from fractal_server.app.models.v2 import TaskGroupActivityV2
12
10
  from fractal_server.app.models.v2 import TaskGroupV2
11
+ from fractal_server.app.schemas.v2 import FractalUploadedFile
13
12
  from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
14
13
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
15
- from fractal_server.app.schemas.v2 import WheelFile
16
14
  from fractal_server.app.schemas.v2.manifest import ManifestV2
17
15
  from fractal_server.logger import reset_logger_handlers
18
16
  from fractal_server.logger import set_logger
19
17
  from fractal_server.tasks.utils import get_log_path
20
18
  from fractal_server.tasks.v2.local._utils import check_task_files_exist
21
- from fractal_server.tasks.v2.utils_background import _prepare_tasks_metadata
22
19
  from fractal_server.tasks.v2.utils_background import add_commit_refresh
23
20
  from fractal_server.tasks.v2.utils_background import fail_and_cleanup
21
+ from fractal_server.tasks.v2.utils_background import (
22
+ get_activity_and_task_group,
23
+ )
24
24
  from fractal_server.tasks.v2.utils_background import get_current_log
25
+ from fractal_server.tasks.v2.utils_background import prepare_tasks_metadata
25
26
  from fractal_server.tasks.v2.utils_package_names import compare_package_names
26
27
  from fractal_server.tasks.v2.utils_python_interpreter import (
27
28
  get_python_interpreter_v2,
@@ -38,7 +39,7 @@ def collect_local(
38
39
  *,
39
40
  task_group_activity_id: int,
40
41
  task_group_id: int,
41
- wheel_file: WheelFile | None = None,
42
+ wheel_file: FractalUploadedFile | None = None,
42
43
  ) -> None:
43
44
  """
44
45
  Collect a task package.
@@ -66,24 +67,17 @@ def collect_local(
66
67
  log_file_path=log_file_path,
67
68
  )
68
69
 
70
+ logger.info("START")
69
71
  with next(get_sync_db()) as db:
70
- # Get main objects from db
71
- activity = db.get(TaskGroupActivityV2, task_group_activity_id)
72
- task_group = db.get(TaskGroupV2, task_group_id)
73
- if activity is None or task_group is None:
74
- # Use `logging` directly
75
- logging.error(
76
- "Cannot find database rows with "
77
- f"{task_group_id=} and {task_group_activity_id=}:\n"
78
- f"{task_group=}\n{activity=}. Exit."
79
- )
72
+ db_objects_ok, task_group, activity = get_activity_and_task_group(
73
+ task_group_activity_id=task_group_activity_id,
74
+ task_group_id=task_group_id,
75
+ db=db,
76
+ logger_name=LOGGER_NAME,
77
+ )
78
+ if not db_objects_ok:
80
79
  return
81
80
 
82
- # Log some info
83
- logger.info("START")
84
- for key, value in task_group.model_dump().items():
85
- logger.debug(f"task_group.{key}: {value}")
86
-
87
81
  # Check that the (local) task_group path does exist
88
82
  if Path(task_group.path).exists():
89
83
  error_msg = f"{task_group.path} already exists."
@@ -103,16 +97,18 @@ def collect_local(
103
97
  Path(task_group.path).mkdir(parents=True)
104
98
  logger.info(f"Created {task_group.path}")
105
99
 
106
- # Write wheel file and set task_group.wheel_path
100
+ # Write wheel file and set task_group.archive_path
107
101
  if wheel_file is not None:
108
102
 
109
- wheel_path = (
103
+ archive_path = (
110
104
  Path(task_group.path) / wheel_file.filename
111
105
  ).as_posix()
112
- logger.info(f"Write wheel-file contents into {wheel_path}")
113
- with open(wheel_path, "wb") as f:
106
+ logger.info(
107
+ f"Write wheel-file contents into {archive_path}"
108
+ )
109
+ with open(archive_path, "wb") as f:
114
110
  f.write(wheel_file.contents)
115
- task_group.wheel_path = wheel_path
111
+ task_group.archive_path = archive_path
116
112
  task_group = add_commit_refresh(obj=task_group, db=db)
117
113
 
118
114
  # Prepare replacements for templates
@@ -131,7 +127,7 @@ def collect_local(
131
127
  ).as_posix(),
132
128
  prefix=(
133
129
  f"{int(time.time())}_"
134
- f"{TaskGroupActivityActionV2.COLLECT}_"
130
+ f"{TaskGroupActivityActionV2.COLLECT}"
135
131
  ),
136
132
  logger_name=LOGGER_NAME,
137
133
  )
@@ -220,7 +216,7 @@ def collect_local(
220
216
  activity = add_commit_refresh(obj=activity, db=db)
221
217
 
222
218
  logger.info("_prepare_tasks_metadata - start")
223
- task_list = _prepare_tasks_metadata(
219
+ task_list = prepare_tasks_metadata(
224
220
  package_manifest=pkg_manifest,
225
221
  package_version=task_group.version,
226
222
  package_root=Path(package_root),
@@ -241,15 +237,15 @@ def collect_local(
241
237
 
242
238
  # Update task_group data
243
239
  logger.info(
244
- "Add pip_freeze, venv_size and venv_file_number "
240
+ "Add env_info, venv_size and venv_file_number "
245
241
  "to TaskGroupV2 - start"
246
242
  )
247
- task_group.pip_freeze = pip_freeze_stdout
243
+ task_group.env_info = pip_freeze_stdout
248
244
  task_group.venv_size_in_kB = int(venv_size)
249
245
  task_group.venv_file_number = int(venv_file_number)
250
246
  task_group = add_commit_refresh(obj=task_group, db=db)
251
247
  logger.info(
252
- "Add pip_freeze, venv_size and venv_file_number "
248
+ "Add env_info, venv_size and venv_file_number "
253
249
  "to TaskGroupV2 - end"
254
250
  )
255
251