fractal-server 2.14.15__py3-none-any.whl → 2.15.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. fractal_server/__init__.py +1 -1
  2. fractal_server/app/models/v2/history.py +2 -0
  3. fractal_server/app/models/v2/task_group.py +17 -5
  4. fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -2
  5. fractal_server/app/routes/api/v2/__init__.py +6 -0
  6. fractal_server/app/routes/api/v2/history.py +2 -2
  7. fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
  8. fractal_server/app/routes/api/v2/task_collection.py +3 -3
  9. fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
  10. fractal_server/app/routes/api/v2/task_collection_pixi.py +236 -0
  11. fractal_server/app/routes/api/v2/task_group_lifecycle.py +8 -3
  12. fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
  13. fractal_server/app/runner/v2/runner.py +2 -2
  14. fractal_server/app/schemas/v2/__init__.py +2 -1
  15. fractal_server/app/schemas/v2/dumps.py +1 -1
  16. fractal_server/app/schemas/v2/task_collection.py +1 -1
  17. fractal_server/app/schemas/v2/task_group.py +16 -5
  18. fractal_server/config.py +42 -0
  19. fractal_server/images/status_tools.py +80 -75
  20. fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
  21. fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
  22. fractal_server/ssh/_fabric.py +3 -0
  23. fractal_server/tasks/v2/local/__init__.py +2 -0
  24. fractal_server/tasks/v2/local/_utils.py +7 -2
  25. fractal_server/tasks/v2/local/collect.py +14 -12
  26. fractal_server/tasks/v2/local/collect_pixi.py +222 -0
  27. fractal_server/tasks/v2/local/deactivate.py +29 -25
  28. fractal_server/tasks/v2/local/deactivate_pixi.py +110 -0
  29. fractal_server/tasks/v2/local/reactivate.py +1 -1
  30. fractal_server/tasks/v2/ssh/__init__.py +1 -0
  31. fractal_server/tasks/v2/ssh/_utils.py +5 -5
  32. fractal_server/tasks/v2/ssh/collect.py +16 -15
  33. fractal_server/tasks/v2/ssh/collect_pixi.py +296 -0
  34. fractal_server/tasks/v2/ssh/deactivate.py +32 -31
  35. fractal_server/tasks/v2/ssh/reactivate.py +1 -1
  36. fractal_server/tasks/v2/templates/pixi_1_collect.sh +70 -0
  37. fractal_server/tasks/v2/utils_background.py +37 -9
  38. fractal_server/tasks/v2/utils_pixi.py +36 -0
  39. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/METADATA +4 -4
  40. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/RECORD +43 -35
  41. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/LICENSE +0 -0
  42. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/WHEEL +0 -0
  43. {fractal_server-2.14.15.dist-info → fractal_server-2.15.0a0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,296 @@
1
+ import logging
2
+ import time
3
+ from pathlib import Path
4
+ from tempfile import TemporaryDirectory
5
+
6
+ from ....ssh._fabric import SingleUseFractalSSH
7
+ from ..utils_background import fail_and_cleanup
8
+ from ..utils_background import prepare_tasks_metadata
9
+ from ..utils_database import create_db_tasks_and_update_task_group_sync
10
+ from ..utils_pixi import parse_collect_stdout
11
+ from ..utils_pixi import SOURCE_DIR_NAME
12
+ from fractal_server.app.db import get_sync_db
13
+ from fractal_server.app.models.v2 import TaskGroupActivityV2
14
+ from fractal_server.app.models.v2 import TaskGroupV2
15
+ from fractal_server.app.schemas.v2 import FractalUploadedFile
16
+ from fractal_server.app.schemas.v2 import TaskGroupActivityActionV2
17
+ from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
18
+ from fractal_server.app.schemas.v2.manifest import ManifestV2
19
+ from fractal_server.config import get_settings
20
+ from fractal_server.logger import reset_logger_handlers
21
+ from fractal_server.logger import set_logger
22
+ from fractal_server.ssh._fabric import SSHConfig
23
+ from fractal_server.syringe import Inject
24
+ from fractal_server.tasks.v2.ssh._utils import _customize_and_run_template
25
+ from fractal_server.tasks.v2.utils_background import add_commit_refresh
26
+ from fractal_server.tasks.v2.utils_background import get_current_log
27
+ from fractal_server.tasks.v2.utils_templates import SCRIPTS_SUBFOLDER
28
+ from fractal_server.utils import get_timestamp
29
+
30
+
31
+ def collect_ssh_pixi(
32
+ *,
33
+ task_group_id: int,
34
+ task_group_activity_id: int,
35
+ ssh_config: SSHConfig,
36
+ tasks_base_dir: str,
37
+ tar_gz_file: FractalUploadedFile,
38
+ ) -> None:
39
+ """
40
+ Collect a task package over SSH
41
+
42
+ This function runs as a background task, therefore exceptions must be
43
+ handled.
44
+
45
+ NOTE: since this function is sync, it runs within a thread - due to
46
+ starlette/fastapi handling of background tasks (see
47
+ https://github.com/encode/starlette/blob/master/starlette/background.py).
48
+
49
+
50
+ Arguments:
51
+ task_group_id:
52
+ task_group_activity_id:
53
+ ssh_config:
54
+ tasks_base_dir:
55
+ Only used as a `safe_root` in `remove_dir`, and typically set to
56
+ `user_settings.ssh_tasks_dir`.
57
+ tar_gz_file:
58
+ """
59
+
60
+ LOGGER_NAME = f"{__name__}.ID{task_group_activity_id}"
61
+
62
+ # Work within a temporary folder, where also logs will be placed
63
+ with TemporaryDirectory() as tmpdir:
64
+ log_file_path = Path(tmpdir) / "log"
65
+ logger = set_logger(
66
+ logger_name=LOGGER_NAME,
67
+ log_file_path=log_file_path,
68
+ )
69
+ with SingleUseFractalSSH(
70
+ ssh_config=ssh_config,
71
+ logger_name=LOGGER_NAME,
72
+ ) as fractal_ssh:
73
+
74
+ with next(get_sync_db()) as db:
75
+ # Get main objects from db
76
+ activity = db.get(TaskGroupActivityV2, task_group_activity_id)
77
+ task_group = db.get(TaskGroupV2, task_group_id)
78
+ if activity is None or task_group is None:
79
+ # Use `logging` directly
80
+ logging.error(
81
+ "Cannot find database rows with "
82
+ f"{task_group_id=} and {task_group_activity_id=}:\n"
83
+ f"{task_group=}\n{activity=}. Exit."
84
+ )
85
+ return
86
+
87
+ # Log some info
88
+ logger.info("START")
89
+ for key, value in task_group.model_dump().items():
90
+ logger.debug(f"task_group.{key}: {value}")
91
+
92
+ # Check that SSH connection works
93
+ try:
94
+ fractal_ssh.check_connection()
95
+ except Exception as e:
96
+ logger.error("Cannot establish SSH connection.")
97
+ fail_and_cleanup(
98
+ task_group=task_group,
99
+ task_group_activity=activity,
100
+ logger_name=LOGGER_NAME,
101
+ log_file_path=log_file_path,
102
+ exception=e,
103
+ db=db,
104
+ )
105
+ return
106
+
107
+ try:
108
+
109
+ # Check that the (remote) task_group path does not exist
110
+ if fractal_ssh.remote_exists(task_group.path):
111
+ error_msg = f"{task_group.path} already exists."
112
+ logger.error(error_msg)
113
+ fail_and_cleanup(
114
+ task_group=task_group,
115
+ task_group_activity=activity,
116
+ logger_name=LOGGER_NAME,
117
+ log_file_path=log_file_path,
118
+ exception=FileExistsError(error_msg),
119
+ db=db,
120
+ )
121
+ return
122
+
123
+ # Create remote `task_group.path` and `script_dir_remote`
124
+ # folders (note that because of `parents=True` we are in
125
+ # the `no error if existing, make parent directories as
126
+ # needed` scenario for `mkdir`)
127
+ script_dir_remote = (
128
+ Path(task_group.path) / SCRIPTS_SUBFOLDER
129
+ ).as_posix()
130
+ fractal_ssh.mkdir(folder=task_group.path, parents=True)
131
+ fractal_ssh.mkdir(folder=script_dir_remote, parents=True)
132
+
133
+ # Write tar.gz file locally and send it to remote path,
134
+ # and set task_group.archive_path
135
+ tar_gz_filename = tar_gz_file.filename
136
+ archive_path = (
137
+ Path(task_group.path) / tar_gz_filename
138
+ ).as_posix()
139
+ tmp_archive_path = (
140
+ Path(tmpdir) / tar_gz_filename
141
+ ).as_posix()
142
+ logger.info(
143
+ f"Write tar.gz-file contents into {tmp_archive_path}"
144
+ )
145
+ with open(tmp_archive_path, "wb") as f:
146
+ f.write(tar_gz_file.contents)
147
+ fractal_ssh.send_file(
148
+ local=tmp_archive_path,
149
+ remote=archive_path,
150
+ )
151
+ task_group.archive_path = archive_path
152
+ task_group = add_commit_refresh(obj=task_group, db=db)
153
+
154
+ # Set `pixi_bin`
155
+ settings = Inject(get_settings)
156
+ pixi_home = settings.pixi.versions[task_group.pixi_version]
157
+ pixi_bin = Path(pixi_home, "bin/pixi").as_posix()
158
+
159
+ replacements = {
160
+ ("__PIXI_HOME__", pixi_home),
161
+ ("__PACKAGE_DIR__", task_group.path),
162
+ ("__TAR_GZ_PATH__", archive_path),
163
+ (
164
+ "__IMPORT_PACKAGE_NAME__",
165
+ task_group.pkg_name.replace("-", "_"),
166
+ ),
167
+ ("__SOURCE_DIR_NAME__", SOURCE_DIR_NAME),
168
+ }
169
+
170
+ logger.info("installing - START")
171
+
172
+ # Set status to ONGOING and refresh logs
173
+ activity.status = TaskGroupActivityStatusV2.ONGOING
174
+ activity.log = get_current_log(log_file_path)
175
+ activity = add_commit_refresh(obj=activity, db=db)
176
+
177
+ stdout = _customize_and_run_template(
178
+ template_filename="pixi_1_collect.sh",
179
+ replacements=replacements,
180
+ script_dir_local=(
181
+ Path(tmpdir) / SCRIPTS_SUBFOLDER
182
+ ).as_posix(),
183
+ script_dir_remote=script_dir_remote,
184
+ prefix=(
185
+ f"{int(time.time())}_"
186
+ f"{TaskGroupActivityActionV2.COLLECT}_"
187
+ ),
188
+ fractal_ssh=fractal_ssh,
189
+ logger_name=LOGGER_NAME,
190
+ )
191
+ activity.log = get_current_log(log_file_path)
192
+ activity = add_commit_refresh(obj=activity, db=db)
193
+
194
+ # Parse stdout
195
+ parsed_output = parse_collect_stdout(stdout)
196
+ package_root_remote = parsed_output["package_root"]
197
+ venv_size = parsed_output["venv_size"]
198
+ venv_file_number = parsed_output["venv_file_number"]
199
+
200
+ # Read and validate remote manifest file
201
+ manifest_path_remote = (
202
+ f"{package_root_remote}/__FRACTAL_MANIFEST__.json"
203
+ )
204
+ pkg_manifest_dict = fractal_ssh.read_remote_json_file(
205
+ manifest_path_remote
206
+ )
207
+ logger.info(f"Loaded {manifest_path_remote=}")
208
+ pkg_manifest = ManifestV2(**pkg_manifest_dict)
209
+ logger.info("Manifest is a valid ManifestV2")
210
+
211
+ logger.info("_prepare_tasks_metadata - start")
212
+ task_list = prepare_tasks_metadata(
213
+ package_manifest=pkg_manifest,
214
+ package_version=task_group.version,
215
+ package_root=Path(package_root_remote),
216
+ pixi_bin=pixi_bin,
217
+ pixi_manifest_path=(
218
+ Path(
219
+ task_group.path,
220
+ SOURCE_DIR_NAME,
221
+ "pyproject.toml",
222
+ ).as_posix()
223
+ ),
224
+ )
225
+ logger.info("_prepare_tasks_metadata - end")
226
+
227
+ logger.info(
228
+ "create_db_tasks_and_update_task_group - " "start"
229
+ )
230
+ create_db_tasks_and_update_task_group_sync(
231
+ task_list=task_list,
232
+ task_group_id=task_group.id,
233
+ db=db,
234
+ )
235
+ logger.info("create_db_tasks_and_update_task_group - end")
236
+
237
+ # NOTE: see issue 2626 about whether to keep `pixi.lock`
238
+ # files in the database
239
+ # FIXME: Read remote file
240
+ pixi_lock_contents = "FIXME\n"
241
+ # with Path(
242
+ # task_group.path,
243
+ # SOURCE_DIR_NAME,
244
+ # "pixi.lock",
245
+ # ).open() as f:
246
+ # pixi_lock_contents = f.read()
247
+
248
+ # Update task_group data
249
+ logger.info(
250
+ "Add env_info, venv_size and venv_file_number "
251
+ "to TaskGroupV2 - start"
252
+ )
253
+ task_group.env_info = pixi_lock_contents
254
+ task_group.venv_size_in_kB = int(venv_size)
255
+ task_group.venv_file_number = int(venv_file_number)
256
+ task_group = add_commit_refresh(obj=task_group, db=db)
257
+ logger.info(
258
+ "Add env_info, venv_size and venv_file_number "
259
+ "to TaskGroupV2 - end"
260
+ )
261
+
262
+ # Finalize (write metadata to DB)
263
+ logger.info("finalising - START")
264
+ activity.status = TaskGroupActivityStatusV2.OK
265
+ activity.timestamp_ended = get_timestamp()
266
+ activity = add_commit_refresh(obj=activity, db=db)
267
+ logger.info("finalising - END")
268
+ logger.info("END")
269
+ reset_logger_handlers(logger)
270
+
271
+ except Exception as collection_e:
272
+ # Delete corrupted package dir
273
+ try:
274
+ logger.info(
275
+ f"Now delete remote folder {task_group.path}"
276
+ )
277
+ fractal_ssh.remove_folder(
278
+ folder=task_group.path,
279
+ safe_root=tasks_base_dir,
280
+ )
281
+ logger.info(
282
+ f"Deleted remoted folder {task_group.path}"
283
+ )
284
+ except Exception as e_rm:
285
+ logger.error(
286
+ "Removing folder failed. "
287
+ f"Original error:\n{str(e_rm)}"
288
+ )
289
+ fail_and_cleanup(
290
+ task_group=task_group,
291
+ task_group_activity=activity,
292
+ log_file_path=log_file_path,
293
+ logger_name=LOGGER_NAME,
294
+ exception=collection_e,
295
+ db=db,
296
+ )
@@ -113,10 +113,10 @@ def deactivate_ssh(
113
113
  activity.status = TaskGroupActivityStatusV2.ONGOING
114
114
  activity = add_commit_refresh(obj=activity, db=db)
115
115
 
116
- if task_group.pip_freeze is None:
116
+ if task_group.env_info is None:
117
117
  logger.warning(
118
118
  "Recreate pip-freeze information, since "
119
- f"{task_group.pip_freeze=}. NOTE: this should "
119
+ f"{task_group.env_info=}. NOTE: this should "
120
120
  "only happen for task groups created before 2.9.0."
121
121
  )
122
122
 
@@ -161,7 +161,7 @@ def deactivate_ssh(
161
161
  )
162
162
  activity.log = get_current_log(log_file_path)
163
163
  activity = add_commit_refresh(obj=activity, db=db)
164
- task_group.pip_freeze = pip_freeze_stdout
164
+ task_group.env_info = pip_freeze_stdout
165
165
  task_group = add_commit_refresh(obj=task_group, db=db)
166
166
  logger.info(
167
167
  "Add pip freeze stdout to TaskGroupV2 - end"
@@ -174,18 +174,19 @@ def deactivate_ssh(
174
174
  f"Handle specific cases for {task_group.origin=}."
175
175
  )
176
176
 
177
- # Blocking situation: `wheel_path` is not set or points
178
- # to a missing path
177
+ # Blocking situation: `archive_path` is not set or
178
+ # points to a missing path
179
179
  if (
180
- task_group.wheel_path is None
180
+ task_group.archive_path is None
181
181
  or not fractal_ssh.remote_exists(
182
- task_group.wheel_path
182
+ task_group.archive_path
183
183
  )
184
184
  ):
185
185
  error_msg = (
186
186
  "Invalid wheel path for task group with "
187
- f"{task_group_id=}. {task_group.wheel_path=} "
188
- "is unset or does not exist."
187
+ f"{task_group_id=}. "
188
+ f"{task_group.archive_path=} is unset or "
189
+ "does not exist."
189
190
  )
190
191
  logger.error(error_msg)
191
192
  fail_and_cleanup(
@@ -198,58 +199,58 @@ def deactivate_ssh(
198
199
  )
199
200
  return
200
201
 
201
- # Recoverable situation: `wheel_path` was not yet
202
+ # Recoverable situation: `archive_path` was not yet
202
203
  # copied over to the correct server-side folder
203
- wheel_path_parent_dir = Path(
204
- task_group.wheel_path
204
+ archive_path_parent_dir = Path(
205
+ task_group.archive_path
205
206
  ).parent
206
- if wheel_path_parent_dir != Path(task_group.path):
207
+ if archive_path_parent_dir != Path(task_group.path):
207
208
  logger.warning(
208
- f"{wheel_path_parent_dir.as_posix()} differs "
209
- f"from {task_group.path}. NOTE: this should "
210
- "only happen for task groups created before "
211
- "2.9.0."
209
+ f"{archive_path_parent_dir.as_posix()} "
210
+ f"differs from {task_group.path}. "
211
+ "NOTE: this should only happen for task "
212
+ "groups created before 2.9.0."
212
213
  )
213
214
 
214
215
  if (
215
- task_group.wheel_path
216
- not in task_group.pip_freeze
216
+ task_group.archive_path
217
+ not in task_group.env_info
217
218
  ):
218
219
  raise ValueError(
219
- f"Cannot find {task_group.wheel_path=} in "
220
- "pip-freeze data. Exit."
220
+ f"Cannot find {task_group.archive_path=} "
221
+ "in pip-freeze data. Exit."
221
222
  )
222
223
 
223
224
  logger.info(
224
225
  f"Now copy wheel file into {task_group.path}."
225
226
  )
226
- new_wheel_path = _copy_wheel_file_ssh(
227
+ new_archive_path = _copy_wheel_file_ssh(
227
228
  task_group=task_group,
228
229
  fractal_ssh=fractal_ssh,
229
230
  logger_name=LOGGER_NAME,
230
231
  )
231
232
  logger.info(
232
- f"Copied wheel file to {new_wheel_path}."
233
+ f"Copied wheel file to {new_archive_path}."
233
234
  )
234
235
 
235
- task_group.wheel_path = new_wheel_path
236
- new_pip_freeze = task_group.pip_freeze.replace(
237
- task_group.wheel_path,
238
- new_wheel_path,
236
+ task_group.archive_path = new_archive_path
237
+ new_pip_freeze = task_group.env_info.replace(
238
+ task_group.archive_path,
239
+ new_archive_path,
239
240
  )
240
- task_group.pip_freeze = new_pip_freeze
241
+ task_group.env_info = new_pip_freeze
241
242
  task_group = add_commit_refresh(
242
243
  obj=task_group, db=db
243
244
  )
244
245
  logger.info(
245
- "Updated `wheel_path` and `pip_freeze` "
246
+ "Updated `archive_path` and `env_info` "
246
247
  "task-group attributes."
247
248
  )
248
249
 
249
- # Fail if `pip_freeze` includes "github", see
250
+ # Fail if `env_info` includes "github", see
250
251
  # https://github.com/fractal-analytics-platform/fractal-server/issues/2142
251
252
  for forbidden_string in FORBIDDEN_DEPENDENCY_STRINGS:
252
- if forbidden_string in task_group.pip_freeze:
253
+ if forbidden_string in task_group.env_info:
253
254
  raise ValueError(
254
255
  "Deactivation and reactivation of task "
255
256
  f"packages with direct {forbidden_string} "
@@ -128,7 +128,7 @@ def reactivate_ssh(
128
128
  Path(task_group.path) / "_tmp_pip_freeze.txt"
129
129
  ).as_posix()
130
130
  with open(pip_freeze_file_local, "w") as f:
131
- f.write(task_group.pip_freeze)
131
+ f.write(task_group.env_info)
132
132
  fractal_ssh.send_file(
133
133
  local=pip_freeze_file_local,
134
134
  remote=pip_freeze_file_remote,
@@ -0,0 +1,70 @@
1
+ set -e
2
+
3
+ write_log(){
4
+ TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
5
+ echo "[collect-task-pixi, $TIMESTAMP] $1"
6
+ }
7
+
8
+ # Replacements
9
+ PIXI_HOME="__PIXI_HOME__"
10
+ PACKAGE_DIR="__PACKAGE_DIR__"
11
+ TAR_GZ_PATH="__TAR_GZ_PATH__"
12
+ IMPORT_PACKAGE_NAME="__IMPORT_PACKAGE_NAME__"
13
+ SOURCE_DIR_NAME="__SOURCE_DIR_NAME__"
14
+
15
+ # Strip trailing `/` from `PACKAGE_DIR`
16
+ PIXI_HOME=${PIXI_HOME%/}
17
+ PACKAGE_DIR=${PACKAGE_DIR%/}
18
+
19
+ # Known paths
20
+ PIXI_EXECUTABLE="${PIXI_HOME}/bin/pixi"
21
+ SOURCE_DIR="${PACKAGE_DIR}/${SOURCE_DIR_NAME}"
22
+ PYPROJECT_TOML="${SOURCE_DIR}/pyproject.toml"
23
+ TAR_GZ_BASENAME=$(basename "$TAR_GZ_PATH" ".tar.gz")
24
+
25
+ # Pixi env variable
26
+ export PIXI_HOME="$PIXI_HOME"
27
+ export PIXI_CACHE_DIR="${PIXI_HOME}/cache"
28
+ export RATTLER_AUTH_FILE="${PIXI_HOME}/credentials.json"
29
+
30
+
31
+ TIME_START=$(date +%s)
32
+
33
+ cd "$PACKAGE_DIR"
34
+ write_log "Changed working directory to $PACKAGE_DIR"
35
+
36
+
37
+ write_log "START 'tar xz -f $TAR_GZ_PATH $TAR_GZ_BASENAME'"
38
+ tar xz -f "$TAR_GZ_PATH" "$TAR_GZ_BASENAME"
39
+ write_log "END 'tar xz -f $TAR_GZ_PATH $TAR_GZ_BASENAME'"
40
+ echo
41
+
42
+ write_log "START 'mv ${PACKAGE_DIR}/${TAR_GZ_BASENAME} $SOURCE_DIR'"
43
+ mv "${PACKAGE_DIR}/${TAR_GZ_BASENAME}" "$SOURCE_DIR"
44
+ write_log "END 'mv ${PACKAGE_DIR}/${TAR_GZ_BASENAME} $SOURCE_DIR'"
45
+ echo
46
+ write_log "END extract $TAR_GZ_PATH"
47
+ TIME_END_TAR=$(date +%s)
48
+ write_log "Elapsed: $((TIME_END_TAR - TIME_START)) seconds"
49
+ echo
50
+
51
+ write_log "START '$PIXI_EXECUTABLE install --manifest-path $PYPROJECT_TOML'"
52
+ ${PIXI_EXECUTABLE} install --manifest-path "$PYPROJECT_TOML"
53
+ write_log "END '$PIXI_EXECUTABLE install --manifest-path $PYPROJECT_TOML'"
54
+ echo
55
+
56
+ PACKAGE_FOLDER=$(
57
+ ${PIXI_EXECUTABLE} run --manifest-path "$PYPROJECT_TOML" python \
58
+ -c "import $IMPORT_PACKAGE_NAME as p, os; print(os.path.dirname(p.__file__))"
59
+ )
60
+ write_log "Package folder: $PACKAGE_FOLDER"
61
+ echo
62
+
63
+ ENV_DISK_USAGE=$(du -sk "${PACKAGE_DIR}" | cut -f1)
64
+ ENV_FILE_NUMBER=$(find "${PACKAGE_DIR}" -type f | wc -l)
65
+
66
+ write_log "Disk usage: $ENV_DISK_USAGE"
67
+ write_log "Number of files: $ENV_FILE_NUMBER"
68
+
69
+ write_log "All ok, exit."
70
+ echo
@@ -9,6 +9,7 @@ from fractal_server.app.schemas.v2 import TaskCreateV2
9
9
  from fractal_server.app.schemas.v2 import TaskGroupActivityStatusV2
10
10
  from fractal_server.app.schemas.v2.manifest import ManifestV2
11
11
  from fractal_server.app.schemas.v2.task_group import TaskGroupActivityActionV2
12
+ from fractal_server.exceptions import UnreachableBranchError
12
13
  from fractal_server.logger import get_logger
13
14
  from fractal_server.logger import reset_logger_handlers
14
15
  from fractal_server.utils import get_timestamp
@@ -47,11 +48,13 @@ def fail_and_cleanup(
47
48
  reset_logger_handlers(logger)
48
49
 
49
50
 
50
- def _prepare_tasks_metadata(
51
+ def prepare_tasks_metadata(
51
52
  *,
52
53
  package_manifest: ManifestV2,
53
- python_bin: Path,
54
54
  package_root: Path,
55
+ python_bin: Path | None = None,
56
+ pixi_bin: str | None = None,
57
+ pixi_manifest_path: str | None = None,
55
58
  package_version: str | None = None,
56
59
  ) -> list[TaskCreateV2]:
57
60
  """
@@ -59,10 +62,20 @@ def _prepare_tasks_metadata(
59
62
 
60
63
  Args:
61
64
  package_manifest:
62
- python_bin:
63
65
  package_root:
64
66
  package_version:
67
+ python_bin:
68
+ pixi_bin:
65
69
  """
70
+ if bool(pixi_bin is None) == bool(python_bin is None):
71
+ raise UnreachableBranchError(
72
+ f"Either {pixi_bin} or {python_bin} must be set."
73
+ )
74
+ if pixi_bin is not None and pixi_manifest_path is None:
75
+ raise UnreachableBranchError(
76
+ f"If {pixi_bin} is set, pixi_manifest_path must be set."
77
+ )
78
+
66
79
  task_list = []
67
80
  for _task in package_manifest.task_list:
68
81
  # Set non-command attributes
@@ -76,14 +89,29 @@ def _prepare_tasks_metadata(
76
89
  # Set command attributes
77
90
  if _task.executable_non_parallel is not None:
78
91
  non_parallel_path = package_root / _task.executable_non_parallel
79
- task_attributes["command_non_parallel"] = (
80
- f"{python_bin.as_posix()} " f"{non_parallel_path.as_posix()}"
81
- )
92
+ if python_bin is not None:
93
+ cmd_non_parallel = (
94
+ f"{python_bin.as_posix()} {non_parallel_path.as_posix()}"
95
+ )
96
+ else:
97
+ cmd_non_parallel = (
98
+ f"{pixi_bin} run --manifest-path {pixi_manifest_path} "
99
+ "--no-lockfile-update python "
100
+ f"{non_parallel_path.as_posix()}"
101
+ )
102
+ task_attributes["command_non_parallel"] = cmd_non_parallel
82
103
  if _task.executable_parallel is not None:
83
104
  parallel_path = package_root / _task.executable_parallel
84
- task_attributes[
85
- "command_parallel"
86
- ] = f"{python_bin.as_posix()} {parallel_path.as_posix()}"
105
+ if python_bin is not None:
106
+ cmd_parallel = (
107
+ f"{python_bin.as_posix()} {parallel_path.as_posix()}"
108
+ )
109
+ else:
110
+ cmd_parallel = (
111
+ f"{pixi_bin} run --manifest-path {pixi_manifest_path} "
112
+ f"--no-lockfile-update python {parallel_path.as_posix()}"
113
+ )
114
+ task_attributes["command_parallel"] = cmd_parallel
87
115
  # Create object
88
116
  task_obj = TaskCreateV2(
89
117
  **_task.model_dump(
@@ -0,0 +1,36 @@
1
+ from typing import TypedDict
2
+
3
+ SOURCE_DIR_NAME = "source_dir"
4
+
5
+
6
+ class ParsedOutput(TypedDict):
7
+ package_root: str
8
+ venv_size: str
9
+ venv_file_number: str
10
+
11
+
12
+ def parse_collect_stdout(stdout: str) -> ParsedOutput:
13
+ """
14
+ Parse standard output of `pixi/1_collect.sh`
15
+ """
16
+ searches = [
17
+ ("Package folder:", "package_root"),
18
+ ("Disk usage:", "venv_size"),
19
+ ("Number of files:", "venv_file_number"),
20
+ ]
21
+ stdout_lines = stdout.splitlines()
22
+ attributes = dict()
23
+ for search, attribute_name in searches:
24
+ matching_lines = [_line for _line in stdout_lines if search in _line]
25
+ if len(matching_lines) == 0:
26
+ raise ValueError(f"String '{search}' not found in stdout.")
27
+ elif len(matching_lines) > 1:
28
+ raise ValueError(
29
+ f"String '{search}' found too many times "
30
+ f"({len(matching_lines)})."
31
+ )
32
+ else:
33
+ actual_line = matching_lines[0]
34
+ attribute_value = actual_line.split(search)[-1].strip(" ")
35
+ attributes[attribute_name] = attribute_value
36
+ return attributes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: fractal-server
3
- Version: 2.14.15
3
+ Version: 2.15.0a0
4
4
  Summary: Backend component of the Fractal analytics platform
5
5
  License: BSD-3-Clause
6
6
  Author: Tommaso Comparin
@@ -11,16 +11,16 @@ Classifier: Programming Language :: Python :: 3
11
11
  Classifier: Programming Language :: Python :: 3.11
12
12
  Classifier: Programming Language :: Python :: 3.12
13
13
  Requires-Dist: alembic (>=1.13.1,<2.0.0)
14
- Requires-Dist: cryptography (>=44.0.1,<44.1.0)
14
+ Requires-Dist: cryptography (>=45.0.3,<45.1.0)
15
15
  Requires-Dist: fabric (>=3.2.2,<3.3.0)
16
16
  Requires-Dist: fastapi (>=0.115.0,<0.116.0)
17
17
  Requires-Dist: fastapi-users[oauth] (>=14,<15)
18
18
  Requires-Dist: gunicorn (>=23.0,<24.0)
19
- Requires-Dist: packaging (>=24.0.0,<25.0.0)
19
+ Requires-Dist: packaging (>=25.0.0,<26.0.0)
20
20
  Requires-Dist: psycopg[binary] (>=3.1.0,<4.0.0)
21
21
  Requires-Dist: pydantic (>=2.11.0,<2.12.0)
22
22
  Requires-Dist: pydantic-settings (>=2.7.0)
23
- Requires-Dist: python-dotenv (>=1.0.0,<1.1.0)
23
+ Requires-Dist: python-dotenv (>=1.1.0,<1.2.0)
24
24
  Requires-Dist: sqlalchemy[asyncio] (>=2.0.23,<2.1)
25
25
  Requires-Dist: sqlmodel (==0.0.24)
26
26
  Requires-Dist: uvicorn (>=0.29.0,<0.35.0)