fractal-server 2.14.15__py3-none-any.whl → 2.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/security.py +2 -2
- fractal_server/app/models/user_settings.py +2 -2
- fractal_server/app/models/v2/dataset.py +3 -3
- fractal_server/app/models/v2/history.py +2 -0
- fractal_server/app/models/v2/job.py +6 -6
- fractal_server/app/models/v2/task.py +12 -8
- fractal_server/app/models/v2/task_group.py +19 -7
- fractal_server/app/models/v2/workflowtask.py +6 -6
- fractal_server/app/routes/admin/v2/task_group_lifecycle.py +2 -5
- fractal_server/app/routes/api/v2/__init__.py +6 -0
- fractal_server/app/routes/api/v2/_aux_functions_tasks.py +22 -0
- fractal_server/app/routes/api/v2/history.py +2 -2
- fractal_server/app/routes/api/v2/pre_submission_checks.py +3 -3
- fractal_server/app/routes/api/v2/task_collection.py +8 -18
- fractal_server/app/routes/api/v2/task_collection_custom.py +2 -2
- fractal_server/app/routes/api/v2/task_collection_pixi.py +219 -0
- fractal_server/app/routes/api/v2/task_group.py +3 -0
- fractal_server/app/routes/api/v2/task_group_lifecycle.py +26 -10
- fractal_server/app/runner/executors/slurm_common/_slurm_config.py +10 -0
- fractal_server/app/runner/executors/slurm_common/base_slurm_runner.py +39 -14
- fractal_server/app/runner/executors/slurm_common/get_slurm_config.py +8 -1
- fractal_server/app/runner/executors/slurm_ssh/runner.py +3 -1
- fractal_server/app/runner/v2/runner.py +2 -2
- fractal_server/app/schemas/v2/__init__.py +1 -1
- fractal_server/app/schemas/v2/dumps.py +1 -1
- fractal_server/app/schemas/v2/task_collection.py +1 -1
- fractal_server/app/schemas/v2/task_group.py +7 -5
- fractal_server/config.py +70 -0
- fractal_server/images/status_tools.py +80 -75
- fractal_server/migrations/versions/791ce783d3d8_add_indices.py +41 -0
- fractal_server/migrations/versions/b1e7f7a1ff71_task_group_for_pixi.py +53 -0
- fractal_server/migrations/versions/b3ffb095f973_json_to_jsonb.py +340 -0
- fractal_server/ssh/_fabric.py +29 -0
- fractal_server/tasks/v2/local/__init__.py +3 -0
- fractal_server/tasks/v2/local/_utils.py +4 -3
- fractal_server/tasks/v2/local/collect.py +26 -30
- fractal_server/tasks/v2/local/collect_pixi.py +252 -0
- fractal_server/tasks/v2/local/deactivate.py +39 -46
- fractal_server/tasks/v2/local/deactivate_pixi.py +98 -0
- fractal_server/tasks/v2/local/reactivate.py +12 -23
- fractal_server/tasks/v2/local/reactivate_pixi.py +184 -0
- fractal_server/tasks/v2/ssh/__init__.py +3 -0
- fractal_server/tasks/v2/ssh/_utils.py +50 -9
- fractal_server/tasks/v2/ssh/collect.py +46 -56
- fractal_server/tasks/v2/ssh/collect_pixi.py +315 -0
- fractal_server/tasks/v2/ssh/deactivate.py +54 -67
- fractal_server/tasks/v2/ssh/deactivate_pixi.py +122 -0
- fractal_server/tasks/v2/ssh/reactivate.py +25 -38
- fractal_server/tasks/v2/ssh/reactivate_pixi.py +233 -0
- fractal_server/tasks/v2/templates/pixi_1_extract.sh +40 -0
- fractal_server/tasks/v2/templates/pixi_2_install.sh +52 -0
- fractal_server/tasks/v2/templates/pixi_3_post_install.sh +76 -0
- fractal_server/tasks/v2/utils_background.py +50 -8
- fractal_server/tasks/v2/utils_pixi.py +38 -0
- fractal_server/tasks/v2/utils_templates.py +14 -1
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/METADATA +4 -4
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/RECORD +61 -47
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.14.15.dist-info → fractal_server-2.15.0.dist-info}/entry_points.txt +0 -0
fractal_server/config.py
CHANGED
@@ -11,6 +11,7 @@
|
|
11
11
|
# <exact-lab.it> under contract with Liberali Lab from the Friedrich Miescher
|
12
12
|
# Institute for Biomedical Research and Pelkmans Lab from the University of
|
13
13
|
# Zurich.
|
14
|
+
import json
|
14
15
|
import logging
|
15
16
|
import shutil
|
16
17
|
import sys
|
@@ -34,6 +35,7 @@ from sqlalchemy.engine import URL
|
|
34
35
|
|
35
36
|
import fractal_server
|
36
37
|
from fractal_server.types import AbsolutePathStr
|
38
|
+
from fractal_server.types import DictStrStr
|
37
39
|
|
38
40
|
|
39
41
|
class MailSettings(BaseModel):
|
@@ -62,6 +64,63 @@ class MailSettings(BaseModel):
|
|
62
64
|
use_login: bool
|
63
65
|
|
64
66
|
|
67
|
+
class PixiSettings(BaseModel):
|
68
|
+
"""
|
69
|
+
Configuration for Pixi task collection
|
70
|
+
|
71
|
+
See https://pixi.sh/latest/reference/cli/pixi/install/#config-options for
|
72
|
+
`pixi install` concurrency options.
|
73
|
+
See https://docs.rs/tokio/latest/tokio/#cpu-bound-tasks-and-blocking-code
|
74
|
+
for `tokio` configuration.
|
75
|
+
|
76
|
+
versions:
|
77
|
+
Available `pixi` versions and their `PIXI_HOME` folders.
|
78
|
+
default_version:
|
79
|
+
Default `pixi` version to use for task collection - must be one
|
80
|
+
of `versions` keys.
|
81
|
+
PIXI_CONCURRENT_SOLVES:
|
82
|
+
Value of `--concurrent-solves` for `pixi install`.
|
83
|
+
PIXI_CONCURRENT_DOWNLOADS:
|
84
|
+
Value of `--concurrent-downloads for `pixi install`.
|
85
|
+
TOKIO_WORKER_THREADS:
|
86
|
+
From tokio docs, "The core threads are where all asynchronous code
|
87
|
+
runs, and Tokio will by default spawn one for each CPU core. You can
|
88
|
+
use the environment variable TOKIO_WORKER_THREADS to override the
|
89
|
+
default value."
|
90
|
+
"""
|
91
|
+
|
92
|
+
versions: DictStrStr
|
93
|
+
default_version: str
|
94
|
+
|
95
|
+
PIXI_CONCURRENT_SOLVES: int = 4
|
96
|
+
PIXI_CONCURRENT_DOWNLOADS: int = 4
|
97
|
+
TOKIO_WORKER_THREADS: int = 2
|
98
|
+
|
99
|
+
@model_validator(mode="after")
|
100
|
+
def check_pixi_settings(self):
|
101
|
+
|
102
|
+
if self.default_version not in self.versions:
|
103
|
+
raise ValueError(
|
104
|
+
f"Default version '{self.default_version}' not in "
|
105
|
+
f"available version {list(self.versions.keys())}."
|
106
|
+
)
|
107
|
+
|
108
|
+
pixi_base_dir = Path(self.versions[self.default_version]).parent
|
109
|
+
|
110
|
+
for key, value in self.versions.items():
|
111
|
+
|
112
|
+
pixi_path = Path(value)
|
113
|
+
|
114
|
+
if pixi_path.parent != pixi_base_dir:
|
115
|
+
raise ValueError(
|
116
|
+
f"{pixi_path=} is not located within the {pixi_base_dir=}."
|
117
|
+
)
|
118
|
+
if pixi_path.name != key:
|
119
|
+
raise ValueError(f"{pixi_path.name=} is not equal to {key=}")
|
120
|
+
|
121
|
+
return self
|
122
|
+
|
123
|
+
|
65
124
|
class FractalConfigurationError(RuntimeError):
|
66
125
|
pass
|
67
126
|
|
@@ -513,6 +572,17 @@ class Settings(BaseSettings):
|
|
513
572
|
FRACTAL_VIEWER_AUTHORIZATION_SCHEME is set to "users-folders".
|
514
573
|
"""
|
515
574
|
|
575
|
+
FRACTAL_PIXI_CONFIG_FILE: Path | None = None
|
576
|
+
|
577
|
+
pixi: PixiSettings | None = None
|
578
|
+
|
579
|
+
@model_validator(mode="after")
|
580
|
+
def populate_pixi_settings(self):
|
581
|
+
if self.FRACTAL_PIXI_CONFIG_FILE is not None:
|
582
|
+
with self.FRACTAL_PIXI_CONFIG_FILE.open("r") as f:
|
583
|
+
self.pixi = PixiSettings(**json.load(f))
|
584
|
+
return self
|
585
|
+
|
516
586
|
###########################################################################
|
517
587
|
# SMTP SERVICE
|
518
588
|
###########################################################################
|
@@ -1,5 +1,4 @@
|
|
1
1
|
import time
|
2
|
-
from copy import deepcopy
|
3
2
|
from typing import Any
|
4
3
|
|
5
4
|
from sqlalchemy import Select
|
@@ -11,7 +10,6 @@ from fractal_server.app.models.v2 import HistoryImageCache
|
|
11
10
|
from fractal_server.app.models.v2 import HistoryUnit
|
12
11
|
from fractal_server.app.schemas.v2 import HistoryUnitStatusWithUnset
|
13
12
|
from fractal_server.logger import set_logger
|
14
|
-
from fractal_server.types import ImageAttributeValue
|
15
13
|
|
16
14
|
logger = set_logger(__name__)
|
17
15
|
|
@@ -19,36 +17,84 @@ logger = set_logger(__name__)
|
|
19
17
|
IMAGE_STATUS_KEY = "__wftask_dataset_image_status__"
|
20
18
|
|
21
19
|
|
22
|
-
def _enriched_image(
|
23
|
-
|
24
|
-
|
20
|
+
def _enriched_image(
|
21
|
+
*,
|
22
|
+
img: dict[str, Any],
|
23
|
+
status: str,
|
24
|
+
) -> dict[str, Any]:
|
25
|
+
return img | {
|
26
|
+
"attributes": (img["attributes"] | {IMAGE_STATUS_KEY: status})
|
27
|
+
}
|
25
28
|
|
26
29
|
|
27
30
|
def _prepare_query(
|
28
31
|
*,
|
29
32
|
dataset_id: int,
|
30
33
|
workflowtask_id: int,
|
31
|
-
zarr_urls: list[str],
|
32
34
|
) -> Select:
|
35
|
+
"""
|
36
|
+
Note: the query does not include `.order_by`.
|
37
|
+
"""
|
33
38
|
stm = (
|
34
39
|
select(HistoryImageCache.zarr_url, HistoryUnit.status)
|
35
40
|
.join(HistoryUnit)
|
36
41
|
.where(HistoryImageCache.dataset_id == dataset_id)
|
37
42
|
.where(HistoryImageCache.workflowtask_id == workflowtask_id)
|
38
43
|
.where(HistoryImageCache.latest_history_unit_id == HistoryUnit.id)
|
39
|
-
.where(HistoryImageCache.zarr_url.in_(zarr_urls))
|
40
|
-
.order_by(HistoryImageCache.zarr_url)
|
41
44
|
)
|
42
45
|
return stm
|
43
46
|
|
44
47
|
|
45
|
-
|
48
|
+
def _postprocess_image_lists(
|
49
|
+
target_images: list[dict[str, Any]],
|
50
|
+
list_query_url_status: list[tuple[str, str]],
|
51
|
+
) -> list[dict[str, Any]]:
|
52
|
+
""" """
|
53
|
+
t_1 = time.perf_counter()
|
54
|
+
|
55
|
+
# Select only processed images that are part of the target image set
|
56
|
+
zarr_url_to_image = {img["zarr_url"]: img for img in target_images}
|
57
|
+
target_zarr_urls = zarr_url_to_image.keys()
|
58
|
+
list_processed_url_status = [
|
59
|
+
url_status
|
60
|
+
for url_status in list_query_url_status
|
61
|
+
if url_status[0] in target_zarr_urls
|
62
|
+
]
|
63
|
+
|
64
|
+
set_processed_urls = set(
|
65
|
+
url_status[0] for url_status in list_processed_url_status
|
66
|
+
)
|
67
|
+
processed_images_with_status = [
|
68
|
+
_enriched_image(
|
69
|
+
img=zarr_url_to_image[item[0]],
|
70
|
+
status=item[1],
|
71
|
+
)
|
72
|
+
for item in list_processed_url_status
|
73
|
+
]
|
74
|
+
|
75
|
+
non_processed_urls = target_zarr_urls - set_processed_urls
|
76
|
+
non_processed_images_with_status = [
|
77
|
+
_enriched_image(
|
78
|
+
img=zarr_url_to_image[zarr_url],
|
79
|
+
status=HistoryUnitStatusWithUnset.UNSET,
|
80
|
+
)
|
81
|
+
for zarr_url in non_processed_urls
|
82
|
+
]
|
83
|
+
t_2 = time.perf_counter()
|
84
|
+
logger.debug(
|
85
|
+
f"[enrich_images_async] post-processing, elapsed={t_2 - t_1:.5f} s"
|
86
|
+
)
|
87
|
+
|
88
|
+
return processed_images_with_status + non_processed_images_with_status
|
89
|
+
|
90
|
+
|
91
|
+
async def enrich_images_unsorted_async(
|
46
92
|
*,
|
47
93
|
images: list[dict[str, Any]],
|
48
94
|
dataset_id: int,
|
49
95
|
workflowtask_id: int,
|
50
96
|
db: AsyncSession,
|
51
|
-
) -> list[dict[str,
|
97
|
+
) -> list[dict[str, Any]]:
|
52
98
|
"""
|
53
99
|
Enrich images with a status-related attribute.
|
54
100
|
|
@@ -59,116 +105,75 @@ async def enrich_images_async(
|
|
59
105
|
db: An async db session
|
60
106
|
|
61
107
|
Returns:
|
62
|
-
The list of enriched images
|
108
|
+
The list of enriched images, not necessarily in the same order as
|
109
|
+
the input.
|
63
110
|
"""
|
64
111
|
t_0 = time.perf_counter()
|
65
112
|
logger.info(
|
66
113
|
f"[enrich_images_async] START, {dataset_id=}, {workflowtask_id=}"
|
67
114
|
)
|
68
115
|
|
69
|
-
|
70
|
-
|
116
|
+
# Get `(zarr_url, status)` for _all_ processed images (including those that
|
117
|
+
# are not part of the target image set)
|
71
118
|
res = await db.execute(
|
72
119
|
_prepare_query(
|
73
120
|
dataset_id=dataset_id,
|
74
121
|
workflowtask_id=workflowtask_id,
|
75
|
-
zarr_urls=zarr_url_to_image.keys(),
|
76
122
|
)
|
77
123
|
)
|
78
|
-
|
124
|
+
list_query_url_status = res.all()
|
79
125
|
t_1 = time.perf_counter()
|
80
|
-
logger.debug(f"[enrich_images_async]
|
126
|
+
logger.debug(f"[enrich_images_async] query, elapsed={t_1 - t_0:.5f} s")
|
81
127
|
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
img=zarr_url_to_image[item[0]],
|
86
|
-
status=item[1],
|
87
|
-
)
|
88
|
-
for item in list_processed_url_status
|
89
|
-
]
|
90
|
-
t_2 = time.perf_counter()
|
91
|
-
logger.debug(
|
92
|
-
"[enrich_images_async] processed-images, " f"elapsed={t_2 - t_1:.3f} s"
|
93
|
-
)
|
94
|
-
|
95
|
-
non_processed_urls = zarr_url_to_image.keys() - set_processed_urls
|
96
|
-
non_processed_images_with_status = [
|
97
|
-
_enriched_image(
|
98
|
-
img=zarr_url_to_image[zarr_url],
|
99
|
-
status=HistoryUnitStatusWithUnset.UNSET,
|
100
|
-
)
|
101
|
-
for zarr_url in non_processed_urls
|
102
|
-
]
|
103
|
-
t_3 = time.perf_counter()
|
104
|
-
logger.debug(
|
105
|
-
"[enrich_images_async] non-processed-images, "
|
106
|
-
f"elapsed={t_3 - t_2:.3f} s"
|
128
|
+
output = _postprocess_image_lists(
|
129
|
+
target_images=images,
|
130
|
+
list_query_url_status=list_query_url_status,
|
107
131
|
)
|
108
132
|
|
109
|
-
return
|
133
|
+
return output
|
110
134
|
|
111
135
|
|
112
|
-
def
|
136
|
+
def enrich_images_unsorted_sync(
|
113
137
|
*,
|
114
138
|
images: list[dict[str, Any]],
|
115
139
|
dataset_id: int,
|
116
140
|
workflowtask_id: int,
|
117
|
-
) -> list[dict[str,
|
141
|
+
) -> list[dict[str, Any]]:
|
118
142
|
"""
|
119
143
|
Enrich images with a status-related attribute.
|
120
144
|
|
145
|
+
|
121
146
|
Args:
|
122
147
|
images: The input image list
|
123
148
|
dataset_id: The dataset ID
|
124
149
|
workflowtask_id: The workflow-task ID
|
125
150
|
|
126
151
|
Returns:
|
127
|
-
The list of enriched images
|
152
|
+
The list of enriched images, not necessarily in the same order as
|
153
|
+
the input.
|
128
154
|
"""
|
155
|
+
|
129
156
|
t_0 = time.perf_counter()
|
130
157
|
logger.info(
|
131
158
|
f"[enrich_images_async] START, {dataset_id=}, {workflowtask_id=}"
|
132
159
|
)
|
133
160
|
|
134
|
-
|
161
|
+
# Get `(zarr_url, status)` for _all_ processed images (including those that
|
162
|
+
# are not part of the target image set)
|
135
163
|
with next(get_sync_db()) as db:
|
136
164
|
res = db.execute(
|
137
165
|
_prepare_query(
|
138
166
|
dataset_id=dataset_id,
|
139
167
|
workflowtask_id=workflowtask_id,
|
140
|
-
zarr_urls=zarr_url_to_image.keys(),
|
141
168
|
)
|
142
169
|
)
|
143
|
-
|
170
|
+
list_query_url_status = res.all()
|
144
171
|
t_1 = time.perf_counter()
|
145
|
-
logger.debug(f"[enrich_images_async]
|
146
|
-
|
147
|
-
set_processed_urls = set(item[0] for item in list_processed_url_status)
|
148
|
-
processed_images_with_status = [
|
149
|
-
_enriched_image(
|
150
|
-
img=zarr_url_to_image[item[0]],
|
151
|
-
status=item[1],
|
152
|
-
)
|
153
|
-
for item in list_processed_url_status
|
154
|
-
]
|
155
|
-
t_2 = time.perf_counter()
|
156
|
-
logger.debug(
|
157
|
-
"[enrich_images_async] processed-images, " f"elapsed={t_2 - t_1:.3f} s"
|
158
|
-
)
|
172
|
+
logger.debug(f"[enrich_images_async] query, elapsed={t_1 - t_0:.5f} s")
|
159
173
|
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
img=zarr_url_to_image[zarr_url],
|
164
|
-
status=HistoryUnitStatusWithUnset.UNSET,
|
165
|
-
)
|
166
|
-
for zarr_url in non_processed_urls
|
167
|
-
]
|
168
|
-
t_3 = time.perf_counter()
|
169
|
-
logger.debug(
|
170
|
-
"[enrich_images_async] non-processed-images, "
|
171
|
-
f"elapsed={t_3 - t_2:.3f} s"
|
174
|
+
output = _postprocess_image_lists(
|
175
|
+
target_images=images,
|
176
|
+
list_query_url_status=list_query_url_status,
|
172
177
|
)
|
173
178
|
|
174
|
-
return
|
179
|
+
return output
|
@@ -0,0 +1,41 @@
|
|
1
|
+
"""Add indices
|
2
|
+
|
3
|
+
Revision ID: 791ce783d3d8
|
4
|
+
Revises: 969d84257cac
|
5
|
+
Create Date: 2025-06-03 09:32:30.757651
|
6
|
+
|
7
|
+
"""
|
8
|
+
from alembic import op
|
9
|
+
|
10
|
+
|
11
|
+
# revision identifiers, used by Alembic.
|
12
|
+
revision = "791ce783d3d8"
|
13
|
+
down_revision = "969d84257cac"
|
14
|
+
branch_labels = None
|
15
|
+
depends_on = None
|
16
|
+
|
17
|
+
|
18
|
+
def upgrade() -> None:
|
19
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
20
|
+
with op.batch_alter_table("historyimagecache", schema=None) as batch_op:
|
21
|
+
batch_op.create_index(
|
22
|
+
batch_op.f("ix_historyimagecache_dataset_id"),
|
23
|
+
["dataset_id"],
|
24
|
+
unique=False,
|
25
|
+
)
|
26
|
+
batch_op.create_index(
|
27
|
+
batch_op.f("ix_historyimagecache_workflowtask_id"),
|
28
|
+
["workflowtask_id"],
|
29
|
+
unique=False,
|
30
|
+
)
|
31
|
+
|
32
|
+
# ### end Alembic commands ###
|
33
|
+
|
34
|
+
|
35
|
+
def downgrade() -> None:
|
36
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
37
|
+
with op.batch_alter_table("historyimagecache", schema=None) as batch_op:
|
38
|
+
batch_op.drop_index(batch_op.f("ix_historyimagecache_workflowtask_id"))
|
39
|
+
batch_op.drop_index(batch_op.f("ix_historyimagecache_dataset_id"))
|
40
|
+
|
41
|
+
# ### end Alembic commands ###
|
@@ -0,0 +1,53 @@
|
|
1
|
+
"""Task group for pixi
|
2
|
+
|
3
|
+
Revision ID: b1e7f7a1ff71
|
4
|
+
Revises: 791ce783d3d8
|
5
|
+
Create Date: 2025-05-29 16:31:17.565973
|
6
|
+
|
7
|
+
"""
|
8
|
+
import sqlalchemy as sa
|
9
|
+
import sqlmodel
|
10
|
+
from alembic import op
|
11
|
+
|
12
|
+
|
13
|
+
# revision identifiers, used by Alembic.
|
14
|
+
revision = "b1e7f7a1ff71"
|
15
|
+
down_revision = "791ce783d3d8"
|
16
|
+
branch_labels = None
|
17
|
+
depends_on = None
|
18
|
+
|
19
|
+
|
20
|
+
def upgrade() -> None:
|
21
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
22
|
+
batch_op.add_column(
|
23
|
+
sa.Column(
|
24
|
+
"pixi_version",
|
25
|
+
sqlmodel.sql.sqltypes.AutoString(),
|
26
|
+
nullable=True,
|
27
|
+
)
|
28
|
+
)
|
29
|
+
batch_op.alter_column(
|
30
|
+
"wheel_path",
|
31
|
+
nullable=True,
|
32
|
+
new_column_name="archive_path",
|
33
|
+
)
|
34
|
+
batch_op.alter_column(
|
35
|
+
"pip_freeze",
|
36
|
+
nullable=True,
|
37
|
+
new_column_name="env_info",
|
38
|
+
)
|
39
|
+
|
40
|
+
|
41
|
+
def downgrade() -> None:
|
42
|
+
with op.batch_alter_table("taskgroupv2", schema=None) as batch_op:
|
43
|
+
batch_op.alter_column(
|
44
|
+
"archive_path",
|
45
|
+
nullable=True,
|
46
|
+
new_column_name="wheel_path",
|
47
|
+
)
|
48
|
+
batch_op.alter_column(
|
49
|
+
"env_info",
|
50
|
+
nullable=True,
|
51
|
+
new_column_name="pip_freeze",
|
52
|
+
)
|
53
|
+
batch_op.drop_column("pixi_version")
|