fractal-server 2.7.1__py3-none-any.whl → 2.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fractal_server/__init__.py +1 -1
- fractal_server/app/models/user_settings.py +1 -0
- fractal_server/app/models/v2/task.py +15 -0
- fractal_server/app/routes/api/v2/dataset.py +39 -6
- fractal_server/app/routes/api/v2/task.py +2 -5
- fractal_server/app/routes/api/v2/task_collection.py +14 -42
- fractal_server/app/routes/api/v2/task_collection_custom.py +3 -3
- fractal_server/app/schemas/user_settings.py +18 -0
- fractal_server/app/schemas/v2/dataset.py +5 -3
- fractal_server/app/schemas/v2/task_collection.py +20 -4
- fractal_server/migrations/versions/19eca0dd47a9_user_settings_project_dir.py +39 -0
- fractal_server/tasks/utils.py +0 -31
- fractal_server/tasks/v1/background_operations.py +11 -11
- fractal_server/tasks/v1/endpoint_operations.py +5 -5
- fractal_server/tasks/v1/utils.py +2 -2
- fractal_server/tasks/v2/collection_local.py +357 -0
- fractal_server/tasks/v2/{background_operations_ssh.py → collection_ssh.py} +108 -102
- fractal_server/tasks/v2/templates/_1_create_venv.sh +0 -8
- fractal_server/tasks/v2/templates/_2_preliminary_pip_operations.sh +2 -2
- fractal_server/tasks/v2/templates/_3_pip_install.sh +22 -1
- fractal_server/tasks/v2/templates/_5_pip_show.sh +5 -5
- fractal_server/tasks/v2/utils_background.py +209 -0
- fractal_server/tasks/v2/utils_package_names.py +77 -0
- fractal_server/tasks/v2/{utils.py → utils_python_interpreter.py} +0 -26
- fractal_server/tasks/v2/utils_templates.py +59 -0
- fractal_server/utils.py +48 -3
- {fractal_server-2.7.1.dist-info → fractal_server-2.8.0.dist-info}/METADATA +11 -8
- {fractal_server-2.7.1.dist-info → fractal_server-2.8.0.dist-info}/RECORD +32 -29
- fractal_server/tasks/v2/_venv_pip.py +0 -198
- fractal_server/tasks/v2/background_operations.py +0 -456
- /fractal_server/{tasks/v2/endpoint_operations.py → app/routes/api/v2/_aux_functions_task_collection.py} +0 -0
- {fractal_server-2.7.1.dist-info → fractal_server-2.8.0.dist-info}/LICENSE +0 -0
- {fractal_server-2.7.1.dist-info → fractal_server-2.8.0.dist-info}/WHEEL +0 -0
- {fractal_server-2.7.1.dist-info → fractal_server-2.8.0.dist-info}/entry_points.txt +0 -0
@@ -33,14 +33,6 @@ write_log "START create venv in ${PACKAGE_ENV_DIR}"
|
|
33
33
|
"$PYTHON" -m venv "$PACKAGE_ENV_DIR" --copies
|
34
34
|
write_log "END create venv in ${PACKAGE_ENV_DIR}"
|
35
35
|
echo
|
36
|
-
VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
37
|
-
if [ -f "$VENVPYTHON" ]; then
|
38
|
-
write_log "OK: $VENVPYTHON exists."
|
39
|
-
echo
|
40
|
-
else
|
41
|
-
write_log "ERROR: $VENVPYTHON not found"
|
42
|
-
exit 2
|
43
|
-
fi
|
44
36
|
|
45
37
|
# End
|
46
38
|
TIME_END=$(date +%s)
|
@@ -14,8 +14,8 @@ VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
|
14
14
|
|
15
15
|
# Upgrade pip
|
16
16
|
write_log "START upgrade pip"
|
17
|
-
"$VENVPYTHON" -m pip install "pip<=__FRACTAL_MAX_PIP_VERSION__" --upgrade
|
18
|
-
"$VENVPYTHON" -m pip install setuptools
|
17
|
+
"$VENVPYTHON" -m pip install --no-cache-dir "pip<=__FRACTAL_MAX_PIP_VERSION__" --upgrade
|
18
|
+
"$VENVPYTHON" -m pip install --no-cache-dir setuptools
|
19
19
|
write_log "END upgrade pip"
|
20
20
|
echo
|
21
21
|
|
@@ -9,6 +9,7 @@ write_log(){
|
|
9
9
|
# Variables to be filled within fractal-server
|
10
10
|
PACKAGE_ENV_DIR=__PACKAGE_ENV_DIR__
|
11
11
|
INSTALL_STRING=__INSTALL_STRING__
|
12
|
+
PINNED_PACKAGE_LIST="__PINNED_PACKAGE_LIST__"
|
12
13
|
|
13
14
|
TIME_START=$(date +%s)
|
14
15
|
|
@@ -16,10 +17,30 @@ VENVPYTHON=${PACKAGE_ENV_DIR}/bin/python
|
|
16
17
|
|
17
18
|
# Install package
|
18
19
|
write_log "START install ${INSTALL_STRING}"
|
19
|
-
"$VENVPYTHON" -m pip install "$INSTALL_STRING"
|
20
|
+
"$VENVPYTHON" -m pip install --no-cache-dir "$INSTALL_STRING"
|
20
21
|
write_log "END install ${INSTALL_STRING}"
|
21
22
|
echo
|
22
23
|
|
24
|
+
|
25
|
+
# Optionally install pinned versions
|
26
|
+
if [ "$PINNED_PACKAGE_LIST" != "" ]; then
|
27
|
+
write_log "START installing pinned versions $PINNED_PACKAGE_LIST"
|
28
|
+
for PINNED_PKG_VERSION in $PINNED_PACKAGE_LIST; do
|
29
|
+
|
30
|
+
PKGNAME=$(echo "$PINNED_PKG_VERSION" | cut -d '=' -f 1)
|
31
|
+
write_log "INFO: package name $PKGNAME"
|
32
|
+
"$VENVPYTHON" -m pip show "$PKGNAME"
|
33
|
+
|
34
|
+
done
|
35
|
+
|
36
|
+
write_log "All packages in ${PINNED_PACKAGE_LIST} are already installed, proceed with specific versions."
|
37
|
+
"$VENVPYTHON" -m pip install --no-cache-dir "$PINNED_PACKAGE_LIST"
|
38
|
+
write_log "END installing pinned versions $PINNED_PACKAGE_LIST"
|
39
|
+
else
|
40
|
+
write_log "SKIP installing pinned versions $PINNED_PACKAGE_LIST (empty list)"
|
41
|
+
fi
|
42
|
+
|
43
|
+
|
23
44
|
# End
|
24
45
|
TIME_END=$(date +%s)
|
25
46
|
write_log "All good up to here."
|
@@ -26,16 +26,16 @@ write_log "START pip show"
|
|
26
26
|
$VENVPYTHON -m pip show ${PACKAGE_NAME}
|
27
27
|
write_log "END pip show"
|
28
28
|
echo
|
29
|
-
PACKAGE_NAME=$($VENVPYTHON -m pip show $
|
29
|
+
PACKAGE_NAME=$($VENVPYTHON -m pip show "$PACKAGE_NAME" | grep "Name:" | cut -d ":" -f 2 | tr -d "[:space:]")
|
30
30
|
write_log "Package name: $PACKAGE_NAME"
|
31
31
|
echo
|
32
|
-
PACKAGE_VERSION=$($VENVPYTHON -m pip show $
|
32
|
+
PACKAGE_VERSION=$($VENVPYTHON -m pip show "$PACKAGE_NAME" | grep "Version:" | cut -d ":" -f 2 | tr -d "[:space:]")
|
33
33
|
write_log "Package version: $PACKAGE_VERSION"
|
34
34
|
echo
|
35
|
-
PACKAGE_PARENT_FOLDER=$($VENVPYTHON -m pip show $
|
35
|
+
PACKAGE_PARENT_FOLDER=$($VENVPYTHON -m pip show "$PACKAGE_NAME" | grep "Location:" | cut -d ":" -f 2 | tr -d "[:space:]")
|
36
36
|
write_log "Package parent folder: $PACKAGE_PARENT_FOLDER"
|
37
37
|
echo
|
38
|
-
MANIFEST_RELATIVE_PATH=$($VENVPYTHON -m pip show $
|
38
|
+
MANIFEST_RELATIVE_PATH=$($VENVPYTHON -m pip show "$PACKAGE_NAME" --files | grep "__FRACTAL_MANIFEST__.json" | tr -d "[:space:]")
|
39
39
|
write_log "Manifest relative path: $MANIFEST_RELATIVE_PATH"
|
40
40
|
echo
|
41
41
|
MANIFEST_ABSOLUTE_PATH="${PACKAGE_PARENT_FOLDER}/${MANIFEST_RELATIVE_PATH}"
|
@@ -46,7 +46,7 @@ if [ -f "$MANIFEST_ABSOLUTE_PATH" ]; then
|
|
46
46
|
echo
|
47
47
|
else
|
48
48
|
write_log "ERROR: manifest path not found at $MANIFEST_ABSOLUTE_PATH"
|
49
|
-
exit
|
49
|
+
exit 2
|
50
50
|
fi
|
51
51
|
|
52
52
|
# End
|
@@ -0,0 +1,209 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from sqlalchemy.orm import Session as DBSyncSession
|
5
|
+
from sqlalchemy.orm.attributes import flag_modified
|
6
|
+
from sqlmodel import select
|
7
|
+
|
8
|
+
from fractal_server.app.models.v2 import CollectionStateV2
|
9
|
+
from fractal_server.app.models.v2 import TaskGroupV2
|
10
|
+
from fractal_server.app.schemas.v2 import CollectionStatusV2
|
11
|
+
from fractal_server.app.schemas.v2 import TaskCreateV2
|
12
|
+
from fractal_server.app.schemas.v2.manifest import ManifestV2
|
13
|
+
from fractal_server.logger import get_logger
|
14
|
+
from fractal_server.logger import reset_logger_handlers
|
15
|
+
|
16
|
+
|
17
|
+
def _set_collection_state_data_status(
|
18
|
+
*,
|
19
|
+
state_id: int,
|
20
|
+
new_status: CollectionStatusV2,
|
21
|
+
logger_name: str,
|
22
|
+
db: DBSyncSession,
|
23
|
+
):
|
24
|
+
logger = get_logger(logger_name)
|
25
|
+
logger.debug(f"{state_id=} - set state.data['status'] to {new_status}")
|
26
|
+
collection_state = db.get(CollectionStateV2, state_id)
|
27
|
+
collection_state.data["status"] = CollectionStatusV2(new_status)
|
28
|
+
flag_modified(collection_state, "data")
|
29
|
+
db.commit()
|
30
|
+
|
31
|
+
|
32
|
+
def _set_collection_state_data_log(
|
33
|
+
*,
|
34
|
+
state_id: int,
|
35
|
+
new_log: str,
|
36
|
+
logger_name: str,
|
37
|
+
db: DBSyncSession,
|
38
|
+
):
|
39
|
+
logger = get_logger(logger_name)
|
40
|
+
logger.debug(f"{state_id=} - set state.data['log']")
|
41
|
+
collection_state = db.get(CollectionStateV2, state_id)
|
42
|
+
collection_state.data["log"] = new_log
|
43
|
+
flag_modified(collection_state, "data")
|
44
|
+
db.commit()
|
45
|
+
|
46
|
+
|
47
|
+
def _set_collection_state_data_info(
|
48
|
+
*,
|
49
|
+
state_id: int,
|
50
|
+
new_info: str,
|
51
|
+
logger_name: str,
|
52
|
+
db: DBSyncSession,
|
53
|
+
):
|
54
|
+
logger = get_logger(logger_name)
|
55
|
+
logger.debug(f"{state_id=} - set state.data['info']")
|
56
|
+
collection_state = db.get(CollectionStateV2, state_id)
|
57
|
+
collection_state.data["info"] = new_info
|
58
|
+
flag_modified(collection_state, "data")
|
59
|
+
db.commit()
|
60
|
+
|
61
|
+
|
62
|
+
def _handle_failure(
|
63
|
+
state_id: int,
|
64
|
+
logger_name: str,
|
65
|
+
exception: Exception,
|
66
|
+
db: DBSyncSession,
|
67
|
+
task_group_id: int,
|
68
|
+
log_file_path: Path,
|
69
|
+
):
|
70
|
+
logger = get_logger(logger_name)
|
71
|
+
logger.error(f"Task collection failed. Original error: {str(exception)}")
|
72
|
+
|
73
|
+
_set_collection_state_data_status(
|
74
|
+
state_id=state_id,
|
75
|
+
new_status=CollectionStatusV2.FAIL,
|
76
|
+
logger_name=logger_name,
|
77
|
+
db=db,
|
78
|
+
)
|
79
|
+
|
80
|
+
new_log = log_file_path.open("r").read()
|
81
|
+
|
82
|
+
_set_collection_state_data_log(
|
83
|
+
state_id=state_id,
|
84
|
+
new_log=new_log,
|
85
|
+
logger_name=logger_name,
|
86
|
+
db=db,
|
87
|
+
)
|
88
|
+
# For backwards-compatibility, we also set state.data["info"]
|
89
|
+
_set_collection_state_data_info(
|
90
|
+
state_id=state_id,
|
91
|
+
new_info=f"Original error: {exception}",
|
92
|
+
logger_name=logger_name,
|
93
|
+
db=db,
|
94
|
+
)
|
95
|
+
|
96
|
+
# Delete TaskGroupV2 object / and apply cascade operation to FKs
|
97
|
+
logger.info(f"Now delete TaskGroupV2 with {task_group_id=}")
|
98
|
+
logger.info("Start of CollectionStateV2 cascade operations.")
|
99
|
+
stm = select(CollectionStateV2).where(
|
100
|
+
CollectionStateV2.taskgroupv2_id == task_group_id
|
101
|
+
)
|
102
|
+
res = db.execute(stm)
|
103
|
+
collection_states = res.scalars().all()
|
104
|
+
for collection_state in collection_states:
|
105
|
+
logger.info(
|
106
|
+
f"Setting CollectionStateV2[{collection_state.id}].taskgroupv2_id "
|
107
|
+
"to None."
|
108
|
+
)
|
109
|
+
collection_state.taskgroupv2_id = None
|
110
|
+
db.add(collection_state)
|
111
|
+
logger.info("End of CollectionStateV2 cascade operations.")
|
112
|
+
task_group = db.get(TaskGroupV2, task_group_id)
|
113
|
+
db.delete(task_group)
|
114
|
+
db.commit()
|
115
|
+
logger.info(f"TaskGroupV2 with {task_group_id=} deleted")
|
116
|
+
|
117
|
+
reset_logger_handlers(logger)
|
118
|
+
return
|
119
|
+
|
120
|
+
|
121
|
+
def _prepare_tasks_metadata(
|
122
|
+
*,
|
123
|
+
package_manifest: ManifestV2,
|
124
|
+
python_bin: Path,
|
125
|
+
package_root: Path,
|
126
|
+
package_version: Optional[str] = None,
|
127
|
+
) -> list[TaskCreateV2]:
|
128
|
+
"""
|
129
|
+
Based on the package manifest and additional info, prepare the task list.
|
130
|
+
|
131
|
+
Args:
|
132
|
+
package_manifest:
|
133
|
+
python_bin:
|
134
|
+
package_root:
|
135
|
+
package_version:
|
136
|
+
"""
|
137
|
+
task_list = []
|
138
|
+
for _task in package_manifest.task_list:
|
139
|
+
# Set non-command attributes
|
140
|
+
task_attributes = {}
|
141
|
+
if package_version is not None:
|
142
|
+
task_attributes["version"] = package_version
|
143
|
+
if package_manifest.has_args_schemas:
|
144
|
+
task_attributes[
|
145
|
+
"args_schema_version"
|
146
|
+
] = package_manifest.args_schema_version
|
147
|
+
# Set command attributes
|
148
|
+
if _task.executable_non_parallel is not None:
|
149
|
+
non_parallel_path = package_root / _task.executable_non_parallel
|
150
|
+
task_attributes["command_non_parallel"] = (
|
151
|
+
f"{python_bin.as_posix()} " f"{non_parallel_path.as_posix()}"
|
152
|
+
)
|
153
|
+
if _task.executable_parallel is not None:
|
154
|
+
parallel_path = package_root / _task.executable_parallel
|
155
|
+
task_attributes[
|
156
|
+
"command_parallel"
|
157
|
+
] = f"{python_bin.as_posix()} {parallel_path.as_posix()}"
|
158
|
+
# Create object
|
159
|
+
task_obj = TaskCreateV2(
|
160
|
+
**_task.dict(
|
161
|
+
exclude={
|
162
|
+
"executable_non_parallel",
|
163
|
+
"executable_parallel",
|
164
|
+
}
|
165
|
+
),
|
166
|
+
**task_attributes,
|
167
|
+
authors=package_manifest.authors,
|
168
|
+
)
|
169
|
+
task_list.append(task_obj)
|
170
|
+
return task_list
|
171
|
+
|
172
|
+
|
173
|
+
def check_task_files_exist(task_list: list[TaskCreateV2]) -> None:
|
174
|
+
"""
|
175
|
+
Check that the modules listed in task commands point to existing files.
|
176
|
+
|
177
|
+
Args:
|
178
|
+
task_list:
|
179
|
+
"""
|
180
|
+
for _task in task_list:
|
181
|
+
if _task.command_non_parallel is not None:
|
182
|
+
_task_path = _task.command_non_parallel.split()[1]
|
183
|
+
if not Path(_task_path).exists():
|
184
|
+
raise FileNotFoundError(
|
185
|
+
f"Task `{_task.name}` has `command_non_parallel` "
|
186
|
+
f"pointing to missing file `{_task_path}`."
|
187
|
+
)
|
188
|
+
if _task.command_parallel is not None:
|
189
|
+
_task_path = _task.command_parallel.split()[1]
|
190
|
+
if not Path(_task_path).exists():
|
191
|
+
raise FileNotFoundError(
|
192
|
+
f"Task `{_task.name}` has `command_parallel` "
|
193
|
+
f"pointing to missing file `{_task_path}`."
|
194
|
+
)
|
195
|
+
|
196
|
+
|
197
|
+
def _refresh_logs(
|
198
|
+
*,
|
199
|
+
state_id: int,
|
200
|
+
log_file_path: Path,
|
201
|
+
db: DBSyncSession,
|
202
|
+
) -> None:
|
203
|
+
"""
|
204
|
+
Read logs from file and update them in the db.
|
205
|
+
"""
|
206
|
+
collection_state = db.get(CollectionStateV2, state_id)
|
207
|
+
collection_state.data["log"] = log_file_path.open("r").read()
|
208
|
+
flag_modified(collection_state, "data")
|
209
|
+
db.commit()
|
@@ -0,0 +1,77 @@
|
|
1
|
+
import re
|
2
|
+
|
3
|
+
from fractal_server.logger import get_logger
|
4
|
+
|
5
|
+
|
6
|
+
def _parse_wheel_filename(wheel_filename: str) -> dict[str, str]:
|
7
|
+
"""
|
8
|
+
Extract distribution and version from a wheel filename.
|
9
|
+
|
10
|
+
The structure of a wheel filename is fixed, and it must start with
|
11
|
+
`{distribution}-{version}` (see
|
12
|
+
https://packaging.python.org/en/latest/specifications/binary-distribution-format
|
13
|
+
).
|
14
|
+
|
15
|
+
Note that we transform exceptions in `ValueError`s, since this function is
|
16
|
+
also used within Pydantic validators.
|
17
|
+
"""
|
18
|
+
if "/" in wheel_filename:
|
19
|
+
raise ValueError(
|
20
|
+
"[_parse_wheel_filename] Input must be a filename, not a full "
|
21
|
+
f"path (given: {wheel_filename})."
|
22
|
+
)
|
23
|
+
try:
|
24
|
+
parts = wheel_filename.split("-")
|
25
|
+
return dict(distribution=parts[0], version=parts[1])
|
26
|
+
except Exception as e:
|
27
|
+
raise ValueError(
|
28
|
+
f"Invalid {wheel_filename=}. Original error: {str(e)}."
|
29
|
+
)
|
30
|
+
|
31
|
+
|
32
|
+
def normalize_package_name(name: str) -> str:
|
33
|
+
"""
|
34
|
+
Implement PyPa specifications for package-name normalization
|
35
|
+
|
36
|
+
The name should be lowercased with all runs of the characters `.`, `-`,
|
37
|
+
or `_` replaced with a single `-` character. This can be implemented in
|
38
|
+
Python with the re module.
|
39
|
+
(https://packaging.python.org/en/latest/specifications/name-normalization)
|
40
|
+
|
41
|
+
Args:
|
42
|
+
name: The non-normalized package name.
|
43
|
+
|
44
|
+
Returns:
|
45
|
+
The normalized package name.
|
46
|
+
"""
|
47
|
+
return re.sub(r"[-_.]+", "-", name).lower()
|
48
|
+
|
49
|
+
|
50
|
+
def compare_package_names(
|
51
|
+
*,
|
52
|
+
pkg_name_pip_show: str,
|
53
|
+
pkg_name_task_group: str,
|
54
|
+
logger_name: str,
|
55
|
+
) -> None:
|
56
|
+
"""
|
57
|
+
Compare the package names from `pip show` and from the db.
|
58
|
+
"""
|
59
|
+
logger = get_logger(logger_name)
|
60
|
+
|
61
|
+
if pkg_name_pip_show == pkg_name_task_group:
|
62
|
+
return
|
63
|
+
|
64
|
+
logger.warning(
|
65
|
+
f"Package name mismatch: "
|
66
|
+
f"{pkg_name_task_group=}, {pkg_name_pip_show=}."
|
67
|
+
)
|
68
|
+
normalized_pkg_name_pip = normalize_package_name(pkg_name_pip_show)
|
69
|
+
normalized_pkg_name_taskgroup = normalize_package_name(pkg_name_task_group)
|
70
|
+
if normalized_pkg_name_pip != normalized_pkg_name_taskgroup:
|
71
|
+
error_msg = (
|
72
|
+
f"Package name mismatch persists, after normalization: "
|
73
|
+
f"{pkg_name_task_group=}, "
|
74
|
+
f"{pkg_name_pip_show=}."
|
75
|
+
)
|
76
|
+
logger.error(error_msg)
|
77
|
+
raise ValueError(error_msg)
|
@@ -31,29 +31,3 @@ def get_python_interpreter_v2(
|
|
31
31
|
if value is None:
|
32
32
|
raise ValueError(f"Requested {python_version=}, but {key}={value}.")
|
33
33
|
return value
|
34
|
-
|
35
|
-
|
36
|
-
def _parse_wheel_filename(wheel_filename: str) -> dict[str, str]:
|
37
|
-
"""
|
38
|
-
Extract distribution and version from a wheel filename.
|
39
|
-
|
40
|
-
The structure of a wheel filename is fixed, and it must start with
|
41
|
-
`{distribution}-{version}` (see
|
42
|
-
https://packaging.python.org/en/latest/specifications/binary-distribution-format
|
43
|
-
).
|
44
|
-
|
45
|
-
Note that we transform exceptions in `ValueError`s, since this function is
|
46
|
-
also used within Pydantic validators.
|
47
|
-
"""
|
48
|
-
if "/" in wheel_filename:
|
49
|
-
raise ValueError(
|
50
|
-
"[_parse_wheel_filename] Input must be a filename, not a full "
|
51
|
-
f"path (given: {wheel_filename})."
|
52
|
-
)
|
53
|
-
try:
|
54
|
-
parts = wheel_filename.split("-")
|
55
|
-
return dict(distribution=parts[0], version=parts[1])
|
56
|
-
except Exception as e:
|
57
|
-
raise ValueError(
|
58
|
-
f"Invalid {wheel_filename=}. Original error: {str(e)}."
|
59
|
-
)
|
@@ -0,0 +1,59 @@
|
|
1
|
+
from pathlib import Path
|
2
|
+
|
3
|
+
TEMPLATES_DIR = Path(__file__).parent / "templates"
|
4
|
+
|
5
|
+
|
6
|
+
def customize_template(
|
7
|
+
*,
|
8
|
+
template_name: str,
|
9
|
+
replacements: list[tuple[str, str]],
|
10
|
+
script_path: str,
|
11
|
+
) -> str:
|
12
|
+
"""
|
13
|
+
Customize a bash-script template and write it to disk.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
template_filename:
|
17
|
+
templates_folder:
|
18
|
+
replacements:
|
19
|
+
"""
|
20
|
+
# Read template
|
21
|
+
template_path = TEMPLATES_DIR / template_name
|
22
|
+
with template_path.open("r") as f:
|
23
|
+
template_data = f.read()
|
24
|
+
# Customize template
|
25
|
+
script_data = template_data
|
26
|
+
for old_new in replacements:
|
27
|
+
script_data = script_data.replace(old_new[0], old_new[1])
|
28
|
+
# Write script locally
|
29
|
+
with open(script_path, "w") as f:
|
30
|
+
f.write(script_data)
|
31
|
+
|
32
|
+
|
33
|
+
def parse_script_5_stdout(stdout: str) -> dict[str, str]:
|
34
|
+
"""
|
35
|
+
Parse standard output of template 5.
|
36
|
+
"""
|
37
|
+
searches = [
|
38
|
+
("Python interpreter:", "python_bin"),
|
39
|
+
("Package name:", "package_name"),
|
40
|
+
("Package version:", "package_version"),
|
41
|
+
("Package parent folder:", "package_root_parent"),
|
42
|
+
("Manifest absolute path:", "manifest_path"),
|
43
|
+
]
|
44
|
+
stdout_lines = stdout.splitlines()
|
45
|
+
attributes = dict()
|
46
|
+
for search, attribute_name in searches:
|
47
|
+
matching_lines = [_line for _line in stdout_lines if search in _line]
|
48
|
+
if len(matching_lines) == 0:
|
49
|
+
raise ValueError(f"String '{search}' not found in stdout.")
|
50
|
+
elif len(matching_lines) > 1:
|
51
|
+
raise ValueError(
|
52
|
+
f"String '{search}' found too many times "
|
53
|
+
f"({len(matching_lines)})."
|
54
|
+
)
|
55
|
+
else:
|
56
|
+
actual_line = matching_lines[0]
|
57
|
+
attribute_value = actual_line.split(search)[-1].strip(" ")
|
58
|
+
attributes[attribute_name] = attribute_value
|
59
|
+
return attributes
|
fractal_server/utils.py
CHANGED
@@ -14,13 +14,15 @@ This module provides general purpose utilities that are not specific to any
|
|
14
14
|
subsystem.
|
15
15
|
"""
|
16
16
|
import asyncio
|
17
|
+
import shlex
|
18
|
+
import subprocess # nosec
|
17
19
|
from datetime import datetime
|
18
20
|
from datetime import timezone
|
19
21
|
from pathlib import Path
|
20
|
-
from shlex import split as shlex_split
|
21
22
|
from typing import Optional
|
22
23
|
|
23
24
|
from .logger import get_logger
|
25
|
+
from .string_tools import validate_cmd
|
24
26
|
|
25
27
|
|
26
28
|
def get_timestamp() -> datetime:
|
@@ -30,7 +32,7 @@ def get_timestamp() -> datetime:
|
|
30
32
|
return datetime.now(tz=timezone.utc)
|
31
33
|
|
32
34
|
|
33
|
-
async def
|
35
|
+
async def execute_command_async(
|
34
36
|
*,
|
35
37
|
command: str,
|
36
38
|
cwd: Optional[Path] = None,
|
@@ -56,7 +58,7 @@ async def execute_command(
|
|
56
58
|
RuntimeError: if the process exited with non-zero status. The error
|
57
59
|
string is set to the `stderr` of the process.
|
58
60
|
"""
|
59
|
-
command_split =
|
61
|
+
command_split = shlex.split(command)
|
60
62
|
cmd, *args = command_split
|
61
63
|
|
62
64
|
logger = get_logger(logger_name)
|
@@ -75,3 +77,46 @@ async def execute_command(
|
|
75
77
|
if proc.returncode != 0:
|
76
78
|
raise RuntimeError(stderr.decode("utf-8"))
|
77
79
|
return stdout.decode("utf-8")
|
80
|
+
|
81
|
+
|
82
|
+
def execute_command_sync(
|
83
|
+
*,
|
84
|
+
command: str,
|
85
|
+
logger_name: Optional[str] = None,
|
86
|
+
allow_char: Optional[str] = None,
|
87
|
+
) -> str:
|
88
|
+
"""
|
89
|
+
Execute arbitrary command
|
90
|
+
|
91
|
+
If the command returns a return code different from zero, a `RuntimeError`
|
92
|
+
is raised.
|
93
|
+
|
94
|
+
Arguments:
|
95
|
+
command: Command to be executed.
|
96
|
+
logger_name: Name of the logger.
|
97
|
+
allow_char: Argument propagated to `validate_cmd`.
|
98
|
+
"""
|
99
|
+
logger = get_logger(logger_name)
|
100
|
+
logger.debug(f"START subprocess call to '{command}'")
|
101
|
+
validate_cmd(command=command, allow_char=allow_char)
|
102
|
+
res = subprocess.run( # nosec
|
103
|
+
shlex.split(command),
|
104
|
+
capture_output=True,
|
105
|
+
encoding="utf-8",
|
106
|
+
)
|
107
|
+
returncode = res.returncode
|
108
|
+
stdout = res.stdout
|
109
|
+
stderr = res.stderr
|
110
|
+
logger.debug(f"{returncode=}")
|
111
|
+
logger.debug(f"{stdout=}")
|
112
|
+
logger.debug(f"{stderr=}")
|
113
|
+
if res.returncode != 0:
|
114
|
+
logger.debug(f"ERROR in subprocess call to '{command}'")
|
115
|
+
raise RuntimeError(
|
116
|
+
f"Command {command} failed.\n"
|
117
|
+
f"returncode={res.returncode}\n"
|
118
|
+
f"{stdout=}\n"
|
119
|
+
f"{stderr=}\n"
|
120
|
+
)
|
121
|
+
logger.debug(f"END subprocess call to '{command}'")
|
122
|
+
return stdout
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: fractal-server
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.8.0
|
4
4
|
Summary: Server component of the Fractal analytics platform
|
5
5
|
Home-page: https://github.com/fractal-analytics-platform/fractal-server
|
6
6
|
License: BSD-3-Clause
|
@@ -37,15 +37,20 @@ Description-Content-Type: text/markdown
|
|
37
37
|
|
38
38
|
# Fractal Server
|
39
39
|
|
40
|
+
<p align="center">
|
41
|
+
<img src="https://github.com/user-attachments/assets/16e9cf11-d47d-4db8-a9b1-f5349e4175b7" alt="Fractal server" width="400">
|
42
|
+
</p>
|
43
|
+
|
40
44
|
[](https://pypi.org/project/fractal-server/)
|
45
|
+
[](https://opensource.org/licenses/BSD-3-Clause)
|
41
46
|
[](https://github.com/fractal-analytics-platform/fractal-server/actions/workflows/ci.yml?query=branch%3Amain)
|
42
47
|
[](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-server/blob/python-coverage-comment-action-data/htmlcov/index.html)
|
43
|
-
[](https://fractal-analytics-platform.github.io/fractal-server)
|
44
49
|
[](https://htmlpreview.github.io/?https://github.com/fractal-analytics-platform/fractal-server/blob/benchmark-api/benchmarks/bench.html)
|
45
50
|
|
46
51
|
[Fractal](https://fractal-analytics-platform.github.io/) is a framework developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) to process bioimaging data at scale in the OME-Zarr format and prepare the images for interactive visualization.
|
47
52
|
|
48
|
-

|
49
54
|
|
50
55
|
This is the server component of the fractal analytics platform.
|
51
56
|
Find more information about Fractal in general and the other repositories at
|
@@ -58,14 +63,12 @@ See https://fractal-analytics-platform.github.io/fractal-server.
|
|
58
63
|
|
59
64
|
# Contributors and license
|
60
65
|
|
61
|
-
|
62
|
-
|
63
|
-
|
66
|
+
Fractal was conceived in the Liberali Lab at the Friedrich Miescher Institute for Biomedical Research and in the Pelkmans Lab at the University of Zurich by [@jluethi](https://github.com/jluethi) and [@gusqgm](https://github.com/gusqgm). The Fractal project is now developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) at the University of Zurich and the project lead is with [@jluethi](https://github.com/jluethi). The core development is done under contract by [eXact lab S.r.l.](https://www.exact-lab.it).
|
67
|
+
|
68
|
+
Unless otherwise specified, Fractal components are released under the BSD 3-Clause License, and copyright is with the BioVisionCenter at the University of Zurich.
|
64
69
|
|
65
70
|
The SLURM compatibility layer is based on
|
66
71
|
[`clusterfutures`](https://github.com/sampsyo/clusterfutures), by
|
67
72
|
[@sampsyo](https://github.com/sampsyo) and collaborators, and it is released
|
68
73
|
under the terms of the MIT license.
|
69
74
|
|
70
|
-
Fractal was conceived in the Liberali Lab at the Friedrich Miescher Institute for Biomedical Research and in the Pelkmans Lab at the University of Zurich by [@jluethi](https://github.com/jluethi) and [@gusqgm](https://github.com/gusqgm). The Fractal project is now developed at the [BioVisionCenter](https://www.biovisioncenter.uzh.ch/en.html) at the University of Zurich and the project lead is with [@jluethi](https://github.com/jluethi). The core development is done under contract by [eXact lab S.r.l.](https://www.exact-lab.it/).
|
71
|
-
|