pytme 0.2.0b0__cp311-cp311-macosx_14_0_arm64.whl → 0.2.2__cp311-cp311-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytme-0.2.2.data/scripts/match_template.py +1187 -0
- {pytme-0.2.0b0.data → pytme-0.2.2.data}/scripts/postprocess.py +170 -71
- {pytme-0.2.0b0.data → pytme-0.2.2.data}/scripts/preprocessor_gui.py +179 -86
- pytme-0.2.2.dist-info/METADATA +91 -0
- pytme-0.2.2.dist-info/RECORD +74 -0
- {pytme-0.2.0b0.dist-info → pytme-0.2.2.dist-info}/WHEEL +1 -1
- scripts/extract_candidates.py +126 -87
- scripts/match_template.py +596 -209
- scripts/match_template_filters.py +571 -223
- scripts/postprocess.py +170 -71
- scripts/preprocessor_gui.py +179 -86
- scripts/refine_matches.py +567 -159
- tme/__init__.py +0 -1
- tme/__version__.py +1 -1
- tme/analyzer.py +627 -855
- tme/backends/__init__.py +41 -11
- tme/backends/_jax_utils.py +185 -0
- tme/backends/cupy_backend.py +120 -225
- tme/backends/jax_backend.py +282 -0
- tme/backends/matching_backend.py +464 -388
- tme/backends/mlx_backend.py +45 -68
- tme/backends/npfftw_backend.py +256 -514
- tme/backends/pytorch_backend.py +41 -154
- tme/density.py +312 -421
- tme/extensions.cpython-311-darwin.so +0 -0
- tme/matching_data.py +366 -303
- tme/matching_exhaustive.py +279 -1521
- tme/matching_optimization.py +234 -129
- tme/matching_scores.py +884 -0
- tme/matching_utils.py +281 -387
- tme/memory.py +377 -0
- tme/orientations.py +226 -66
- tme/parser.py +3 -4
- tme/preprocessing/__init__.py +2 -0
- tme/preprocessing/_utils.py +217 -0
- tme/preprocessing/composable_filter.py +31 -0
- tme/preprocessing/compose.py +55 -0
- tme/preprocessing/frequency_filters.py +388 -0
- tme/preprocessing/tilt_series.py +1011 -0
- tme/preprocessor.py +574 -530
- tme/structure.py +495 -189
- tme/types.py +5 -3
- pytme-0.2.0b0.data/scripts/match_template.py +0 -800
- pytme-0.2.0b0.dist-info/METADATA +0 -73
- pytme-0.2.0b0.dist-info/RECORD +0 -66
- tme/helpers.py +0 -881
- tme/matching_constrained.py +0 -195
- {pytme-0.2.0b0.data → pytme-0.2.2.data}/scripts/estimate_ram_usage.py +0 -0
- {pytme-0.2.0b0.data → pytme-0.2.2.data}/scripts/preprocess.py +0 -0
- {pytme-0.2.0b0.dist-info → pytme-0.2.2.dist-info}/LICENSE +0 -0
- {pytme-0.2.0b0.dist-info → pytme-0.2.2.dist-info}/entry_points.txt +0 -0
- {pytme-0.2.0b0.dist-info → pytme-0.2.2.dist-info}/top_level.txt +0 -0
scripts/extract_candidates.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
#!python3
|
2
|
-
"""
|
2
|
+
""" Prepare orientations stack for refinement.
|
3
3
|
|
4
4
|
Copyright (c) 2023 European Molecular Biology Laboratory
|
5
5
|
|
@@ -12,13 +12,33 @@ import numpy as np
|
|
12
12
|
|
13
13
|
from tme import Density, Orientations
|
14
14
|
from tme.matching_utils import (
|
15
|
-
|
15
|
+
load_pickle,
|
16
16
|
generate_tempfile_name,
|
17
17
|
rotation_aligning_vectors,
|
18
18
|
euler_from_rotationmatrix,
|
19
|
+
euler_to_rotationmatrix,
|
19
20
|
)
|
20
21
|
|
21
22
|
|
23
|
+
class ProgressBar:
|
24
|
+
"""
|
25
|
+
ASCII progress bar.
|
26
|
+
"""
|
27
|
+
|
28
|
+
def __init__(self, message : str, nchars : int, total : int):
|
29
|
+
self._size = nchars - len(message)
|
30
|
+
self._message = message
|
31
|
+
self._total = total
|
32
|
+
|
33
|
+
def update(self, cur):
|
34
|
+
x = int(cur * self._size / self._total)
|
35
|
+
print(
|
36
|
+
"%s[%s%s] %i/%i\r"
|
37
|
+
% (self._message, "#" * x, "." * (self._size - x), cur, self._total),
|
38
|
+
end=''
|
39
|
+
)
|
40
|
+
|
41
|
+
|
22
42
|
def parse_args():
|
23
43
|
parser = argparse.ArgumentParser(
|
24
44
|
description="Extract matching candidates for further refinement."
|
@@ -52,57 +72,67 @@ def parse_args():
|
|
52
72
|
io_group.add_argument(
|
53
73
|
"-o",
|
54
74
|
"--output_file",
|
55
|
-
required=
|
75
|
+
required=True,
|
56
76
|
type=str,
|
57
77
|
help="Path to output HDF5 file.",
|
58
78
|
)
|
59
79
|
|
60
|
-
|
61
|
-
|
62
|
-
"--
|
80
|
+
alignment_group = parser.add_argument_group("Alignment")
|
81
|
+
alignment_group.add_argument(
|
82
|
+
"--align_orientations",
|
83
|
+
action="store_true",
|
63
84
|
required=False,
|
64
|
-
|
65
|
-
|
85
|
+
help="Whether to align extracted orientations based on their angles. Allows "
|
86
|
+
"for efficient subsequent sampling of cone angles.",
|
66
87
|
)
|
67
|
-
|
68
|
-
"--
|
88
|
+
alignment_group.add_argument(
|
89
|
+
"--angles_are_vector",
|
90
|
+
action="store_true",
|
69
91
|
required=False,
|
70
|
-
|
71
|
-
|
72
|
-
"euler_y and euler_x are assumed to be a vector that will be rotated to match "
|
73
|
-
"alignment_vector.",
|
92
|
+
help="Considers euler_z euler_y, euler_x as vector that will be rotated to align "
|
93
|
+
"with the z-axis (1,0,0). Only considered when --align_orientations is set."
|
74
94
|
)
|
75
|
-
|
95
|
+
alignment_group.add_argument(
|
76
96
|
"--interpolation_order",
|
77
97
|
dest="interpolation_order",
|
78
98
|
required=False,
|
79
99
|
type=int,
|
80
100
|
default=1,
|
81
|
-
help="
|
82
|
-
|
101
|
+
help="Interpolation order for alignment, less than zero is no interpolation."
|
102
|
+
)
|
103
|
+
|
104
|
+
extraction_group = parser.add_argument_group("Extraction")
|
105
|
+
extraction_group.add_argument(
|
106
|
+
"--box_size",
|
107
|
+
required=False,
|
108
|
+
type=int,
|
109
|
+
help="Box size for extraction, defaults to two times the template.",
|
83
110
|
)
|
84
111
|
extraction_group.add_argument(
|
85
112
|
"--translation_uncertainty",
|
86
|
-
dest="translation_uncertainty",
|
87
113
|
required=False,
|
88
114
|
type=int,
|
89
|
-
|
90
|
-
help="Creates a cented spherical target mask with given radius in voxel.",
|
115
|
+
help="Sets box size for extraction to template box plus this value.",
|
91
116
|
)
|
92
117
|
extraction_group.add_argument(
|
93
|
-
"--
|
94
|
-
|
118
|
+
"--keep_out_of_box",
|
119
|
+
action="store_true",
|
95
120
|
required=False,
|
96
|
-
|
97
|
-
|
98
|
-
help="Path to write spherical mask to, defaults to target_mask.h5.",
|
121
|
+
help="Whether to keep orientations that fall outside the box. If the "
|
122
|
+
"orientations are sensible, it is safe to pass this flag.",
|
99
123
|
)
|
124
|
+
|
100
125
|
args = parser.parse_args()
|
101
126
|
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
127
|
+
data_present = args.target is not None and args.template is not None
|
128
|
+
if args.input_file is None and not data_present:
|
129
|
+
raise ValueError(
|
130
|
+
"Either --input_file or --target and --template need to be specified."
|
131
|
+
)
|
132
|
+
elif args.input_file is not None and data_present:
|
133
|
+
raise ValueError(
|
134
|
+
"Please specific either --input_file or --target and --template."
|
135
|
+
)
|
106
136
|
|
107
137
|
return args
|
108
138
|
|
@@ -111,57 +141,62 @@ def main():
|
|
111
141
|
args = parse_args()
|
112
142
|
orientations = Orientations.from_file(args.orientations)
|
113
143
|
|
144
|
+
if args.input_file is not None:
|
145
|
+
data = load_pickle(args.input_file)
|
146
|
+
target_origin, _, sampling_rate, cli_args = data[-1]
|
147
|
+
args.target, args.template = cli_args.target, cli_args.template
|
148
|
+
|
114
149
|
target = Density.from_file(args.target, use_memmap=True)
|
115
|
-
template = Density.from_file(args.template)
|
116
150
|
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
151
|
+
try:
|
152
|
+
template = Density.from_file(args.template)
|
153
|
+
except Exception:
|
154
|
+
template = Density.from_structure(args.template, sampling_rate = target.sampling_rate)
|
155
|
+
|
156
|
+
box_size = args.box_size
|
157
|
+
if box_size is None:
|
158
|
+
box_size = np.multiply(template.shape, 2)
|
159
|
+
if args.translation_uncertainty is not None:
|
160
|
+
box_size = np.add(template.shape, args.translation_uncertainty)
|
161
|
+
|
162
|
+
box_size = np.array(box_size)
|
163
|
+
box_size = np.repeat(box_size, template.data.ndim // box_size.size).astype(int)
|
164
|
+
|
165
|
+
extraction_shape = np.copy(box_size)
|
166
|
+
if args.align_orientations:
|
167
|
+
extraction_shape[:] = int(np.linalg.norm(box_size) + 1)
|
121
168
|
|
122
|
-
subtomo_shape = np.multiply(box_size, 2).astype(int)
|
123
|
-
extraction_scaling = 3 if args.alignment_vector is not None else 2
|
124
|
-
extraction_shape = np.multiply(box_size, extraction_scaling).astype(int)
|
125
169
|
orientations, cand_slices, obs_slices = orientations.get_extraction_slices(
|
126
170
|
target_shape=target.shape,
|
127
171
|
extraction_shape=extraction_shape,
|
128
|
-
drop_out_of_box=
|
172
|
+
drop_out_of_box=not args.keep_out_of_box,
|
129
173
|
return_orientations=True,
|
130
174
|
)
|
131
175
|
|
176
|
+
if args.align_orientations:
|
177
|
+
orientations.rotations = orientations.rotations.astype(np.float32)
|
178
|
+
for index in range(orientations.rotations.shape[0]):
|
179
|
+
rotation_matrix = euler_to_rotationmatrix(orientations.rotations[index])
|
180
|
+
rotation_matrix = np.linalg.inv(rotation_matrix)
|
181
|
+
if args.angles_are_vector:
|
182
|
+
rotation_matrix = rotation_aligning_vectors(
|
183
|
+
orientations.rotations[index], target_vector=(1,0,0)
|
184
|
+
)
|
185
|
+
orientations.rotations[index] = euler_from_rotationmatrix(rotation_matrix)
|
186
|
+
|
187
|
+
|
132
188
|
filename = generate_tempfile_name()
|
133
189
|
output_dtype = target.data.dtype
|
134
|
-
if args.
|
190
|
+
if args.align_orientations is not None:
|
135
191
|
output_dtype = np.float32
|
136
192
|
|
137
|
-
if args.translation_uncertainty is not None:
|
138
|
-
dens = Density(
|
139
|
-
np.memmap(
|
140
|
-
args.mask_path,
|
141
|
-
mode="w+",
|
142
|
-
shape=(len(obs_slices), *subtomo_shape),
|
143
|
-
dtype=output_dtype,
|
144
|
-
),
|
145
|
-
sampling_rate=(1, *target.sampling_rate),
|
146
|
-
origin=(0, *target.origin),
|
147
|
-
)
|
148
|
-
dens.data[:] = 0
|
149
|
-
mask = create_mask(
|
150
|
-
mask_type="ellipse",
|
151
|
-
center=template.shape,
|
152
|
-
radius=args.translation_uncertainty,
|
153
|
-
shape=subtomo_shape,
|
154
|
-
)
|
155
|
-
dens.data[:] = mask
|
156
|
-
dens.to_file(args.mask_path)
|
157
|
-
|
158
193
|
target.data = target.data.astype(output_dtype)
|
159
194
|
|
160
195
|
dens = Density(
|
161
196
|
np.memmap(
|
162
197
|
filename,
|
163
198
|
mode="w+",
|
164
|
-
shape=(len(obs_slices), *
|
199
|
+
shape=(len(obs_slices), *box_size),
|
165
200
|
dtype=output_dtype,
|
166
201
|
),
|
167
202
|
sampling_rate=(1, *target.sampling_rate),
|
@@ -169,50 +204,54 @@ def main():
|
|
169
204
|
)
|
170
205
|
dens.data[:] = target.metadata["mean"]
|
171
206
|
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
total_slices = len(obs_slices)
|
177
|
-
magnitude = len(str(total_slices))
|
178
|
-
for index, obs_slice in enumerate(obs_slices):
|
179
|
-
print(f"Processing {index:{magnitude}} / {total_slices:{magnitude}}", end="\r")
|
207
|
+
data_subset = np.zeros(extraction_shape, dtype = target.data.dtype)
|
208
|
+
pbar = ProgressBar(message = "Orientation ", nchars = 80, total = len(obs_slices))
|
209
|
+
for index, (obs_slice, cand_slice) in enumerate(zip(obs_slices, cand_slices)):
|
210
|
+
pbar.update(index + 1)
|
180
211
|
|
212
|
+
data_subset.fill(0)
|
213
|
+
data_subset[cand_slice] = target.data[obs_slice]
|
181
214
|
target_subset = Density(
|
182
|
-
|
215
|
+
data_subset,
|
183
216
|
sampling_rate=target.sampling_rate,
|
184
217
|
origin=target.origin,
|
185
218
|
)
|
186
219
|
|
187
|
-
|
188
|
-
|
189
|
-
normal = orientations.rotations[index]
|
190
|
-
# To visualize only normals were picked we would need to do the opposite
|
191
|
-
# However, this allows us to align everything to the template axis
|
192
|
-
rotation_matrix = rotation_aligning_vectors(
|
193
|
-
normal, target_vector=args.alignment_vector
|
194
|
-
)
|
195
|
-
|
220
|
+
if args.align_orientations:
|
221
|
+
rotation_matrix = euler_to_rotationmatrix(orientations.rotations[index])
|
196
222
|
target_subset = target_subset.rigid_transform(
|
197
223
|
rotation_matrix=rotation_matrix,
|
198
224
|
use_geometric_center=True,
|
199
225
|
order=args.interpolation_order,
|
200
226
|
)
|
201
|
-
target_subset.pad(
|
227
|
+
target_subset.pad(box_size, center=True)
|
228
|
+
|
229
|
+
# target_value = target.data[tuple(orientations.translations[index].astype(int))]
|
230
|
+
# center = np.divide(target_subset.data.shape, 2).astype(int ) + np.mod(target_subset.shape, 2)
|
231
|
+
# print(np.where(target_subset.data == target_value), center)
|
232
|
+
# print(target_subset.data[tuple(center.astype(int))],
|
233
|
+
# target_value,
|
234
|
+
# target_subset.data[tuple(center.astype(int))] == target_value
|
235
|
+
# )
|
236
|
+
|
202
237
|
dens.data[index] = target_subset.data
|
203
|
-
|
238
|
+
print("")
|
204
239
|
|
240
|
+
target_meta = {
|
241
|
+
k: v for k, v in target.metadata.items() if k in ("mean", "max", "min", "std")
|
242
|
+
}
|
205
243
|
dens.metadata.update(target_meta)
|
244
|
+
dens.metadata["batch_dimension"] = (0, )
|
245
|
+
|
206
246
|
dens.to_file(args.output_file)
|
207
247
|
orientations.to_file(
|
208
|
-
f"{splitext(args.output_file)}_aligned.tsv",
|
248
|
+
f"{splitext(args.output_file)[0]}_aligned.tsv",
|
209
249
|
file_format="text"
|
210
250
|
)
|
211
|
-
orientations.to_file(
|
212
|
-
|
213
|
-
|
214
|
-
)
|
215
|
-
|
251
|
+
# orientations.to_file(
|
252
|
+
# f"{splitext(args.output_file)[0]}_aligned.star",
|
253
|
+
# file_format="relion"
|
254
|
+
# )
|
216
255
|
|
217
256
|
if __name__ == "__main__":
|
218
257
|
main()
|