pytme 0.2.9__cp311-cp311-macosx_15_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pytme-0.2.9.data/scripts/estimate_ram_usage.py +97 -0
- pytme-0.2.9.data/scripts/match_template.py +1135 -0
- pytme-0.2.9.data/scripts/postprocess.py +622 -0
- pytme-0.2.9.data/scripts/preprocess.py +209 -0
- pytme-0.2.9.data/scripts/preprocessor_gui.py +1227 -0
- pytme-0.2.9.dist-info/METADATA +95 -0
- pytme-0.2.9.dist-info/RECORD +119 -0
- pytme-0.2.9.dist-info/WHEEL +5 -0
- pytme-0.2.9.dist-info/entry_points.txt +6 -0
- pytme-0.2.9.dist-info/licenses/LICENSE +153 -0
- pytme-0.2.9.dist-info/top_level.txt +3 -0
- scripts/__init__.py +0 -0
- scripts/estimate_ram_usage.py +97 -0
- scripts/match_template.py +1135 -0
- scripts/postprocess.py +622 -0
- scripts/preprocess.py +209 -0
- scripts/preprocessor_gui.py +1227 -0
- tests/__init__.py +0 -0
- tests/data/Blurring/blob_width18.npy +0 -0
- tests/data/Blurring/edgegaussian_sigma3.npy +0 -0
- tests/data/Blurring/gaussian_sigma2.npy +0 -0
- tests/data/Blurring/hamming_width6.npy +0 -0
- tests/data/Blurring/kaiserb_width18.npy +0 -0
- tests/data/Blurring/localgaussian_sigma0510.npy +0 -0
- tests/data/Blurring/mean_size5.npy +0 -0
- tests/data/Blurring/ntree_sigma0510.npy +0 -0
- tests/data/Blurring/rank_rank3.npy +0 -0
- tests/data/Maps/.DS_Store +0 -0
- tests/data/Maps/emd_8621.mrc.gz +0 -0
- tests/data/README.md +2 -0
- tests/data/Raw/em_map.map +0 -0
- tests/data/Structures/.DS_Store +0 -0
- tests/data/Structures/1pdj.cif +3339 -0
- tests/data/Structures/1pdj.pdb +1429 -0
- tests/data/Structures/5khe.cif +3685 -0
- tests/data/Structures/5khe.ent +2210 -0
- tests/data/Structures/5khe.pdb +2210 -0
- tests/data/Structures/5uz4.cif +70548 -0
- tests/preprocessing/__init__.py +0 -0
- tests/preprocessing/test_compose.py +76 -0
- tests/preprocessing/test_frequency_filters.py +178 -0
- tests/preprocessing/test_preprocessor.py +136 -0
- tests/preprocessing/test_utils.py +79 -0
- tests/test_analyzer.py +216 -0
- tests/test_backends.py +446 -0
- tests/test_density.py +503 -0
- tests/test_extensions.py +130 -0
- tests/test_matching_cli.py +283 -0
- tests/test_matching_data.py +162 -0
- tests/test_matching_exhaustive.py +124 -0
- tests/test_matching_memory.py +30 -0
- tests/test_matching_optimization.py +226 -0
- tests/test_matching_utils.py +189 -0
- tests/test_orientations.py +175 -0
- tests/test_parser.py +33 -0
- tests/test_rotations.py +153 -0
- tests/test_structure.py +247 -0
- tme/__init__.py +6 -0
- tme/__version__.py +1 -0
- tme/analyzer/__init__.py +2 -0
- tme/analyzer/_utils.py +186 -0
- tme/analyzer/aggregation.py +577 -0
- tme/analyzer/peaks.py +953 -0
- tme/backends/__init__.py +171 -0
- tme/backends/_cupy_utils.py +734 -0
- tme/backends/_jax_utils.py +188 -0
- tme/backends/cupy_backend.py +294 -0
- tme/backends/jax_backend.py +314 -0
- tme/backends/matching_backend.py +1270 -0
- tme/backends/mlx_backend.py +241 -0
- tme/backends/npfftw_backend.py +583 -0
- tme/backends/pytorch_backend.py +430 -0
- tme/data/__init__.py +0 -0
- tme/data/c48n309.npy +0 -0
- tme/data/c48n527.npy +0 -0
- tme/data/c48n9.npy +0 -0
- tme/data/c48u1.npy +0 -0
- tme/data/c48u1153.npy +0 -0
- tme/data/c48u1201.npy +0 -0
- tme/data/c48u1641.npy +0 -0
- tme/data/c48u181.npy +0 -0
- tme/data/c48u2219.npy +0 -0
- tme/data/c48u27.npy +0 -0
- tme/data/c48u2947.npy +0 -0
- tme/data/c48u3733.npy +0 -0
- tme/data/c48u4749.npy +0 -0
- tme/data/c48u5879.npy +0 -0
- tme/data/c48u7111.npy +0 -0
- tme/data/c48u815.npy +0 -0
- tme/data/c48u83.npy +0 -0
- tme/data/c48u8649.npy +0 -0
- tme/data/c600v.npy +0 -0
- tme/data/c600vc.npy +0 -0
- tme/data/metadata.yaml +80 -0
- tme/data/quat_to_numpy.py +42 -0
- tme/data/scattering_factors.pickle +0 -0
- tme/density.py +2263 -0
- tme/extensions.cpython-311-darwin.so +0 -0
- tme/external/bindings.cpp +332 -0
- tme/filters/__init__.py +6 -0
- tme/filters/_utils.py +311 -0
- tme/filters/bandpass.py +230 -0
- tme/filters/compose.py +81 -0
- tme/filters/ctf.py +393 -0
- tme/filters/reconstruction.py +160 -0
- tme/filters/wedge.py +542 -0
- tme/filters/whitening.py +191 -0
- tme/matching_data.py +863 -0
- tme/matching_exhaustive.py +497 -0
- tme/matching_optimization.py +1311 -0
- tme/matching_scores.py +1183 -0
- tme/matching_utils.py +1188 -0
- tme/memory.py +337 -0
- tme/orientations.py +598 -0
- tme/parser.py +685 -0
- tme/preprocessor.py +1329 -0
- tme/rotations.py +350 -0
- tme/structure.py +1864 -0
- tme/types.py +13 -0
tme/orientations.py
ADDED
@@ -0,0 +1,598 @@
|
|
1
|
+
#!python3
|
2
|
+
""" Handle template matching orientations and conversion between formats.
|
3
|
+
|
4
|
+
Copyright (c) 2024 European Molecular Biology Laboratory
|
5
|
+
|
6
|
+
Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
|
7
|
+
"""
|
8
|
+
from typing import List, Tuple
|
9
|
+
from dataclasses import dataclass
|
10
|
+
from string import ascii_lowercase, ascii_uppercase
|
11
|
+
|
12
|
+
import numpy as np
|
13
|
+
|
14
|
+
from .parser import StarParser
|
15
|
+
from .matching_utils import compute_extraction_box
|
16
|
+
|
17
|
+
# Exceeds available numpy dimensions for default installations.
|
18
|
+
NAMES = ["x", "y", "z", *ascii_lowercase[:-3], *ascii_uppercase]
|
19
|
+
|
20
|
+
|
21
|
+
@dataclass
|
22
|
+
class Orientations:
|
23
|
+
"""
|
24
|
+
Handle template matching orientations and conversion between formats.
|
25
|
+
|
26
|
+
Examples
|
27
|
+
--------
|
28
|
+
The following achieves the minimal definition of an :py:class:`Orientations` instance
|
29
|
+
|
30
|
+
>>> import numpy as np
|
31
|
+
>>> from tme import Orientations
|
32
|
+
>>> translations = np.random.randint(low = 0, high = 100, size = (100,3))
|
33
|
+
>>> rotations = np.random.rand(100, 3)
|
34
|
+
>>> scores = np.random.rand(100)
|
35
|
+
>>> details = np.full((100,), fill_value = -1)
|
36
|
+
>>> orientations = Orientations(
|
37
|
+
>>> translations = translations,
|
38
|
+
>>> rotations = rotations,
|
39
|
+
>>> scores = scores,
|
40
|
+
>>> details = details,
|
41
|
+
>>> )
|
42
|
+
|
43
|
+
The created ``orientations`` object can be written to disk in a range of formats.
|
44
|
+
See :py:meth:`Orientations.to_file` for available formats. The following creates
|
45
|
+
a STAR file
|
46
|
+
|
47
|
+
>>> orientations.to_file("test.star")
|
48
|
+
|
49
|
+
:py:meth:`Orientations.from_file` can create :py:class:`Orientations` instances
|
50
|
+
from a range of formats, to enable conversion between formats
|
51
|
+
|
52
|
+
>>> orientations_star = Orientations.from_file("test.star")
|
53
|
+
>>> np.all(orientations.translations == orientations_star.translations)
|
54
|
+
True
|
55
|
+
|
56
|
+
Parameters
|
57
|
+
----------
|
58
|
+
translations: np.ndarray
|
59
|
+
Array with translations of each orientations (n, d).
|
60
|
+
rotations: np.ndarray
|
61
|
+
Array with euler angles of each orientation in zxy convention (n, d).
|
62
|
+
scores: np.ndarray
|
63
|
+
Array with the score of each orientation (n, ).
|
64
|
+
details: np.ndarray
|
65
|
+
Array with additional orientation details (n, ).
|
66
|
+
"""
|
67
|
+
|
68
|
+
#: Array with translations of each orientation (n, d).
|
69
|
+
translations: np.ndarray
|
70
|
+
|
71
|
+
#: Array with zyz euler angles of each orientation (n, d).
|
72
|
+
rotations: np.ndarray
|
73
|
+
|
74
|
+
#: Array with scores of each orientation (n, ).
|
75
|
+
scores: np.ndarray
|
76
|
+
|
77
|
+
#: Array with additional details of each orientation(n, ).
|
78
|
+
details: np.ndarray
|
79
|
+
|
80
|
+
def __post_init__(self):
|
81
|
+
self.translations = np.array(self.translations).astype(np.float32)
|
82
|
+
self.rotations = np.array(self.rotations).astype(np.float32)
|
83
|
+
self.scores = np.array(self.scores).astype(np.float32)
|
84
|
+
self.details = np.array(self.details).astype(np.float32)
|
85
|
+
n_orientations = set(
|
86
|
+
[
|
87
|
+
self.translations.shape[0],
|
88
|
+
self.rotations.shape[0],
|
89
|
+
self.scores.shape[0],
|
90
|
+
self.details.shape[0],
|
91
|
+
]
|
92
|
+
)
|
93
|
+
if len(n_orientations) != 1:
|
94
|
+
raise ValueError(
|
95
|
+
"The first dimension of all parameters needs to be of equal length."
|
96
|
+
)
|
97
|
+
if self.translations.ndim != 2:
|
98
|
+
raise ValueError("Expected two dimensional translations parameter.")
|
99
|
+
|
100
|
+
if self.rotations.ndim != 2:
|
101
|
+
raise ValueError("Expected two dimensional rotations parameter.")
|
102
|
+
|
103
|
+
def __iter__(self) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
104
|
+
"""
|
105
|
+
Iterate over the current class instance. Each iteration returns a orientation
|
106
|
+
defined by its translation, rotation, score and additional detail.
|
107
|
+
|
108
|
+
Yields
|
109
|
+
------
|
110
|
+
Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
|
111
|
+
A tuple of arrays defining the given orientation.
|
112
|
+
"""
|
113
|
+
yield from zip(self.translations, self.rotations, self.scores, self.details)
|
114
|
+
|
115
|
+
def __getitem__(self, indices: List[int]) -> "Orientations":
|
116
|
+
"""
|
117
|
+
Retrieve a subset of orientations based on the provided indices.
|
118
|
+
|
119
|
+
Parameters
|
120
|
+
----------
|
121
|
+
indices : List[int]
|
122
|
+
A list of indices specifying the orientations to be retrieved.
|
123
|
+
|
124
|
+
Returns
|
125
|
+
-------
|
126
|
+
:py:class:`Orientations`
|
127
|
+
A new :py:class:`Orientations`instance containing only the selected orientations.
|
128
|
+
"""
|
129
|
+
indices = np.asarray(indices)
|
130
|
+
attributes = (
|
131
|
+
"translations",
|
132
|
+
"rotations",
|
133
|
+
"scores",
|
134
|
+
"details",
|
135
|
+
)
|
136
|
+
kwargs = {attr: getattr(self, attr)[indices].copy() for attr in attributes}
|
137
|
+
return self.__class__(**kwargs)
|
138
|
+
|
139
|
+
def copy(self) -> "Orientations":
|
140
|
+
"""
|
141
|
+
Create a copy of the current class instance.
|
142
|
+
|
143
|
+
Returns
|
144
|
+
-------
|
145
|
+
:py:class:`Orientations`
|
146
|
+
Copy of the class instance.
|
147
|
+
"""
|
148
|
+
indices = np.arange(self.scores.size)
|
149
|
+
return self[indices]
|
150
|
+
|
151
|
+
def to_file(self, filename: str, file_format: type = None, **kwargs) -> None:
|
152
|
+
"""
|
153
|
+
Save the current class instance to a file in the specified format.
|
154
|
+
|
155
|
+
Parameters
|
156
|
+
----------
|
157
|
+
filename : str
|
158
|
+
The name of the file where the orientations will be saved.
|
159
|
+
file_format : type, optional
|
160
|
+
The format in which to save the orientations. Defaults to None and infers
|
161
|
+
the file_format from the typical extension. Supported formats are
|
162
|
+
|
163
|
+
+---------------+----------------------------------------------------+
|
164
|
+
| text | pytme's standard tab-separated orientations file |
|
165
|
+
+---------------+----------------------------------------------------+
|
166
|
+
| star | Creates a STAR file of orientations |
|
167
|
+
+---------------+----------------------------------------------------+
|
168
|
+
| dynamo | Creates a dynamo table |
|
169
|
+
+---------------+----------------------------------------------------+
|
170
|
+
|
171
|
+
**kwargs : dict
|
172
|
+
Additional keyword arguments specific to the file format.
|
173
|
+
|
174
|
+
Raises
|
175
|
+
------
|
176
|
+
ValueError
|
177
|
+
If an unsupported file format is specified.
|
178
|
+
"""
|
179
|
+
mapping = {
|
180
|
+
"text": self._to_text,
|
181
|
+
"star": self._to_star,
|
182
|
+
"dynamo": self._to_dynamo_tbl,
|
183
|
+
}
|
184
|
+
if file_format is None:
|
185
|
+
file_format = "text"
|
186
|
+
if filename.lower().endswith(".star"):
|
187
|
+
file_format = "star"
|
188
|
+
elif filename.lower().endswith(".tbl"):
|
189
|
+
file_format = "dynamo"
|
190
|
+
|
191
|
+
func = mapping.get(file_format, None)
|
192
|
+
if func is None:
|
193
|
+
raise ValueError(
|
194
|
+
f"{file_format} not implemented. Supported are {','.join(mapping.keys())}."
|
195
|
+
)
|
196
|
+
|
197
|
+
return func(filename=filename, **kwargs)
|
198
|
+
|
199
|
+
def _to_text(self, filename: str, **kwargs) -> None:
|
200
|
+
"""
|
201
|
+
Save orientations in a text file format.
|
202
|
+
|
203
|
+
Parameters
|
204
|
+
----------
|
205
|
+
filename : str
|
206
|
+
The name of the file to save the orientations.
|
207
|
+
|
208
|
+
Notes
|
209
|
+
-----
|
210
|
+
The file is saved with a header specifying each column: x, y, z, euler_x,
|
211
|
+
euler_y, euler_z, score, detail. Each row in the file corresponds to an orientation.
|
212
|
+
"""
|
213
|
+
header = "\t".join(
|
214
|
+
[
|
215
|
+
*list(NAMES[: self.translations.shape[1]]),
|
216
|
+
*[f"euler_{x}" for x in NAMES[: self.rotations.shape[1]]],
|
217
|
+
"score",
|
218
|
+
"detail",
|
219
|
+
]
|
220
|
+
)
|
221
|
+
with open(filename, mode="w", encoding="utf-8") as ofile:
|
222
|
+
_ = ofile.write(f"{header}\n")
|
223
|
+
for translation, angles, score, detail in self:
|
224
|
+
out_string = (
|
225
|
+
"\t".join([str(x) for x in (*translation, *angles, score, detail)])
|
226
|
+
+ "\n"
|
227
|
+
)
|
228
|
+
_ = ofile.write(out_string)
|
229
|
+
return None
|
230
|
+
|
231
|
+
def _to_dynamo_tbl(
|
232
|
+
self,
|
233
|
+
filename: str,
|
234
|
+
name_prefix: str = None,
|
235
|
+
sampling_rate: float = 1.0,
|
236
|
+
subtomogram_size: int = 0,
|
237
|
+
**kwargs,
|
238
|
+
) -> None:
|
239
|
+
"""
|
240
|
+
Save orientations in Dynamo's tbl file format.
|
241
|
+
|
242
|
+
Parameters
|
243
|
+
----------
|
244
|
+
filename : str
|
245
|
+
The name of the file to save the orientations.
|
246
|
+
sampling_rate : float, optional
|
247
|
+
Subtomogram sampling rate in angstrom per voxel
|
248
|
+
|
249
|
+
Notes
|
250
|
+
-----
|
251
|
+
The file is saved with a standard header used in Dynamo tbl files
|
252
|
+
outlined in [1]_. Each row corresponds to a particular partice.
|
253
|
+
|
254
|
+
References
|
255
|
+
----------
|
256
|
+
.. [1] https://wiki.dynamo.biozentrum.unibas.ch/w/index.php/Table
|
257
|
+
"""
|
258
|
+
with open(filename, mode="w", encoding="utf-8") as ofile:
|
259
|
+
for index, (translation, rotation, score, detail) in enumerate(self):
|
260
|
+
out = [
|
261
|
+
index,
|
262
|
+
1,
|
263
|
+
0,
|
264
|
+
0,
|
265
|
+
0,
|
266
|
+
0,
|
267
|
+
*rotation,
|
268
|
+
self.scores[index],
|
269
|
+
self.scores[index],
|
270
|
+
0,
|
271
|
+
0,
|
272
|
+
# Wedge parameters
|
273
|
+
-90,
|
274
|
+
90,
|
275
|
+
-60,
|
276
|
+
60,
|
277
|
+
0,
|
278
|
+
0,
|
279
|
+
0,
|
280
|
+
0,
|
281
|
+
0,
|
282
|
+
0,
|
283
|
+
# Coordinate in original volume
|
284
|
+
*translation,
|
285
|
+
0,
|
286
|
+
0,
|
287
|
+
0,
|
288
|
+
0,
|
289
|
+
0,
|
290
|
+
0,
|
291
|
+
0,
|
292
|
+
0,
|
293
|
+
sampling_rate,
|
294
|
+
3,
|
295
|
+
0,
|
296
|
+
0,
|
297
|
+
]
|
298
|
+
_ = ofile.write(" ".join([str(x) for x in out]) + "\n")
|
299
|
+
|
300
|
+
return None
|
301
|
+
|
302
|
+
def _to_star(
|
303
|
+
self, filename: str, source_path: str = None, version: str = None, **kwargs
|
304
|
+
) -> None:
|
305
|
+
"""
|
306
|
+
Save orientations in STAR file format.
|
307
|
+
|
308
|
+
Parameters
|
309
|
+
----------
|
310
|
+
filename : str
|
311
|
+
The name of the file to save the orientations.
|
312
|
+
source_path : str
|
313
|
+
Path to image file the orientation is in reference to.
|
314
|
+
version : str
|
315
|
+
Version indicator.
|
316
|
+
"""
|
317
|
+
header = [
|
318
|
+
"data_particles",
|
319
|
+
"",
|
320
|
+
"loop_",
|
321
|
+
"_rlnCoordinateX",
|
322
|
+
"_rlnCoordinateY",
|
323
|
+
"_rlnCoordinateZ",
|
324
|
+
"_rlnAngleRot",
|
325
|
+
"_rlnAngleTilt",
|
326
|
+
"_rlnAnglePsi",
|
327
|
+
]
|
328
|
+
if source_path is not None:
|
329
|
+
header.append("_rlnMicrographName")
|
330
|
+
|
331
|
+
header.append("_pytmeScore")
|
332
|
+
|
333
|
+
header = "\n".join(header)
|
334
|
+
with open(filename, mode="w", encoding="utf-8") as ofile:
|
335
|
+
if version is not None:
|
336
|
+
_ = ofile.write(f"{version.strip()}\n\n")
|
337
|
+
|
338
|
+
_ = ofile.write(f"{header}\n")
|
339
|
+
for index, (translation, rotation, score, detail) in enumerate(self):
|
340
|
+
line = [str(x) for x in translation]
|
341
|
+
line.extend([str(x) for x in rotation])
|
342
|
+
|
343
|
+
if source_path is not None:
|
344
|
+
line.append(source_path)
|
345
|
+
line.append(score)
|
346
|
+
|
347
|
+
_ = ofile.write("\t".join([str(x) for x in line]) + "\n")
|
348
|
+
|
349
|
+
return None
|
350
|
+
|
351
|
+
@classmethod
|
352
|
+
def from_file(
|
353
|
+
cls, filename: str, file_format: type = None, **kwargs
|
354
|
+
) -> "Orientations":
|
355
|
+
"""
|
356
|
+
Create an instance of :py:class:`Orientations` from a file.
|
357
|
+
|
358
|
+
Parameters
|
359
|
+
----------
|
360
|
+
filename : str
|
361
|
+
The name of the file from which to read the orientations.
|
362
|
+
file_format : type, optional
|
363
|
+
The format of the file. Defaults to None and infers
|
364
|
+
the file_format from the typical extension. Supported formats are
|
365
|
+
|
366
|
+
+---------------+----------------------------------------------------+
|
367
|
+
| text | pyTME's standard tab-separated orientations file |
|
368
|
+
+---------------+----------------------------------------------------+
|
369
|
+
| star | Creates a STAR file of orientations |
|
370
|
+
+---------------+----------------------------------------------------+
|
371
|
+
| dynamo | Creates a dynamo table |
|
372
|
+
+---------------+----------------------------------------------------+
|
373
|
+
|
374
|
+
**kwargs
|
375
|
+
Additional keyword arguments specific to the file format.
|
376
|
+
|
377
|
+
Returns
|
378
|
+
-------
|
379
|
+
:py:class:`Orientations`
|
380
|
+
An instance of :py:class:`Orientations` populated with data from the file.
|
381
|
+
|
382
|
+
Raises
|
383
|
+
------
|
384
|
+
ValueError
|
385
|
+
If an unsupported file format is specified.
|
386
|
+
"""
|
387
|
+
mapping = {
|
388
|
+
"text": cls._from_text,
|
389
|
+
"star": cls._from_star,
|
390
|
+
"tbl": cls._from_tbl,
|
391
|
+
}
|
392
|
+
if file_format is None:
|
393
|
+
file_format = "text"
|
394
|
+
|
395
|
+
if filename.lower().endswith(".star"):
|
396
|
+
file_format = "star"
|
397
|
+
elif filename.lower().endswith(".tbl"):
|
398
|
+
file_format = "tbl"
|
399
|
+
|
400
|
+
func = mapping.get(file_format, None)
|
401
|
+
if func is None:
|
402
|
+
raise ValueError(
|
403
|
+
f"{file_format} not implemented. Supported are {','.join(mapping.keys())}."
|
404
|
+
)
|
405
|
+
|
406
|
+
translations, rotations, scores, details, *_ = func(filename=filename, **kwargs)
|
407
|
+
return cls(
|
408
|
+
translations=translations,
|
409
|
+
rotations=rotations,
|
410
|
+
scores=scores,
|
411
|
+
details=details,
|
412
|
+
)
|
413
|
+
|
414
|
+
@staticmethod
|
415
|
+
def _from_text(
|
416
|
+
filename: str,
|
417
|
+
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
418
|
+
"""
|
419
|
+
Read orientations from a text file.
|
420
|
+
|
421
|
+
Parameters
|
422
|
+
----------
|
423
|
+
filename : str
|
424
|
+
The name of the file from which to read the orientations.
|
425
|
+
|
426
|
+
Returns
|
427
|
+
-------
|
428
|
+
Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
|
429
|
+
A tuple containing numpy arrays for translations, rotations, scores,
|
430
|
+
and details.
|
431
|
+
|
432
|
+
Notes
|
433
|
+
-----
|
434
|
+
The text file is expected to have a header and data in columns. Colums containing
|
435
|
+
the name euler are considered to specify rotations. The second last and last
|
436
|
+
column correspond to score and detail. Its possible to only specify translations,
|
437
|
+
in this case the remaining columns will be filled with trivial values.
|
438
|
+
"""
|
439
|
+
with open(filename, mode="r", encoding="utf-8") as infile:
|
440
|
+
data = [x.strip().split("\t") for x in infile.read().split("\n")]
|
441
|
+
|
442
|
+
header = data.pop(0)
|
443
|
+
translation, rotation, score, detail = [], [], [], []
|
444
|
+
for candidate in data:
|
445
|
+
if len(candidate) <= 1:
|
446
|
+
continue
|
447
|
+
|
448
|
+
translation.append(
|
449
|
+
tuple(candidate[i] for i, x in enumerate(header) if x in NAMES)
|
450
|
+
)
|
451
|
+
rotation.append(
|
452
|
+
tuple(candidate[i] for i, x in enumerate(header) if "euler" in x)
|
453
|
+
)
|
454
|
+
score.append(candidate[-2])
|
455
|
+
detail.append(candidate[-1])
|
456
|
+
|
457
|
+
translation = np.vstack(translation)
|
458
|
+
rotation = np.vstack(rotation)
|
459
|
+
score = np.array(score)
|
460
|
+
detail = np.array(detail)
|
461
|
+
|
462
|
+
if translation.shape[1] == len(header):
|
463
|
+
rotation = np.zeros(translation.shape, dtype=np.float32)
|
464
|
+
score = np.zeros(translation.shape[0], dtype=np.float32)
|
465
|
+
detail = np.zeros(translation.shape[0], dtype=np.float32) - 1
|
466
|
+
|
467
|
+
if rotation.size == 0 and translation.shape[0] != 0:
|
468
|
+
rotation = np.zeros(translation.shape, dtype=np.float32)
|
469
|
+
|
470
|
+
header_order = tuple(x for x in header if x in NAMES)
|
471
|
+
header_order = zip(header_order, range(len(header_order)))
|
472
|
+
sort_order = tuple(
|
473
|
+
x[1] for x in sorted(header_order, key=lambda x: x[0], reverse=False)
|
474
|
+
)
|
475
|
+
translation = translation[..., sort_order]
|
476
|
+
|
477
|
+
header_order = tuple(
|
478
|
+
x for x in header if "euler" in x and x.replace("euler_", "") in NAMES
|
479
|
+
)
|
480
|
+
header_order = zip(header_order, range(len(header_order)))
|
481
|
+
sort_order = tuple(
|
482
|
+
x[1] for x in sorted(header_order, key=lambda x: x[0], reverse=False)
|
483
|
+
)
|
484
|
+
rotation = rotation[..., sort_order]
|
485
|
+
|
486
|
+
return translation, rotation, score, detail
|
487
|
+
|
488
|
+
@classmethod
|
489
|
+
def _from_star(
|
490
|
+
cls, filename: str, delimiter: str = "\t"
|
491
|
+
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
492
|
+
ret = StarParser(filename, delimiter=delimiter)
|
493
|
+
|
494
|
+
ret = ret.get("data_particles", None)
|
495
|
+
if ret is None:
|
496
|
+
raise ValueError(f"No data_particles section found in {filename}.")
|
497
|
+
|
498
|
+
translation = np.vstack(
|
499
|
+
(ret["_rlnCoordinateX"], ret["_rlnCoordinateY"], ret["_rlnCoordinateZ"])
|
500
|
+
)
|
501
|
+
translation = translation.astype(np.float32).T
|
502
|
+
|
503
|
+
rotation = np.vstack(
|
504
|
+
(ret["_rlnAngleRot"], ret["_rlnAngleTilt"], ret["_rlnAnglePsi"])
|
505
|
+
)
|
506
|
+
rotation = rotation.astype(np.float32).T
|
507
|
+
|
508
|
+
default = np.zeros(translation.shape[0])
|
509
|
+
return translation, rotation, default, default
|
510
|
+
|
511
|
+
@staticmethod
|
512
|
+
def _from_tbl(
|
513
|
+
filename: str, **kwargs
|
514
|
+
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
515
|
+
with open(filename, mode="r", encoding="utf-8") as infile:
|
516
|
+
data = infile.read().split("\n")
|
517
|
+
data = [x.strip().split(" ") for x in data if len(x.strip())]
|
518
|
+
|
519
|
+
if len(data[0]) != 38:
|
520
|
+
raise ValueError(
|
521
|
+
"Expected tbl file to have 38 columns generated by _to_tbl."
|
522
|
+
)
|
523
|
+
|
524
|
+
translations, rotations, scores, details = [], [], [], []
|
525
|
+
for peak in data:
|
526
|
+
rotations.append((peak[6], peak[7], peak[8]))
|
527
|
+
scores.append(peak[9])
|
528
|
+
details.append(-1)
|
529
|
+
translations.append((peak[23], peak[24], peak[25]))
|
530
|
+
|
531
|
+
translations, rotations = np.array(translations), np.array(rotations)
|
532
|
+
scores, details = np.array(scores), np.array(details)
|
533
|
+
return translations, rotations, scores, details
|
534
|
+
|
535
|
+
def get_extraction_slices(
|
536
|
+
self,
|
537
|
+
target_shape: Tuple[int],
|
538
|
+
extraction_shape: Tuple[int],
|
539
|
+
drop_out_of_box: bool = False,
|
540
|
+
return_orientations: bool = False,
|
541
|
+
) -> "Orientations":
|
542
|
+
"""
|
543
|
+
Calculate slices for extracting regions of interest within a larger array.
|
544
|
+
|
545
|
+
Parameters
|
546
|
+
----------
|
547
|
+
target_shape : Tuple[int]
|
548
|
+
The shape of the target array within which regions are to be extracted.
|
549
|
+
extraction_shape : Tuple[int]
|
550
|
+
The shape of the regions to be extracted.
|
551
|
+
drop_out_of_box : bool, optional
|
552
|
+
If True, drop regions that extend beyond the target array boundary, by default False.
|
553
|
+
return_orientations : bool, optional
|
554
|
+
If True, return orientations along with slices, by default False.
|
555
|
+
|
556
|
+
Returns
|
557
|
+
-------
|
558
|
+
Union[Tuple[List[slice]], Tuple["Orientations", List[slice], List[slice]]]
|
559
|
+
If return_orientations is False, returns a tuple containing slices for candidate
|
560
|
+
regions and observation regions.
|
561
|
+
If return_orientations is True, returns a tuple containing orientations along
|
562
|
+
with slices for candidate regions and observation regions.
|
563
|
+
|
564
|
+
Raises
|
565
|
+
------
|
566
|
+
SystemExit
|
567
|
+
If no peak remains after filtering, indicating an error.
|
568
|
+
"""
|
569
|
+
obs_beg, obs_end, cand_beg, cand_end, keep = compute_extraction_box(
|
570
|
+
self.translations.astype(int),
|
571
|
+
extraction_shape=extraction_shape,
|
572
|
+
original_shape=target_shape,
|
573
|
+
)
|
574
|
+
|
575
|
+
subset = self
|
576
|
+
if drop_out_of_box:
|
577
|
+
n_remaining = keep.sum()
|
578
|
+
if n_remaining == 0:
|
579
|
+
print("No peak remaining after filtering")
|
580
|
+
subset = self[keep]
|
581
|
+
cand_beg, cand_end = cand_beg[keep,], cand_end[keep,]
|
582
|
+
obs_beg, obs_end = obs_beg[keep,], obs_end[keep,]
|
583
|
+
|
584
|
+
cand_beg, cand_end = cand_beg.astype(int), cand_end.astype(int)
|
585
|
+
obs_beg, obs_end = obs_beg.astype(int), obs_end.astype(int)
|
586
|
+
candidate_slices = [
|
587
|
+
tuple(slice(s, e) for s, e in zip(start_row, stop_row))
|
588
|
+
for start_row, stop_row in zip(cand_beg, cand_end)
|
589
|
+
]
|
590
|
+
|
591
|
+
observation_slices = [
|
592
|
+
tuple(slice(s, e) for s, e in zip(start_row, stop_row))
|
593
|
+
for start_row, stop_row in zip(obs_beg, obs_end)
|
594
|
+
]
|
595
|
+
|
596
|
+
if return_orientations:
|
597
|
+
return subset, candidate_slices, observation_slices
|
598
|
+
return candidate_slices, observation_slices
|