pytme 0.3.1.post1__tar.gz → 0.3.1.post2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/PKG-INFO +1 -1
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/pyproject.toml +1 -1
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/match_template.py +2 -2
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/preprocessor_gui.py +1 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/refine_matches.py +5 -7
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_orientations.py +0 -12
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/_jax_utils.py +45 -15
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/jax_backend.py +10 -10
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/density.py +58 -1
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/orientations.py +11 -5
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/LICENSE +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/MANIFEST.in +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/README.md +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/pytme.egg-info/SOURCES.txt +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/estimate_memory_usage.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/eval.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/extract_candidates.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/match_template_filters.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/postprocess.py +1 -1
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/preprocess.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/scripts/pytme_runner.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/setup.cfg +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/setup.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/.DS_Store +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/.DS_Store +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/blob_width18.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/edgegaussian_sigma3.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/gaussian_sigma2.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/hamming_width6.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/kaiserb_width18.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/localgaussian_sigma0510.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/mean_size5.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/ntree_sigma0510.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Blurring/rank_rank3.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Maps/.DS_Store +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Maps/emd_8621.mrc.gz +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/README.md +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Raw/.DS_Store +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Raw/em_map.map +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/.DS_Store +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/1pdj.cif +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/1pdj.pdb +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/5khe.cif +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/5khe.ent +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/5khe.pdb +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/data/Structures/5uz4.cif +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/preprocessing/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/preprocessing/test_compose.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/preprocessing/test_frequency_filters.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/preprocessing/test_preprocessor.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/preprocessing/test_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_analyzer.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_backends.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_density.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_extensions.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_cli.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_data.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_exhaustive.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_memory.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_optimization.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_matching_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_parser.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_rotations.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tests/test_structure.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/__version__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/aggregation.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/base.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/peaks.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/analyzer/proxy.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/_cupy_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/_numpyfftw_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/cupy_backend.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/matching_backend.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/mlx_backend.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/npfftw_backend.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/backends/pytorch_backend.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/cli.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48n309.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48n527.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48n9.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u1.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u1153.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u1201.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u1641.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u181.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u2219.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u27.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u2947.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u3733.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u4749.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u5879.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u7111.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u815.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u83.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c48u8649.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c600v.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/c600vc.npy +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/metadata.yaml +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/quat_to_numpy.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/data/scattering_factors.pickle +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/external/bindings.cpp +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/__init__.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/bandpass.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/compose.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/ctf.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/reconstruction.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/wedge.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/filters/whitening.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/mask.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/matching_data.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/matching_exhaustive.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/matching_optimization.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/matching_scores.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/matching_utils.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/memory.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/parser.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/preprocessor.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/rotations.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/structure.py +0 -0
- {pytme-0.3.1.post1 → pytme-0.3.1.post2}/tme/types.py +0 -0
@@ -359,8 +359,8 @@ def parse_args():
|
|
359
359
|
"--invert-target-contrast",
|
360
360
|
action="store_true",
|
361
361
|
default=False,
|
362
|
-
help="Invert the target
|
363
|
-
"
|
362
|
+
help="Invert the target contrast. Useful for matching on tomograms if the "
|
363
|
+
"template has not been inverted.",
|
364
364
|
)
|
365
365
|
io_group.add_argument(
|
366
366
|
"--scramble-phases",
|
@@ -10,11 +10,9 @@ import subprocess
|
|
10
10
|
from sys import exit
|
11
11
|
from os import unlink
|
12
12
|
from time import time
|
13
|
-
from os.path import join
|
14
13
|
from typing import Tuple, List, Dict
|
15
14
|
|
16
15
|
import numpy as np
|
17
|
-
from scipy import optimize
|
18
16
|
from sklearn.metrics import roc_auc_score
|
19
17
|
|
20
18
|
from tme import Orientations, Density
|
@@ -66,7 +64,6 @@ def parse_args():
|
|
66
64
|
matching_group.add_argument(
|
67
65
|
"-i",
|
68
66
|
"--template",
|
69
|
-
dest="template",
|
70
67
|
type=str,
|
71
68
|
required=True,
|
72
69
|
help="Path to a template in PDB/MMCIF or other supported formats (see target).",
|
@@ -102,7 +99,7 @@ def parse_args():
|
|
102
99
|
)
|
103
100
|
matching_group.add_argument(
|
104
101
|
"-s",
|
105
|
-
|
102
|
+
"--score",
|
106
103
|
type=str,
|
107
104
|
default="batchFLCSphericalMask",
|
108
105
|
choices=list(MATCHING_EXHAUSTIVE_REGISTER.keys()),
|
@@ -197,6 +194,7 @@ def create_matching_argdict(args) -> Dict:
|
|
197
194
|
"-n": args.cores,
|
198
195
|
"--ctf-file": args.ctf_file,
|
199
196
|
"--invert-target-contrast": args.invert_target_contrast,
|
197
|
+
"--backend" : args.backend,
|
200
198
|
}
|
201
199
|
return arg_dict
|
202
200
|
|
@@ -252,7 +250,7 @@ class DeepMatcher:
|
|
252
250
|
if args.lowpass_range:
|
253
251
|
self.filter_parameters["--lowpass"] = 0
|
254
252
|
if args.highpass_range:
|
255
|
-
self.filter_parameters["--highpass"] =
|
253
|
+
self.filter_parameters["--highpass"] = 0
|
256
254
|
|
257
255
|
self.postprocess_args = create_postprocessing_argdict(args)
|
258
256
|
self.log_file = f"{args.output_prefix}_optimization_log.txt"
|
@@ -309,14 +307,14 @@ class DeepMatcher:
|
|
309
307
|
|
310
308
|
match_template = argdict_to_command(
|
311
309
|
self.match_template_args,
|
312
|
-
executable="match_template
|
310
|
+
executable="match_template",
|
313
311
|
)
|
314
312
|
run_command(match_template)
|
315
313
|
|
316
314
|
# Assume we get a new peak for each input in the same order
|
317
315
|
postprocess = argdict_to_command(
|
318
316
|
self.postprocess_args,
|
319
|
-
executable="postprocess
|
317
|
+
executable="postprocess",
|
320
318
|
)
|
321
319
|
run_command(postprocess)
|
322
320
|
|
@@ -95,18 +95,6 @@ class TestDensity:
|
|
95
95
|
self.orientations.rotations, orientations_new.rotations, atol=1e-3
|
96
96
|
)
|
97
97
|
|
98
|
-
@pytest.mark.parametrize("input_format", ("text", "star", "tbl"))
|
99
|
-
@pytest.mark.parametrize("output_format", ("text", "star", "tbl"))
|
100
|
-
def test_file_format_io(self, input_format: str, output_format: str):
|
101
|
-
_, output_file = mkstemp(suffix=f".{input_format}")
|
102
|
-
_, output_file2 = mkstemp(suffix=f".{output_format}")
|
103
|
-
|
104
|
-
self.orientations.to_file(output_file)
|
105
|
-
orientations_new = Orientations.from_file(output_file)
|
106
|
-
orientations_new.to_file(output_file2)
|
107
|
-
|
108
|
-
assert True
|
109
|
-
|
110
98
|
@pytest.mark.parametrize("drop_oob", (True, False))
|
111
99
|
@pytest.mark.parametrize("shape", (10, 40, 80))
|
112
100
|
@pytest.mark.parametrize("odd", (True, False))
|
@@ -17,7 +17,7 @@ from ..backends import backend as be
|
|
17
17
|
from ..matching_utils import normalize_template as _normalize_template
|
18
18
|
|
19
19
|
|
20
|
-
__all__ = ["scan"]
|
20
|
+
__all__ = ["scan", "setup_scan"]
|
21
21
|
|
22
22
|
|
23
23
|
def _correlate(template: BackendArray, ft_target: BackendArray) -> BackendArray:
|
@@ -116,12 +116,49 @@ def _mask_scores(arr, mask):
|
|
116
116
|
return arr.at[:].multiply(mask)
|
117
117
|
|
118
118
|
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
119
|
+
def _select_config(analyzer_kwargs, device_idx):
|
120
|
+
return analyzer_kwargs[device_idx]
|
121
|
+
|
122
|
+
|
123
|
+
def setup_scan(analyzer_kwargs, callback_class, fast_shape, rotate_mask):
|
124
|
+
"""Create separate scan function with initialized analyzer for each device"""
|
125
|
+
device_scans = [
|
126
|
+
partial(
|
127
|
+
scan,
|
128
|
+
fast_shape=fast_shape,
|
129
|
+
rotate_mask=rotate_mask,
|
130
|
+
analyzer=callback_class(**device_config),
|
131
|
+
) for device_config in analyzer_kwargs
|
132
|
+
]
|
133
|
+
|
134
|
+
@partial(
|
135
|
+
pmap,
|
136
|
+
in_axes=(0,) + (None,) * 6,
|
137
|
+
axis_name="batch",
|
138
|
+
)
|
139
|
+
def scan_combined(
|
140
|
+
target,
|
141
|
+
template,
|
142
|
+
template_mask,
|
143
|
+
rotations,
|
144
|
+
template_filter,
|
145
|
+
target_filter,
|
146
|
+
score_mask,
|
147
|
+
):
|
148
|
+
return lax.switch(
|
149
|
+
lax.axis_index("batch"),
|
150
|
+
device_scans,
|
151
|
+
target,
|
152
|
+
template,
|
153
|
+
template_mask,
|
154
|
+
rotations,
|
155
|
+
template_filter,
|
156
|
+
target_filter,
|
157
|
+
score_mask,
|
158
|
+
)
|
159
|
+
return scan_combined
|
160
|
+
|
161
|
+
|
125
162
|
def scan(
|
126
163
|
target: BackendArray,
|
127
164
|
template: BackendArray,
|
@@ -132,17 +169,10 @@ def scan(
|
|
132
169
|
score_mask: BackendArray,
|
133
170
|
fast_shape: Tuple[int],
|
134
171
|
rotate_mask: bool,
|
135
|
-
|
136
|
-
analyzer_kwargs: Tuple[Tuple],
|
172
|
+
analyzer: object,
|
137
173
|
) -> Tuple[BackendArray, BackendArray]:
|
138
174
|
eps = jnp.finfo(template.dtype).resolution
|
139
175
|
|
140
|
-
kwargs = lax.switch(
|
141
|
-
lax.axis_index("batch"),
|
142
|
-
[lambda: analyzer_kwargs[i] for i in range(len(analyzer_kwargs))],
|
143
|
-
)
|
144
|
-
analyzer = analyzer_class(**be._tuple_to_dict(kwargs))
|
145
|
-
|
146
176
|
if hasattr(target_filter, "shape"):
|
147
177
|
target = _apply_fourier_filter(target, target_filter)
|
148
178
|
|
@@ -218,7 +218,7 @@ class JaxBackend(NumpyFFTWBackend):
|
|
218
218
|
Emulates output of :py:meth:`tme.matching_exhaustive.scan` using
|
219
219
|
:py:class:`tme.analyzer.MaxScoreOverRotations`.
|
220
220
|
"""
|
221
|
-
from ._jax_utils import
|
221
|
+
from ._jax_utils import setup_scan
|
222
222
|
from ..analyzer import MaxScoreOverRotations
|
223
223
|
|
224
224
|
pad_target = True if len(splits) > 1 else False
|
@@ -279,8 +279,7 @@ class JaxBackend(NumpyFFTWBackend):
|
|
279
279
|
cur_args = analyzer_args.copy()
|
280
280
|
cur_args["offset"] = translation_offset
|
281
281
|
cur_args.update(callback_class_args)
|
282
|
-
|
283
|
-
analyzer_kwargs.append(self._dict_to_tuple(cur_args))
|
282
|
+
analyzer_kwargs.append(cur_args)
|
284
283
|
|
285
284
|
if pad_target:
|
286
285
|
score_mask = base._score_mask(fast_shape, shift)
|
@@ -310,7 +309,13 @@ class JaxBackend(NumpyFFTWBackend):
|
|
310
309
|
create_filter, create_template_filter, create_target_filter = (False,) * 3
|
311
310
|
base, targets = None, self._array_backend.stack(targets)
|
312
311
|
|
313
|
-
|
312
|
+
scan_inner = setup_scan(
|
313
|
+
analyzer_kwargs=analyzer_kwargs,
|
314
|
+
callback_class=callback_class,
|
315
|
+
fast_shape=fast_shape,
|
316
|
+
rotate_mask=rotate_mask
|
317
|
+
)
|
318
|
+
|
314
319
|
states = scan_inner(
|
315
320
|
self.astype(targets, self._float_dtype),
|
316
321
|
self.astype(matching_data.template, self._float_dtype),
|
@@ -319,17 +324,12 @@ class JaxBackend(NumpyFFTWBackend):
|
|
319
324
|
template_filter,
|
320
325
|
target_filter,
|
321
326
|
score_mask,
|
322
|
-
fast_shape,
|
323
|
-
rotate_mask,
|
324
|
-
callback_class,
|
325
|
-
analyzer_kwargs,
|
326
327
|
)
|
327
328
|
|
328
329
|
ndim = targets.ndim - 1
|
329
330
|
for index in range(targets.shape[0]):
|
330
|
-
kwargs =
|
331
|
+
kwargs = analyzer_kwargs[index]
|
331
332
|
analyzer = callback_class(**kwargs)
|
332
|
-
|
333
333
|
state = [self._unbatch(x, ndim, index) for x in states]
|
334
334
|
|
335
335
|
if isinstance(analyzer, MaxScoreOverRotations):
|
@@ -2196,7 +2196,7 @@ class Density:
|
|
2196
2196
|
|
2197
2197
|
Parameters
|
2198
2198
|
----------
|
2199
|
-
target : Density
|
2199
|
+
target : :py:class:`Density`
|
2200
2200
|
The target map for template matching.
|
2201
2201
|
template : Structure
|
2202
2202
|
The template that should be aligned to the target.
|
@@ -2259,3 +2259,60 @@ class Density:
|
|
2259
2259
|
coordinates = np.array(np.where(data > 0))
|
2260
2260
|
weights = self.data[tuple(coordinates)]
|
2261
2261
|
return align_to_axis(coordinates.T, weights=weights, axis=axis, flip=flip)
|
2262
|
+
|
2263
|
+
@staticmethod
|
2264
|
+
def fourier_shell_correlation(density1: "Density", density2: "Density") -> NDArray:
|
2265
|
+
"""
|
2266
|
+
Computes the Fourier Shell Correlation (FSC) between two instances of `Density`.
|
2267
|
+
|
2268
|
+
The Fourier transforms of the input maps are divided into shells
|
2269
|
+
based on their spatial frequency. The correlation between corresponding shells
|
2270
|
+
in the two maps is computed to give the FSC.
|
2271
|
+
|
2272
|
+
Parameters
|
2273
|
+
----------
|
2274
|
+
density1 : :py:class:`Density`
|
2275
|
+
Reference for comparison.
|
2276
|
+
density2 : :py:class:`Density`
|
2277
|
+
Target for comparison.
|
2278
|
+
|
2279
|
+
Returns
|
2280
|
+
-------
|
2281
|
+
NDArray
|
2282
|
+
An array of shape (N, 2), where N is the number of shells.
|
2283
|
+
The first column represents the spatial frequency for each shell
|
2284
|
+
and the second column represents the corresponding FSC.
|
2285
|
+
|
2286
|
+
References
|
2287
|
+
----------
|
2288
|
+
.. [1] https://github.com/tdgrant1/denss/blob/master/saxstats/saxstats.py
|
2289
|
+
"""
|
2290
|
+
side = density1.data.shape[0]
|
2291
|
+
df = 1.0 / side
|
2292
|
+
|
2293
|
+
qx_ = np.fft.fftfreq(side) * side * df
|
2294
|
+
qx, qy, qz = np.meshgrid(qx_, qx_, qx_, indexing="ij")
|
2295
|
+
qr = np.sqrt(qx**2 + qy**2 + qz**2)
|
2296
|
+
|
2297
|
+
qmax = np.max(qr)
|
2298
|
+
qstep = np.min(qr[qr > 0])
|
2299
|
+
nbins = int(qmax / qstep)
|
2300
|
+
qbins = np.linspace(0, nbins * qstep, nbins + 1)
|
2301
|
+
qbin_labels = np.searchsorted(qbins, qr, "right") - 1
|
2302
|
+
|
2303
|
+
F1 = np.fft.fftn(density1.data)
|
2304
|
+
F2 = np.fft.fftn(density2.data)
|
2305
|
+
|
2306
|
+
qbin_labels = qbin_labels.reshape(-1)
|
2307
|
+
numerator = np.bincount(
|
2308
|
+
qbin_labels, weights=np.real(F1 * np.conj(F2)).reshape(-1)
|
2309
|
+
)
|
2310
|
+
term1 = np.bincount(qbin_labels, weights=np.abs(F1).reshape(-1) ** 2)
|
2311
|
+
term2 = np.bincount(qbin_labels, weights=np.abs(F2).reshape(-1) ** 2)
|
2312
|
+
np.multiply(term1, term2, out=term1)
|
2313
|
+
denominator = np.sqrt(term1)
|
2314
|
+
FSC = np.divide(numerator, denominator)
|
2315
|
+
|
2316
|
+
qidx = np.where(qbins < qx.max())
|
2317
|
+
|
2318
|
+
return np.vstack((qbins[qidx], FSC[qidx])).T
|
@@ -494,16 +494,22 @@ class Orientations:
|
|
494
494
|
|
495
495
|
@classmethod
|
496
496
|
def _from_star(
|
497
|
-
cls, filename: str, delimiter: str =
|
497
|
+
cls, filename: str, delimiter: str = None
|
498
498
|
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
|
499
499
|
parser = StarParser(filename, delimiter=delimiter)
|
500
500
|
|
501
|
-
|
502
|
-
|
503
|
-
ret = parser.get(
|
501
|
+
keyword_order = ("data_particles", "particles", "data")
|
502
|
+
for keyword in keyword_order:
|
503
|
+
ret = parser.get(keyword, None)
|
504
|
+
if ret is None:
|
505
|
+
ret = parser.get(f"{keyword}_", None)
|
506
|
+
if ret is not None:
|
507
|
+
break
|
504
508
|
|
505
509
|
if ret is None:
|
506
|
-
raise ValueError(
|
510
|
+
raise ValueError(
|
511
|
+
f"Could not find either {keyword_order} section found in {filename}."
|
512
|
+
)
|
507
513
|
|
508
514
|
translation = np.vstack(
|
509
515
|
(ret["_rlnCoordinateX"], ret["_rlnCoordinateY"], ret["_rlnCoordinateZ"])
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -375,10 +375,10 @@ def normalize_input(foregrounds: Tuple[str], backgrounds: Tuple[str]) -> Tuple:
|
|
375
375
|
update = tuple(slice(0, int(x)) for x in np.minimum(out_shape, scores.shape))
|
376
376
|
scores_out = np.full(out_shape, fill_value=0, dtype=np.float32)
|
377
377
|
scores_out[update] = data[0][update] - scores_norm[update]
|
378
|
+
scores_out = np.fmax(scores_out, 0, out=scores_out)
|
378
379
|
scores_out[update] += scores_norm[update].mean()
|
379
380
|
|
380
381
|
# scores_out[update] = np.divide(scores_out[update], 1 - scores_norm[update])
|
381
|
-
scores_out = np.fmax(scores_out, 0, out=scores_out)
|
382
382
|
data[0] = scores_out
|
383
383
|
|
384
384
|
fg, bg = simple_stats(data[0]), simple_stats(scores_norm)
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|