pytme 0.2.9.post1__cp311-cp311-macosx_15_0_arm64.whl → 0.3b0.post1__cp311-cp311-macosx_15_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. pytme-0.3b0.post1.data/scripts/estimate_memory_usage.py +76 -0
  2. pytme-0.3b0.post1.data/scripts/match_template.py +1098 -0
  3. {pytme-0.2.9.post1.data → pytme-0.3b0.post1.data}/scripts/postprocess.py +318 -189
  4. {pytme-0.2.9.post1.data → pytme-0.3b0.post1.data}/scripts/preprocess.py +21 -31
  5. {pytme-0.2.9.post1.data → pytme-0.3b0.post1.data}/scripts/preprocessor_gui.py +12 -12
  6. pytme-0.3b0.post1.data/scripts/pytme_runner.py +769 -0
  7. {pytme-0.2.9.post1.dist-info → pytme-0.3b0.post1.dist-info}/METADATA +21 -20
  8. pytme-0.3b0.post1.dist-info/RECORD +126 -0
  9. {pytme-0.2.9.post1.dist-info → pytme-0.3b0.post1.dist-info}/entry_points.txt +2 -1
  10. pytme-0.3b0.post1.dist-info/licenses/LICENSE +339 -0
  11. scripts/estimate_memory_usage.py +76 -0
  12. scripts/eval.py +93 -0
  13. scripts/extract_candidates.py +224 -0
  14. scripts/match_template.py +341 -378
  15. pytme-0.2.9.post1.data/scripts/match_template.py → scripts/match_template_filters.py +213 -148
  16. scripts/postprocess.py +318 -189
  17. scripts/preprocess.py +21 -31
  18. scripts/preprocessor_gui.py +12 -12
  19. scripts/pytme_runner.py +769 -0
  20. scripts/refine_matches.py +625 -0
  21. tests/preprocessing/test_frequency_filters.py +28 -14
  22. tests/test_analyzer.py +41 -36
  23. tests/test_backends.py +1 -0
  24. tests/test_matching_cli.py +109 -54
  25. tests/test_matching_data.py +5 -5
  26. tests/test_matching_exhaustive.py +1 -2
  27. tests/test_matching_optimization.py +4 -9
  28. tests/test_matching_utils.py +1 -1
  29. tests/test_orientations.py +0 -1
  30. tme/__version__.py +1 -1
  31. tme/analyzer/__init__.py +2 -0
  32. tme/analyzer/_utils.py +26 -21
  33. tme/analyzer/aggregation.py +395 -222
  34. tme/analyzer/base.py +127 -0
  35. tme/analyzer/peaks.py +189 -204
  36. tme/analyzer/proxy.py +123 -0
  37. tme/backends/__init__.py +4 -3
  38. tme/backends/_cupy_utils.py +25 -24
  39. tme/backends/_jax_utils.py +20 -18
  40. tme/backends/cupy_backend.py +13 -26
  41. tme/backends/jax_backend.py +24 -23
  42. tme/backends/matching_backend.py +4 -3
  43. tme/backends/mlx_backend.py +4 -3
  44. tme/backends/npfftw_backend.py +34 -30
  45. tme/backends/pytorch_backend.py +18 -4
  46. tme/cli.py +126 -0
  47. tme/density.py +9 -7
  48. tme/filters/__init__.py +3 -3
  49. tme/filters/_utils.py +36 -10
  50. tme/filters/bandpass.py +229 -188
  51. tme/filters/compose.py +5 -4
  52. tme/filters/ctf.py +516 -254
  53. tme/filters/reconstruction.py +91 -32
  54. tme/filters/wedge.py +196 -135
  55. tme/filters/whitening.py +37 -42
  56. tme/matching_data.py +28 -39
  57. tme/matching_exhaustive.py +31 -27
  58. tme/matching_optimization.py +5 -4
  59. tme/matching_scores.py +25 -15
  60. tme/matching_utils.py +54 -9
  61. tme/memory.py +4 -3
  62. tme/orientations.py +22 -9
  63. tme/parser.py +114 -33
  64. tme/preprocessor.py +6 -5
  65. tme/rotations.py +10 -7
  66. tme/structure.py +4 -3
  67. pytme-0.2.9.post1.data/scripts/estimate_ram_usage.py +0 -97
  68. pytme-0.2.9.post1.dist-info/RECORD +0 -119
  69. pytme-0.2.9.post1.dist-info/licenses/LICENSE +0 -153
  70. scripts/estimate_ram_usage.py +0 -97
  71. tests/data/Maps/.DS_Store +0 -0
  72. tests/data/Structures/.DS_Store +0 -0
  73. {pytme-0.2.9.post1.dist-info → pytme-0.3b0.post1.dist-info}/WHEEL +0 -0
  74. {pytme-0.2.9.post1.dist-info → pytme-0.3b0.post1.dist-info}/top_level.txt +0 -0
scripts/eval.py ADDED
@@ -0,0 +1,93 @@
1
+ #!python3
2
+ """ Apply tme.preprocessor.Preprocessor methods to an input file based
3
+ on a provided yaml configuration obtaiend from preprocessor_gui.py.
4
+
5
+ Copyright (c) 2023 European Molecular Biology Laboratory
6
+
7
+ Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
8
+ """
9
+ import yaml
10
+ import argparse
11
+ import textwrap
12
+ from tme import Preprocessor, Density
13
+
14
+
15
+ def parse_args():
16
+ parser = argparse.ArgumentParser(
17
+ description=textwrap.dedent(
18
+ """
19
+ Apply preprocessing to an input file based on a provided YAML configuration.
20
+
21
+ Expected YAML file format:
22
+ ```yaml
23
+ <method_name>:
24
+ <parameter1>: <value1>
25
+ <parameter2>: <value2>
26
+ ...
27
+ ```
28
+ """
29
+ ),
30
+ formatter_class=argparse.RawDescriptionHelpFormatter,
31
+ )
32
+ parser.add_argument(
33
+ "-i",
34
+ "--input_file",
35
+ type=str,
36
+ required=True,
37
+ help="Path to the input data file in CCP4/MRC format.",
38
+ )
39
+ parser.add_argument(
40
+ "-y",
41
+ "--yaml_file",
42
+ type=str,
43
+ required=True,
44
+ help="Path to the YAML configuration file.",
45
+ )
46
+ parser.add_argument(
47
+ "-o",
48
+ "--output_file",
49
+ type=str,
50
+ required=True,
51
+ help="Path to output file in CPP4/MRC format..",
52
+ )
53
+ parser.add_argument(
54
+ "--compress", action="store_true", help="Compress the output file using gzip."
55
+ )
56
+
57
+ args = parser.parse_args()
58
+
59
+ return args
60
+
61
+
62
+ def main():
63
+ args = parse_args()
64
+ with open(args.yaml_file, "r") as f:
65
+ preprocess_settings = yaml.safe_load(f)
66
+
67
+ if len(preprocess_settings) > 1:
68
+ raise NotImplementedError(
69
+ "Multiple preprocessing methods specified. "
70
+ "The script currently supports one method at a time."
71
+ )
72
+
73
+ method_name = list(preprocess_settings.keys())[0]
74
+ if not hasattr(Preprocessor, method_name):
75
+ raise ValueError(f"Method {method_name} does not exist in Preprocessor.")
76
+
77
+ density = Density.from_file(args.input_file)
78
+ output = density.empty
79
+
80
+ method_params = preprocess_settings[method_name]
81
+ preprocessor = Preprocessor()
82
+ method = getattr(preprocessor, method_name, None)
83
+ if not method:
84
+ raise ValueError(
85
+ f"{method} does not exist in dge.preprocessor.Preprocessor class."
86
+ )
87
+
88
+ output.data = method(template=density.data, **method_params)
89
+ output.to_file(args.output_file, gzip=args.compress)
90
+
91
+
92
+ if __name__ == "__main__":
93
+ main()
@@ -0,0 +1,224 @@
1
+ #!python3
2
+ """ Prepare orientations stack for refinement.
3
+
4
+ Copyright (c) 2023 European Molecular Biology Laboratory
5
+
6
+ Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
7
+ """
8
+ import argparse
9
+ from os.path import splitext
10
+
11
+ import numpy as np
12
+
13
+ from tme import Density, Orientations
14
+ from tme.matching_utils import (
15
+ generate_tempfile_name,
16
+ rotation_aligning_vectors,
17
+ euler_from_rotationmatrix,
18
+ euler_to_rotationmatrix,
19
+ )
20
+
21
+
22
+ class ProgressBar:
23
+ """
24
+ ASCII progress bar.
25
+ """
26
+
27
+ def __init__(self, message: str, nchars: int, total: int):
28
+ self._size = nchars - len(message) - (len(str(total))+2) * 2
29
+ self._message = message
30
+ self._total = total
31
+
32
+ def update(self, cur):
33
+ x = int(cur * self._size / self._total)
34
+ print(
35
+ "%s[%s%s] %i/%i\r"
36
+ % (self._message, "#" * x, "." * (self._size - x), cur, self._total),
37
+ end="",
38
+ )
39
+
40
+
41
+ def parse_args():
42
+ parser = argparse.ArgumentParser(
43
+ description="Extract matching candidates for further refinement.",
44
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
45
+ )
46
+
47
+ io_group = parser.add_argument_group("Input / Output")
48
+ io_group.add_argument(
49
+ "--target",
50
+ required=True,
51
+ type=str,
52
+ help="Extract candidates from this target.",
53
+ )
54
+ io_group.add_argument(
55
+ "--orientations",
56
+ required=True,
57
+ type=str,
58
+ help="Path to file generated by postprocess.py using output_format orientations.",
59
+ )
60
+ io_group.add_argument(
61
+ "--orientations_sampling",
62
+ required=False,
63
+ type=float,
64
+ default=1.0,
65
+ help="Factor to map candidate coordinates onto the target. Only relevant if "
66
+ "target sampling rate differs from candidate orientation sampling rate.",
67
+ )
68
+ io_group.add_argument(
69
+ "-o",
70
+ "--output_file",
71
+ required=True,
72
+ type=str,
73
+ help="Path to write output H5 file.",
74
+ )
75
+
76
+ alignment_group = parser.add_argument_group("Alignment")
77
+ alignment_group.add_argument(
78
+ "--align_orientations",
79
+ action="store_true",
80
+ required=False,
81
+ help="Whether to align extracted orientations based on their angles. Allows "
82
+ "for efficient subsequent sampling of cone angles.",
83
+ )
84
+ alignment_group.add_argument(
85
+ "--angles_are_vector",
86
+ action="store_true",
87
+ required=False,
88
+ help="Considers euler_z euler_y, euler_x as vector that will be rotated to align "
89
+ "with the z-axis (1,0,0). Only considered when --align_orientations is set.",
90
+ )
91
+ alignment_group.add_argument(
92
+ "--interpolation_order",
93
+ dest="interpolation_order",
94
+ required=False,
95
+ type=int,
96
+ default=1,
97
+ help="Interpolation order for alignment, less than zero is no interpolation.",
98
+ )
99
+
100
+ extraction_group = parser.add_argument_group("Extraction")
101
+ extraction_group.add_argument(
102
+ "--box_size",
103
+ required=False,
104
+ type=int,
105
+ help="Box size for extraction, defaults to two times the template.",
106
+ )
107
+ extraction_group.add_argument(
108
+ "--translation_uncertainty",
109
+ required=False,
110
+ type=int,
111
+ help="Sets box size for extraction to template box plus this value.",
112
+ )
113
+ extraction_group.add_argument(
114
+ "--keep_out_of_box",
115
+ action="store_true",
116
+ required=False,
117
+ help="Whether to keep orientations that fall outside the box. If the "
118
+ "orientations are sensible, it is safe to pass this flag.",
119
+ )
120
+
121
+ args = parser.parse_args()
122
+
123
+ return args
124
+
125
+
126
+ def main():
127
+ args = parse_args()
128
+ orientations = Orientations.from_file(args.orientations)
129
+ orientations.translations = np.divide(
130
+ orientations.translations, args.orientations_sampling
131
+ )
132
+
133
+ target = Density.from_file(args.target, use_memmap=True)
134
+
135
+ box_size = np.array(args.box_size)
136
+ box_size = np.repeat(box_size, target.data.ndim // box_size.size).astype(int)
137
+
138
+ extraction_shape = np.copy(box_size)
139
+ if args.align_orientations:
140
+ extraction_shape[:] = int(np.linalg.norm(box_size) + 1)
141
+
142
+ orientations, cand_slices, obs_slices = orientations.get_extraction_slices(
143
+ target_shape=target.shape,
144
+ extraction_shape=extraction_shape,
145
+ drop_out_of_box=not args.keep_out_of_box,
146
+ return_orientations=True,
147
+ )
148
+
149
+ if args.align_orientations:
150
+ for index in range(orientations.rotations.shape[0]):
151
+ rotation_matrix = euler_to_rotationmatrix(orientations.rotations[index])
152
+ rotation_matrix = np.linalg.inv(rotation_matrix)
153
+ if args.angles_are_vector:
154
+ rotation_matrix = rotation_aligning_vectors(
155
+ orientations.rotations[index], target_vector=(1, 0, 0)
156
+ )
157
+ orientations.rotations[index] = euler_from_rotationmatrix(rotation_matrix)
158
+
159
+ filename = generate_tempfile_name()
160
+ output_dtype = target.data.dtype
161
+ if args.align_orientations is not None:
162
+ output_dtype = np.float32
163
+
164
+ target.data = target.data.astype(output_dtype)
165
+
166
+ dens = Density(
167
+ np.memmap(
168
+ filename,
169
+ mode="w+",
170
+ shape=(len(obs_slices), *box_size),
171
+ dtype=output_dtype,
172
+ ),
173
+ sampling_rate=(1, *target.sampling_rate),
174
+ origin=(0, *target.origin),
175
+ )
176
+ dens.data[:] = target.metadata["mean"]
177
+
178
+ data_subset = np.zeros(extraction_shape, dtype=target.data.dtype)
179
+ pbar = ProgressBar(message="Aligning ", nchars=80, total=len(obs_slices))
180
+ for index, (obs_slice, cand_slice) in enumerate(zip(obs_slices, cand_slices)):
181
+ pbar.update(index + 1)
182
+
183
+ data_subset.fill(0)
184
+ data_subset[cand_slice] = target.data[obs_slice]
185
+ target_subset = Density(
186
+ data_subset,
187
+ sampling_rate=target.sampling_rate,
188
+ origin=target.origin,
189
+ )
190
+
191
+ if args.align_orientations:
192
+ rotation_matrix = euler_to_rotationmatrix(orientations.rotations[index])
193
+ target_subset = target_subset.rigid_transform(
194
+ rotation_matrix=rotation_matrix,
195
+ use_geometric_center=True,
196
+ order=args.interpolation_order,
197
+ )
198
+ target_subset.pad(box_size, center=True)
199
+
200
+ # target_value = target.data[tuple(orientations.translations[index].astype(int))]
201
+ # center = np.divide(target_subset.data.shape, 2).astype(int)
202
+ # print(np.where(target_subset.data == target_value), center)
203
+ # print(target_subset.data[tuple(center.astype(int))],
204
+ # target_value,
205
+ # target_subset.data[tuple(center.astype(int))] == target_value
206
+ # )
207
+
208
+ dens.data[index] = target_subset.data
209
+ print("")
210
+
211
+ target_meta = {
212
+ k: v for k, v in target.metadata.items() if k in ("mean", "max", "min", "std")
213
+ }
214
+ dens.metadata.update(target_meta)
215
+ dens.metadata["batch_dimension"] = (0,)
216
+ dens.metadata["normals"] = orientations.rotations
217
+
218
+ dens.to_file(args.output_file)
219
+ orientations.to_file(
220
+ f"{splitext(args.output_file)[0]}_aligned.tsv", file_format="text"
221
+ )
222
+
223
+ if __name__ == "__main__":
224
+ main()