pytme 0.3b0__cp311-cp311-macosx_15_0_arm64.whl → 0.3.1__cp311-cp311-macosx_15_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pytme-0.3b0.data → pytme-0.3.1.data}/scripts/estimate_memory_usage.py +1 -5
- {pytme-0.3b0.data → pytme-0.3.1.data}/scripts/match_template.py +177 -226
- {pytme-0.3b0.data → pytme-0.3.1.data}/scripts/postprocess.py +69 -47
- {pytme-0.3b0.data → pytme-0.3.1.data}/scripts/preprocess.py +10 -23
- {pytme-0.3b0.data → pytme-0.3.1.data}/scripts/preprocessor_gui.py +98 -28
- pytme-0.3.1.data/scripts/pytme_runner.py +1223 -0
- {pytme-0.3b0.dist-info → pytme-0.3.1.dist-info}/METADATA +15 -15
- pytme-0.3.1.dist-info/RECORD +133 -0
- {pytme-0.3b0.dist-info → pytme-0.3.1.dist-info}/entry_points.txt +1 -0
- pytme-0.3.1.dist-info/licenses/LICENSE +339 -0
- scripts/estimate_memory_usage.py +1 -5
- scripts/eval.py +93 -0
- scripts/extract_candidates.py +118 -99
- scripts/match_template.py +177 -226
- scripts/match_template_filters.py +1200 -0
- scripts/postprocess.py +69 -47
- scripts/preprocess.py +10 -23
- scripts/preprocessor_gui.py +98 -28
- scripts/pytme_runner.py +1223 -0
- scripts/refine_matches.py +156 -387
- tests/data/.DS_Store +0 -0
- tests/data/Blurring/.DS_Store +0 -0
- tests/data/Maps/.DS_Store +0 -0
- tests/data/Raw/.DS_Store +0 -0
- tests/data/Structures/.DS_Store +0 -0
- tests/preprocessing/test_frequency_filters.py +19 -10
- tests/preprocessing/test_utils.py +18 -0
- tests/test_analyzer.py +122 -122
- tests/test_backends.py +4 -9
- tests/test_density.py +0 -1
- tests/test_matching_cli.py +30 -30
- tests/test_matching_data.py +5 -5
- tests/test_matching_utils.py +11 -61
- tests/test_rotations.py +1 -1
- tme/__version__.py +1 -1
- tme/analyzer/__init__.py +1 -1
- tme/analyzer/_utils.py +5 -8
- tme/analyzer/aggregation.py +28 -9
- tme/analyzer/base.py +25 -36
- tme/analyzer/peaks.py +49 -122
- tme/analyzer/proxy.py +1 -0
- tme/backends/_jax_utils.py +31 -28
- tme/backends/_numpyfftw_utils.py +270 -0
- tme/backends/cupy_backend.py +11 -54
- tme/backends/jax_backend.py +72 -48
- tme/backends/matching_backend.py +6 -51
- tme/backends/mlx_backend.py +1 -27
- tme/backends/npfftw_backend.py +95 -90
- tme/backends/pytorch_backend.py +5 -26
- tme/density.py +7 -10
- tme/extensions.cpython-311-darwin.so +0 -0
- tme/filters/__init__.py +2 -2
- tme/filters/_utils.py +32 -7
- tme/filters/bandpass.py +225 -186
- tme/filters/ctf.py +138 -87
- tme/filters/reconstruction.py +38 -9
- tme/filters/wedge.py +98 -112
- tme/filters/whitening.py +1 -6
- tme/mask.py +341 -0
- tme/matching_data.py +20 -44
- tme/matching_exhaustive.py +46 -56
- tme/matching_optimization.py +2 -1
- tme/matching_scores.py +216 -412
- tme/matching_utils.py +82 -424
- tme/memory.py +1 -1
- tme/orientations.py +16 -8
- tme/parser.py +109 -29
- tme/preprocessor.py +2 -2
- tme/rotations.py +1 -1
- pytme-0.3b0.dist-info/RECORD +0 -122
- pytme-0.3b0.dist-info/licenses/LICENSE +0 -153
- {pytme-0.3b0.dist-info → pytme-0.3.1.dist-info}/WHEEL +0 -0
- {pytme-0.3b0.dist-info → pytme-0.3.1.dist-info}/top_level.txt +0 -0
scripts/refine_matches.py
CHANGED
@@ -1,15 +1,16 @@
|
|
1
1
|
#!python3
|
2
|
-
"""
|
2
|
+
"""Iterative template matching parameter tuning.
|
3
3
|
|
4
|
-
|
4
|
+
Copyright (c) 2024 European Molecular Biology Laboratory
|
5
5
|
|
6
|
-
|
6
|
+
Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
|
7
7
|
"""
|
8
8
|
import argparse
|
9
9
|
import subprocess
|
10
10
|
from sys import exit
|
11
|
+
from os import unlink
|
11
12
|
from time import time
|
12
|
-
from
|
13
|
+
from os.path import join
|
13
14
|
from typing import Tuple, List, Dict
|
14
15
|
|
15
16
|
import numpy as np
|
@@ -17,18 +18,27 @@ from scipy import optimize
|
|
17
18
|
from sklearn.metrics import roc_auc_score
|
18
19
|
|
19
20
|
from tme import Orientations, Density
|
20
|
-
from tme.
|
21
|
+
from tme.backends import backend as be
|
22
|
+
from tme.matching_utils import generate_tempfile_name, create_mask
|
21
23
|
from tme.matching_exhaustive import MATCHING_EXHAUSTIVE_REGISTER
|
22
24
|
|
23
|
-
|
24
|
-
|
25
|
+
|
26
|
+
def parse_range(x: str):
|
27
|
+
start, stop, step = x.split(":")
|
25
28
|
return range(int(start), int(stop), int(step))
|
26
29
|
|
30
|
+
|
27
31
|
def parse_args():
|
28
32
|
parser = argparse.ArgumentParser(
|
29
33
|
description="Refine template matching candidates using deep matching.",
|
30
34
|
)
|
31
35
|
io_group = parser.add_argument_group("Input / Output")
|
36
|
+
io_group.add_argument(
|
37
|
+
"--target",
|
38
|
+
required=True,
|
39
|
+
type=str,
|
40
|
+
help="Image stack created using extract_candidates.py.",
|
41
|
+
)
|
32
42
|
io_group.add_argument(
|
33
43
|
"--orientations",
|
34
44
|
required=True,
|
@@ -38,72 +48,53 @@ def parse_args():
|
|
38
48
|
" for available options.",
|
39
49
|
)
|
40
50
|
io_group.add_argument(
|
41
|
-
"--
|
51
|
+
"--output-prefix", required=True, type=str, help="Path to write output to."
|
42
52
|
)
|
43
53
|
io_group.add_argument(
|
44
|
-
"--
|
45
|
-
|
46
|
-
default=
|
47
|
-
|
48
|
-
help="Number of refinement iterations to perform.",
|
54
|
+
"--save-pickles",
|
55
|
+
action="store_true",
|
56
|
+
default=False,
|
57
|
+
help="Save intermediate results as pickle files in output directory.",
|
49
58
|
)
|
50
59
|
io_group.add_argument(
|
51
|
-
"--
|
60
|
+
"--save-orientations",
|
52
61
|
action="store_true",
|
53
62
|
default=False,
|
54
|
-
help="
|
63
|
+
help="Save orientation results in output directory.",
|
55
64
|
)
|
56
65
|
matching_group = parser.add_argument_group("Template Matching")
|
57
|
-
matching_group.add_argument(
|
58
|
-
"--input_file",
|
59
|
-
required=False,
|
60
|
-
type=str,
|
61
|
-
help="Path to the output of match_template.py.",
|
62
|
-
)
|
63
|
-
matching_group.add_argument(
|
64
|
-
"-m",
|
65
|
-
"--target",
|
66
|
-
dest="target",
|
67
|
-
type=str,
|
68
|
-
required=False,
|
69
|
-
help="Path to a target in CCP4/MRC, EM, H5 or another format supported by "
|
70
|
-
"tme.density.Density.from_file "
|
71
|
-
"https://kosinskilab.github.io/pyTME/reference/api/tme.density.Density.from_file.html",
|
72
|
-
)
|
73
|
-
matching_group.add_argument(
|
74
|
-
"--target_mask",
|
75
|
-
dest="target_mask",
|
76
|
-
type=str,
|
77
|
-
required=False,
|
78
|
-
help="Path to a mask for the target in a supported format (see target).",
|
79
|
-
)
|
80
66
|
matching_group.add_argument(
|
81
67
|
"-i",
|
82
68
|
"--template",
|
83
69
|
dest="template",
|
84
70
|
type=str,
|
85
|
-
required=
|
71
|
+
required=True,
|
86
72
|
help="Path to a template in PDB/MMCIF or other supported formats (see target).",
|
87
73
|
)
|
88
74
|
matching_group.add_argument(
|
89
|
-
"--
|
90
|
-
dest="template_mask",
|
75
|
+
"--template-mask",
|
91
76
|
type=str,
|
92
77
|
required=False,
|
93
78
|
help="Path to a mask for the template in a supported format (see target).",
|
94
79
|
)
|
95
80
|
matching_group.add_argument(
|
96
|
-
"--
|
97
|
-
|
81
|
+
"--ctf-file",
|
82
|
+
type=str,
|
83
|
+
required=False,
|
84
|
+
default=None,
|
85
|
+
help="Path to a file with CTF parameters. This can be a Warp/M XML file "
|
86
|
+
"a GCTF/Relion STAR file, an MDOC file, or the output of CTFFIND4. If the file "
|
87
|
+
" does not specify tilt angles, --tilt-angles are used.",
|
88
|
+
)
|
89
|
+
matching_group.add_argument(
|
90
|
+
"--invert-target-contrast",
|
98
91
|
action="store_true",
|
99
92
|
default=False,
|
100
|
-
help="Invert the target's contrast
|
101
|
-
"This option is intended for targets where templates to-be-matched have "
|
93
|
+
help="Invert the target's contrast for cases where templates to-be-matched have "
|
102
94
|
"negative values, e.g. tomograms.",
|
103
95
|
)
|
104
96
|
matching_group.add_argument(
|
105
|
-
"--
|
106
|
-
dest="angular_sampling",
|
97
|
+
"--angular-sampling",
|
107
98
|
required=True,
|
108
99
|
default=None,
|
109
100
|
help="Angular sampling rate using optimized rotational sets."
|
@@ -113,11 +104,13 @@ def parse_args():
|
|
113
104
|
"-s",
|
114
105
|
dest="score",
|
115
106
|
type=str,
|
116
|
-
default="
|
107
|
+
default="batchFLCSphericalMask",
|
117
108
|
choices=list(MATCHING_EXHAUSTIVE_REGISTER.keys()),
|
118
109
|
help="Template matching scoring function.",
|
119
110
|
)
|
120
|
-
|
111
|
+
|
112
|
+
computation_group = parser.add_argument_group("Computation")
|
113
|
+
computation_group.add_argument(
|
121
114
|
"-n",
|
122
115
|
dest="cores",
|
123
116
|
required=False,
|
@@ -125,113 +118,44 @@ def parse_args():
|
|
125
118
|
default=4,
|
126
119
|
help="Number of cores used for template matching.",
|
127
120
|
)
|
128
|
-
|
129
|
-
"--
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
help="Whether to perform computations on the GPU.",
|
134
|
-
)
|
135
|
-
matching_group.add_argument(
|
136
|
-
"--no_centering",
|
137
|
-
dest="no_centering",
|
138
|
-
action="store_true",
|
139
|
-
help="Assumes the template is already centered and omits centering.",
|
140
|
-
)
|
141
|
-
matching_group.add_argument(
|
142
|
-
"--no_edge_padding",
|
143
|
-
dest="no_edge_padding",
|
144
|
-
action="store_true",
|
145
|
-
default=False,
|
146
|
-
help="Whether to not pad the edges of the target. Can be set if the target"
|
147
|
-
" has a well defined bounding box, e.g. a masked reconstruction.",
|
148
|
-
)
|
149
|
-
matching_group.add_argument(
|
150
|
-
"--no_fourier_padding",
|
151
|
-
dest="no_fourier_padding",
|
152
|
-
action="store_true",
|
153
|
-
default=False,
|
154
|
-
help="Whether input arrays should not be zero-padded to full convolution shape "
|
155
|
-
"for numerical stability. When working with very large targets, e.g. tomograms, "
|
156
|
-
"it is safe to use this flag and benefit from the performance gain.",
|
157
|
-
)
|
158
|
-
|
159
|
-
peak_group = parser.add_argument_group("Peak Calling")
|
160
|
-
peak_group.add_argument(
|
161
|
-
"--number_of_peaks",
|
162
|
-
type=int,
|
163
|
-
default=100,
|
164
|
-
required=False,
|
165
|
-
help="Upper limit of peaks to call, subject to filtering parameters. Default 1000. "
|
166
|
-
"If minimum_score is provided all peaks scoring higher will be reported.",
|
167
|
-
)
|
168
|
-
extraction_group = parser.add_argument_group("Extraction")
|
169
|
-
extraction_group.add_argument(
|
170
|
-
"--keep_out_of_box",
|
171
|
-
action="store_true",
|
172
|
-
required=False,
|
173
|
-
help="Whether to keep orientations that fall outside the box. If the "
|
174
|
-
"orientations are sensible, it is safe to pass this flag.",
|
121
|
+
computation_group.add_argument(
|
122
|
+
"--backend",
|
123
|
+
default=be._backend_name,
|
124
|
+
choices=be.available_backends(),
|
125
|
+
help="Set computation backend.",
|
175
126
|
)
|
176
127
|
|
177
128
|
optimization_group = parser.add_argument_group("Optimization")
|
178
|
-
optimization_group.add_argument(
|
179
|
-
"--lowpass",
|
180
|
-
dest="lowpass",
|
181
|
-
action="store_true",
|
182
|
-
default=False,
|
183
|
-
help="Optimize template matching lowpass filter cutoff.",
|
184
|
-
)
|
185
|
-
optimization_group.add_argument(
|
186
|
-
"--highpass",
|
187
|
-
dest="highpass",
|
188
|
-
action="store_true",
|
189
|
-
default=False,
|
190
|
-
help="Optimize template matching highpass filter cutoff.",
|
191
|
-
)
|
192
129
|
optimization_group.add_argument(
|
193
130
|
"--lowpass-range",
|
194
|
-
dest="lowpass_range",
|
195
131
|
type=str,
|
196
132
|
default="0:50:5",
|
197
133
|
help="Optimize template matching lowpass filter cutoff.",
|
198
134
|
)
|
199
135
|
optimization_group.add_argument(
|
200
136
|
"--highpass-range",
|
201
|
-
dest="highpass_range",
|
202
137
|
type=str,
|
203
138
|
default="0:50:5",
|
204
139
|
help="Optimize template matching highpass filter cutoff.",
|
205
140
|
)
|
206
141
|
optimization_group.add_argument(
|
207
142
|
"--translation-uncertainty",
|
208
|
-
dest="translation_uncertainty",
|
209
143
|
type=int,
|
210
|
-
default=
|
211
|
-
help="
|
144
|
+
default=8,
|
145
|
+
help="Translational uncertainty for masking, defaults to 8.",
|
212
146
|
)
|
213
147
|
|
214
|
-
|
215
148
|
args = parser.parse_args()
|
216
149
|
|
217
|
-
|
218
|
-
if args.input_file is None and not data_present:
|
219
|
-
raise ValueError(
|
220
|
-
"Either --input_file or --target and --template need to be specified."
|
221
|
-
)
|
222
|
-
elif args.input_file is not None and data_present:
|
223
|
-
raise ValueError(
|
224
|
-
"Please specific either --input_file or --target and --template."
|
225
|
-
)
|
226
|
-
|
150
|
+
args.target_mask = None
|
227
151
|
if args.lowpass_range != "None":
|
228
152
|
args.lowpass_range = parse_range(args.lowpass_range)
|
229
153
|
else:
|
230
|
-
args.lowpass_range = (None,
|
154
|
+
args.lowpass_range = (None,)
|
231
155
|
if args.highpass_range != "None":
|
232
156
|
args.highpass_range = parse_range(args.highpass_range)
|
233
157
|
else:
|
234
|
-
args.highpass_range = (None,
|
158
|
+
args.highpass_range = (None,)
|
235
159
|
return args
|
236
160
|
|
237
161
|
|
@@ -250,6 +174,7 @@ def argdict_to_command(input_args: Dict, executable: str) -> List:
|
|
250
174
|
ret.insert(0, executable)
|
251
175
|
return " ".join(ret)
|
252
176
|
|
177
|
+
|
253
178
|
def run_command(command):
|
254
179
|
ret = subprocess.run(command, capture_output=True, shell=True)
|
255
180
|
if ret.returncode != 0:
|
@@ -260,52 +185,41 @@ def run_command(command):
|
|
260
185
|
|
261
186
|
return None
|
262
187
|
|
263
|
-
def create_stacking_argdict(args) -> Dict:
|
264
|
-
arg_dict = {
|
265
|
-
"--target": args.target,
|
266
|
-
"--template": args.template,
|
267
|
-
"--orientations": args.orientations,
|
268
|
-
"--output_file": args.candidate_stack_path,
|
269
|
-
"--keep_out_of_box": args.keep_out_of_box,
|
270
|
-
}
|
271
|
-
return arg_dict
|
272
|
-
|
273
188
|
|
274
189
|
def create_matching_argdict(args) -> Dict:
|
275
190
|
arg_dict = {
|
276
191
|
"--target": args.target,
|
277
192
|
"--template": args.template,
|
278
|
-
"--
|
279
|
-
"
|
193
|
+
"--template-mask": args.template_mask,
|
194
|
+
"--output": args.match_template_path,
|
280
195
|
"-a": args.angular_sampling,
|
281
196
|
"-s": args.score,
|
282
|
-
"--no_fourier_padding": True,
|
283
|
-
"--no_edge_padding": True,
|
284
|
-
"--no_centering": args.no_centering,
|
285
197
|
"-n": args.cores,
|
286
|
-
"--
|
287
|
-
"--
|
198
|
+
"--ctf-file": args.ctf_file,
|
199
|
+
"--invert-target-contrast": args.invert_target_contrast,
|
288
200
|
}
|
289
201
|
return arg_dict
|
290
202
|
|
291
203
|
|
292
204
|
def create_postprocessing_argdict(args) -> Dict:
|
293
205
|
arg_dict = {
|
294
|
-
"--
|
295
|
-
"--
|
296
|
-
"--
|
297
|
-
"--
|
298
|
-
"--
|
299
|
-
"--
|
300
|
-
"--
|
206
|
+
"--input-file": args.match_template_path,
|
207
|
+
"--target-mask": args.target_mask,
|
208
|
+
"--output-prefix": args.new_orientations_path,
|
209
|
+
"--peak-caller": "PeakCallerMaximumFilter",
|
210
|
+
"--num-peaks": 1,
|
211
|
+
"--output-format": "orientations",
|
212
|
+
"--mask-edges": True,
|
301
213
|
}
|
302
214
|
if args.target_mask is not None:
|
303
|
-
arg_dict["--
|
215
|
+
arg_dict["--mask-edges"] = False
|
304
216
|
return arg_dict
|
305
217
|
|
306
218
|
|
307
|
-
def update_orientations(
|
308
|
-
|
219
|
+
def update_orientations(
|
220
|
+
old: Orientations, new: Orientations, args, **kwargs
|
221
|
+
) -> Orientations:
|
222
|
+
stack_shape = Density.from_file(args.target, use_memmap=True).shape
|
309
223
|
stack_center = np.add(np.divide(stack_shape, 2).astype(int), np.mod(stack_shape, 2))
|
310
224
|
|
311
225
|
peak_number = new.translations[:, 0].astype(int)
|
@@ -315,34 +229,44 @@ def update_orientations(old : Orientations, new : Orientations, args, **kwargs)
|
|
315
229
|
)
|
316
230
|
ret = old.copy()
|
317
231
|
ret.scores[:] = 0
|
232
|
+
|
318
233
|
ret.scores[peak_number] = new.scores
|
319
234
|
ret.translations[peak_number] = new_translations
|
320
235
|
|
321
|
-
# The effect of --align_orientations should be handled
|
236
|
+
# The effect of --align_orientations should be handled here
|
237
|
+
ret.rotations[peak_number] = new.rotations
|
322
238
|
return ret
|
323
239
|
|
324
240
|
|
325
241
|
class DeepMatcher:
|
326
|
-
def __init__(self, args, margin
|
242
|
+
def __init__(self, args, margin: float = 0.5):
|
327
243
|
self.args = args
|
328
244
|
self.margin = margin
|
329
245
|
self.orientations = Orientations.from_file(args.orientations)
|
330
246
|
|
331
247
|
match_template_args = create_matching_argdict(args)
|
332
|
-
match_template_args["--target"] = args.
|
248
|
+
match_template_args["--target"] = args.target
|
333
249
|
self.match_template_args = match_template_args
|
334
250
|
|
335
251
|
self.filter_parameters = {}
|
336
|
-
if args.
|
252
|
+
if args.lowpass_range:
|
337
253
|
self.filter_parameters["--lowpass"] = 0
|
338
|
-
if args.
|
254
|
+
if args.highpass_range:
|
339
255
|
self.filter_parameters["--highpass"] = 200
|
340
|
-
# self.filter_parameters["--whiten"] = False
|
341
|
-
self.filter_parameters["--no_filter_target"] = False
|
342
|
-
|
343
256
|
|
344
257
|
self.postprocess_args = create_postprocessing_argdict(args)
|
345
|
-
self.
|
258
|
+
self.log_file = f"{args.output_prefix}_optimization_log.txt"
|
259
|
+
|
260
|
+
header = [
|
261
|
+
"mean_score_positive",
|
262
|
+
"mean_score_negative",
|
263
|
+
"lowpass",
|
264
|
+
"highpass",
|
265
|
+
"auc_score",
|
266
|
+
"duration",
|
267
|
+
]
|
268
|
+
with open(self.log_file, mode="w", encoding="utf-8") as f:
|
269
|
+
_ = f.write(",".join(header) + "\n")
|
346
270
|
|
347
271
|
def get_initial_values(self) -> Tuple[float]:
|
348
272
|
ret = tuple(float(x) for x in self.filter_parameters.values())
|
@@ -356,270 +280,115 @@ class DeepMatcher:
|
|
356
280
|
ret[key] = value > 0.5
|
357
281
|
return ret
|
358
282
|
|
359
|
-
def forward(self, x
|
360
|
-
|
361
|
-
|
283
|
+
def forward(self, x: Tuple[float]):
|
362
284
|
# Label 1 -> True positive, label 0 -> false positive
|
363
285
|
orientations_new = self(x)
|
364
286
|
label, score = orientations_new.details, orientations_new.scores
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
-
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
loss = roc_auc_score(label, score)
|
374
|
-
# print(
|
375
|
-
# np.mean(score[label == 1]), np.mean(score[label == 0]),
|
376
|
-
# *x, loss, time()
|
377
|
-
# )
|
378
|
-
|
287
|
+
loss = np.add(
|
288
|
+
(1 - label) * np.square(score),
|
289
|
+
label * np.square(np.fmax(self.margin - score, 0.0)),
|
290
|
+
)
|
291
|
+
loss = loss.mean()
|
379
292
|
return loss
|
380
293
|
|
381
294
|
def __call__(self, x: Tuple[float]):
|
295
|
+
start = time()
|
382
296
|
filter_parameters = self.format_parameters(x)
|
383
297
|
self.match_template_args.update(filter_parameters)
|
384
|
-
match_template = argdict_to_command(
|
385
|
-
self.match_template_args,
|
386
|
-
executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
|
387
|
-
)
|
388
|
-
run_command(match_template)
|
389
298
|
|
390
|
-
|
391
|
-
|
392
|
-
self.postprocess_args,
|
393
|
-
executable="python3 $HOME/src/pytme/scripts/postprocess.py",
|
394
|
-
)
|
395
|
-
run_command(postprocess)
|
299
|
+
if self.args.save_pickles or self.args.save_orientations:
|
300
|
+
prefix = "_".join([str(y) for y in x])
|
396
301
|
|
397
|
-
|
398
|
-
f"{self.
|
399
|
-
|
400
|
-
|
401
|
-
new=orientations_new,
|
402
|
-
old=self.orientations,
|
403
|
-
args=self.args
|
404
|
-
)
|
405
|
-
|
406
|
-
label, score = orientations_new.details, orientations_new.scores
|
407
|
-
loss = roc_auc_score(label, score)
|
408
|
-
print(
|
409
|
-
np.mean(score[label == 1]), np.mean(score[label == 0]),
|
410
|
-
*x, 0, loss, time()
|
411
|
-
)
|
302
|
+
if self.args.save_pickles:
|
303
|
+
pickle_path = f"{self.args.output_prefix}_{prefix}.pickle"
|
304
|
+
self.match_template_args["--output"] = pickle_path
|
305
|
+
self.postprocess_args["--input-file"] = pickle_path
|
412
306
|
|
307
|
+
if self.args.save_orientations:
|
308
|
+
orientation_path = f"{self.args.output_prefix}_{prefix}"
|
413
309
|
|
414
|
-
# Rerun with noise correction
|
415
|
-
temp_args = self.match_template_args.copy()
|
416
|
-
background_file = generate_tempfile_name(".pickle")
|
417
|
-
temp_args["--scramble_phases"] = True
|
418
|
-
temp_args["-o"] = background_file
|
419
310
|
match_template = argdict_to_command(
|
420
|
-
|
421
|
-
executable="
|
311
|
+
self.match_template_args,
|
312
|
+
executable="match_template.py",
|
422
313
|
)
|
423
314
|
run_command(match_template)
|
424
|
-
|
425
|
-
|
315
|
+
|
316
|
+
# Assume we get a new peak for each input in the same order
|
426
317
|
postprocess = argdict_to_command(
|
427
318
|
self.postprocess_args,
|
428
|
-
executable="
|
319
|
+
executable="postprocess.py",
|
429
320
|
)
|
430
321
|
run_command(postprocess)
|
431
322
|
|
432
323
|
orientations_new = Orientations.from_file(
|
433
|
-
f"{self.postprocess_args['--
|
324
|
+
f"{self.postprocess_args['--output-prefix']}.tsv"
|
434
325
|
)
|
435
326
|
orientations_new = update_orientations(
|
436
|
-
new=orientations_new,
|
437
|
-
old=self.orientations,
|
438
|
-
args=self.args
|
327
|
+
new=orientations_new, old=self.orientations, args=self.args
|
439
328
|
)
|
440
329
|
|
330
|
+
if self.args.save_orientations:
|
331
|
+
orientations_new.to_file(f"{orientation_path}.tsv")
|
332
|
+
|
441
333
|
label, score = orientations_new.details, orientations_new.scores
|
442
334
|
loss = roc_auc_score(label, score)
|
443
|
-
print(
|
444
|
-
np.mean(score[label == 1]), np.mean(score[label == 0]),
|
445
|
-
*x, 1, loss, time()
|
446
|
-
)
|
447
335
|
|
448
|
-
|
336
|
+
mean_true = np.mean(score[label == 1])
|
337
|
+
mean_false = np.mean(score[label == 0])
|
338
|
+
params = ",".join([str(y) for y in x])
|
449
339
|
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
# self.match_template_args.update(filter_parameters)
|
454
|
-
# match_template = argdict_to_command(
|
455
|
-
# self.match_template_args,
|
456
|
-
# executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
|
457
|
-
# )
|
458
|
-
# run_command(match_template)
|
459
|
-
|
460
|
-
# data = load_pickle(self.args.match_template_path)
|
461
|
-
# temp_args = self.match_template_args.copy()
|
462
|
-
# temp_args["--scramble_phases"] = True
|
463
|
-
# # write_pickle(data, "/home/vmaurer/deep_matching/t.pickle")
|
464
|
-
|
465
|
-
# match_template = argdict_to_command(
|
466
|
-
# temp_args,
|
467
|
-
# executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
|
468
|
-
# )
|
469
|
-
# run_command(match_template)
|
470
|
-
# data_norm = load_pickle(self.args.match_template_path)
|
471
|
-
# # write_pickle(data_norm, "/home/vmaurer/deep_matching/noise.pickle")
|
472
|
-
|
473
|
-
# data[0] = (data[0] - data_norm[0]) / (1 - data_norm[0])
|
474
|
-
# data[0] = np.fmax(data[0], 0)
|
475
|
-
# write_pickle(data, self.args.match_template_path)
|
476
|
-
|
477
|
-
# # Assume we get a new peak for each input in the same order
|
478
|
-
# postprocess = argdict_to_command(
|
479
|
-
# self.postprocess_args,
|
480
|
-
# executable="python3 $HOME/src/pytme/scripts/postprocess.py",
|
481
|
-
# )
|
482
|
-
# run_command(postprocess)
|
483
|
-
|
484
|
-
# orientations_new = Orientations.from_file(
|
485
|
-
# f"{self.postprocess_args['--output_prefix']}.tsv"
|
486
|
-
# )
|
487
|
-
# orientations_new = update_orientations(
|
488
|
-
# new=orientations_new,
|
489
|
-
# old=self.orientations,
|
490
|
-
# args=self.args
|
491
|
-
# )
|
492
|
-
|
493
|
-
# return orientations_new
|
340
|
+
with open(self.log_file, mode="a", encoding="utf-8") as f:
|
341
|
+
_ = f.write(f"{mean_true},{mean_false},{params},{loss},{time()-start}\n")
|
342
|
+
return orientations_new
|
494
343
|
|
495
344
|
|
496
345
|
def main():
|
497
|
-
print("Entered")
|
498
346
|
args = parse_args()
|
499
347
|
|
500
|
-
if args.input_file is not None:
|
501
|
-
data = load_pickle(args.input_file)
|
502
|
-
target_origin, _, sampling_rate, cli_args = data[-1]
|
503
|
-
args.target, args.template = cli_args.target, cli_args.template
|
504
|
-
|
505
|
-
args.candidate_stack_path = generate_tempfile_name(suffix=".h5")
|
506
348
|
args.new_orientations_path = generate_tempfile_name()
|
507
349
|
args.match_template_path = generate_tempfile_name()
|
508
350
|
|
509
|
-
|
510
|
-
initial_values = match_deep.get_initial_values()
|
511
|
-
|
512
|
-
# Do a single pass over the data
|
513
|
-
if len(initial_values) == 0:
|
514
|
-
create_image_stack = create_stacking_argdict(args)
|
515
|
-
create_image_stack = argdict_to_command(
|
516
|
-
create_image_stack,
|
517
|
-
executable="python3 $HOME/src/pytme/scripts/extract_candidates.py",
|
518
|
-
)
|
519
|
-
run_command(create_image_stack)
|
520
|
-
|
521
|
-
print("Created image stack")
|
522
|
-
if args.verbose:
|
523
|
-
copyfile(args.candidate_stack_path, f"{args.output_prefix}_stack.h5")
|
524
|
-
|
525
|
-
print("Starting matching")
|
526
|
-
orientations = match_deep(x=())
|
527
|
-
|
528
|
-
if args.verbose:
|
529
|
-
copyfile(args.match_template_path, f"{args.output_prefix}_stack.pickle")
|
530
|
-
print("Completed matching")
|
531
|
-
orientations.to_file(f"{args.output_prefix}.tsv")
|
532
|
-
exit(0)
|
351
|
+
args.box_size = np.max(Density.from_file(args.template, use_memmap=True).shape)
|
533
352
|
|
353
|
+
args.target_mask = None
|
534
354
|
if args.translation_uncertainty is not None:
|
535
355
|
args.target_mask = generate_tempfile_name(suffix=".h5")
|
536
|
-
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
356
|
+
dens = Density.from_file(args.target)
|
357
|
+
stack_center = np.add(
|
358
|
+
np.divide(dens.data.shape, 2).astype(int), np.mod(dens.data.shape, 2)
|
359
|
+
).astype(int)[1:]
|
360
|
+
|
361
|
+
out = dens.empty
|
362
|
+
out.data[..., :] = create_mask(
|
363
|
+
mask_type="ellipse",
|
364
|
+
center=stack_center,
|
365
|
+
radius=args.translation_uncertainty,
|
366
|
+
shape=dens.data.shape[1:],
|
542
367
|
)
|
543
|
-
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
561
|
-
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
|
566
|
-
|
567
|
-
# for whiten in (False, True):
|
568
|
-
# loss = match_deep.forward((lowpass, highpass, whiten))
|
569
|
-
# # print((lowpass, highpass), loss)
|
570
|
-
# if min_loss is None:
|
571
|
-
# min_loss = loss
|
572
|
-
# if loss < min_loss:
|
573
|
-
# min_loss = loss
|
574
|
-
# parameters = (lowpass, highpass, whiten),
|
575
|
-
|
576
|
-
# for lowpass in (10, 50, 100, 200):
|
577
|
-
# for highpass in (10, 50, 100, 200):
|
578
|
-
for lowpass in args.lowpass_range:
|
579
|
-
for highpass in args.highpass_range:
|
580
|
-
if lowpass is not None and highpass is not None:
|
581
|
-
if lowpass >= highpass:
|
582
|
-
continue
|
583
|
-
for no_filter_target in (True, False):
|
584
|
-
loss = match_deep.forward((lowpass, highpass, no_filter_target))
|
585
|
-
if min_loss is None:
|
586
|
-
min_loss = loss
|
587
|
-
if loss < min_loss:
|
588
|
-
min_loss = loss
|
589
|
-
parameters = (lowpass, highpass, no_filter_target)
|
590
|
-
|
591
|
-
# print("Final output", min_loss, parameters)
|
592
|
-
import sys
|
593
|
-
sys.exit(0)
|
594
|
-
|
595
|
-
# parameters = optimize.minimize(
|
596
|
-
# x0=match_deep.get_initial_values(),
|
597
|
-
# fun=match_deep.forward,
|
598
|
-
# method="L-BFGS-B",
|
599
|
-
# options={"maxiter": 100}
|
600
|
-
# )
|
601
|
-
parameter_dict = match_deep.format_parameters(parameters)
|
602
|
-
print("Converged with parameters", parameters)
|
603
|
-
|
604
|
-
match_template = create_matching_argdict(args)
|
605
|
-
match_template.update(parameter_dict)
|
606
|
-
match_template = argdict_to_command(
|
607
|
-
match_template,
|
608
|
-
executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
|
609
|
-
)
|
610
|
-
_ = subprocess.run(match_template, capture_output=True, shell=True)
|
611
|
-
|
612
|
-
# Some form of labelling is necessary for these matches
|
613
|
-
# 1. All of them are true positives
|
614
|
-
# 2. All of them are true positives up to a certain threshold
|
615
|
-
# 3. Kernel fitting
|
616
|
-
# 4. Perhaps also sensible to include a certain percentage of low scores as true negatives
|
617
|
-
postprocess = create_postprocessing_argdict(args)
|
618
|
-
postprocess = argdict_to_command(postprocess, executable="postprocess.py")
|
619
|
-
_ = subprocess.run(postprocess, capture_output=True, shell=True)
|
620
|
-
args.orientations = f"{args.new_orientations_path}.tsv"
|
621
|
-
orientations = Orientations.from_file(args.orientations)
|
622
|
-
orientations.to_file(f"{args.output_prefix}_{current_iteration}.tsv")
|
368
|
+
out.to_file(args.target_mask)
|
369
|
+
|
370
|
+
# Perhaps we need a different optimizer here to use sensible steps for each parameter
|
371
|
+
parameters, min_loss = (), None
|
372
|
+
match_deep = DeepMatcher(args)
|
373
|
+
for lowpass in args.lowpass_range:
|
374
|
+
for highpass in args.highpass_range:
|
375
|
+
if lowpass is not None and highpass is not None:
|
376
|
+
if lowpass >= highpass:
|
377
|
+
continue
|
378
|
+
parameters = (lowpass, highpass)
|
379
|
+
loss = match_deep.forward(parameters)
|
380
|
+
if min_loss is None:
|
381
|
+
min_loss, best_params = loss, parameters
|
382
|
+
|
383
|
+
if loss < min_loss:
|
384
|
+
min_loss, best_params = loss, parameters
|
385
|
+
|
386
|
+
unlink(args.target_mask)
|
387
|
+
unlink(args.new_orientations_path)
|
388
|
+
|
389
|
+
if not args.save_pickles:
|
390
|
+
unlink(args.match_template_path)
|
391
|
+
print("Final output", min_loss, best_params)
|
623
392
|
|
624
393
|
|
625
394
|
if __name__ == "__main__":
|