pytme 0.2.9__cp311-cp311-macosx_15_0_arm64.whl → 0.3b0__cp311-cp311-macosx_15_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. pytme-0.2.9.data/scripts/estimate_ram_usage.py → pytme-0.3b0.data/scripts/estimate_memory_usage.py +16 -33
  2. {pytme-0.2.9.data → pytme-0.3b0.data}/scripts/match_template.py +224 -223
  3. {pytme-0.2.9.data → pytme-0.3b0.data}/scripts/postprocess.py +283 -163
  4. {pytme-0.2.9.data → pytme-0.3b0.data}/scripts/preprocess.py +11 -8
  5. {pytme-0.2.9.data → pytme-0.3b0.data}/scripts/preprocessor_gui.py +10 -9
  6. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/METADATA +11 -9
  7. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/RECORD +61 -58
  8. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/entry_points.txt +1 -1
  9. scripts/{estimate_ram_usage.py → estimate_memory_usage.py} +16 -33
  10. scripts/extract_candidates.py +224 -0
  11. scripts/match_template.py +224 -223
  12. scripts/postprocess.py +283 -163
  13. scripts/preprocess.py +11 -8
  14. scripts/preprocessor_gui.py +10 -9
  15. scripts/refine_matches.py +626 -0
  16. tests/preprocessing/test_frequency_filters.py +9 -4
  17. tests/test_analyzer.py +143 -138
  18. tests/test_matching_cli.py +85 -29
  19. tests/test_matching_exhaustive.py +1 -2
  20. tests/test_matching_optimization.py +4 -9
  21. tests/test_orientations.py +0 -1
  22. tme/__version__.py +1 -1
  23. tme/analyzer/__init__.py +2 -0
  24. tme/analyzer/_utils.py +25 -17
  25. tme/analyzer/aggregation.py +385 -220
  26. tme/analyzer/base.py +138 -0
  27. tme/analyzer/peaks.py +150 -88
  28. tme/analyzer/proxy.py +122 -0
  29. tme/backends/__init__.py +4 -3
  30. tme/backends/_cupy_utils.py +25 -24
  31. tme/backends/_jax_utils.py +4 -3
  32. tme/backends/cupy_backend.py +4 -13
  33. tme/backends/jax_backend.py +6 -8
  34. tme/backends/matching_backend.py +4 -3
  35. tme/backends/mlx_backend.py +4 -3
  36. tme/backends/npfftw_backend.py +7 -5
  37. tme/backends/pytorch_backend.py +14 -4
  38. tme/cli.py +126 -0
  39. tme/density.py +4 -3
  40. tme/filters/__init__.py +1 -1
  41. tme/filters/_utils.py +4 -3
  42. tme/filters/bandpass.py +6 -4
  43. tme/filters/compose.py +5 -4
  44. tme/filters/ctf.py +426 -214
  45. tme/filters/reconstruction.py +58 -28
  46. tme/filters/wedge.py +139 -61
  47. tme/filters/whitening.py +36 -36
  48. tme/matching_data.py +4 -3
  49. tme/matching_exhaustive.py +17 -16
  50. tme/matching_optimization.py +5 -4
  51. tme/matching_scores.py +4 -3
  52. tme/matching_utils.py +6 -4
  53. tme/memory.py +4 -3
  54. tme/orientations.py +9 -6
  55. tme/parser.py +5 -4
  56. tme/preprocessor.py +4 -3
  57. tme/rotations.py +10 -7
  58. tme/structure.py +4 -3
  59. tests/data/Maps/.DS_Store +0 -0
  60. tests/data/Structures/.DS_Store +0 -0
  61. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/WHEEL +0 -0
  62. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/licenses/LICENSE +0 -0
  63. {pytme-0.2.9.dist-info → pytme-0.3b0.dist-info}/top_level.txt +0 -0
@@ -1,9 +1,10 @@
1
1
  #!python3
2
- """ GUI for identifying adequate template matching filter and masks.
2
+ """
3
+ GUI for identifying suitable masks and analyzing template matchign results.
3
4
 
4
- Copyright (c) 2023 European Molecular Biology Laboratory
5
+ Copyright (c) 2023 European Molecular Biology Laboratory
5
6
 
6
- Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
7
+ Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
7
8
  """
8
9
  import inspect
9
10
  import argparse
@@ -23,9 +24,9 @@ from napari.utils.events import EventedList
23
24
 
24
25
  from tme.backends import backend
25
26
  from tme.rotations import align_vectors
26
- from tme.filters import BandPassFilter, CTF
27
27
  from tme import Preprocessor, Density, Orientations
28
28
  from tme.matching_utils import create_mask, load_pickle
29
+ from tme.filters import BandPassFilter, CTFReconstructed
29
30
 
30
31
  preprocessor = Preprocessor()
31
32
  SLIDER_MIN, SLIDER_MAX = 0, 25
@@ -71,15 +72,14 @@ def ctf_filter(
71
72
  fast_shape = [next_fast_len(x) for x in np.multiply(template.shape, 2)]
72
73
  template_pad = backend.topleft_pad(template, fast_shape)
73
74
  template_ft = np.fft.rfftn(template_pad, s=template_pad.shape)
74
- ctf = CTF(
75
- angles=[0],
75
+ ctf = CTFReconstructed(
76
76
  shape=fast_shape,
77
77
  defocus_x=[defocus_angstrom],
78
78
  acceleration_voltage=acceleration_voltage * 1e3,
79
79
  spherical_aberration=spherical_aberration * 1e7,
80
80
  amplitude_contrast=amplitude_contrast,
81
- phase_shift=[phase_shift],
82
- defocus_angle=[defocus_angle],
81
+ phase_shift=phase_shift,
82
+ defocus_angle=defocus_angle,
83
83
  sampling_rate=np.max(sampling_rate),
84
84
  return_real_fourier=True,
85
85
  flip_phase=flip_phase,
@@ -1216,7 +1216,8 @@ def main():
1216
1216
 
1217
1217
  def parse_args():
1218
1218
  parser = argparse.ArgumentParser(
1219
- description="GUI for preparing and analyzing template matching runs."
1219
+ description="GUI for preparing and analyzing template matching runs.",
1220
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
1220
1221
  )
1221
1222
  args = parser.parse_args()
1222
1223
  return args
@@ -0,0 +1,626 @@
1
+ #!python3
2
+ """ Iterative template matching parameter tuning.
3
+
4
+ Copyright (c) 2024 European Molecular Biology Laboratory
5
+
6
+ Author: Valentin Maurer <valentin.maurer@embl-hamburg.de>
7
+ """
8
+ import argparse
9
+ import subprocess
10
+ from sys import exit
11
+ from time import time
12
+ from shutil import copyfile
13
+ from typing import Tuple, List, Dict
14
+
15
+ import numpy as np
16
+ from scipy import optimize
17
+ from sklearn.metrics import roc_auc_score
18
+
19
+ from tme import Orientations, Density
20
+ from tme.matching_utils import generate_tempfile_name, load_pickle, write_pickle, create_mask
21
+ from tme.matching_exhaustive import MATCHING_EXHAUSTIVE_REGISTER
22
+
23
+ def parse_range(x : str):
24
+ start, stop,step = x.split(":")
25
+ return range(int(start), int(stop), int(step))
26
+
27
+ def parse_args():
28
+ parser = argparse.ArgumentParser(
29
+ description="Refine template matching candidates using deep matching.",
30
+ )
31
+ io_group = parser.add_argument_group("Input / Output")
32
+ io_group.add_argument(
33
+ "--orientations",
34
+ required=True,
35
+ type=str,
36
+ help="Path to an orientations file in a supported format. See "
37
+ "https://kosinskilab.github.io/pyTME/reference/api/tme.orientations.Orientations.from_file.html"
38
+ " for available options.",
39
+ )
40
+ io_group.add_argument(
41
+ "--output_prefix", required=True, type=str, help="Path to write output to."
42
+ )
43
+ io_group.add_argument(
44
+ "--iterations",
45
+ required=False,
46
+ default=0,
47
+ type=int,
48
+ help="Number of refinement iterations to perform.",
49
+ )
50
+ io_group.add_argument(
51
+ "--verbose",
52
+ action="store_true",
53
+ default=False,
54
+ help="More verbose and more files written to disk.",
55
+ )
56
+ matching_group = parser.add_argument_group("Template Matching")
57
+ matching_group.add_argument(
58
+ "--input_file",
59
+ required=False,
60
+ type=str,
61
+ help="Path to the output of match_template.py.",
62
+ )
63
+ matching_group.add_argument(
64
+ "-m",
65
+ "--target",
66
+ dest="target",
67
+ type=str,
68
+ required=False,
69
+ help="Path to a target in CCP4/MRC, EM, H5 or another format supported by "
70
+ "tme.density.Density.from_file "
71
+ "https://kosinskilab.github.io/pyTME/reference/api/tme.density.Density.from_file.html",
72
+ )
73
+ matching_group.add_argument(
74
+ "--target_mask",
75
+ dest="target_mask",
76
+ type=str,
77
+ required=False,
78
+ help="Path to a mask for the target in a supported format (see target).",
79
+ )
80
+ matching_group.add_argument(
81
+ "-i",
82
+ "--template",
83
+ dest="template",
84
+ type=str,
85
+ required=False,
86
+ help="Path to a template in PDB/MMCIF or other supported formats (see target).",
87
+ )
88
+ matching_group.add_argument(
89
+ "--template_mask",
90
+ dest="template_mask",
91
+ type=str,
92
+ required=False,
93
+ help="Path to a mask for the template in a supported format (see target).",
94
+ )
95
+ matching_group.add_argument(
96
+ "--invert_target_contrast",
97
+ dest="invert_target_contrast",
98
+ action="store_true",
99
+ default=False,
100
+ help="Invert the target's contrast and rescale linearly between zero and one. "
101
+ "This option is intended for targets where templates to-be-matched have "
102
+ "negative values, e.g. tomograms.",
103
+ )
104
+ matching_group.add_argument(
105
+ "--angular_sampling",
106
+ dest="angular_sampling",
107
+ required=True,
108
+ default=None,
109
+ help="Angular sampling rate using optimized rotational sets."
110
+ "A lower number yields more rotations. Values >= 180 sample only the identity.",
111
+ )
112
+ matching_group.add_argument(
113
+ "-s",
114
+ dest="score",
115
+ type=str,
116
+ default="FLCSphericalMask",
117
+ choices=list(MATCHING_EXHAUSTIVE_REGISTER.keys()),
118
+ help="Template matching scoring function.",
119
+ )
120
+ matching_group.add_argument(
121
+ "-n",
122
+ dest="cores",
123
+ required=False,
124
+ type=int,
125
+ default=4,
126
+ help="Number of cores used for template matching.",
127
+ )
128
+ matching_group.add_argument(
129
+ "--use_gpu",
130
+ dest="use_gpu",
131
+ action="store_true",
132
+ default=False,
133
+ help="Whether to perform computations on the GPU.",
134
+ )
135
+ matching_group.add_argument(
136
+ "--no_centering",
137
+ dest="no_centering",
138
+ action="store_true",
139
+ help="Assumes the template is already centered and omits centering.",
140
+ )
141
+ matching_group.add_argument(
142
+ "--no_edge_padding",
143
+ dest="no_edge_padding",
144
+ action="store_true",
145
+ default=False,
146
+ help="Whether to not pad the edges of the target. Can be set if the target"
147
+ " has a well defined bounding box, e.g. a masked reconstruction.",
148
+ )
149
+ matching_group.add_argument(
150
+ "--no_fourier_padding",
151
+ dest="no_fourier_padding",
152
+ action="store_true",
153
+ default=False,
154
+ help="Whether input arrays should not be zero-padded to full convolution shape "
155
+ "for numerical stability. When working with very large targets, e.g. tomograms, "
156
+ "it is safe to use this flag and benefit from the performance gain.",
157
+ )
158
+
159
+ peak_group = parser.add_argument_group("Peak Calling")
160
+ peak_group.add_argument(
161
+ "--number_of_peaks",
162
+ type=int,
163
+ default=100,
164
+ required=False,
165
+ help="Upper limit of peaks to call, subject to filtering parameters. Default 1000. "
166
+ "If minimum_score is provided all peaks scoring higher will be reported.",
167
+ )
168
+ extraction_group = parser.add_argument_group("Extraction")
169
+ extraction_group.add_argument(
170
+ "--keep_out_of_box",
171
+ action="store_true",
172
+ required=False,
173
+ help="Whether to keep orientations that fall outside the box. If the "
174
+ "orientations are sensible, it is safe to pass this flag.",
175
+ )
176
+
177
+ optimization_group = parser.add_argument_group("Optimization")
178
+ optimization_group.add_argument(
179
+ "--lowpass",
180
+ dest="lowpass",
181
+ action="store_true",
182
+ default=False,
183
+ help="Optimize template matching lowpass filter cutoff.",
184
+ )
185
+ optimization_group.add_argument(
186
+ "--highpass",
187
+ dest="highpass",
188
+ action="store_true",
189
+ default=False,
190
+ help="Optimize template matching highpass filter cutoff.",
191
+ )
192
+ optimization_group.add_argument(
193
+ "--lowpass-range",
194
+ dest="lowpass_range",
195
+ type=str,
196
+ default="0:50:5",
197
+ help="Optimize template matching lowpass filter cutoff.",
198
+ )
199
+ optimization_group.add_argument(
200
+ "--highpass-range",
201
+ dest="highpass_range",
202
+ type=str,
203
+ default="0:50:5",
204
+ help="Optimize template matching highpass filter cutoff.",
205
+ )
206
+ optimization_group.add_argument(
207
+ "--translation-uncertainty",
208
+ dest="translation_uncertainty",
209
+ type=int,
210
+ default=None,
211
+ help="Optimize template matching highpass filter cutoff.",
212
+ )
213
+
214
+
215
+ args = parser.parse_args()
216
+
217
+ data_present = args.target is not None and args.template is not None
218
+ if args.input_file is None and not data_present:
219
+ raise ValueError(
220
+ "Either --input_file or --target and --template need to be specified."
221
+ )
222
+ elif args.input_file is not None and data_present:
223
+ raise ValueError(
224
+ "Please specific either --input_file or --target and --template."
225
+ )
226
+
227
+ if args.lowpass_range != "None":
228
+ args.lowpass_range = parse_range(args.lowpass_range)
229
+ else:
230
+ args.lowpass_range = (None, )
231
+ if args.highpass_range != "None":
232
+ args.highpass_range = parse_range(args.highpass_range)
233
+ else:
234
+ args.highpass_range = (None, )
235
+ return args
236
+
237
+
238
+ def argdict_to_command(input_args: Dict, executable: str) -> List:
239
+ ret = []
240
+ for key, value in input_args.items():
241
+ if value is None:
242
+ continue
243
+ elif isinstance(value, bool):
244
+ if value:
245
+ ret.append(key)
246
+ else:
247
+ ret.extend([key, value])
248
+
249
+ ret = [str(x) for x in ret]
250
+ ret.insert(0, executable)
251
+ return " ".join(ret)
252
+
253
+ def run_command(command):
254
+ ret = subprocess.run(command, capture_output=True, shell=True)
255
+ if ret.returncode != 0:
256
+ print(f"Error when executing: {command}.")
257
+ print(f"Stdout: {ret.stdout.decode('utf-8')}")
258
+ print(f"Stderr: {ret.stderr.decode('utf-8')}")
259
+ exit(-1)
260
+
261
+ return None
262
+
263
+ def create_stacking_argdict(args) -> Dict:
264
+ arg_dict = {
265
+ "--target": args.target,
266
+ "--template": args.template,
267
+ "--orientations": args.orientations,
268
+ "--output_file": args.candidate_stack_path,
269
+ "--keep_out_of_box": args.keep_out_of_box,
270
+ }
271
+ return arg_dict
272
+
273
+
274
+ def create_matching_argdict(args) -> Dict:
275
+ arg_dict = {
276
+ "--target": args.target,
277
+ "--template": args.template,
278
+ "--template_mask": args.template_mask,
279
+ "-o": args.match_template_path,
280
+ "-a": args.angular_sampling,
281
+ "-s": args.score,
282
+ "--no_fourier_padding": True,
283
+ "--no_edge_padding": True,
284
+ "--no_centering": args.no_centering,
285
+ "-n": args.cores,
286
+ "--invert_target_contrast": args.invert_target_contrast,
287
+ "--use_gpu": args.use_gpu,
288
+ }
289
+ return arg_dict
290
+
291
+
292
+ def create_postprocessing_argdict(args) -> Dict:
293
+ arg_dict = {
294
+ "--input_file": args.match_template_path,
295
+ "--target_mask": args.target_mask,
296
+ "--output_prefix": args.new_orientations_path,
297
+ "--peak_caller": "PeakCallerMaximumFilter",
298
+ "--number_of_peaks": args.number_of_peaks,
299
+ "--output_format": "orientations",
300
+ "--mask_edges": True,
301
+ }
302
+ if args.target_mask is not None:
303
+ arg_dict["--mask_edges"] = False
304
+ return arg_dict
305
+
306
+
307
+ def update_orientations(old : Orientations, new : Orientations, args, **kwargs) -> Orientations:
308
+ stack_shape = Density.from_file(args.candidate_stack_path, use_memmap=True).shape
309
+ stack_center = np.add(np.divide(stack_shape, 2).astype(int), np.mod(stack_shape, 2))
310
+
311
+ peak_number = new.translations[:, 0].astype(int)
312
+ new_translations = np.add(
313
+ old.translations[peak_number],
314
+ np.subtract(new.translations, stack_center)[:, 1:],
315
+ )
316
+ ret = old.copy()
317
+ ret.scores[:] = 0
318
+ ret.scores[peak_number] = new.scores
319
+ ret.translations[peak_number] = new_translations
320
+
321
+ # The effect of --align_orientations should be handled herer
322
+ return ret
323
+
324
+
325
+ class DeepMatcher:
326
+ def __init__(self, args, margin : float = 0.5):
327
+ self.args = args
328
+ self.margin = margin
329
+ self.orientations = Orientations.from_file(args.orientations)
330
+
331
+ match_template_args = create_matching_argdict(args)
332
+ match_template_args["--target"] = args.candidate_stack_path
333
+ self.match_template_args = match_template_args
334
+
335
+ self.filter_parameters = {}
336
+ if args.lowpass:
337
+ self.filter_parameters["--lowpass"] = 0
338
+ if args.highpass:
339
+ self.filter_parameters["--highpass"] = 200
340
+ # self.filter_parameters["--whiten"] = False
341
+ self.filter_parameters["--no_filter_target"] = False
342
+
343
+
344
+ self.postprocess_args = create_postprocessing_argdict(args)
345
+ self.postprocess_args["--number_of_peaks"] = 1
346
+
347
+ def get_initial_values(self) -> Tuple[float]:
348
+ ret = tuple(float(x) for x in self.filter_parameters.values())
349
+ return ret
350
+
351
+ def format_parameters(self, parameter_values: Tuple[float]) -> Dict:
352
+ ret = {}
353
+ for value, key in zip(parameter_values, self.filter_parameters.keys()):
354
+ ret[key] = value
355
+ if isinstance(self.filter_parameters[key], bool):
356
+ ret[key] = value > 0.5
357
+ return ret
358
+
359
+ def forward(self, x : Tuple[float]):
360
+
361
+
362
+ # Label 1 -> True positive, label 0 -> false positive
363
+ orientations_new = self(x)
364
+ label, score = orientations_new.details, orientations_new.scores
365
+ # loss = np.add(
366
+ # (1 - label) * np.square(score),
367
+ # label * np.square(np.fmax(self.margin - score, 0.0))
368
+ # )
369
+ # loss = loss.mean()
370
+
371
+
372
+
373
+ loss = roc_auc_score(label, score)
374
+ # print(
375
+ # np.mean(score[label == 1]), np.mean(score[label == 0]),
376
+ # *x, loss, time()
377
+ # )
378
+
379
+ return loss
380
+
381
+ def __call__(self, x: Tuple[float]):
382
+ filter_parameters = self.format_parameters(x)
383
+ self.match_template_args.update(filter_parameters)
384
+ match_template = argdict_to_command(
385
+ self.match_template_args,
386
+ executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
387
+ )
388
+ run_command(match_template)
389
+
390
+ # Assume we get a new peak for each input in the same order
391
+ postprocess = argdict_to_command(
392
+ self.postprocess_args,
393
+ executable="python3 $HOME/src/pytme/scripts/postprocess.py",
394
+ )
395
+ run_command(postprocess)
396
+
397
+ orientations_new = Orientations.from_file(
398
+ f"{self.postprocess_args['--output_prefix']}.tsv"
399
+ )
400
+ orientations_new = update_orientations(
401
+ new=orientations_new,
402
+ old=self.orientations,
403
+ args=self.args
404
+ )
405
+
406
+ label, score = orientations_new.details, orientations_new.scores
407
+ loss = roc_auc_score(label, score)
408
+ print(
409
+ np.mean(score[label == 1]), np.mean(score[label == 0]),
410
+ *x, 0, loss, time()
411
+ )
412
+
413
+
414
+ # Rerun with noise correction
415
+ temp_args = self.match_template_args.copy()
416
+ background_file = generate_tempfile_name(".pickle")
417
+ temp_args["--scramble_phases"] = True
418
+ temp_args["-o"] = background_file
419
+ match_template = argdict_to_command(
420
+ temp_args,
421
+ executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
422
+ )
423
+ run_command(match_template)
424
+ temp_args = self.match_template_args.copy()
425
+ temp_args["--background_file"] = background_file
426
+ postprocess = argdict_to_command(
427
+ self.postprocess_args,
428
+ executable="python3 $HOME/src/pytme/scripts/postprocess.py",
429
+ )
430
+ run_command(postprocess)
431
+
432
+ orientations_new = Orientations.from_file(
433
+ f"{self.postprocess_args['--output_prefix']}.tsv"
434
+ )
435
+ orientations_new = update_orientations(
436
+ new=orientations_new,
437
+ old=self.orientations,
438
+ args=self.args
439
+ )
440
+
441
+ label, score = orientations_new.details, orientations_new.scores
442
+ loss = roc_auc_score(label, score)
443
+ print(
444
+ np.mean(score[label == 1]), np.mean(score[label == 0]),
445
+ *x, 1, loss, time()
446
+ )
447
+
448
+ return orientations_new
449
+
450
+ # def __call__(self, x: Tuple[float]):
451
+ # filter_parameters = self.format_parameters(x)
452
+ # # print(filter_parameters)
453
+ # self.match_template_args.update(filter_parameters)
454
+ # match_template = argdict_to_command(
455
+ # self.match_template_args,
456
+ # executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
457
+ # )
458
+ # run_command(match_template)
459
+
460
+ # data = load_pickle(self.args.match_template_path)
461
+ # temp_args = self.match_template_args.copy()
462
+ # temp_args["--scramble_phases"] = True
463
+ # # write_pickle(data, "/home/vmaurer/deep_matching/t.pickle")
464
+
465
+ # match_template = argdict_to_command(
466
+ # temp_args,
467
+ # executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
468
+ # )
469
+ # run_command(match_template)
470
+ # data_norm = load_pickle(self.args.match_template_path)
471
+ # # write_pickle(data_norm, "/home/vmaurer/deep_matching/noise.pickle")
472
+
473
+ # data[0] = (data[0] - data_norm[0]) / (1 - data_norm[0])
474
+ # data[0] = np.fmax(data[0], 0)
475
+ # write_pickle(data, self.args.match_template_path)
476
+
477
+ # # Assume we get a new peak for each input in the same order
478
+ # postprocess = argdict_to_command(
479
+ # self.postprocess_args,
480
+ # executable="python3 $HOME/src/pytme/scripts/postprocess.py",
481
+ # )
482
+ # run_command(postprocess)
483
+
484
+ # orientations_new = Orientations.from_file(
485
+ # f"{self.postprocess_args['--output_prefix']}.tsv"
486
+ # )
487
+ # orientations_new = update_orientations(
488
+ # new=orientations_new,
489
+ # old=self.orientations,
490
+ # args=self.args
491
+ # )
492
+
493
+ # return orientations_new
494
+
495
+
496
+ def main():
497
+ print("Entered")
498
+ args = parse_args()
499
+
500
+ if args.input_file is not None:
501
+ data = load_pickle(args.input_file)
502
+ target_origin, _, sampling_rate, cli_args = data[-1]
503
+ args.target, args.template = cli_args.target, cli_args.template
504
+
505
+ args.candidate_stack_path = generate_tempfile_name(suffix=".h5")
506
+ args.new_orientations_path = generate_tempfile_name()
507
+ args.match_template_path = generate_tempfile_name()
508
+
509
+ match_deep = DeepMatcher(args)
510
+ initial_values = match_deep.get_initial_values()
511
+
512
+ # Do a single pass over the data
513
+ if len(initial_values) == 0:
514
+ create_image_stack = create_stacking_argdict(args)
515
+ create_image_stack = argdict_to_command(
516
+ create_image_stack,
517
+ executable="python3 $HOME/src/pytme/scripts/extract_candidates.py",
518
+ )
519
+ run_command(create_image_stack)
520
+
521
+ print("Created image stack")
522
+ if args.verbose:
523
+ copyfile(args.candidate_stack_path, f"{args.output_prefix}_stack.h5")
524
+
525
+ print("Starting matching")
526
+ orientations = match_deep(x=())
527
+
528
+ if args.verbose:
529
+ copyfile(args.match_template_path, f"{args.output_prefix}_stack.pickle")
530
+ print("Completed matching")
531
+ orientations.to_file(f"{args.output_prefix}.tsv")
532
+ exit(0)
533
+
534
+ if args.translation_uncertainty is not None:
535
+ args.target_mask = generate_tempfile_name(suffix=".h5")
536
+
537
+ for current_iteration in range(args.iterations):
538
+ create_image_stack = create_stacking_argdict(args)
539
+ create_image_stack = argdict_to_command(
540
+ create_image_stack,
541
+ executable="python3 $HOME/src/pytme/scripts/extract_candidates.py",
542
+ )
543
+ run_command(create_image_stack)
544
+
545
+ if args.translation_uncertainty is not None:
546
+ dens = Density.from_file(args.candidate_stack_path)
547
+ stack_center = np.add(
548
+ np.divide(dens.data.shape, 2).astype(int), np.mod(dens.data.shape, 2)
549
+ ).astype(int)[1:]
550
+
551
+ out = dens.empty
552
+ out.data[:,...] = create_mask(
553
+ mask_type = "ellipse",
554
+ center = stack_center,
555
+ radius = args.translation_uncertainty,
556
+ shape = dens.data.shape[1:]
557
+ )
558
+ out.to_file(args.target_mask)
559
+
560
+
561
+
562
+ # Perhaps we need a different optimizer here to use sensible steps for each parameter
563
+ parameters, min_loss = (), None
564
+ match_deep = DeepMatcher(args)
565
+ # for lowpass in (0, 10, 20, 50):
566
+ # for highpass in (50, 100, 150, 200):
567
+ # for whiten in (False, True):
568
+ # loss = match_deep.forward((lowpass, highpass, whiten))
569
+ # # print((lowpass, highpass), loss)
570
+ # if min_loss is None:
571
+ # min_loss = loss
572
+ # if loss < min_loss:
573
+ # min_loss = loss
574
+ # parameters = (lowpass, highpass, whiten),
575
+
576
+ # for lowpass in (10, 50, 100, 200):
577
+ # for highpass in (10, 50, 100, 200):
578
+ for lowpass in args.lowpass_range:
579
+ for highpass in args.highpass_range:
580
+ if lowpass is not None and highpass is not None:
581
+ if lowpass >= highpass:
582
+ continue
583
+ for no_filter_target in (True, False):
584
+ loss = match_deep.forward((lowpass, highpass, no_filter_target))
585
+ if min_loss is None:
586
+ min_loss = loss
587
+ if loss < min_loss:
588
+ min_loss = loss
589
+ parameters = (lowpass, highpass, no_filter_target)
590
+
591
+ # print("Final output", min_loss, parameters)
592
+ import sys
593
+ sys.exit(0)
594
+
595
+ # parameters = optimize.minimize(
596
+ # x0=match_deep.get_initial_values(),
597
+ # fun=match_deep.forward,
598
+ # method="L-BFGS-B",
599
+ # options={"maxiter": 100}
600
+ # )
601
+ parameter_dict = match_deep.format_parameters(parameters)
602
+ print("Converged with parameters", parameters)
603
+
604
+ match_template = create_matching_argdict(args)
605
+ match_template.update(parameter_dict)
606
+ match_template = argdict_to_command(
607
+ match_template,
608
+ executable="python3 $HOME/src/pytme/scripts/match_template_filters.py",
609
+ )
610
+ _ = subprocess.run(match_template, capture_output=True, shell=True)
611
+
612
+ # Some form of labelling is necessary for these matches
613
+ # 1. All of them are true positives
614
+ # 2. All of them are true positives up to a certain threshold
615
+ # 3. Kernel fitting
616
+ # 4. Perhaps also sensible to include a certain percentage of low scores as true negatives
617
+ postprocess = create_postprocessing_argdict(args)
618
+ postprocess = argdict_to_command(postprocess, executable="postprocess.py")
619
+ _ = subprocess.run(postprocess, capture_output=True, shell=True)
620
+ args.orientations = f"{args.new_orientations_path}.tsv"
621
+ orientations = Orientations.from_file(args.orientations)
622
+ orientations.to_file(f"{args.output_prefix}_{current_iteration}.tsv")
623
+
624
+
625
+ if __name__ == "__main__":
626
+ main()