nabu 2025.1.0.dev13__py3-none-any.whl → 2025.1.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. nabu/__init__.py +1 -1
  2. nabu/app/cast_volume.py +12 -1
  3. nabu/app/cli_configs.py +81 -4
  4. nabu/app/estimate_motion.py +54 -0
  5. nabu/app/multicor.py +2 -4
  6. nabu/app/pcaflats.py +116 -0
  7. nabu/app/reconstruct.py +1 -7
  8. nabu/app/reduce_dark_flat.py +5 -2
  9. nabu/estimation/cor.py +1 -1
  10. nabu/estimation/motion.py +557 -0
  11. nabu/estimation/tests/test_motion_estimation.py +471 -0
  12. nabu/estimation/tilt.py +1 -1
  13. nabu/estimation/translation.py +47 -1
  14. nabu/io/cast_volume.py +94 -13
  15. nabu/io/reader.py +32 -1
  16. nabu/io/tests/test_remove_volume.py +152 -0
  17. nabu/pipeline/config_validators.py +42 -43
  18. nabu/pipeline/estimators.py +255 -0
  19. nabu/pipeline/fullfield/chunked.py +67 -43
  20. nabu/pipeline/fullfield/chunked_cuda.py +5 -2
  21. nabu/pipeline/fullfield/nabu_config.py +17 -11
  22. nabu/pipeline/fullfield/processconfig.py +8 -2
  23. nabu/pipeline/fullfield/reconstruction.py +3 -0
  24. nabu/pipeline/params.py +12 -0
  25. nabu/pipeline/tests/test_estimators.py +240 -3
  26. nabu/preproc/ccd.py +53 -3
  27. nabu/preproc/flatfield.py +306 -1
  28. nabu/preproc/shift.py +3 -1
  29. nabu/preproc/tests/test_pcaflats.py +154 -0
  30. nabu/processing/rotation_cuda.py +3 -1
  31. nabu/processing/tests/test_rotation.py +4 -2
  32. nabu/reconstruction/fbp.py +7 -0
  33. nabu/reconstruction/fbp_base.py +31 -7
  34. nabu/reconstruction/fbp_opencl.py +8 -0
  35. nabu/reconstruction/filtering_opencl.py +2 -0
  36. nabu/reconstruction/mlem.py +51 -14
  37. nabu/reconstruction/tests/test_filtering.py +13 -2
  38. nabu/reconstruction/tests/test_mlem.py +91 -62
  39. nabu/resources/dataset_analyzer.py +144 -20
  40. nabu/resources/nxflatfield.py +101 -35
  41. nabu/resources/tests/test_nxflatfield.py +1 -1
  42. nabu/resources/utils.py +16 -10
  43. nabu/stitching/alignment.py +7 -7
  44. nabu/stitching/config.py +22 -20
  45. nabu/stitching/definitions.py +2 -2
  46. nabu/stitching/overlap.py +4 -4
  47. nabu/stitching/sample_normalization.py +5 -5
  48. nabu/stitching/stitcher/post_processing.py +5 -3
  49. nabu/stitching/stitcher/pre_processing.py +24 -20
  50. nabu/stitching/tests/test_config.py +3 -3
  51. nabu/stitching/tests/test_y_preprocessing_stitching.py +11 -8
  52. nabu/stitching/tests/test_z_postprocessing_stitching.py +2 -2
  53. nabu/stitching/tests/test_z_preprocessing_stitching.py +23 -20
  54. nabu/stitching/utils/utils.py +7 -7
  55. nabu/testutils.py +1 -4
  56. nabu/utils.py +13 -0
  57. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/METADATA +3 -4
  58. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/RECORD +62 -57
  59. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/WHEEL +1 -1
  60. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/entry_points.txt +2 -1
  61. nabu/app/correct_rot.py +0 -62
  62. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/licenses/LICENSE +0 -0
  63. {nabu-2025.1.0.dev13.dist-info → nabu-2025.1.0rc1.dist-info}/top_level.txt +0 -0
nabu/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "2025.1.0-dev13"
1
+ __version__ = "2025.1.0-rc1"
2
2
  __nabu_modules__ = [
3
3
  "app",
4
4
  "cuda",
nabu/app/cast_volume.py CHANGED
@@ -19,6 +19,7 @@ from nabu.io.cast_volume import (
19
19
  RESCALE_MIN_PERCENTILE,
20
20
  cast_volume,
21
21
  get_default_output_volume,
22
+ remove_volume,
22
23
  )
23
24
  from nabu.pipeline.params import files_formats
24
25
  from nabu.utils import convert_str_to_tuple
@@ -109,7 +110,14 @@ def main(argv=None):
109
110
  default=None,
110
111
  help="Provide url to the histogram - like: '/{path}/my_file.hdf5?path/to/my/data' with my_file.hdf5 is the file containing the histogram. Located under 'path'. And 'path/to/my/data' is the location of the HDF5 dataset",
111
112
  )
112
-
113
+ parser.add_argument(
114
+ "--remove-input-volume",
115
+ "--remove",
116
+ dest="remove",
117
+ action="store_true",
118
+ default=False,
119
+ help="Whether to remove the input volume after cast. Default is False.",
120
+ )
113
121
  options = parser.parse_args(argv[1:])
114
122
 
115
123
  # handle input volume
@@ -270,6 +278,9 @@ def main(argv=None):
270
278
  rescale_min_percentile=rescale_min_percentile,
271
279
  rescale_max_percentile=rescale_max_percentile,
272
280
  )
281
+ if options.remove:
282
+ _logger.info(f"Removing {input_volume.data_url.file_path()}")
283
+ remove_volume(input_volume, check=True)
273
284
  exit(0)
274
285
 
275
286
 
nabu/app/cli_configs.py CHANGED
@@ -8,6 +8,7 @@ from nabu.pipeline.config_validators import str2bool
8
8
 
9
9
  from tomoscan.framereducer.method import ReduceMethod
10
10
 
11
+ reduce_methods = tuple(member.value for member in ReduceMethod)
11
12
 
12
13
  BootstrapConfig = {
13
14
  "bootstrap": {
@@ -162,7 +163,7 @@ MultiCorConfig = ReconstructConfig.copy()
162
163
  MultiCorConfig.update(
163
164
  {
164
165
  "cor": {
165
- "help": "Positions of the center of rotation. It must be a list of comma-separated scalars, or in the form start:stop:step, where start, stop and step can all be floating-point values.",
166
+ "help": "Absolute positions of the center of rotation. It must be a list of comma-separated scalars, or in the form start:stop:step, where start, stop and step can all be floating-point values.",
166
167
  "default": "",
167
168
  "mandatory": True,
168
169
  },
@@ -322,6 +323,49 @@ CompareVolumesConfig = {
322
323
  },
323
324
  }
324
325
 
326
+ EstimateMotionConfig = {
327
+ "dataset": {
328
+ "help": "Path to the dataset.",
329
+ "default": "",
330
+ "mandatory": True,
331
+ },
332
+ "flatfield": {
333
+ "help": "Whether to perform flatfield normalization. Default is True.",
334
+ "default": "1",
335
+ "type": int,
336
+ },
337
+ "rot_center": {
338
+ "help": "Center of rotation. If not provided, will be estimated.",
339
+ "default": None,
340
+ },
341
+ "subsampling": {
342
+ "help": "For 360-degrees scan, angular subsampling for matching opposite projections. Default is 10.",
343
+ "default": 10,
344
+ "type": int,
345
+ },
346
+ "deg_xy": {
347
+ "help": "Polynomial degree in x-y for sample movement polynomial model",
348
+ "default": 2,
349
+ "type": int,
350
+ },
351
+ "deg_z": {
352
+ "help": "Polynomial degree in z (vertical) for sample movement polynomial model",
353
+ "default": 2,
354
+ "type": int,
355
+ },
356
+ "verbose": {
357
+ "help": "Whether to plot the movement estimation fit",
358
+ "default": 1,
359
+ },
360
+ "output_file": {
361
+ "help": "Path of the output file containing the sample translations projected in the detector reference frame. This file can be directly used in 'translation_movements_file' of nabu configuration",
362
+ "default": "correct_motion.txt",
363
+ },
364
+ "only": {
365
+ "help": "Whether to only generate motion file for horizontal or vertical movement: --only horizontal or --only vertical",
366
+ "default": "",
367
+ },
368
+ }
325
369
 
326
370
  # Default configuration for "stitching" command
327
371
  StitchingConfig = {
@@ -344,7 +388,7 @@ StitchingConfig = {
344
388
  # Default configuration for "stitching-bootstrap" command
345
389
  BootstrapStitchingConfig = {
346
390
  "stitching-type": {
347
- "help": f"User can provide stitching type to filter some parameters. Must be in {StitchingType.values()}.",
391
+ "help": f"User can provide stitching type to filter some parameters. Must be in {[sst for sst in StitchingType]}.",
348
392
  "default": None,
349
393
  },
350
394
  "level": {
@@ -559,12 +603,12 @@ ReduceDarkFlatConfig = {
559
603
  "required": False,
560
604
  },
561
605
  "dark-method": {
562
- "help": f"Define the method to be used for computing darks. Valid methods are {ReduceMethod.values()}",
606
+ "help": f"Define the method to be used for computing darks. Valid methods are {reduce_methods}",
563
607
  "default": ReduceMethod.MEAN,
564
608
  "required": False,
565
609
  },
566
610
  "flat-method": {
567
- "help": f"Define the method to be used for computing flats. Valid methods are {ReduceMethod.values()}",
611
+ "help": f"Define the method to be used for computing flats. Valid methods are {reduce_methods}",
568
612
  "default": ReduceMethod.MEDIAN,
569
613
  "required": False,
570
614
  },
@@ -608,6 +652,39 @@ ReduceDarkFlatConfig = {
608
652
  },
609
653
  }
610
654
 
655
+ PCAFlatsConfig = {
656
+ "datasets": {"help": "datasets to be stitched together", "default": tuple(), "nargs": "+", "mandatory": True},
657
+ "flat-method": {
658
+ "help": f"Define the method to be used for computing flats. Valid methods are {reduce_methods}",
659
+ "default": ReduceMethod.MEDIAN,
660
+ "required": False,
661
+ },
662
+ "dark-method": {
663
+ "help": f"Define the method to be used for computing darks. Valid methods are {reduce_methods}",
664
+ "default": ReduceMethod.MEAN,
665
+ "required": False,
666
+ },
667
+ "overwrite": {
668
+ "dest": "overwrite",
669
+ "action": "store_true",
670
+ "default": False,
671
+ "help": "Overwrite dark/flats if exists",
672
+ },
673
+ "debug": {
674
+ "dest": "debug",
675
+ "action": "store_true",
676
+ "default": False,
677
+ "help": "Set logging system in debug mode",
678
+ "required": False,
679
+ },
680
+ "output-filename": {
681
+ "aliases": ("orfl",),
682
+ "default": None,
683
+ "help": "Where to save PCA flats. If not provided will be dumped in the current folder as{scan_prefix}_PCAFlats.hdf5",
684
+ "required": False,
685
+ },
686
+ }
687
+
611
688
  ShowReconstructionTimingsConfig = {
612
689
  "logfile": {
613
690
  "help": "Path to the log file.",
@@ -0,0 +1,54 @@
1
+ from os import path
2
+
3
+ from ..resources.nxflatfield import update_dataset_info_flats_darks
4
+ from ..resources.logger import LoggerOrPrint
5
+ from ..resources.dataset_analyzer import analyze_dataset
6
+ from ..pipeline.config_validators import convert_to_bool
7
+ from ..pipeline.estimators import TranslationsEstimator
8
+ from .utils import parse_params_values
9
+ from .cli_configs import EstimateMotionConfig
10
+
11
+
12
+ def estimate_motion():
13
+ args = parse_params_values(
14
+ EstimateMotionConfig,
15
+ parser_description="Estimate sample motion and generate 'translation_movements_file' for nabu config file. ",
16
+ )
17
+ try:
18
+ rot_center = float(args["rot_center"])
19
+ except (ValueError, TypeError):
20
+ rot_center = None
21
+
22
+ logger = LoggerOrPrint(None)
23
+ dataset_info = analyze_dataset(args["dataset"], logger=logger)
24
+ do_ff = args["flatfield"]
25
+
26
+ if do_ff:
27
+ update_dataset_info_flats_darks(dataset_info, True, loading_mode="load_if_present")
28
+
29
+ est = TranslationsEstimator(
30
+ dataset_info,
31
+ do_flatfield=do_ff,
32
+ rot_center=rot_center,
33
+ angular_subsampling=args["subsampling"],
34
+ deg_xy=args["deg_xy"],
35
+ deg_z=args["deg_z"],
36
+ shifts_estimator="phase_cross_correlation",
37
+ )
38
+
39
+ estimated_shifts_h, estimated_shifts_v, cor = est.estimate_motion()
40
+ if convert_to_bool(args["verbose"]):
41
+ err_vu = est.motion_estimator.get_max_fit_error(cor=rot_center)
42
+ logger.info("Max fit error in 'u': %.2f pix \t\t Max fit error in 'v': %.2f pix" % (err_vu[1], err_vu[0]))
43
+ est.motion_estimator.plot_detector_shifts(cor=rot_center)
44
+ est.motion_estimator.plot_movements(cor=rot_center, angles_rad=dataset_info.rotation_angles)
45
+
46
+ out_file = args["output_file"]
47
+ est.generate_translations_movements_file(filename=out_file, only=args["only"] or None)
48
+ logger.info(
49
+ f"Wrote {out_file} - use 'translation_movements_file = {path.abspath(out_file)}' in nabu configuration file to correct for sample movements in the reconstruction'"
50
+ )
51
+
52
+
53
+ if __name__ == "__main__":
54
+ estimate_motion()
nabu/app/multicor.py CHANGED
@@ -34,16 +34,16 @@ def main():
34
34
  program_version="nabu " + version,
35
35
  )
36
36
 
37
+ cors = get_user_cors(args["cor"])
37
38
  reconstructor = get_reconstructor(
38
39
  args,
39
40
  # Put a dummy CoR to avoid crash in both full-FoV and extended-FoV.
40
41
  # It will be overwritten later by the user-defined CoRs
41
- overwrite_options={"reconstruction/rotation_axis_position": 10.0},
42
+ overwrite_options={"reconstruction/rotation_axis_position": cors[0]},
42
43
  )
43
44
 
44
45
  if reconstructor.delta_z > 1:
45
46
  raise ValueError("Only slice reconstruction can be used (have delta_z = %d)" % reconstructor.delta_z)
46
-
47
47
  reconstructor.reconstruct() # warm-up, spawn pipeline
48
48
 
49
49
  pipeline = reconstructor.pipeline
@@ -58,7 +58,6 @@ def main():
58
58
  pass
59
59
  ######
60
60
 
61
- cors = get_user_cors(args["cor"])
62
61
  options = reconstructor.process_config.processing_options["reconstruction"]
63
62
  reconstruct_from_sinos_stack = (options["method"].lower() == "cone") or (
64
63
  options["method"].lower() == "mlem" and options["implementation"].lower() == "corrct"
@@ -81,7 +80,6 @@ def main():
81
80
  pipeline.processing_options["reconstruction"]["rotation_axis_position"] = cor
82
81
  pipeline.processing_options["save"]["file_prefix"] = file_prefix + "_%.03f" % cor
83
82
  pipeline._init_writer(create_subfolder=False, single_output_file_initialized=False)
84
-
85
83
  # Reconfigure center of rotation
86
84
  if not (do_halftomo):
87
85
  pipeline.reconstruction.reset_rot_center(cor)
nabu/app/pcaflats.py ADDED
@@ -0,0 +1,116 @@
1
+ import sys
2
+ import os
3
+ import numpy as np
4
+ import h5py
5
+ from .utils import parse_params_values
6
+ from ..utils import is_writeable
7
+ from .cli_configs import PCAFlatsConfig
8
+ from .. import version
9
+ from ..preproc.flatfield import PCAFlatsDecomposer
10
+ from ..io.reader import NXDarksFlats
11
+
12
+
13
+ def get_flats_darks_in_nx(filename):
14
+ dfreader = NXDarksFlats(filename)
15
+ darks = np.concatenate([d for d in dfreader.get_raw_darks()], axis=0)
16
+ flats = np.concatenate([f for f in dfreader.get_raw_flats()], axis=0)
17
+ entry = dfreader.flats_reader.data_path.lstrip("/").split("/")[0]
18
+ return flats, darks, entry
19
+
20
+
21
+ def get_flats_darks_from_h5(filename):
22
+ flats = []
23
+ darks = []
24
+ with h5py.File(filename, "r") as f:
25
+ for k, v in f.items():
26
+ if k == "1.1":
27
+ detector_name = decode_bytes(f["1.1/technique/tomoconfig/detector"][()][0])
28
+ else:
29
+ try:
30
+ image_key = v["technique/image_key"][()]
31
+ except:
32
+ raise NotImplementedError(
33
+ "Legacy h5 file format is not handled. The entry of the h5 file should contain a 'technique/image_key' group."
34
+ )
35
+ if image_key == 2: # Darks
36
+ darks.append(v[f"instrument/{detector_name}/data"][()])
37
+ elif image_key == 1: # Flats
38
+ flats.append(v[f"instrument/{detector_name}/data"][()])
39
+
40
+ flats = np.concatenate([f for f in flats], axis=0)
41
+ darks = np.concatenate([d for d in darks], axis=0)
42
+ return flats, darks, "entry0000" # TODO this will be problematic on the reconstruction side
43
+
44
+
45
+ def pcaflats_decomposition(flats, darks, pcaflats_filename="PCAFlats.h5", overwrite=False, entry="entry0000"):
46
+ """Compute the PCS decomposition of a series of flats and darks, possibly taken from various scans."""
47
+ try:
48
+ decomposer = PCAFlatsDecomposer(flats, darks)
49
+ decomposer.save_decomposition(pcaflats_filename, overwrite=overwrite, entry=entry)
50
+ success = True
51
+ except:
52
+ success = False
53
+ raise ValueError("An error occured in the PCA deccomposition.")
54
+ return success
55
+
56
+
57
+ def decode_bytes(content):
58
+ if isinstance(content, bytes):
59
+ return content.decode()
60
+ else:
61
+ return content
62
+
63
+
64
+ def main(argv=None):
65
+ """Compute PCA Flats on a series of datasets (h5 or NX)."""
66
+ if argv is None:
67
+ argv = sys.argv[1:]
68
+
69
+ args = parse_params_values(
70
+ PCAFlatsConfig,
71
+ parser_description=f"Compute a PCA Decomposition of flats acquired from various datasets..",
72
+ program_version="nabu " + version,
73
+ user_args=argv,
74
+ )
75
+
76
+ # Get "where to write".
77
+ abspath = os.path.abspath(args["output_filename"])
78
+ pcaflats_dir = os.path.dirname(abspath)
79
+ pcaflats_filename = os.path.basename(abspath)
80
+
81
+ if is_writeable(pcaflats_dir):
82
+ output_path = os.path.join(pcaflats_dir, pcaflats_filename)
83
+ else:
84
+ raise ValueError(f"Output dir {pcaflats_dir} is not writeable.")
85
+
86
+ # raise error if file exists and overwrite=False
87
+ if not args["overwrite"] and os.path.exists(output_path):
88
+ raise FileExistsError(f"Output file {output_path} already exists. Use --overwrite to overwrite it.")
89
+
90
+ # Collect raw darks and flats
91
+ flats_stack = []
92
+ darks_stack = []
93
+
94
+ for dataset in args["datasets"]:
95
+ filename = os.path.basename(dataset)
96
+ kind = filename.split(".")[-1]
97
+ if kind == "nx":
98
+ flats, darks, entry = get_flats_darks_in_nx(dataset)
99
+ elif kind in ("h5", "hdf5"):
100
+ flats, darks, entry = get_flats_darks_from_h5(dataset)
101
+
102
+ flats_stack.append(flats)
103
+ darks_stack.append(darks)
104
+
105
+ flats = np.concatenate(flats_stack, axis=0)
106
+ darks = np.concatenate(darks_stack, axis=0)
107
+
108
+ exit(
109
+ pcaflats_decomposition(
110
+ flats, darks, pcaflats_filename=args["output_filename"], overwrite=args["overwrite"], entry=entry
111
+ )
112
+ )
113
+
114
+
115
+ if __name__ == "__main__":
116
+ main()
nabu/app/reconstruct.py CHANGED
@@ -1,5 +1,5 @@
1
- from tomoscan.io import HDF5File
2
1
  from .. import version
2
+ from ..io.reader import list_hdf5_entries
3
3
  from ..utils import list_match_queries
4
4
  from ..pipeline.config import parse_nabu_config_file
5
5
  from ..pipeline.config_validators import convert_to_int
@@ -92,12 +92,6 @@ def get_reconstructor(args, overwrite_options=None):
92
92
  return reconstructor
93
93
 
94
94
 
95
- def list_hdf5_entries(fname):
96
- with HDF5File(fname, "r") as f:
97
- entries = list(f.keys())
98
- return entries
99
-
100
-
101
95
  def main():
102
96
  args = parse_params_values(
103
97
  ReconstructConfig,
@@ -12,6 +12,9 @@ from tomoscan.factory import Factory
12
12
  from silx.io.url import DataUrl
13
13
 
14
14
 
15
+ reduce_methods = tuple(member.value for member in ReduceMethod)
16
+
17
+
15
18
  def _create_data_urls(output_file: Optional[str], output_data_path: Optional[str], name: str):
16
19
  """
17
20
  util function to compute reduced Data and metadata url(s)
@@ -65,8 +68,8 @@ def reduce_dark_flat(
65
68
  """
66
69
  calculation of the darks / flats calling tomoscan utils function
67
70
  """
68
- dark_method = ReduceMethod.from_value(dark_method) if dark_method is not None else None
69
- flat_method = ReduceMethod.from_value(flat_method) if flat_method is not None else None
71
+ dark_method = ReduceMethod(dark_method) if dark_method is not None else None
72
+ flat_method = ReduceMethod(flat_method) if flat_method is not None else None
70
73
 
71
74
  # 1. define url where to save the file
72
75
  ## 1.1 for darks
nabu/estimation/cor.py CHANGED
@@ -132,7 +132,7 @@ class CenterOfRotation(AlignmentBase):
132
132
  img_2 = self._prepare_image(img_2, roi_yxhw=roi_yxhw, median_filt_shape=median_filt_shape)
133
133
 
134
134
  cc = self._compute_correlation_fft(img_1, img_2, padding_mode, high_pass=high_pass, low_pass=low_pass)
135
- img_shape = img_2.shape
135
+ img_shape = cc.shape # Because cc.shape can differ from img_2.shape (e.g. in case of odd nb of cols)
136
136
  cc_vs = np.fft.fftfreq(img_shape[-2], 1 / img_shape[-2])
137
137
  cc_hs = np.fft.fftfreq(img_shape[-1], 1 / img_shape[-1])
138
138