nabu 2024.2.14__py3-none-any.whl → 2025.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (197) hide show
  1. doc/doc_config.py +32 -0
  2. nabu/__init__.py +1 -1
  3. nabu/app/bootstrap_stitching.py +4 -2
  4. nabu/app/cast_volume.py +16 -14
  5. nabu/app/cli_configs.py +102 -9
  6. nabu/app/compare_volumes.py +1 -1
  7. nabu/app/composite_cor.py +2 -4
  8. nabu/app/diag_to_pix.py +5 -6
  9. nabu/app/diag_to_rot.py +10 -11
  10. nabu/app/double_flatfield.py +18 -5
  11. nabu/app/estimate_motion.py +75 -0
  12. nabu/app/multicor.py +28 -15
  13. nabu/app/parse_reconstruction_log.py +1 -0
  14. nabu/app/pcaflats.py +122 -0
  15. nabu/app/prepare_weights_double.py +1 -2
  16. nabu/app/reconstruct.py +1 -7
  17. nabu/app/reconstruct_helical.py +5 -9
  18. nabu/app/reduce_dark_flat.py +5 -4
  19. nabu/app/rotate.py +3 -1
  20. nabu/app/stitching.py +7 -2
  21. nabu/app/tests/test_reduce_dark_flat.py +2 -2
  22. nabu/app/validator.py +1 -4
  23. nabu/cuda/convolution.py +1 -1
  24. nabu/cuda/fft.py +1 -1
  25. nabu/cuda/medfilt.py +1 -1
  26. nabu/cuda/padding.py +1 -1
  27. nabu/cuda/src/backproj.cu +6 -6
  28. nabu/cuda/src/cone.cu +4 -0
  29. nabu/cuda/src/hierarchical_backproj.cu +14 -0
  30. nabu/cuda/utils.py +2 -2
  31. nabu/estimation/alignment.py +17 -31
  32. nabu/estimation/cor.py +27 -33
  33. nabu/estimation/cor_sino.py +2 -8
  34. nabu/estimation/focus.py +4 -8
  35. nabu/estimation/motion.py +557 -0
  36. nabu/estimation/tests/test_alignment.py +2 -0
  37. nabu/estimation/tests/test_motion_estimation.py +471 -0
  38. nabu/estimation/tests/test_tilt.py +1 -1
  39. nabu/estimation/tilt.py +6 -5
  40. nabu/estimation/translation.py +47 -1
  41. nabu/io/cast_volume.py +108 -18
  42. nabu/io/detector_distortion.py +5 -6
  43. nabu/io/reader.py +45 -6
  44. nabu/io/reader_helical.py +5 -4
  45. nabu/io/tests/test_cast_volume.py +2 -2
  46. nabu/io/tests/test_readers.py +41 -38
  47. nabu/io/tests/test_remove_volume.py +152 -0
  48. nabu/io/tests/test_writers.py +2 -2
  49. nabu/io/utils.py +8 -4
  50. nabu/io/writer.py +1 -2
  51. nabu/misc/fftshift.py +1 -1
  52. nabu/misc/fourier_filters.py +1 -1
  53. nabu/misc/histogram.py +1 -1
  54. nabu/misc/histogram_cuda.py +1 -1
  55. nabu/misc/padding_base.py +1 -1
  56. nabu/misc/rotation.py +1 -1
  57. nabu/misc/rotation_cuda.py +1 -1
  58. nabu/misc/tests/test_binning.py +1 -1
  59. nabu/misc/transpose.py +1 -1
  60. nabu/misc/unsharp.py +1 -1
  61. nabu/misc/unsharp_cuda.py +1 -1
  62. nabu/misc/unsharp_opencl.py +1 -1
  63. nabu/misc/utils.py +1 -1
  64. nabu/opencl/fft.py +1 -1
  65. nabu/opencl/padding.py +1 -1
  66. nabu/opencl/src/backproj.cl +6 -6
  67. nabu/opencl/utils.py +8 -8
  68. nabu/pipeline/config.py +2 -2
  69. nabu/pipeline/config_validators.py +46 -46
  70. nabu/pipeline/datadump.py +3 -3
  71. nabu/pipeline/estimators.py +271 -11
  72. nabu/pipeline/fullfield/chunked.py +103 -67
  73. nabu/pipeline/fullfield/chunked_cuda.py +5 -2
  74. nabu/pipeline/fullfield/computations.py +4 -1
  75. nabu/pipeline/fullfield/dataset_validator.py +0 -1
  76. nabu/pipeline/fullfield/get_double_flatfield.py +147 -0
  77. nabu/pipeline/fullfield/nabu_config.py +36 -17
  78. nabu/pipeline/fullfield/processconfig.py +41 -7
  79. nabu/pipeline/fullfield/reconstruction.py +14 -10
  80. nabu/pipeline/helical/dataset_validator.py +3 -4
  81. nabu/pipeline/helical/fbp.py +4 -4
  82. nabu/pipeline/helical/filtering.py +5 -4
  83. nabu/pipeline/helical/gridded_accumulator.py +10 -11
  84. nabu/pipeline/helical/helical_chunked_regridded.py +1 -0
  85. nabu/pipeline/helical/helical_reconstruction.py +12 -9
  86. nabu/pipeline/helical/helical_utils.py +1 -2
  87. nabu/pipeline/helical/nabu_config.py +2 -1
  88. nabu/pipeline/helical/span_strategy.py +1 -0
  89. nabu/pipeline/helical/weight_balancer.py +2 -3
  90. nabu/pipeline/params.py +20 -3
  91. nabu/pipeline/tests/__init__.py +0 -0
  92. nabu/pipeline/tests/test_estimators.py +240 -3
  93. nabu/pipeline/utils.py +1 -1
  94. nabu/pipeline/writer.py +1 -1
  95. nabu/preproc/alignment.py +0 -10
  96. nabu/preproc/ccd.py +53 -3
  97. nabu/preproc/ctf.py +8 -8
  98. nabu/preproc/ctf_cuda.py +1 -1
  99. nabu/preproc/double_flatfield_cuda.py +2 -2
  100. nabu/preproc/double_flatfield_variable_region.py +0 -1
  101. nabu/preproc/flatfield.py +307 -2
  102. nabu/preproc/flatfield_cuda.py +1 -2
  103. nabu/preproc/flatfield_variable_region.py +3 -3
  104. nabu/preproc/phase.py +2 -4
  105. nabu/preproc/phase_cuda.py +2 -2
  106. nabu/preproc/shift.py +4 -2
  107. nabu/preproc/shift_cuda.py +0 -1
  108. nabu/preproc/tests/test_ctf.py +4 -4
  109. nabu/preproc/tests/test_double_flatfield.py +1 -1
  110. nabu/preproc/tests/test_flatfield.py +1 -1
  111. nabu/preproc/tests/test_paganin.py +1 -3
  112. nabu/preproc/tests/test_pcaflats.py +154 -0
  113. nabu/preproc/tests/test_vshift.py +4 -1
  114. nabu/processing/azim.py +9 -5
  115. nabu/processing/convolution_cuda.py +6 -4
  116. nabu/processing/fft_base.py +7 -3
  117. nabu/processing/fft_cuda.py +25 -164
  118. nabu/processing/fft_opencl.py +28 -6
  119. nabu/processing/fftshift.py +1 -1
  120. nabu/processing/histogram.py +1 -1
  121. nabu/processing/muladd.py +0 -1
  122. nabu/processing/padding_base.py +1 -1
  123. nabu/processing/padding_cuda.py +0 -2
  124. nabu/processing/processing_base.py +12 -6
  125. nabu/processing/rotation_cuda.py +3 -1
  126. nabu/processing/tests/test_fft.py +2 -64
  127. nabu/processing/tests/test_fftshift.py +1 -1
  128. nabu/processing/tests/test_medfilt.py +1 -3
  129. nabu/processing/tests/test_padding.py +1 -1
  130. nabu/processing/tests/test_roll.py +1 -1
  131. nabu/processing/tests/test_rotation.py +4 -2
  132. nabu/processing/unsharp_opencl.py +1 -1
  133. nabu/reconstruction/astra.py +245 -0
  134. nabu/reconstruction/cone.py +39 -9
  135. nabu/reconstruction/fbp.py +7 -0
  136. nabu/reconstruction/fbp_base.py +36 -5
  137. nabu/reconstruction/filtering.py +59 -25
  138. nabu/reconstruction/filtering_cuda.py +22 -21
  139. nabu/reconstruction/filtering_opencl.py +10 -14
  140. nabu/reconstruction/hbp.py +26 -13
  141. nabu/reconstruction/mlem.py +55 -16
  142. nabu/reconstruction/projection.py +3 -5
  143. nabu/reconstruction/sinogram.py +1 -1
  144. nabu/reconstruction/sinogram_cuda.py +0 -1
  145. nabu/reconstruction/tests/test_cone.py +37 -2
  146. nabu/reconstruction/tests/test_deringer.py +4 -4
  147. nabu/reconstruction/tests/test_fbp.py +36 -15
  148. nabu/reconstruction/tests/test_filtering.py +27 -7
  149. nabu/reconstruction/tests/test_halftomo.py +28 -2
  150. nabu/reconstruction/tests/test_mlem.py +94 -64
  151. nabu/reconstruction/tests/test_projector.py +7 -2
  152. nabu/reconstruction/tests/test_reconstructor.py +1 -1
  153. nabu/reconstruction/tests/test_sino_normalization.py +0 -1
  154. nabu/resources/dataset_analyzer.py +210 -24
  155. nabu/resources/gpu.py +4 -4
  156. nabu/resources/logger.py +4 -4
  157. nabu/resources/nxflatfield.py +103 -37
  158. nabu/resources/tests/test_dataset_analyzer.py +37 -0
  159. nabu/resources/tests/test_extract.py +11 -0
  160. nabu/resources/tests/test_nxflatfield.py +5 -5
  161. nabu/resources/utils.py +16 -10
  162. nabu/stitching/alignment.py +8 -11
  163. nabu/stitching/config.py +44 -35
  164. nabu/stitching/definitions.py +2 -2
  165. nabu/stitching/frame_composition.py +8 -10
  166. nabu/stitching/overlap.py +4 -4
  167. nabu/stitching/sample_normalization.py +5 -5
  168. nabu/stitching/slurm_utils.py +2 -2
  169. nabu/stitching/stitcher/base.py +2 -0
  170. nabu/stitching/stitcher/dumper/base.py +0 -1
  171. nabu/stitching/stitcher/dumper/postprocessing.py +1 -1
  172. nabu/stitching/stitcher/post_processing.py +11 -9
  173. nabu/stitching/stitcher/pre_processing.py +37 -31
  174. nabu/stitching/stitcher/single_axis.py +2 -3
  175. nabu/stitching/stitcher_2D.py +2 -1
  176. nabu/stitching/tests/test_config.py +10 -11
  177. nabu/stitching/tests/test_sample_normalization.py +1 -1
  178. nabu/stitching/tests/test_slurm_utils.py +1 -2
  179. nabu/stitching/tests/test_y_preprocessing_stitching.py +11 -8
  180. nabu/stitching/tests/test_z_postprocessing_stitching.py +3 -3
  181. nabu/stitching/tests/test_z_preprocessing_stitching.py +27 -24
  182. nabu/stitching/utils/tests/__init__.py +0 -0
  183. nabu/stitching/utils/tests/test_post-processing.py +1 -0
  184. nabu/stitching/utils/utils.py +16 -18
  185. nabu/tests.py +0 -3
  186. nabu/testutils.py +62 -9
  187. nabu/utils.py +50 -20
  188. {nabu-2024.2.14.dist-info → nabu-2025.1.0.dist-info}/METADATA +7 -7
  189. nabu-2025.1.0.dist-info/RECORD +328 -0
  190. {nabu-2024.2.14.dist-info → nabu-2025.1.0.dist-info}/WHEEL +1 -1
  191. {nabu-2024.2.14.dist-info → nabu-2025.1.0.dist-info}/entry_points.txt +2 -1
  192. nabu/app/correct_rot.py +0 -70
  193. nabu/io/tests/test_detector_distortion.py +0 -178
  194. nabu-2024.2.14.dist-info/RECORD +0 -317
  195. /nabu/{stitching → app}/tests/__init__.py +0 -0
  196. {nabu-2024.2.14.dist-info → nabu-2025.1.0.dist-info}/licenses/LICENSE +0 -0
  197. {nabu-2024.2.14.dist-info → nabu-2025.1.0.dist-info}/top_level.txt +0 -0
@@ -75,8 +75,11 @@ class CudaChunkedPipeline(ChunkedPipeline):
75
75
  # Decide when to transfer data to GPU. Normally it's right after reading the data,
76
76
  # But sometimes a part of the processing is done on CPU.
77
77
  self._when_to_transfer_radios_on_gpu = "read_data"
78
- if self.flatfield is not None and self.flatfield.distortion_correction is not None:
79
- self._when_to_transfer_radios_on_gpu = "flatfield"
78
+ if self.flatfield is not None:
79
+ use_flats_distortion = getattr(self.flatfield, "distortion_correction", None) is not None
80
+ use_pca_flats = self.processing_options["flatfield"]["method"].lower() == "pca"
81
+ if use_flats_distortion or use_pca_flats:
82
+ self._when_to_transfer_radios_on_gpu = "flatfield"
80
83
 
81
84
  def _init_cuda(self, cuda_options):
82
85
  if not (__has_pycuda__):
@@ -128,7 +128,10 @@ def estimate_required_memory(
128
128
  if process_config.rec_params["method"] == "cone":
129
129
  # In cone-beam reconstruction, need both sinograms and reconstruction inside GPU.
130
130
  # That's big!
131
- total_memory_needed += 2 * data_volume_size
131
+ mult_factor = 2
132
+ if rec_config["crop_filtered_data"] is False:
133
+ mult_factor = 4
134
+ total_memory_needed += mult_factor * data_volume_size
132
135
 
133
136
  if debug:
134
137
  print(
@@ -1,4 +1,3 @@
1
- import os
2
1
  from ..dataset_validator import DatasetValidatorBase
3
2
 
4
3
 
@@ -0,0 +1,147 @@
1
+ """
2
+ Double-flatfield:
3
+ - Compute the average of all projections, which gives one resulting image
4
+ - Apply some filter to this image (DFF)
5
+ - Subtract or divide this image from all the projections
6
+ """
7
+
8
+ from os import path
9
+ from silx.io.url import DataUrl
10
+ from silx.io.dictdump import h5todict
11
+
12
+ from nabu.io.utils import get_first_hdf5_entry
13
+
14
+ from ...utils import is_writeable
15
+ from ...app.double_flatfield import DoubleFlatFieldChunks
16
+ from ...resources.nxflatfield import data_url_exists
17
+
18
+ rel_file_path_template = "{scan_name}_dff.h5"
19
+ data_path_template = "{entry}/double_flatfield"
20
+
21
+
22
+ def get_possible_dff_urls(dataset_info, user_dir, output_dir):
23
+ """
24
+ See nabu.resources.nxflatfield.get_frame_possible_urls
25
+ """
26
+ entry = dataset_info.hdf5_entry or ""
27
+
28
+ def make_dataurl(dirname):
29
+ file_path = path.join(
30
+ dirname,
31
+ rel_file_path_template.format(scan_name=dataset_info.scan_basename),
32
+ )
33
+ return DataUrl(
34
+ file_path=file_path,
35
+ data_path=data_path_template.format(entry=entry),
36
+ scheme="silx",
37
+ )
38
+
39
+ urls = {"user": None, "dataset": None, "output": None}
40
+
41
+ if user_dir is not None:
42
+ urls["user"] = make_dataurl(user_dir)
43
+ urls["dataset"] = make_dataurl(dataset_info.scan_dirname)
44
+ if output_dir is not None:
45
+ urls["output"] = make_dataurl(output_dir)
46
+
47
+ return urls
48
+
49
+
50
+ def compute_and_save_dff(dataset_info, possible_urls, dff_options):
51
+ if possible_urls["user"] is not None:
52
+ dff_output_file = possible_urls["user"].file_path()
53
+ elif is_writeable(path.dirname(possible_urls["dataset"].file_path())):
54
+ dff_output_file = possible_urls["dataset"].file_path()
55
+ else:
56
+ dff_output_file = possible_urls["output"].file_path()
57
+
58
+ dataset_info.logger.info("Computing double flatfield")
59
+ dff = DoubleFlatFieldChunks(
60
+ None,
61
+ dff_output_file,
62
+ dataset_info=dataset_info,
63
+ chunk_size=dff_options.get("chunk_size", 100),
64
+ sigma=dff_options.get("dff_sigma", None),
65
+ do_flatfield=dff_options.get("do_flatfield", True),
66
+ logger=dataset_info.logger,
67
+ )
68
+ dff_image = dff.compute_double_flatfield()
69
+ return dff.write_double_flatfield(dff_image)
70
+
71
+
72
+ def check_existing_dff(dff_url, dff_options, logger):
73
+ # Check that the DFF exists at the given DataUrl, and that its configuration matches the wanted config
74
+ # Return the DFF file path
75
+ if not (data_url_exists(dff_url)):
76
+ raise ValueError("DFF file not found:", dff_url)
77
+
78
+ fname = dff_url.file_path()
79
+ entry = get_first_hdf5_entry(fname)
80
+ dff_file_options = h5todict(fname, path=entry + "/double_flatfield/configuration", asarray=False)
81
+
82
+ ff_file = dff_file_options.get("do_flatfield", True)
83
+ ff_user = dff_options.get("do_flatfield", True)
84
+ # Use "==" instead of "is" here, as h5todict() will return something like numpy.True_ instead of True
85
+ if ff_file != ff_user:
86
+ msg = "DFF was computed with flatfield=%s, but you asked flatfield=%s" % (ff_file, ff_user)
87
+ logger.error(msg)
88
+ return False
89
+
90
+ # Use this because h5todict() returns str("None") instead of None
91
+ def _correct_none(x):
92
+ if x in [None, "None"]:
93
+ return None
94
+ return x
95
+
96
+ sigma_file = _correct_none(dff_file_options.get("dff_sigma", None))
97
+ sigma_user = _correct_none(dff_options.get("dff_sigma", None))
98
+ if sigma_file != sigma_user:
99
+ msg = "DFF was computed with dff_sigma=%s, but you asked dff_sigma=%s" % (sigma_file, sigma_user)
100
+ logger.error(msg)
101
+ return False
102
+
103
+ return fname
104
+
105
+
106
+ # pylint: disable=E1136
107
+ def get_double_flatfield(dataset_info, mode, output_dir=None, darks_flats_dir=None, dff_options=None):
108
+ """
109
+ See nabu.resources.nxflatfield.update_dataset_info_flats_darks for the logic
110
+ """
111
+ if mode is False:
112
+ return
113
+ dff_options = dff_options or {}
114
+
115
+ possible_urls = get_possible_dff_urls(dataset_info, darks_flats_dir, output_dir)
116
+
117
+ if mode == "force-compute":
118
+ return compute_and_save_dff(dataset_info, possible_urls, dff_options)
119
+
120
+ def _can_load_from(folder_type):
121
+ if possible_urls.get(folder_type, None) is None:
122
+ return False
123
+ return data_url_exists(possible_urls[folder_type])
124
+
125
+ where_to_load_from = None
126
+ if possible_urls["user"] is not None and _can_load_from("user"):
127
+ where_to_load_from = "user"
128
+ elif _can_load_from("dataset"):
129
+ where_to_load_from = "dataset"
130
+ elif _can_load_from("output"):
131
+ where_to_load_from = "output"
132
+
133
+ if where_to_load_from is None:
134
+ if mode == "force-load":
135
+ raise ValueError("Could not load double-flatfield file (using 'force-load')")
136
+ else:
137
+ return compute_and_save_dff(dataset_info, possible_urls, dff_options)
138
+
139
+ fname = check_existing_dff(possible_urls[where_to_load_from], dff_options, dataset_info.logger)
140
+ if fname is False:
141
+ if mode == "force-load":
142
+ raise ValueError("Could not load double-flatfield file (using 'force-load'): wrong configuration")
143
+ return compute_and_save_dff(dataset_info, possible_urls, dff_options)
144
+ return fname
145
+
146
+ # One possible corner case: if mode == "force-load" and darks_flats_dir is not None (but the actual folder is empty)
147
+ # then nabu will load a DFF found elsewhere (if any). We might want to raise an error instead.
@@ -1,4 +1,5 @@
1
- from ..config_validators import *
1
+ # ruff: noqa: F405
2
+ from ..config_validators import * # noqa: F403
2
3
 
3
4
  nabu_config = {
4
5
  "dataset": {
@@ -15,14 +16,14 @@ nabu_config = {
15
16
  "type": "advanced",
16
17
  },
17
18
  "nexus_version": {
18
- "default": "1.4",
19
- "help": "Nexus version to use when browsing the HDF5 dataset. Default is 1.0.",
20
- "validator": float_validator,
19
+ "default": "",
20
+ "help": "Specify a Nexus version to use when browsing the HDF5 dataset.",
21
+ "validator": optional_float_validator,
21
22
  "type": "advanced",
22
23
  },
23
24
  "darks_flats_dir": {
24
25
  "default": "",
25
- "help": "Path to a directory where XXX_flats.h5 and XXX_darks.h5 are to be found, where 'XXX' denotes the dataset basename. If these files are found, then reduced flats/darks will be loaded from them. Otherwise, reduced flats/darks will be saved to there once computed, either in the .nx directory, or in the output directory. Mind that the HDF5 entry corresponds to the one of the dataset.",
26
+ "help": "Path to a directory where XXX_flats.h5 and XXX_darks.h5 are to be found, where 'XXX' denotes the dataset basename. If these files are found, then reduced flats/darks will be loaded from them. Otherwise, reduced flats/darks will be saved there once computed, either in the .nx directory, or in the output directory. Mind that the HDF5 entry corresponds to the one of the dataset.",
26
27
  "validator": optional_directory_location_validator,
27
28
  "type": "optional",
28
29
  },
@@ -40,7 +41,7 @@ nabu_config = {
40
41
  },
41
42
  "projections_subsampling": {
42
43
  "default": "1",
43
- "help": "Projections subsampling factor: take one projection out of 'projection_subsampling'. The format can be an integer (take 1 projection out of N), or N:M (take 1 projection out of N, start with the projection number M)\nFor example: 2 (or 2:0) to reconstruct from even projections, 2:1 to reconstruct from odd projections.",
44
+ "help": "Projections subsampling factor: take one projection out of 'projections_subsampling'. The format can be an integer (take 1 projection out of N), or N:M (take 1 projection out of N, start with the projection number M)\nFor example: 2 (or 2:0) to reconstruct from even projections, 2:1 to reconstruct from odd projections.",
44
45
  "validator": projections_subsampling_validator,
45
46
  "type": "advanced",
46
47
  },
@@ -60,13 +61,19 @@ nabu_config = {
60
61
  "preproc": {
61
62
  "flatfield": {
62
63
  "default": "1",
63
- "help": "How to perform flat-field normalization. The parameter value can be:\n - 1 or True: enabled.\n - 0 or False: disabled\n - forced or force-load: perform flatfield regardless of the dataset by attempting to load darks/flats\n - force-compute: perform flatfield, ignore all .h5 files containing already computed darks/flats.",
64
- "validator": flatfield_enabled_validator,
64
+ "help": "How to perform flat-field normalization. The parameter value can be:\n - 1 or True: enabled.\n - 0 or False: disabled\n - pca: perform a normalization via Principal Component Analysis decomposition PCA-flat-field normalization",
65
+ "validator": flatfield_validator,
65
66
  "type": "required",
66
67
  },
68
+ "flatfield_loading_mode": {
69
+ "default": "load_if_present",
70
+ "help": "How to load/compute flat-field. This parameter can be:\n - load_if_present (default) or empty string: Use the existing flatfield files, if existing.\n - force-load: perform flatfield regardless of the dataset by attempting to load darks/flats\n - force-compute: perform flatfield, ignore all .h5 files containing already computed darks/flats.",
71
+ "validator": flatfield_loading_mode_validator,
72
+ "type": "optional",
73
+ },
67
74
  "flat_distortion_correction_enabled": {
68
75
  "default": "0",
69
- "help": "Whether to correct for flat distortion. If activated, each radio is correlated with its corresponding flat, in order to determine and correct the flat distortion.",
76
+ "help": "Whether to correct for flat distortion. If activated, each radiograph is correlated with its corresponding flat, in order to determine and correct the flat distortion.",
70
77
  "validator": boolean_validator,
71
78
  "type": "advanced",
72
79
  },
@@ -109,10 +116,10 @@ nabu_config = {
109
116
  "validator": generic_options_validator,
110
117
  "type": "advanced",
111
118
  },
112
- "double_flatfield_enabled": {
119
+ "double_flatfield": {
113
120
  "default": "0",
114
- "help": "Whether to enable the 'double flat-field' filetering for correcting rings artefacts.",
115
- "validator": boolean_validator,
121
+ "help": "Whether to perform 'double flat-field' filtering (this can help to remove rings artefacts). Possible values:\n - 1 or True: enabled.\n - 0 or False: disabled\n - force-load: use an existing DFF file regardless of the dataset\n - force-compute: re-compute the DFF, ignore all existing .h5 files containing already computed DFF",
122
+ "validator": flatfield_validator,
116
123
  "type": "optional",
117
124
  },
118
125
  "dff_sigma": {
@@ -171,7 +178,7 @@ nabu_config = {
171
178
  },
172
179
  "rotate_projections_center": {
173
180
  "default": "",
174
- "help": "Center of rotation when 'tilt_correction' is non-empty. By default the center of rotation is the middle of each radio, i.e ((Nx-1)/2.0, (Ny-1)/2.0).",
181
+ "help": "Center of rotation when 'tilt_correction' is non-empty. By default the center of rotation is the middle of each radiograph, i.e ((Nx-1)/2.0, (Ny-1)/2.0).",
175
182
  "validator": optional_tuple_of_floats_validator,
176
183
  "type": "advanced",
177
184
  },
@@ -271,7 +278,7 @@ nabu_config = {
271
278
  },
272
279
  "cor_slice": {
273
280
  "default": "",
274
- "help": "Which slice to use for estimating the Center of Rotation (CoR). This parameter can be an integer or 'top', 'middle', 'bottom'.\nIf provided, the CoR will be estimated from the correspondig sinogram, and 'cor_options' can contain the parameter 'subsampling'.",
281
+ "help": "Which slice to use for estimating the Center of Rotation (CoR). This parameter can be an integer or 'top', 'middle', 'bottom'.\nIf provided, the CoR will be estimated from the corresponding sinogram, and 'cor_options' can contain the parameter 'subsampling'.",
275
282
  "validator": cor_slice_validator,
276
283
  "type": "advanced",
277
284
  },
@@ -401,6 +408,12 @@ nabu_config = {
401
408
  "validator": nonnegative_integer_validator,
402
409
  "type": "advanced",
403
410
  },
411
+ "crop_filtered_data": {
412
+ "default": "1",
413
+ "help": "Whether to crop the data after the filtering step in FBP/FDK. This parameter should be always 1 unless you know what you are doing.",
414
+ "validator": boolean_validator,
415
+ "type": "advanced",
416
+ },
404
417
  "optim_algorithm": {
405
418
  "default": "chambolle-pock",
406
419
  "help": "Optimization algorithm for iterative methods",
@@ -472,7 +485,7 @@ nabu_config = {
472
485
  },
473
486
  "postproc": {
474
487
  "output_histogram": {
475
- "default": "0",
488
+ "default": "1",
476
489
  "help": "Whether to compute a histogram of the volume.",
477
490
  "validator": boolean_validator,
478
491
  "type": "optional",
@@ -537,7 +550,7 @@ nabu_config = {
537
550
  "pipeline": {
538
551
  "save_steps": {
539
552
  "default": "",
540
- "help": "Save intermediate results. This is a list of comma-separated processing steps, for ex: flatfield, phase, sinogram.\nEach step generates a HDF5 file in the form name_file_prefix.hdf5 (ex. 'sinogram_file_prefix.hdf5')",
553
+ "help": "Save intermediate results. This is a list of comma-separated processing steps, for ex: flatfield, phase, sinogram.\nEach step generates a HDF5 file in the form name_file_prefix.hdf5 (e.g. 'sinogram_file_prefix.hdf5')",
541
554
  "validator": optional_string_validator,
542
555
  "type": "optional",
543
556
  },
@@ -549,7 +562,7 @@ nabu_config = {
549
562
  },
550
563
  "steps_file": {
551
564
  "default": "",
552
- "help": "File where the intermediate processing steps are written. By default it is empty, and intermediate processing steps are written in the same directory as the reconstructions, with a file prefix, ex. sinogram_mydataset.hdf5.",
565
+ "help": "File where the intermediate processing steps are written. By default it is empty, and intermediate processing steps are written in the same directory as the reconstructions, with a file prefix, e.g. sinogram_mydataset.hdf5.",
553
566
  "validator": optional_output_file_path_validator,
554
567
  "type": "advanced",
555
568
  },
@@ -605,6 +618,12 @@ renamed_keys = {
605
618
  "since": "2021.2.0",
606
619
  "message": "Option 'flatfield_enabled' has been renamed 'flatfield' in [preproc]",
607
620
  },
621
+ "double_flatfield_enabled": {
622
+ "section": "preproc",
623
+ "new_name": "double_flatfield",
624
+ "since": "2025.1.0",
625
+ "message": "Option 'double_flatfield_enabled' has been renamed 'double_flatfield' in [preproc]",
626
+ },
608
627
  "rotate_projections": {
609
628
  "section": "preproc",
610
629
  "new_name": "",
@@ -1,9 +1,10 @@
1
1
  import os
2
2
  import posixpath
3
3
  import numpy as np
4
+ from .get_double_flatfield import get_double_flatfield
4
5
  from silx.io import get_data
5
6
  from silx.io.url import DataUrl
6
- from ...utils import copy_dict_items, compare_dicts
7
+ from ...utils import copy_dict_items, compare_dicts, deprecation_warning
7
8
  from ...io.utils import hdf5_entry_exists, get_h5_value
8
9
  from ...io.reader import import_h5_to_dict
9
10
  from ...resources.utils import extract_parameters, get_values_from_file
@@ -11,6 +12,7 @@ from ...resources.nxflatfield import update_dataset_info_flats_darks
11
12
  from ...resources.utils import get_quantities_and_units
12
13
  from ..estimators import estimate_cor
13
14
  from ..processconfig import ProcessConfigBase
15
+ from ..config_validators import convert_to_bool
14
16
  from .nabu_config import nabu_config, renamed_keys
15
17
  from .dataset_validator import FullFieldDatasetValidator
16
18
  from nxtomo.nxobject.nxdetector import ImageKey
@@ -32,6 +34,7 @@ class ProcessConfig(ProcessConfigBase):
32
34
 
33
35
  (2) update_dataset_info_with_user_config
34
36
  - Update flats/darks
37
+ - Double-flat-field
35
38
  - CoR (value or estimation method) # no estimation yet
36
39
  - rotation angles
37
40
  - translations files
@@ -73,10 +76,11 @@ class ProcessConfig(ProcessConfigBase):
73
76
  Update the 'dataset_info' (DatasetAnalyzer class instance) data structure with options from user configuration.
74
77
  """
75
78
  self.logger.debug("Updating dataset information with user configuration")
76
- if self.dataset_info.kind == "nx":
79
+ if self.dataset_info.kind == "nx" and self.nabu_config["preproc"]["flatfield"]:
77
80
  update_dataset_info_flats_darks(
78
81
  self.dataset_info,
79
82
  self.nabu_config["preproc"]["flatfield"],
83
+ loading_mode=self.nabu_config["preproc"]["flatfield_loading_mode"],
80
84
  output_dir=self.nabu_config["output"]["location"],
81
85
  darks_flats_dir=self.nabu_config["dataset"]["darks_flats_dir"],
82
86
  )
@@ -89,12 +93,29 @@ class ProcessConfig(ProcessConfigBase):
89
93
  self.subsampling_factor = subsampling_factor or 1
90
94
  self.subsampling_start = subsampling_start or 0
91
95
 
96
+ self._get_double_flatfield()
92
97
  self._update_dataset_with_user_overwrites()
93
98
  self._get_rotation_axis_position()
94
99
  self._update_rotation_angles()
95
100
  self._get_translation_file("reconstruction", "translation_movements_file", "translations")
96
101
  self._get_user_sino_normalization()
97
102
 
103
+ def _get_double_flatfield(self):
104
+ self._dff_file = None
105
+ dff_mode = self.nabu_config["preproc"]["double_flatfield"]
106
+ if not (dff_mode):
107
+ return
108
+ self._dff_file = get_double_flatfield(
109
+ self.dataset_info,
110
+ dff_mode,
111
+ output_dir=self.nabu_config["output"]["location"],
112
+ darks_flats_dir=self.nabu_config["dataset"]["darks_flats_dir"],
113
+ dff_options={
114
+ "dff_sigma": self.nabu_config["preproc"]["dff_sigma"],
115
+ "do_flatfield": (self.nabu_config["preproc"]["flatfield"] is not False),
116
+ },
117
+ )
118
+
98
119
  def _update_dataset_with_user_overwrites(self):
99
120
  user_overwrites = self.nabu_config["dataset"]["overwrite_metadata"].strip()
100
121
  if user_overwrites in ("", None):
@@ -406,8 +427,10 @@ class ProcessConfig(ProcessConfigBase):
406
427
  # Flat-field
407
428
  #
408
429
  if nabu_config["preproc"]["flatfield"]:
430
+ ff_method = "pca" if nabu_config["preproc"]["flatfield"] == "pca" else "default"
409
431
  tasks.append("flatfield")
410
432
  options["flatfield"] = {
433
+ "method": ff_method,
411
434
  # Data reader handles binning/subsampling by itself,
412
435
  # but FlatField needs "real" indices (after binning/subsampling)
413
436
  "projs_indices": self.projs_indices(subsampling=False),
@@ -415,7 +438,7 @@ class ProcessConfig(ProcessConfigBase):
415
438
  "do_flat_distortion": nabu_config["preproc"]["flat_distortion_correction_enabled"],
416
439
  "flat_distortion_params": extract_parameters(nabu_config["preproc"]["flat_distortion_params"]),
417
440
  }
418
- normalize_srcurrent = nabu_config["preproc"]["normalize_srcurrent"]
441
+ normalize_srcurrent = nabu_config["preproc"]["normalize_srcurrent"] and ff_method == "default"
419
442
  radios_srcurrent = None
420
443
  flats_srcurrent = None
421
444
  if normalize_srcurrent:
@@ -439,6 +462,7 @@ class ProcessConfig(ProcessConfigBase):
439
462
  if len(dataset_info.darks) > 1:
440
463
  self.logger.warning("Cannot do flat-field with more than one reduced dark. Taking the first one.")
441
464
  dataset_info.darks = dataset_info.darks[sorted(dataset_info.darks.keys())[0]]
465
+
442
466
  #
443
467
  # Spikes filter
444
468
  #
@@ -451,11 +475,19 @@ class ProcessConfig(ProcessConfigBase):
451
475
  #
452
476
  # Double flat field
453
477
  #
454
- if nabu_config["preproc"]["double_flatfield_enabled"]:
478
+ # ---- COMPAT ----
479
+ if convert_to_bool(nabu_config["preproc"].get("double_flatfield_enabled", False))[0]:
480
+ deprecation_warning(
481
+ "'double_flatfield_enabled' has been renamed to 'double_flatfield'. Please update your configuration file"
482
+ )
483
+ nabu_config["preproc"]["double_flatfield"] = True
484
+
485
+ # -------------
486
+ if nabu_config["preproc"]["double_flatfield"]:
455
487
  tasks.append("double_flatfield")
456
488
  options["double_flatfield"] = {
457
489
  "sigma": nabu_config["preproc"]["dff_sigma"],
458
- "processes_file": nabu_config["preproc"]["processes_file"],
490
+ "processes_file": self._dff_file or nabu_config["preproc"]["processes_file"],
459
491
  "log_min_clip": nabu_config["preproc"]["log_min_clip"],
460
492
  "log_max_clip": nabu_config["preproc"]["log_max_clip"],
461
493
  }
@@ -558,6 +590,7 @@ class ProcessConfig(ProcessConfigBase):
558
590
  self.rec_params,
559
591
  [
560
592
  "method",
593
+ "iterations",
561
594
  "implementation",
562
595
  "fbp_filter_type",
563
596
  "fbp_filter_cutoff",
@@ -575,6 +608,7 @@ class ProcessConfig(ProcessConfigBase):
575
608
  "sample_detector_dist",
576
609
  "hbp_legs",
577
610
  "hbp_reduction_steps",
611
+ "crop_filtered_data",
578
612
  ],
579
613
  )
580
614
  rec_options = options["reconstruction"]
@@ -593,8 +627,6 @@ class ProcessConfig(ProcessConfigBase):
593
627
  voxel_size,
594
628
  ) # pix size is in microns in dataset_info
595
629
 
596
- rec_options["iterations"] = nabu_config["reconstruction"]["iterations"]
597
-
598
630
  # x/y/z position information
599
631
  def get_mean_pos(position_array):
600
632
  if position_array is None:
@@ -616,6 +648,8 @@ class ProcessConfig(ProcessConfigBase):
616
648
  rec_options["position"] = mean_positions_xyz
617
649
  if rec_options["method"] == "cone" and rec_options["sample_detector_dist"] is None:
618
650
  rec_options["sample_detector_dist"] = self.dataset_info.distance # was checked to be not None earlier
651
+ if rec_options["method"].lower() == "mlem" and rec_options["implementation"] in [None, ""]:
652
+ rec_options["implementation"] = "corrct"
619
653
 
620
654
  # New key
621
655
  rec_options["cor_estimated_auto"] = isinstance(nabu_config["reconstruction"]["rotation_axis_position"], str)
@@ -7,7 +7,7 @@ from silx.io import get_data
7
7
  from silx.io.url import DataUrl
8
8
  from tomoscan.esrf.volume.singleframebase import VolumeSingleFrameBase
9
9
  from ... import version as nabu_version
10
- from ...utils import check_supported, subdivide_into_overlapping_segment
10
+ from ...utils import check_supported, first_generator_item, subdivide_into_overlapping_segment
11
11
  from ...resources.logger import LoggerOrPrint
12
12
  from ...resources.utils import is_hdf5_extension
13
13
  from ...io.writer import merge_hdf5_files, NXProcessWriter
@@ -61,7 +61,7 @@ class FullFieldReconstructor:
61
61
  Dictionary with cuda options passed to `nabu.cuda.processing.CudaProcessing`
62
62
 
63
63
 
64
- Other parameters
64
+ Other Parameters
65
65
  -----------------
66
66
  Advanced options can be passed in the 'extra_options' dictionary. These can be:
67
67
 
@@ -120,7 +120,7 @@ class FullFieldReconstructor:
120
120
  vm = virtual_memory()
121
121
  self.resources["mem_avail_GB"] = vm.available / 1e9
122
122
  # Account for other memory constraints. There might be a better way
123
- slurm_mem_constraint_MB = int(environ.get("SLURM_MEM_PER_NODE", 0))
123
+ slurm_mem_constraint_MB = int(environ.get("SLURM_MEM_PER_NODE", 0)) # noqa: PLW1508
124
124
  if slurm_mem_constraint_MB > 0:
125
125
  self.resources["mem_avail_GB"] = slurm_mem_constraint_MB / 1e3
126
126
  #
@@ -261,6 +261,9 @@ class FullFieldReconstructor:
261
261
  if (self.process_config.dataset_info.detector_tilt or 0) > 15:
262
262
  force_grouped_mode = True
263
263
  msg = "Radios rotation with a large angle needs to process full radios"
264
+ if self.process_config.processing_options.get("flatfield", {}).get("method", "default") == "pca":
265
+ force_grouped_mode = True
266
+ msg = "PCA-Flatfield normalization needs to process full radios"
264
267
  if self.process_config.resume_from_step == "sinogram" and force_grouped_mode:
265
268
  self.logger.warning("Cannot use grouped-radios processing when resuming from sinogram")
266
269
  force_grouped_mode = False
@@ -312,7 +315,7 @@ class FullFieldReconstructor:
312
315
  sigma = opts["unsharp_sigma"]
313
316
  # nabu uses cutoff = 4
314
317
  cutoff = 4
315
- gaussian_kernel_size = int(ceil(2 * cutoff * sigma + 1))
318
+ gaussian_kernel_size = ceil(2 * cutoff * sigma + 1)
316
319
  self.logger.debug("Unsharp mask margin: %d pixels" % gaussian_kernel_size)
317
320
  return (gaussian_kernel_size, gaussian_kernel_size)
318
321
 
@@ -360,8 +363,8 @@ class FullFieldReconstructor:
360
363
  d2 = rec_cfg["sample_detector_dist"]
361
364
  n_z, _ = self.process_config.radio_shape(binning=True)
362
365
 
363
- delta_z = self.process_config.rec_delta_z # accounts_for_binning
364
- overlap = ceil(delta_z * d2 / (d1 + d2)) # sqrt(2) missing ?
366
+ # delta_z = self.process_config.rec_delta_z # accounts_for_binning
367
+ # overlap = ceil(delta_z * d2 / (d1 + d2)) # sqrt(2) missing ?
365
368
 
366
369
  max_overlap = ceil(n_z * d2 / (d1 + d2)) # sqrt(2) missing ?
367
370
 
@@ -483,7 +486,7 @@ class FullFieldReconstructor:
483
486
  {
484
487
  "sub_region": (
485
488
  (0, self.n_angles),
486
- (curr_z_min - margin_up, curr_z_max + margin_down),
489
+ (int(curr_z_min - margin_up), int(curr_z_max + margin_down)),
487
490
  (0, self.chunk_shape[-1]),
488
491
  ),
489
492
  "margin": ((margin_up, margin_down), (0, 0)),
@@ -690,7 +693,7 @@ class FullFieldReconstructor:
690
693
  # Prevent issue when out_dir is empty, which happens only if dataset/location is a relative path.
691
694
  # TODO this should be prevented earlier
692
695
  if out_dir is None or len(out_dir.strip()) == 0:
693
- out_dir = dirname(dirname(self.results[list(self.results.keys())[0]]))
696
+ out_dir = dirname(dirname(self.results[first_generator_item(self.results.keys())]))
694
697
  #
695
698
  if output_file is None:
696
699
  output_file = join(out_dir, prefix + out_cfg["file_prefix"]) + ".hdf5"
@@ -771,7 +774,8 @@ class FullFieldReconstructor:
771
774
  out_cfg = self.process_config.nabu_config["output"]
772
775
  if output_file is None:
773
776
  output_file = (
774
- join(dirname(list(self._histograms.values())[0]), out_cfg["file_prefix"] + "_histogram") + ".hdf5"
777
+ join(dirname(first_generator_item(self._histograms.values())), out_cfg["file_prefix"] + "_histogram")
778
+ + ".hdf5"
775
779
  )
776
780
  local_files = self.get_relative_files(files=list(self._histograms.values()))
777
781
  #
@@ -823,7 +827,7 @@ class FullFieldReconstructor:
823
827
  def merge_data_dumps(self, axis=1):
824
828
  # Collect in a dict where keys are step names (instead of task keys)
825
829
  dumps = {}
826
- for task_key, data_dumps in self._data_dumps.items():
830
+ for task_key, data_dumps in self._data_dumps.items(): # noqa: PERF102
827
831
  for step_name, fname in data_dumps.items():
828
832
  fname = join(basename(dirname(fname)), basename(fname))
829
833
  if step_name not in dumps:
@@ -1,4 +1,4 @@
1
- from ..fullfield.dataset_validator import *
1
+ from ..fullfield.dataset_validator import FullFieldDatasetValidator
2
2
  from ...utils import copy_dict_items
3
3
 
4
4
 
@@ -10,10 +10,9 @@ class HelicalDatasetValidator(FullFieldDatasetValidator):
10
10
 
11
11
  def _check_slice_indices(self):
12
12
  """Slice indices can be far beyond what fullfield pipeline accepts, no check here, but
13
- Nabu expects that rec_region is initialised here"""
13
+ Nabu expects that rec_region is initialised here
14
+ """
14
15
 
15
16
  what = ["start_x", "end_x", "start_y", "end_y", "start_z", "end_z"]
16
17
 
17
18
  self.rec_region = copy_dict_items(self.rec_params, what)
18
-
19
- return
@@ -1,6 +1,6 @@
1
- from ...reconstruction.fbp import *
1
+ import numpy as np
2
+ from ...reconstruction.fbp import Backprojector
2
3
  from .filtering import HelicalSinoFilter
3
- from ...utils import convert_index
4
4
 
5
5
 
6
6
  class BackprojectorHelical(Backprojector):
@@ -25,9 +25,9 @@ class BackprojectorHelical(Backprojector):
25
25
 
26
26
  def set_custom_angles_and_axis_corrections(self, angles_rad, x_per_proj):
27
27
  """To arbitrarily change angles
28
- Parameters
29
- ==========
30
28
 
29
+ Parameters
30
+ ----------
31
31
  angles_rad: array of floats
32
32
  one angle per each projection in radians
33
33
 
@@ -68,26 +68,26 @@ class HelicalSinoFilter(CudaSinoFilter):
68
68
  "padding",
69
69
  filename=get_cuda_srcfile("helical_padding.cu"),
70
70
  signature="PPfiiiii",
71
- options=[str("-DMIRROR_EDGES")],
71
+ options=["-DMIRROR_EDGES"],
72
72
  )
73
73
  self._pad_mirror_constant_kernel = self.cuda.kernel(
74
74
  "padding",
75
75
  filename=get_cuda_srcfile("helical_padding.cu"),
76
76
  signature="PPfiiiiiff",
77
- options=[str("-DMIRROR_CONSTANT")],
77
+ options=["-DMIRROR_CONSTANT"],
78
78
  )
79
79
 
80
80
  self._pad_mirror_edges_variable_rot_pos_kernel = self.cuda.kernel(
81
81
  "padding",
82
82
  filename=get_cuda_srcfile("helical_padding.cu"),
83
83
  signature="PPPiiiii",
84
- options=[str("-DMIRROR_EDGES_VARIABLE_ROT_POS")],
84
+ options=["-DMIRROR_EDGES_VARIABLE_ROT_POS"],
85
85
  )
86
86
  self._pad_mirror_constant_variable_rot_pos_kernel = self.cuda.kernel(
87
87
  "padding",
88
88
  filename=get_cuda_srcfile("helical_padding.cu"),
89
89
  signature="PPPiiiiiff",
90
- options=[str("-DMIRROR_CONSTANT_VARIABLE_ROT_POS")],
90
+ options=["-DMIRROR_CONSTANT_VARIABLE_ROT_POS"],
91
91
  )
92
92
 
93
93
  self.d_mirror_indexes = self.cuda.allocate_array(
@@ -179,6 +179,7 @@ class HelicalSinoFilter(CudaSinoFilter):
179
179
  """
180
180
  Perform the sinogram siltering.
181
181
  redefined here to use also mirror data
182
+
182
183
  Parameters
183
184
  ----------
184
185
  sino: numpy.ndarray or pycuda.gpuarray.GPUArray