nabu 2023.2.1__py3-none-any.whl → 2024.1.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doc/conf.py +1 -1
- doc/doc_config.py +32 -0
- nabu/__init__.py +2 -1
- nabu/app/bootstrap_stitching.py +1 -1
- nabu/app/cli_configs.py +122 -2
- nabu/app/composite_cor.py +27 -2
- nabu/app/correct_rot.py +70 -0
- nabu/app/create_distortion_map_from_poly.py +42 -18
- nabu/app/diag_to_pix.py +358 -0
- nabu/app/diag_to_rot.py +449 -0
- nabu/app/generate_header.py +4 -3
- nabu/app/histogram.py +2 -2
- nabu/app/multicor.py +6 -1
- nabu/app/parse_reconstruction_log.py +151 -0
- nabu/app/prepare_weights_double.py +83 -22
- nabu/app/reconstruct.py +5 -1
- nabu/app/reconstruct_helical.py +7 -0
- nabu/app/reduce_dark_flat.py +6 -3
- nabu/app/rotate.py +4 -4
- nabu/app/stitching.py +16 -2
- nabu/app/tests/test_reduce_dark_flat.py +18 -2
- nabu/app/validator.py +4 -4
- nabu/cuda/convolution.py +8 -376
- nabu/cuda/fft.py +4 -0
- nabu/cuda/kernel.py +4 -4
- nabu/cuda/medfilt.py +5 -158
- nabu/cuda/padding.py +5 -71
- nabu/cuda/processing.py +23 -2
- nabu/cuda/src/ElementOp.cu +78 -0
- nabu/cuda/src/backproj.cu +28 -2
- nabu/cuda/src/fourier_wavelets.cu +2 -2
- nabu/cuda/src/normalization.cu +23 -0
- nabu/cuda/src/padding.cu +2 -2
- nabu/cuda/src/transpose.cu +16 -0
- nabu/cuda/utils.py +39 -0
- nabu/estimation/alignment.py +10 -1
- nabu/estimation/cor.py +808 -38
- nabu/estimation/cor_sino.py +7 -9
- nabu/estimation/tests/test_cor.py +85 -3
- nabu/io/reader.py +26 -18
- nabu/io/tests/test_cast_volume.py +3 -3
- nabu/io/tests/test_detector_distortion.py +3 -3
- nabu/io/tiffwriter_zmm.py +2 -2
- nabu/io/utils.py +14 -4
- nabu/io/writer.py +5 -3
- nabu/misc/fftshift.py +6 -0
- nabu/misc/histogram.py +5 -285
- nabu/misc/histogram_cuda.py +8 -104
- nabu/misc/kernel_base.py +3 -121
- nabu/misc/padding_base.py +5 -69
- nabu/misc/processing_base.py +3 -107
- nabu/misc/rotation.py +5 -62
- nabu/misc/rotation_cuda.py +5 -65
- nabu/misc/transpose.py +6 -0
- nabu/misc/unsharp.py +3 -78
- nabu/misc/unsharp_cuda.py +5 -52
- nabu/misc/unsharp_opencl.py +8 -85
- nabu/opencl/fft.py +6 -0
- nabu/opencl/kernel.py +21 -6
- nabu/opencl/padding.py +5 -72
- nabu/opencl/processing.py +27 -5
- nabu/opencl/src/backproj.cl +3 -3
- nabu/opencl/src/fftshift.cl +65 -12
- nabu/opencl/src/padding.cl +2 -2
- nabu/opencl/src/roll.cl +96 -0
- nabu/opencl/src/transpose.cl +16 -0
- nabu/pipeline/config_validators.py +63 -3
- nabu/pipeline/dataset_validator.py +2 -2
- nabu/pipeline/estimators.py +193 -35
- nabu/pipeline/fullfield/chunked.py +34 -17
- nabu/pipeline/fullfield/chunked_cuda.py +7 -5
- nabu/pipeline/fullfield/computations.py +48 -13
- nabu/pipeline/fullfield/nabu_config.py +13 -13
- nabu/pipeline/fullfield/processconfig.py +10 -5
- nabu/pipeline/fullfield/reconstruction.py +1 -2
- nabu/pipeline/helical/fbp.py +5 -0
- nabu/pipeline/helical/filtering.py +12 -9
- nabu/pipeline/helical/gridded_accumulator.py +179 -33
- nabu/pipeline/helical/helical_chunked_regridded.py +262 -151
- nabu/pipeline/helical/helical_chunked_regridded_cuda.py +4 -11
- nabu/pipeline/helical/helical_reconstruction.py +56 -18
- nabu/pipeline/helical/span_strategy.py +1 -1
- nabu/pipeline/helical/tests/test_accumulator.py +4 -0
- nabu/pipeline/params.py +23 -2
- nabu/pipeline/processconfig.py +3 -8
- nabu/pipeline/tests/test_chunk_reader.py +78 -0
- nabu/pipeline/tests/test_estimators.py +120 -2
- nabu/pipeline/utils.py +25 -0
- nabu/pipeline/writer.py +2 -0
- nabu/preproc/ccd_cuda.py +9 -7
- nabu/preproc/ctf.py +21 -26
- nabu/preproc/ctf_cuda.py +25 -25
- nabu/preproc/double_flatfield.py +14 -2
- nabu/preproc/double_flatfield_cuda.py +7 -11
- nabu/preproc/flatfield_cuda.py +23 -27
- nabu/preproc/phase.py +19 -24
- nabu/preproc/phase_cuda.py +21 -21
- nabu/preproc/shift_cuda.py +58 -28
- nabu/preproc/tests/test_ctf.py +5 -5
- nabu/preproc/tests/test_double_flatfield.py +2 -2
- nabu/preproc/tests/test_vshift.py +13 -2
- nabu/processing/__init__.py +0 -0
- nabu/processing/convolution_cuda.py +375 -0
- nabu/processing/fft_base.py +163 -0
- nabu/processing/fft_cuda.py +256 -0
- nabu/processing/fft_opencl.py +54 -0
- nabu/processing/fftshift.py +134 -0
- nabu/processing/histogram.py +286 -0
- nabu/processing/histogram_cuda.py +103 -0
- nabu/processing/kernel_base.py +126 -0
- nabu/processing/medfilt_cuda.py +159 -0
- nabu/processing/muladd.py +29 -0
- nabu/processing/muladd_cuda.py +68 -0
- nabu/processing/padding_base.py +71 -0
- nabu/processing/padding_cuda.py +75 -0
- nabu/processing/padding_opencl.py +77 -0
- nabu/processing/processing_base.py +123 -0
- nabu/processing/roll_opencl.py +64 -0
- nabu/processing/rotation.py +63 -0
- nabu/processing/rotation_cuda.py +66 -0
- nabu/processing/tests/__init__.py +0 -0
- nabu/processing/tests/test_fft.py +268 -0
- nabu/processing/tests/test_fftshift.py +71 -0
- nabu/{misc → processing}/tests/test_histogram.py +2 -4
- nabu/{cuda → processing}/tests/test_medfilt.py +1 -1
- nabu/processing/tests/test_muladd.py +54 -0
- nabu/{cuda → processing}/tests/test_padding.py +119 -75
- nabu/processing/tests/test_roll.py +63 -0
- nabu/{misc → processing}/tests/test_rotation.py +3 -2
- nabu/processing/tests/test_transpose.py +72 -0
- nabu/{misc → processing}/tests/test_unsharp.py +41 -8
- nabu/processing/transpose.py +126 -0
- nabu/processing/unsharp.py +79 -0
- nabu/processing/unsharp_cuda.py +53 -0
- nabu/processing/unsharp_opencl.py +75 -0
- nabu/reconstruction/fbp.py +34 -10
- nabu/reconstruction/fbp_base.py +35 -16
- nabu/reconstruction/fbp_opencl.py +7 -12
- nabu/reconstruction/filtering.py +2 -2
- nabu/reconstruction/filtering_cuda.py +13 -14
- nabu/reconstruction/filtering_opencl.py +3 -4
- nabu/reconstruction/projection.py +2 -0
- nabu/reconstruction/rings.py +158 -1
- nabu/reconstruction/rings_cuda.py +218 -58
- nabu/reconstruction/sinogram_cuda.py +16 -12
- nabu/reconstruction/tests/test_deringer.py +116 -14
- nabu/reconstruction/tests/test_fbp.py +22 -31
- nabu/reconstruction/tests/test_filtering.py +11 -2
- nabu/resources/dataset_analyzer.py +89 -26
- nabu/resources/nxflatfield.py +2 -2
- nabu/resources/tests/test_nxflatfield.py +1 -1
- nabu/resources/utils.py +9 -2
- nabu/stitching/alignment.py +184 -0
- nabu/stitching/config.py +241 -39
- nabu/stitching/definitions.py +6 -0
- nabu/stitching/frame_composition.py +4 -2
- nabu/stitching/overlap.py +99 -3
- nabu/stitching/sample_normalization.py +60 -0
- nabu/stitching/slurm_utils.py +10 -10
- nabu/stitching/tests/test_alignment.py +99 -0
- nabu/stitching/tests/test_config.py +16 -1
- nabu/stitching/tests/test_overlap.py +68 -2
- nabu/stitching/tests/test_sample_normalization.py +49 -0
- nabu/stitching/tests/test_slurm_utils.py +5 -5
- nabu/stitching/tests/test_utils.py +3 -33
- nabu/stitching/tests/test_z_stitching.py +391 -22
- nabu/stitching/utils.py +144 -202
- nabu/stitching/z_stitching.py +309 -126
- nabu/testutils.py +18 -0
- nabu/thirdparty/tomocupy_remove_stripe.py +586 -0
- nabu/utils.py +32 -6
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/LICENSE +1 -1
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/METADATA +5 -5
- nabu-2024.1.0rc3.dist-info/RECORD +296 -0
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/WHEEL +1 -1
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/entry_points.txt +5 -1
- nabu/conftest.py +0 -14
- nabu/opencl/fftshift.py +0 -92
- nabu/opencl/tests/test_fftshift.py +0 -55
- nabu/opencl/tests/test_padding.py +0 -84
- nabu-2023.2.1.dist-info/RECORD +0 -252
- /nabu/cuda/src/{fftshift.cu → dfi_fftshift.cu} +0 -0
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/top_level.txt +0 -0
@@ -4,10 +4,14 @@ from scipy.ndimage import shift
|
|
4
4
|
from nabu.pipeline.params import fbp_filters
|
5
5
|
from nabu.utils import clip_circle
|
6
6
|
from nabu.testutils import get_data, generate_tests_scenarios, __do_long_tests__
|
7
|
-
from nabu.cuda.utils import get_cuda_context, __has_pycuda__
|
7
|
+
from nabu.cuda.utils import get_cuda_context, __has_pycuda__
|
8
8
|
from nabu.opencl.utils import get_opencl_context, __has_pyopencl__
|
9
9
|
|
10
|
-
|
10
|
+
from nabu.processing.fft_cuda import has_skcuda, has_vkfft as has_vkfft_cu
|
11
|
+
from nabu.processing.fft_opencl import has_vkfft as has_vkfft_cl
|
12
|
+
|
13
|
+
__has_pycuda__ = __has_pycuda__ and (has_skcuda() or has_vkfft_cu())
|
14
|
+
__has_pyopencl__ = __has_pyopencl__ and has_vkfft_cl()
|
11
15
|
|
12
16
|
if __has_pycuda__:
|
13
17
|
from nabu.reconstruction.fbp import CudaBackprojector
|
@@ -22,7 +26,7 @@ if __do_long_tests__:
|
|
22
26
|
"backend": ["cuda", "opencl"],
|
23
27
|
"input_on_gpu": [False, True],
|
24
28
|
"output_on_gpu": [False, True],
|
25
|
-
"
|
29
|
+
"use_textures": [True, False],
|
26
30
|
}
|
27
31
|
)
|
28
32
|
|
@@ -34,8 +38,8 @@ def bootstrap(request):
|
|
34
38
|
cls.ref_512 = get_data("mri_rec_astra.npz")["data"]
|
35
39
|
# always use contiguous arrays
|
36
40
|
cls.sino_511 = np.ascontiguousarray(cls.sino_512[:, :-1])
|
37
|
-
|
38
|
-
cls.
|
41
|
+
# Could be set to 5.0e-2 when using textures. When not using textures, interpolation slightly differs
|
42
|
+
cls.tol = 5.1e-2
|
39
43
|
|
40
44
|
if __has_pycuda__:
|
41
45
|
cls.cuda_ctx = get_cuda_context(cleanup_at_exit=False)
|
@@ -56,19 +60,19 @@ class TestFBP:
|
|
56
60
|
def _get_backprojector(self, config, *bp_args, **bp_kwargs):
|
57
61
|
if config["backend"] == "cuda":
|
58
62
|
if not (__has_pycuda__):
|
59
|
-
pytest.skip("Need pycuda + scikit-cuda")
|
63
|
+
pytest.skip("Need pycuda + (scikit-cuda or pyvkfft)")
|
60
64
|
Backprojector = CudaBackprojector
|
61
65
|
ctx = self.cuda_ctx
|
62
66
|
else:
|
63
67
|
if not (__has_pyopencl__):
|
64
|
-
pytest.skip("Need pyopencl")
|
68
|
+
pytest.skip("Need pyopencl + pyvkfft")
|
65
69
|
Backprojector = OpenCLBackprojector
|
66
70
|
ctx = self.opencl_ctx
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
71
|
+
if config.get("use_textures", True) is False:
|
72
|
+
# patch "extra_options"
|
73
|
+
extra_options = bp_kwargs.pop("extra_options", {})
|
74
|
+
extra_options["use_textures"] = False
|
75
|
+
bp_kwargs["extra_options"] = extra_options
|
72
76
|
return Backprojector(*bp_args, **bp_kwargs, backend_options={"ctx": ctx})
|
73
77
|
|
74
78
|
@staticmethod
|
@@ -84,13 +88,6 @@ class TestFBP:
|
|
84
88
|
res = res.get()
|
85
89
|
return res
|
86
90
|
|
87
|
-
def get_error_tolerance(self, config):
|
88
|
-
tol = self.tol
|
89
|
-
if config["backend"] == "opencl":
|
90
|
-
tol = self.tol_without_texture
|
91
|
-
tol += 1e-3
|
92
|
-
return tol
|
93
|
-
|
94
91
|
@pytest.mark.parametrize("config", scenarios)
|
95
92
|
def test_fbp_512(self, config):
|
96
93
|
"""
|
@@ -102,7 +99,7 @@ class TestFBP:
|
|
102
99
|
delta_clipped = self.clip_to_inner_circle(res - self.ref_512)
|
103
100
|
err_max = np.max(np.abs(delta_clipped))
|
104
101
|
|
105
|
-
assert err_max < self.
|
102
|
+
assert err_max < self.tol, "Something wrong with config=%s" % (str(config))
|
106
103
|
|
107
104
|
@pytest.mark.parametrize("config", scenarios)
|
108
105
|
def test_fbp_511(self, config):
|
@@ -116,7 +113,7 @@ class TestFBP:
|
|
116
113
|
delta_clipped = self.clip_to_inner_circle(res - ref)
|
117
114
|
err_max = np.max(np.abs(delta_clipped))
|
118
115
|
|
119
|
-
assert err_max < self.
|
116
|
+
assert err_max < self.tol, "Something wrong with config=%s" % (str(config))
|
120
117
|
|
121
118
|
@pytest.mark.parametrize("config", scenarios)
|
122
119
|
def test_fbp_roi(self, config):
|
@@ -146,7 +143,7 @@ class TestFBP:
|
|
146
143
|
}
|
147
144
|
for roi, ref in cases.values():
|
148
145
|
err_max = backproject_roi(roi, ref)
|
149
|
-
assert err_max < self.
|
146
|
+
assert err_max < self.tol, "Something wrong with ROI = %s for config=%s" % (
|
150
147
|
str(roi),
|
151
148
|
str(config),
|
152
149
|
)
|
@@ -183,6 +180,7 @@ class TestFBP:
|
|
183
180
|
"""
|
184
181
|
sino = self.sino_512
|
185
182
|
tol = 1e-5
|
183
|
+
|
186
184
|
for rot_center in [None, sino.shape[1] / 2.0 - 10, sino.shape[1] / 2.0 + 15]:
|
187
185
|
B = self._get_backprojector(
|
188
186
|
config, sino.shape, rot_center=rot_center, extra_options={"clip_outer_circle": True}
|
@@ -195,15 +193,8 @@ class TestFBP:
|
|
195
193
|
res_noclip = B0.fbp(sino)
|
196
194
|
ref = self.clip_to_inner_circle(res_noclip, radius_factor=1)
|
197
195
|
|
198
|
-
|
199
|
-
|
200
|
-
and config["backend"] == "opencl"
|
201
|
-
and config.get("opencl_use_textures", True) is False
|
202
|
-
):
|
203
|
-
# TODO problem when not using textures here - though again outside of circle
|
204
|
-
tol = 10
|
205
|
-
|
206
|
-
err_max = np.max(np.abs(res - ref))
|
196
|
+
abs_diff = np.abs(res - ref)
|
197
|
+
err_max = np.max(abs_diff)
|
207
198
|
assert err_max < tol, "Max error is too high for rot_center=%s ; %s" % (str(rot_center), str(config))
|
208
199
|
|
209
200
|
@pytest.mark.parametrize("config", scenarios)
|
@@ -94,7 +94,14 @@ class TestSinoFilter:
|
|
94
94
|
self.sino, sino_filter.dwidth_padded, filter_name=config["filter_name"], padding_mode=config["padding_mode"]
|
95
95
|
)
|
96
96
|
|
97
|
-
|
97
|
+
if not np.allclose(res.get(), ref, atol=6e-5):
|
98
|
+
from spire.utils import ims
|
99
|
+
|
100
|
+
ims([res.get(), ref, res.get() - ref])
|
101
|
+
|
102
|
+
assert np.allclose(res.get(), ref, atol=6e-5), "test_cuda_filter: something wrong with config=%s" % (
|
103
|
+
str(config)
|
104
|
+
)
|
98
105
|
|
99
106
|
@pytest.mark.skipif(
|
100
107
|
not (__has_pyopencl__ and __has_vkfft__), reason="Need OpenCL + pyopencl + pyvkfft to use OpenCLSinoFilter"
|
@@ -121,4 +128,6 @@ class TestSinoFilter:
|
|
121
128
|
self.sino, sino_filter.dwidth_padded, filter_name=config["filter_name"], padding_mode=config["padding_mode"]
|
122
129
|
)
|
123
130
|
|
124
|
-
assert np.allclose(res.get(), ref, atol=6e-5)
|
131
|
+
assert np.allclose(res.get(), ref, atol=6e-5), "test_opencl_filter: something wrong with config=%s" % (
|
132
|
+
str(config)
|
133
|
+
)
|
@@ -1,21 +1,16 @@
|
|
1
1
|
import os
|
2
|
+
from bisect import bisect_left
|
2
3
|
import numpy as np
|
3
4
|
from silx.io import get_data
|
4
5
|
from silx.io.url import DataUrl
|
5
6
|
from tomoscan.esrf.scan.edfscan import EDFTomoScan
|
6
|
-
from tomoscan.esrf.scan.
|
7
|
-
|
7
|
+
from tomoscan.esrf.scan.nxtomoscan import NXtomoScan
|
8
8
|
from ..utils import check_supported
|
9
9
|
from ..io.utils import get_compacted_dataslices
|
10
10
|
from .utils import is_hdf5_extension, get_values_from_file
|
11
11
|
from .logger import LoggerOrPrint
|
12
12
|
|
13
|
-
|
14
|
-
from packaging.version import parse as parse_version
|
15
|
-
from tomoscan.version import version as tomoscan_version
|
16
|
-
|
17
|
-
_tomoscan_has_nxversion = parse_version(tomoscan_version) > parse_version("0.6.0")
|
18
|
-
#
|
13
|
+
from ..pipeline.utils import nabu_env_settings
|
19
14
|
|
20
15
|
|
21
16
|
class DatasetAnalyzer:
|
@@ -56,25 +51,29 @@ class DatasetAnalyzer:
|
|
56
51
|
"output_dir": None,
|
57
52
|
"exclude_projections": None,
|
58
53
|
"hdf5_entry": None,
|
54
|
+
"nx_version": 1.0,
|
59
55
|
}
|
60
|
-
if _tomoscan_has_nxversion:
|
61
|
-
advanced_options["nx_version"] = 1.0
|
62
56
|
# --
|
63
57
|
advanced_options.update(extra_options)
|
64
58
|
self.extra_options = advanced_options
|
65
59
|
|
60
|
+
# pylint: disable=E1136
|
66
61
|
def _get_excluded_projections(self):
|
62
|
+
self._ignore_projections_indices = None
|
63
|
+
self._need_rebuild_tomoscan_object_to_exclude_projections = False
|
67
64
|
excluded_projs = self.extra_options["exclude_projections"]
|
68
65
|
if excluded_projs is None:
|
69
66
|
return
|
70
|
-
|
71
|
-
|
72
|
-
|
67
|
+
if excluded_projs["type"] == "indices":
|
68
|
+
projs_idx = get_values_from_file(excluded_projs["file"], any_size=True).astype(np.int32).tolist()
|
69
|
+
self._ignore_projections_indices = projs_idx
|
70
|
+
else:
|
71
|
+
self._need_rebuild_tomoscan_object_to_exclude_projections = True
|
73
72
|
|
74
73
|
def _init_dataset_scan(self, **kwargs):
|
75
74
|
if self._scanner is None:
|
76
75
|
raise ValueError("Base class")
|
77
|
-
if self._scanner is
|
76
|
+
if self._scanner is NXtomoScan:
|
78
77
|
if self.extra_options.get("hdf5_entry", None) is not None:
|
79
78
|
kwargs["entry"] = self.extra_options["hdf5_entry"]
|
80
79
|
if self.extra_options.get("nx_version", None) is not None:
|
@@ -82,10 +81,50 @@ class DatasetAnalyzer:
|
|
82
81
|
if self._scanner is EDFTomoScan:
|
83
82
|
# Assume 1 frame per file (otherwise too long to open each file)
|
84
83
|
kwargs["n_frames"] = 1
|
84
|
+
|
85
85
|
self.dataset_scanner = self._scanner( # pylint: disable=E1102
|
86
|
-
self.location, ignore_projections=self.
|
86
|
+
self.location, ignore_projections=self._ignore_projections_indices, **kwargs
|
87
87
|
)
|
88
88
|
self.projections = self.dataset_scanner.projections
|
89
|
+
|
90
|
+
# ---
|
91
|
+
if self._need_rebuild_tomoscan_object_to_exclude_projections:
|
92
|
+
# pylint: disable=E1136
|
93
|
+
exclude_projs = self.extra_options["exclude_projections"]
|
94
|
+
rot_angles_deg = np.rad2deg(self.rotation_angles)
|
95
|
+
self._rotation_angles = None # prevent caching
|
96
|
+
# tomoscan only supports ignore_projections=<list of integers>
|
97
|
+
# However this is cumbersome to use, it's more convenient to use angular range or list of angles
|
98
|
+
# But having angles instead of indices implies to already have information on current scan angular range
|
99
|
+
ignore_projections_indices = []
|
100
|
+
if exclude_projs["type"] == "angular_range":
|
101
|
+
exclude_angle_min, exclude_angle_max = exclude_projs["range"]
|
102
|
+
projections_indices = np.array(sorted(self.dataset_scanner.projections.keys()))
|
103
|
+
for proj_idx, angle in zip(projections_indices, rot_angles_deg):
|
104
|
+
if exclude_angle_min <= angle and angle <= exclude_angle_max:
|
105
|
+
ignore_projections_indices.append(proj_idx)
|
106
|
+
elif exclude_projs["type"] == "angles":
|
107
|
+
excluded_angles = get_values_from_file(exclude_projs["file"], any_size=True).astype(np.float32).tolist()
|
108
|
+
for excluded_angle in excluded_angles:
|
109
|
+
proj_idx = bisect_left(rot_angles_deg, excluded_angle)
|
110
|
+
if proj_idx < rot_angles_deg.size:
|
111
|
+
ignore_projections_indices.append(proj_idx)
|
112
|
+
# Rebuild the dataset_scanner instance
|
113
|
+
self._ignore_projections_indices = ignore_projections_indices
|
114
|
+
self.dataset_scanner = self._scanner( # pylint: disable=E1102
|
115
|
+
self.location, ignore_projections=self._ignore_projections_indices, **kwargs
|
116
|
+
)
|
117
|
+
# ---
|
118
|
+
if self._ignore_projections_indices is not None:
|
119
|
+
self.logger.info("Excluding projections: %s" % str(self._ignore_projections_indices))
|
120
|
+
|
121
|
+
if nabu_env_settings.skip_tomoscan_checks:
|
122
|
+
self.logger.warning(
|
123
|
+
" WARNING: according to nabu_env_settings.skip_tomoscan_checks, skipping virtual layout integrity check of tomoscan which is time consuming"
|
124
|
+
)
|
125
|
+
self.dataset_scanner.set_check_behavior(run_check=False, raise_error=False)
|
126
|
+
|
127
|
+
self.projections = self.dataset_scanner.projections
|
89
128
|
self.flats = self.dataset_scanner.flats
|
90
129
|
self.darks = self.dataset_scanner.darks
|
91
130
|
self.n_angles = len(self.dataset_scanner.projections)
|
@@ -106,7 +145,8 @@ class DatasetAnalyzer:
|
|
106
145
|
self._energy = None
|
107
146
|
self._pixel_size = None
|
108
147
|
self._distance = None
|
109
|
-
self.
|
148
|
+
self._flats_srcurrent = None
|
149
|
+
self._projections_srcurrent = None
|
110
150
|
|
111
151
|
@property
|
112
152
|
def energy(self):
|
@@ -183,20 +223,43 @@ class DatasetAnalyzer:
|
|
183
223
|
def detector_tilt(self, tilt):
|
184
224
|
self._detector_tilt = tilt
|
185
225
|
|
186
|
-
|
187
|
-
def projections_srcurrent(self):
|
188
|
-
"""
|
189
|
-
Return the synchrotron electric current for each projection.
|
190
|
-
"""
|
226
|
+
def _get_srcurrent(self, indices):
|
191
227
|
srcurrent = self.dataset_scanner.electric_current
|
192
228
|
if srcurrent is None or len(srcurrent) == 0:
|
193
229
|
return None
|
194
230
|
srcurrent_all = np.array(srcurrent)
|
195
|
-
|
196
|
-
if np.any(projections_indices >= len(srcurrent_all)):
|
231
|
+
if np.any(indices >= len(srcurrent_all)):
|
197
232
|
self.logger.error("Something wrong with SRCurrent: not enough values!")
|
198
233
|
return None
|
199
|
-
return srcurrent_all[
|
234
|
+
return srcurrent_all[indices].astype("f")
|
235
|
+
|
236
|
+
@property
|
237
|
+
def projections_srcurrent(self):
|
238
|
+
"""
|
239
|
+
Return the synchrotron electric current for each projection.
|
240
|
+
"""
|
241
|
+
if self._projections_srcurrent is None:
|
242
|
+
projections_indices = np.array(sorted(self.projections.keys()))
|
243
|
+
self._projections_srcurrent = self._get_srcurrent(projections_indices)
|
244
|
+
return self._projections_srcurrent
|
245
|
+
|
246
|
+
@projections_srcurrent.setter
|
247
|
+
def projections_srcurrent(self, val):
|
248
|
+
self._projections_srcurrent = val
|
249
|
+
|
250
|
+
@property
|
251
|
+
def flats_srcurrent(self):
|
252
|
+
"""
|
253
|
+
Return the synchrotron electric current for each flat image.
|
254
|
+
"""
|
255
|
+
if self._flats_srcurrent is None:
|
256
|
+
flats_indices = np.array(sorted(self.flats.keys()))
|
257
|
+
self._flats_srcurrent = self._get_srcurrent(flats_indices)
|
258
|
+
return self._flats_srcurrent
|
259
|
+
|
260
|
+
@flats_srcurrent.setter
|
261
|
+
def flats_srcurrent(self, val):
|
262
|
+
self._flats_srcurrent = val
|
200
263
|
|
201
264
|
def check_defined_attribute(self, name, error_msg=None):
|
202
265
|
"""
|
@@ -252,7 +315,7 @@ class EDFDatasetAnalyzer(DatasetAnalyzer):
|
|
252
315
|
scan_range = self.dataset_scanner.scan_range
|
253
316
|
if scan_range is not None:
|
254
317
|
fullturn = abs(scan_range - 360) < abs(scan_range - 180)
|
255
|
-
angles = np.linspace(0, scan_range, num=self.
|
318
|
+
angles = np.linspace(0, scan_range, num=len(self.projections), endpoint=fullturn, dtype="f")
|
256
319
|
self._rotation_angles = np.deg2rad(angles)
|
257
320
|
return self._rotation_angles
|
258
321
|
|
@@ -262,7 +325,7 @@ class HDF5DatasetAnalyzer(DatasetAnalyzer):
|
|
262
325
|
HDF5 dataset analyzer
|
263
326
|
"""
|
264
327
|
|
265
|
-
_scanner =
|
328
|
+
_scanner = NXtomoScan
|
266
329
|
kind = "hdf5"
|
267
330
|
|
268
331
|
@property
|
nabu/resources/nxflatfield.py
CHANGED
@@ -2,7 +2,7 @@ import os
|
|
2
2
|
import numpy as np
|
3
3
|
from silx.io.url import DataUrl
|
4
4
|
from tomoscan.io import HDF5File
|
5
|
-
from tomoscan.esrf.scan.
|
5
|
+
from tomoscan.esrf.scan.nxtomoscan import NXtomoScan
|
6
6
|
from ..utils import check_supported, is_writeable
|
7
7
|
|
8
8
|
|
@@ -61,7 +61,7 @@ def get_metadata_url(url, frame_type):
|
|
61
61
|
Return the url of the metadata stored alongside flats/darks
|
62
62
|
"""
|
63
63
|
check_supported(frame_type, ["flats", "darks"], "frame type")
|
64
|
-
template_url = getattr(
|
64
|
+
template_url = getattr(NXtomoScan, "REDUCED_%s_METADATAURLS" % frame_type.upper())[0]
|
65
65
|
return DataUrl(
|
66
66
|
file_path=url.file_path(),
|
67
67
|
data_path=template_url.data_path(),
|
@@ -4,7 +4,7 @@ from shutil import rmtree
|
|
4
4
|
import pytest
|
5
5
|
import numpy as np
|
6
6
|
from silx.io import get_data
|
7
|
-
from
|
7
|
+
from nxtomo.nxobject.nxdetector import ImageKey
|
8
8
|
from nabu.testutils import generate_nx_dataset
|
9
9
|
from nabu.resources.nxflatfield import update_dataset_info_flats_darks
|
10
10
|
from nabu.resources.dataset_analyzer import HDF5DatasetAnalyzer
|
nabu/resources/utils.py
CHANGED
@@ -1,7 +1,8 @@
|
|
1
1
|
from ast import literal_eval
|
2
2
|
import numpy as np
|
3
3
|
from psutil import virtual_memory, cpu_count
|
4
|
-
from
|
4
|
+
from pyunitsystem.metricsystem import MetricSystem
|
5
|
+
from pyunitsystem.energysystem import EnergySI
|
5
6
|
|
6
7
|
|
7
8
|
def get_values_from_file(fname, n_values=None, shape=None, sep=None, any_size=False):
|
@@ -162,6 +163,12 @@ def get_quantities_and_units(string, sep=";"):
|
|
162
163
|
value, unit = value_and_unit.split()
|
163
164
|
val = float(value)
|
164
165
|
# Convert to SI
|
165
|
-
|
166
|
+
try:
|
167
|
+
# handle metrics
|
168
|
+
conversion_factor = MetricSystem.from_str(unit).value
|
169
|
+
except ValueError:
|
170
|
+
# handle energies
|
171
|
+
conversion_factor = EnergySI.from_str(unit).value / EnergySI.KILOELECTRONVOLT.value
|
172
|
+
|
166
173
|
result[quantity_name] = val * conversion_factor
|
167
174
|
return result
|
@@ -0,0 +1,184 @@
|
|
1
|
+
import h5py
|
2
|
+
import numpy
|
3
|
+
from typing import Union
|
4
|
+
from silx.utils.enum import Enum as _Enum
|
5
|
+
from tomoscan.volumebase import VolumeBase
|
6
|
+
from tomoscan.esrf.volume.hdf5volume import HDF5Volume
|
7
|
+
from nabu.io.utils import DatasetReader
|
8
|
+
|
9
|
+
|
10
|
+
class AlignmentAxis2(_Enum):
|
11
|
+
CENTER = "center"
|
12
|
+
LEFT = "left"
|
13
|
+
RIGTH = "right"
|
14
|
+
|
15
|
+
|
16
|
+
class AlignmentAxis1(_Enum):
|
17
|
+
FRONT = "front"
|
18
|
+
CENTER = "center"
|
19
|
+
BACK = "back"
|
20
|
+
|
21
|
+
|
22
|
+
def align_horizontally(data: numpy.ndarray, alignment: AlignmentAxis2, new_width: int, pad_mode="constant"):
|
23
|
+
"""
|
24
|
+
Align data horizontally to make sure new data width will ne `new_width`.
|
25
|
+
|
26
|
+
:param numpy.ndarray data: data to align
|
27
|
+
:param HAlignment alignment: alignment strategy
|
28
|
+
:param int new_width: output data width
|
29
|
+
"""
|
30
|
+
current_width = data.shape[-1]
|
31
|
+
alignment = AlignmentAxis2.from_value(alignment)
|
32
|
+
|
33
|
+
if current_width > new_width:
|
34
|
+
raise ValueError(f"data.shape[-1] ({data.shape[-1]}) > new_width ({new_width}). Unable to crop data")
|
35
|
+
elif current_width == new_width:
|
36
|
+
return data
|
37
|
+
else:
|
38
|
+
if alignment is AlignmentAxis2.CENTER:
|
39
|
+
left_width = (new_width - current_width) // 2
|
40
|
+
right_width = (new_width - current_width) - left_width
|
41
|
+
elif alignment is AlignmentAxis2.LEFT:
|
42
|
+
left_width = 0
|
43
|
+
right_width = new_width - current_width
|
44
|
+
elif alignment is AlignmentAxis2.RIGTH:
|
45
|
+
left_width = new_width - current_width
|
46
|
+
right_width = 0
|
47
|
+
else:
|
48
|
+
raise ValueError(f"alignment {alignment.value} is not handled")
|
49
|
+
|
50
|
+
assert left_width >= 0, f"pad width must be positive - left width isn't ({left_width})"
|
51
|
+
assert right_width >= 0, f"pad width must be positive - right width isn't ({right_width})"
|
52
|
+
return numpy.pad(
|
53
|
+
data,
|
54
|
+
pad_width=((0, 0), (left_width, right_width)),
|
55
|
+
mode=pad_mode,
|
56
|
+
)
|
57
|
+
|
58
|
+
|
59
|
+
class PaddedRawData:
|
60
|
+
"""
|
61
|
+
Util class to extend a data when necessary
|
62
|
+
Must to aplpy to a volume and to an hdf5dataset - array
|
63
|
+
The idea behind is to avoid loading all the data in memory
|
64
|
+
"""
|
65
|
+
|
66
|
+
def __init__(self, data: Union[numpy.ndarray, h5py.Dataset], axis_1_pad_width: tuple) -> None:
|
67
|
+
self._axis_1_pad_width = numpy.array(axis_1_pad_width)
|
68
|
+
if not (self._axis_1_pad_width.size == 2 and self._axis_1_pad_width[0] >= 0 and self._axis_1_pad_width[1] >= 0):
|
69
|
+
raise ValueError(f"'axis_1_pad_width' expects to positive elements. Get {axis_1_pad_width}")
|
70
|
+
self._raw_data = data
|
71
|
+
self._raw_data_end = None
|
72
|
+
# note: for now we return only frames with zeros for padded frames.
|
73
|
+
# in the future we could imagine having a method and miror existing volume or extend the closest frame, or get a mean value...
|
74
|
+
self._empty_frame = None
|
75
|
+
self._dtype = None
|
76
|
+
self._shape = None
|
77
|
+
self._raw_data_shape = self.raw_data.shape
|
78
|
+
|
79
|
+
@staticmethod
|
80
|
+
def get_empty_frame(shape, dtype):
|
81
|
+
return numpy.zeros(
|
82
|
+
shape=shape,
|
83
|
+
dtype=dtype,
|
84
|
+
)
|
85
|
+
|
86
|
+
@property
|
87
|
+
def empty_frame(self):
|
88
|
+
if self._empty_frame is None:
|
89
|
+
self._empty_frame = self.get_empty_frame(
|
90
|
+
shape=(self.shape[0], 1, self.shape[2]),
|
91
|
+
dtype=self.dtype,
|
92
|
+
)
|
93
|
+
return self._empty_frame
|
94
|
+
|
95
|
+
@property
|
96
|
+
def shape(self):
|
97
|
+
if self._shape is None:
|
98
|
+
self._shape = tuple(
|
99
|
+
(
|
100
|
+
self._raw_data_shape[0],
|
101
|
+
numpy.sum(
|
102
|
+
numpy.array(self._axis_1_pad_width),
|
103
|
+
)
|
104
|
+
+ self._raw_data_shape[1],
|
105
|
+
self._raw_data_shape[2],
|
106
|
+
)
|
107
|
+
)
|
108
|
+
return self._shape
|
109
|
+
|
110
|
+
@property
|
111
|
+
def raw_data(self):
|
112
|
+
return self._raw_data
|
113
|
+
|
114
|
+
@property
|
115
|
+
def raw_data_start(self):
|
116
|
+
return self._axis_1_pad_width[0]
|
117
|
+
|
118
|
+
@property
|
119
|
+
def raw_data_end(self):
|
120
|
+
if self._raw_data_end is None:
|
121
|
+
self._raw_data_end = self._axis_1_pad_width[0] + self._raw_data_shape[1]
|
122
|
+
return self._raw_data_end
|
123
|
+
|
124
|
+
@property
|
125
|
+
def dtype(self):
|
126
|
+
if self._dtype is None:
|
127
|
+
self._dtype = self.raw_data.dtype
|
128
|
+
return self._dtype
|
129
|
+
|
130
|
+
def __getitem__(self, args):
|
131
|
+
if not isinstance(args, tuple) and len(args) == 3:
|
132
|
+
raise ValueError("only handles 3D slicing")
|
133
|
+
elif not (args[0] == slice(None, None, None) and args[2] == slice(None, None, None)):
|
134
|
+
raise ValueError(
|
135
|
+
"slicing only handled along axis 1. First and third tuple item are expected to be empty slice as slice(None, None, None)"
|
136
|
+
)
|
137
|
+
else:
|
138
|
+
if numpy.isscalar(args[1]):
|
139
|
+
args = (
|
140
|
+
args[0],
|
141
|
+
slice(args[1], args[1] + 1, 1),
|
142
|
+
args[2],
|
143
|
+
)
|
144
|
+
|
145
|
+
start = args[1].start
|
146
|
+
if start is None:
|
147
|
+
start = 0
|
148
|
+
stop = args[1].stop
|
149
|
+
if stop is None:
|
150
|
+
stop = self.shape[1]
|
151
|
+
step = args[1].step
|
152
|
+
# some test
|
153
|
+
if start < 0 or stop < 0:
|
154
|
+
raise ValueError("only positive position are handled")
|
155
|
+
if start >= stop:
|
156
|
+
raise ValueError("start >= stop")
|
157
|
+
if stop > self.shape[1]:
|
158
|
+
raise ValueError("stop > self.shape[1]")
|
159
|
+
if step not in (1, None):
|
160
|
+
raise ValueError("for now PaddedVolume only handles steps of 1")
|
161
|
+
|
162
|
+
first_part_array = None
|
163
|
+
if start < self.raw_data_start and (stop - start > 0):
|
164
|
+
stop_first_part = min(stop, self.raw_data_start)
|
165
|
+
first_part_array = numpy.repeat(self.empty_frame, repeats=stop_first_part - start, axis=1)
|
166
|
+
start = stop_first_part
|
167
|
+
|
168
|
+
third_part_array = None
|
169
|
+
if stop > self.raw_data_end and (stop - start > 0):
|
170
|
+
if stop > self.shape[1]:
|
171
|
+
raise ValueError("requested slice is out of boundaries")
|
172
|
+
start_third_part = max(start, self.raw_data_end)
|
173
|
+
third_part_array = numpy.repeat(self.empty_frame, repeats=stop - start_third_part, axis=1)
|
174
|
+
stop = self.raw_data_end
|
175
|
+
|
176
|
+
if start >= self.raw_data_start and stop >= self.raw_data_start and (stop - start > 0):
|
177
|
+
second_part_array = self.raw_data[:, start - self.raw_data_start : stop - self.raw_data_start, :]
|
178
|
+
else:
|
179
|
+
second_part_array = None
|
180
|
+
|
181
|
+
parts = tuple(filter(lambda a: a is not None, (first_part_array, second_part_array, third_part_array)))
|
182
|
+
return numpy.hstack(
|
183
|
+
parts,
|
184
|
+
)
|