nabu 2023.2.1__py3-none-any.whl → 2024.1.0rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doc/conf.py +1 -1
- doc/doc_config.py +32 -0
- nabu/__init__.py +2 -1
- nabu/app/bootstrap_stitching.py +1 -1
- nabu/app/cli_configs.py +122 -2
- nabu/app/composite_cor.py +27 -2
- nabu/app/correct_rot.py +70 -0
- nabu/app/create_distortion_map_from_poly.py +42 -18
- nabu/app/diag_to_pix.py +358 -0
- nabu/app/diag_to_rot.py +449 -0
- nabu/app/generate_header.py +4 -3
- nabu/app/histogram.py +2 -2
- nabu/app/multicor.py +6 -1
- nabu/app/parse_reconstruction_log.py +151 -0
- nabu/app/prepare_weights_double.py +83 -22
- nabu/app/reconstruct.py +5 -1
- nabu/app/reconstruct_helical.py +7 -0
- nabu/app/reduce_dark_flat.py +6 -3
- nabu/app/rotate.py +4 -4
- nabu/app/stitching.py +16 -2
- nabu/app/tests/test_reduce_dark_flat.py +18 -2
- nabu/app/validator.py +4 -4
- nabu/cuda/convolution.py +8 -376
- nabu/cuda/fft.py +4 -0
- nabu/cuda/kernel.py +4 -4
- nabu/cuda/medfilt.py +5 -158
- nabu/cuda/padding.py +5 -71
- nabu/cuda/processing.py +23 -2
- nabu/cuda/src/ElementOp.cu +78 -0
- nabu/cuda/src/backproj.cu +28 -2
- nabu/cuda/src/fourier_wavelets.cu +2 -2
- nabu/cuda/src/normalization.cu +23 -0
- nabu/cuda/src/padding.cu +2 -2
- nabu/cuda/src/transpose.cu +16 -0
- nabu/cuda/utils.py +39 -0
- nabu/estimation/alignment.py +10 -1
- nabu/estimation/cor.py +808 -38
- nabu/estimation/cor_sino.py +7 -9
- nabu/estimation/tests/test_cor.py +85 -3
- nabu/io/reader.py +26 -18
- nabu/io/tests/test_cast_volume.py +3 -3
- nabu/io/tests/test_detector_distortion.py +3 -3
- nabu/io/tiffwriter_zmm.py +2 -2
- nabu/io/utils.py +14 -4
- nabu/io/writer.py +5 -3
- nabu/misc/fftshift.py +6 -0
- nabu/misc/histogram.py +5 -285
- nabu/misc/histogram_cuda.py +8 -104
- nabu/misc/kernel_base.py +3 -121
- nabu/misc/padding_base.py +5 -69
- nabu/misc/processing_base.py +3 -107
- nabu/misc/rotation.py +5 -62
- nabu/misc/rotation_cuda.py +5 -65
- nabu/misc/transpose.py +6 -0
- nabu/misc/unsharp.py +3 -78
- nabu/misc/unsharp_cuda.py +5 -52
- nabu/misc/unsharp_opencl.py +8 -85
- nabu/opencl/fft.py +6 -0
- nabu/opencl/kernel.py +21 -6
- nabu/opencl/padding.py +5 -72
- nabu/opencl/processing.py +27 -5
- nabu/opencl/src/backproj.cl +3 -3
- nabu/opencl/src/fftshift.cl +65 -12
- nabu/opencl/src/padding.cl +2 -2
- nabu/opencl/src/roll.cl +96 -0
- nabu/opencl/src/transpose.cl +16 -0
- nabu/pipeline/config_validators.py +63 -3
- nabu/pipeline/dataset_validator.py +2 -2
- nabu/pipeline/estimators.py +193 -35
- nabu/pipeline/fullfield/chunked.py +34 -17
- nabu/pipeline/fullfield/chunked_cuda.py +7 -5
- nabu/pipeline/fullfield/computations.py +48 -13
- nabu/pipeline/fullfield/nabu_config.py +13 -13
- nabu/pipeline/fullfield/processconfig.py +10 -5
- nabu/pipeline/fullfield/reconstruction.py +1 -2
- nabu/pipeline/helical/fbp.py +5 -0
- nabu/pipeline/helical/filtering.py +12 -9
- nabu/pipeline/helical/gridded_accumulator.py +179 -33
- nabu/pipeline/helical/helical_chunked_regridded.py +262 -151
- nabu/pipeline/helical/helical_chunked_regridded_cuda.py +4 -11
- nabu/pipeline/helical/helical_reconstruction.py +56 -18
- nabu/pipeline/helical/span_strategy.py +1 -1
- nabu/pipeline/helical/tests/test_accumulator.py +4 -0
- nabu/pipeline/params.py +23 -2
- nabu/pipeline/processconfig.py +3 -8
- nabu/pipeline/tests/test_chunk_reader.py +78 -0
- nabu/pipeline/tests/test_estimators.py +120 -2
- nabu/pipeline/utils.py +25 -0
- nabu/pipeline/writer.py +2 -0
- nabu/preproc/ccd_cuda.py +9 -7
- nabu/preproc/ctf.py +21 -26
- nabu/preproc/ctf_cuda.py +25 -25
- nabu/preproc/double_flatfield.py +14 -2
- nabu/preproc/double_flatfield_cuda.py +7 -11
- nabu/preproc/flatfield_cuda.py +23 -27
- nabu/preproc/phase.py +19 -24
- nabu/preproc/phase_cuda.py +21 -21
- nabu/preproc/shift_cuda.py +58 -28
- nabu/preproc/tests/test_ctf.py +5 -5
- nabu/preproc/tests/test_double_flatfield.py +2 -2
- nabu/preproc/tests/test_vshift.py +13 -2
- nabu/processing/__init__.py +0 -0
- nabu/processing/convolution_cuda.py +375 -0
- nabu/processing/fft_base.py +163 -0
- nabu/processing/fft_cuda.py +256 -0
- nabu/processing/fft_opencl.py +54 -0
- nabu/processing/fftshift.py +134 -0
- nabu/processing/histogram.py +286 -0
- nabu/processing/histogram_cuda.py +103 -0
- nabu/processing/kernel_base.py +126 -0
- nabu/processing/medfilt_cuda.py +159 -0
- nabu/processing/muladd.py +29 -0
- nabu/processing/muladd_cuda.py +68 -0
- nabu/processing/padding_base.py +71 -0
- nabu/processing/padding_cuda.py +75 -0
- nabu/processing/padding_opencl.py +77 -0
- nabu/processing/processing_base.py +123 -0
- nabu/processing/roll_opencl.py +64 -0
- nabu/processing/rotation.py +63 -0
- nabu/processing/rotation_cuda.py +66 -0
- nabu/processing/tests/__init__.py +0 -0
- nabu/processing/tests/test_fft.py +268 -0
- nabu/processing/tests/test_fftshift.py +71 -0
- nabu/{misc → processing}/tests/test_histogram.py +2 -4
- nabu/{cuda → processing}/tests/test_medfilt.py +1 -1
- nabu/processing/tests/test_muladd.py +54 -0
- nabu/{cuda → processing}/tests/test_padding.py +119 -75
- nabu/processing/tests/test_roll.py +63 -0
- nabu/{misc → processing}/tests/test_rotation.py +3 -2
- nabu/processing/tests/test_transpose.py +72 -0
- nabu/{misc → processing}/tests/test_unsharp.py +41 -8
- nabu/processing/transpose.py +126 -0
- nabu/processing/unsharp.py +79 -0
- nabu/processing/unsharp_cuda.py +53 -0
- nabu/processing/unsharp_opencl.py +75 -0
- nabu/reconstruction/fbp.py +34 -10
- nabu/reconstruction/fbp_base.py +35 -16
- nabu/reconstruction/fbp_opencl.py +7 -12
- nabu/reconstruction/filtering.py +2 -2
- nabu/reconstruction/filtering_cuda.py +13 -14
- nabu/reconstruction/filtering_opencl.py +3 -4
- nabu/reconstruction/projection.py +2 -0
- nabu/reconstruction/rings.py +158 -1
- nabu/reconstruction/rings_cuda.py +218 -58
- nabu/reconstruction/sinogram_cuda.py +16 -12
- nabu/reconstruction/tests/test_deringer.py +116 -14
- nabu/reconstruction/tests/test_fbp.py +22 -31
- nabu/reconstruction/tests/test_filtering.py +11 -2
- nabu/resources/dataset_analyzer.py +89 -26
- nabu/resources/nxflatfield.py +2 -2
- nabu/resources/tests/test_nxflatfield.py +1 -1
- nabu/resources/utils.py +9 -2
- nabu/stitching/alignment.py +184 -0
- nabu/stitching/config.py +241 -39
- nabu/stitching/definitions.py +6 -0
- nabu/stitching/frame_composition.py +4 -2
- nabu/stitching/overlap.py +99 -3
- nabu/stitching/sample_normalization.py +60 -0
- nabu/stitching/slurm_utils.py +10 -10
- nabu/stitching/tests/test_alignment.py +99 -0
- nabu/stitching/tests/test_config.py +16 -1
- nabu/stitching/tests/test_overlap.py +68 -2
- nabu/stitching/tests/test_sample_normalization.py +49 -0
- nabu/stitching/tests/test_slurm_utils.py +5 -5
- nabu/stitching/tests/test_utils.py +3 -33
- nabu/stitching/tests/test_z_stitching.py +391 -22
- nabu/stitching/utils.py +144 -202
- nabu/stitching/z_stitching.py +309 -126
- nabu/testutils.py +18 -0
- nabu/thirdparty/tomocupy_remove_stripe.py +586 -0
- nabu/utils.py +32 -6
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/LICENSE +1 -1
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/METADATA +5 -5
- nabu-2024.1.0rc3.dist-info/RECORD +296 -0
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/WHEEL +1 -1
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/entry_points.txt +5 -1
- nabu/conftest.py +0 -14
- nabu/opencl/fftshift.py +0 -92
- nabu/opencl/tests/test_fftshift.py +0 -55
- nabu/opencl/tests/test_padding.py +0 -84
- nabu-2023.2.1.dist-info/RECORD +0 -252
- /nabu/cuda/src/{fftshift.cu → dfi_fftshift.cu} +0 -0
- {nabu-2023.2.1.dist-info → nabu-2024.1.0rc3.dist-info}/top_level.txt +0 -0
nabu/estimation/cor_sino.py
CHANGED
@@ -34,18 +34,17 @@ class SinoCor:
|
|
34
34
|
- accurate. Try to refine COR to 1/10 pixel
|
35
35
|
"""
|
36
36
|
|
37
|
-
def __init__(self,
|
37
|
+
def __init__(self, img_1, img_2, logger=None):
|
38
38
|
""" """
|
39
39
|
self.logger = LoggerOrPrint(logger)
|
40
|
-
self.sx =
|
41
|
-
self.sy = sinogram.shape[0]
|
40
|
+
self.sx = img_1.shape[1]
|
42
41
|
|
43
|
-
# algorithm cannot accept odd number of projs
|
44
|
-
nproj2 =
|
42
|
+
# algorithm cannot accept odd number of projs. This is handled in the SinoCORFinder class.
|
43
|
+
nproj2 = img_1.shape[0]
|
45
44
|
|
46
45
|
# extract upper and lower part of sinogram, flipping H the upper part
|
47
|
-
self.data1 =
|
48
|
-
self.data2 =
|
46
|
+
self.data1 = img_1
|
47
|
+
self.data2 = img_2
|
49
48
|
|
50
49
|
self.rcor_abs = round(self.sx / 2.0)
|
51
50
|
self.cor_acc = round(self.sx / 2.0)
|
@@ -177,8 +176,7 @@ class SinoCorInterface:
|
|
177
176
|
self._logger = logger
|
178
177
|
|
179
178
|
def find_shift(self, img_1, img_2, side="right", window_width=None, neighborhood=7, shift_value=0.1, **kwargs):
|
180
|
-
|
181
|
-
cor_finder = SinoCor(sinogram, logger=self._logger)
|
179
|
+
cor_finder = SinoCor(img_1, img_2, logger=self._logger)
|
182
180
|
cor_finder.estimate_cor_coarse(side=side, window_width=window_width)
|
183
181
|
cor = cor_finder.estimate_cor_fine(neighborhood=neighborhood, shift_value=shift_value)
|
184
182
|
# offset will be added later - keep compatibility with result from AlignmentBase.find_shift()
|
@@ -5,11 +5,14 @@ import scipy.ndimage
|
|
5
5
|
import h5py
|
6
6
|
from nabu.testutils import utilstest, __do_long_tests__
|
7
7
|
from nabu.testutils import get_data as nabu_get_data
|
8
|
+
|
8
9
|
from nabu.estimation.cor import (
|
9
10
|
CenterOfRotation,
|
10
11
|
CenterOfRotationAdaptiveSearch,
|
11
12
|
CenterOfRotationGrowingWindow,
|
12
13
|
CenterOfRotationSlidingWindow,
|
14
|
+
CenterOfRotationFourierAngles,
|
15
|
+
CenterOfRotationOctaveAccurate,
|
13
16
|
)
|
14
17
|
from nabu.estimation.cor_sino import SinoCor
|
15
18
|
|
@@ -30,6 +33,27 @@ def bootstrap_cor_win(request):
|
|
30
33
|
cls.data_ha_sino, cls.cor_ha_sn_pix = get_cor_win_sino_data_h5("halftomo_1_sino.npz")
|
31
34
|
|
32
35
|
|
36
|
+
@pytest.fixture(scope="class")
|
37
|
+
def bootstrap_cor_accurate(request):
|
38
|
+
cls = request.cls
|
39
|
+
cls.abs_tol = 0.2
|
40
|
+
cls.image_pair_stylo, cls.cor_pos_abs_stylo = get_cor_win_proj_data_h5("stylo_accurate.npz")
|
41
|
+
cls.image_pair_blc12781, cls.cor_pos_abs_blc12781 = get_cor_win_proj_data_h5("blc12781_accurate.npz")
|
42
|
+
|
43
|
+
|
44
|
+
@pytest.fixture(scope="class")
|
45
|
+
def bootstrap_cor_fourier(request):
|
46
|
+
cls = request.cls
|
47
|
+
cls.abs_tol = 0.2
|
48
|
+
dataset_relpath = os.path.join("sino_bamboo_hercules_for_test.npz")
|
49
|
+
dataset_downloaded_path = utilstest.getfile(dataset_relpath)
|
50
|
+
a = np.load(dataset_downloaded_path)
|
51
|
+
cls.sinos = a["sinos"]
|
52
|
+
cls.angles = a["angles"]
|
53
|
+
cls.true_cor = a["true_cor"]
|
54
|
+
cls.estimated_cor_from_motor = a["estimated_cor_from_motor"]
|
55
|
+
|
56
|
+
|
33
57
|
def get_cor_data_h5(*dataset_path):
|
34
58
|
"""
|
35
59
|
Get a dataset file from silx.org/pub/nabu/data
|
@@ -241,6 +265,7 @@ class TestCor:
|
|
241
265
|
assert "Images need to be of the same shape" in str(ex.value), message
|
242
266
|
|
243
267
|
|
268
|
+
@pytest.mark.skipif(not (__do_long_tests__), reason="Need NABU_LONG_TESTS=1 for this test")
|
244
269
|
@pytest.mark.usefixtures("bootstrap_cor", "bootstrap_cor_win")
|
245
270
|
class TestCorWindowSlide:
|
246
271
|
def test_proj_center_axis_lft(self):
|
@@ -309,6 +334,7 @@ class TestCorWindowSlide:
|
|
309
334
|
assert np.isclose(self.cor_ha_sn_pix, cor_position, atol=self.abs_tol * 5), message
|
310
335
|
|
311
336
|
|
337
|
+
@pytest.mark.skipif(not (__do_long_tests__), reason="need NABU_LONG_TESTS for this test")
|
312
338
|
@pytest.mark.usefixtures("bootstrap_cor", "bootstrap_cor_win")
|
313
339
|
class TestCorWindowGrow:
|
314
340
|
def test_proj_center_axis_cen(self):
|
@@ -379,12 +405,68 @@ class TestCoarseToFineSinoCor:
|
|
379
405
|
Test nabu.estimation.cor_sino.SinoCor
|
380
406
|
"""
|
381
407
|
sino_halftomo = np.vstack([self.data_ha_sino[0], self.data_ha_sino[1]])
|
382
|
-
sino_cor = SinoCor(
|
408
|
+
sino_cor = SinoCor(self.data_ha_sino[0], np.fliplr(self.data_ha_sino[1]))
|
383
409
|
cor_coarse = sino_cor.estimate_cor_coarse()
|
384
|
-
assert np.isscalar(cor_coarse), f"cor_position expected to be a
|
410
|
+
assert np.isscalar(cor_coarse), f"cor_position expected to be a scalar, {type(cor_coarse)} returned"
|
385
411
|
cor_fine = sino_cor.estimate_cor_fine()
|
386
|
-
assert np.isscalar(cor_fine), f"cor_position expected to be a scale, {type(
|
412
|
+
assert np.isscalar(cor_fine), f"cor_position expected to be a scale, {type(cor_fine)} returned"
|
387
413
|
|
388
414
|
cor_ref = self.cor_ha_sn_pix + sino_halftomo.shape[-1] / 2.0
|
389
415
|
message = "Computed CoR %f " % cor_fine + " and expected CoR %f do not coincide" % cor_ref
|
390
416
|
assert abs(cor_fine - cor_ref) < self.abs_tol * 2, message
|
417
|
+
|
418
|
+
|
419
|
+
@pytest.mark.usefixtures("bootstrap_cor_accurate")
|
420
|
+
class TestCorOctaveAccurate:
|
421
|
+
def test_cor_accurate_positive_shift(self):
|
422
|
+
detector_width = self.image_pair_stylo[0].shape[1]
|
423
|
+
CoR_calc = CenterOfRotationOctaveAccurate()
|
424
|
+
cor_position = CoR_calc.find_shift(self.image_pair_stylo[0], np.fliplr(self.image_pair_stylo[1]), "center")
|
425
|
+
cor_position = cor_position + detector_width / 2
|
426
|
+
assert np.isscalar(cor_position), f"cor_position expected to be a scalar, {type(cor_position)} returned"
|
427
|
+
message = f"Computed CoR {cor_position} and expected CoR {self.cor_pos_abs_stylo} do not coincide."
|
428
|
+
assert np.isclose(self.cor_pos_abs_stylo, cor_position, atol=self.abs_tol), message
|
429
|
+
|
430
|
+
def test_cor_accurate_negative_shift(self):
|
431
|
+
detector_width = self.image_pair_blc12781[0].shape[1]
|
432
|
+
CoR_calc = CenterOfRotationOctaveAccurate()
|
433
|
+
cor_position = CoR_calc.find_shift(
|
434
|
+
self.image_pair_blc12781[0], np.fliplr(self.image_pair_blc12781[1]), "center"
|
435
|
+
)
|
436
|
+
cor_position = cor_position + detector_width / 2
|
437
|
+
assert np.isscalar(cor_position), f"cor_position expected to be a scalar, {type(cor_position)} returned"
|
438
|
+
message = f"Computed CoR {cor_position} and expected CoR {self.cor_pos_abs_blc12781} do not coincide."
|
439
|
+
assert np.isclose(self.cor_pos_abs_blc12781, cor_position, atol=self.abs_tol), message
|
440
|
+
|
441
|
+
|
442
|
+
@pytest.mark.usefixtures("bootstrap_cor_fourier", "bootstrap_cor_win")
|
443
|
+
class TestCorFourierAngle:
|
444
|
+
def test_sino_right_axis_with_near_pos(self):
|
445
|
+
sino1 = self.data_ha_sino[0, :, :]
|
446
|
+
sino2 = np.fliplr(self.data_ha_sino[1, :, :])
|
447
|
+
start_angle = np.pi / 4
|
448
|
+
angles = np.linspace(start_angle, start_angle + 2 * np.pi, 2 * sino1.shape[0])
|
449
|
+
|
450
|
+
cor_options = {"side": 740, "refine": True}
|
451
|
+
|
452
|
+
CoR_calc = CenterOfRotationFourierAngles(cor_options=cor_options)
|
453
|
+
cor_position = CoR_calc.find_shift(sino1, sino2, angles, side="right")
|
454
|
+
assert np.isscalar(cor_position), f"cor_position expected to be a scale, {type(cor_position)} returned"
|
455
|
+
|
456
|
+
message = "Computed CoR %f " % cor_position + " and expected CoR %f do not coincide" % self.cor_ha_sn_pix
|
457
|
+
assert np.isclose(self.cor_ha_sn_pix, cor_position, atol=self.abs_tol * 3), message
|
458
|
+
|
459
|
+
def test_sino_right_axis_with_ignore(self):
|
460
|
+
sino1 = self.data_ha_sino[0, :, :]
|
461
|
+
sino2 = np.fliplr(self.data_ha_sino[1, :, :])
|
462
|
+
start_angle = np.pi / 4
|
463
|
+
angles = np.linspace(start_angle, start_angle + 2 * np.pi, 2 * sino1.shape[0])
|
464
|
+
|
465
|
+
cor_options = {"side": "ignore", "refine": True}
|
466
|
+
|
467
|
+
CoR_calc = CenterOfRotationFourierAngles(cor_options=cor_options)
|
468
|
+
cor_position = CoR_calc.find_shift(sino1, sino2, angles, side="right")
|
469
|
+
assert np.isscalar(cor_position), f"cor_position expected to be a scale, {type(cor_position)} returned"
|
470
|
+
|
471
|
+
message = "Computed CoR %f " % cor_position + " and expected CoR %f do not coincide" % self.cor_ha_sn_pix
|
472
|
+
assert np.isclose(self.cor_ha_sn_pix, cor_position, atol=self.abs_tol * 3), message
|
nabu/io/reader.py
CHANGED
@@ -9,9 +9,9 @@ from ..misc.binning import binning as image_binning
|
|
9
9
|
from ..utils import subsample_dict, get_3D_subregion, get_num_threads
|
10
10
|
|
11
11
|
try:
|
12
|
-
from
|
12
|
+
from fabio.edfimage import EdfImage
|
13
13
|
except ImportError:
|
14
|
-
|
14
|
+
EdfImage = None
|
15
15
|
|
16
16
|
|
17
17
|
class Reader:
|
@@ -122,21 +122,21 @@ class EDFReader(Reader):
|
|
122
122
|
A class for reading series of EDF Files.
|
123
123
|
Multi-frames EDF are not supported.
|
124
124
|
"""
|
125
|
-
if
|
126
|
-
raise ImportError("Need
|
125
|
+
if EdfImage is None:
|
126
|
+
raise ImportError("Need fabio to use this reader")
|
127
127
|
super().__init__(sub_region=sub_region)
|
128
|
+
self._reader = EdfImage()
|
129
|
+
self._first_fname = None
|
128
130
|
|
129
131
|
def read(self, fname):
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
Size=(self.end_x - self.start_x, self.end_y - self.start_y),
|
136
|
-
)
|
132
|
+
if self._first_fname is None:
|
133
|
+
self._first_fname = fname
|
134
|
+
self._reader.read(fname)
|
135
|
+
if self.sub_region is None:
|
136
|
+
data = self._reader.data
|
137
137
|
else:
|
138
|
-
data =
|
139
|
-
|
138
|
+
data = self._reader.fast_read_roi(fname, (slice(self.start_y, self.end_y), slice(self.start_x, self.end_x)))
|
139
|
+
self._reader.close()
|
140
140
|
return data
|
141
141
|
|
142
142
|
def get_data(self, data_url):
|
@@ -279,9 +279,10 @@ class ChunkReader:
|
|
279
279
|
Whether to bin the data. If multi-dimensional binning is done,
|
280
280
|
the parameter must be in the form (binning_x, binning_y).
|
281
281
|
Each image will be binned by these factors.
|
282
|
-
dataset_subsampling: int, optional
|
283
|
-
|
284
|
-
then one image out of `n` will be read.
|
282
|
+
dataset_subsampling: int or tuple, optional
|
283
|
+
Subsampling factor when reading the images.
|
284
|
+
If an integer `n` is provided, then one image out of `n` will be read.
|
285
|
+
If a tuple of integers (step, begin) is given, the data is read as data[begin::step]
|
285
286
|
num_threads: int, optional
|
286
287
|
Number of threads to use for binning the data.
|
287
288
|
Default is to use all available threads.
|
@@ -328,6 +329,10 @@ class ChunkReader:
|
|
328
329
|
def _set_files(self, files):
|
329
330
|
if len(files) == 0:
|
330
331
|
raise ValueError("Expected at least one data file")
|
332
|
+
self._files_begin_idx = 0
|
333
|
+
if isinstance(self.dataset_subsampling, (tuple, list)):
|
334
|
+
self._files_begin_idx = self.dataset_subsampling[1]
|
335
|
+
self.dataset_subsampling = self.dataset_subsampling[0]
|
331
336
|
self.n_files = len(files)
|
332
337
|
self.files = files
|
333
338
|
self._sorted_files_indices = sorted(files.keys())
|
@@ -469,10 +474,13 @@ class ChunkReader:
|
|
469
474
|
self._fileindex_to_idx[fileidx] = i
|
470
475
|
|
471
476
|
def _load_multi(self):
|
472
|
-
urls_compacted = get_compacted_dataslices(
|
477
|
+
urls_compacted = get_compacted_dataslices(
|
478
|
+
self.files, subsampling=self.dataset_subsampling, begin=self._files_begin_idx
|
479
|
+
)
|
473
480
|
loaded = {}
|
474
481
|
start_idx = 0
|
475
|
-
|
482
|
+
sorted_files_indices = sorted(urls_compacted.keys())
|
483
|
+
for idx in sorted_files_indices:
|
476
484
|
url = urls_compacted[idx]
|
477
485
|
url_str = str(url)
|
478
486
|
is_loaded = loaded.get(url_str, False)
|
@@ -14,7 +14,7 @@ from tomoscan.esrf.volume import (
|
|
14
14
|
)
|
15
15
|
from nabu.io.writer import __have_jp2k__
|
16
16
|
from tomoscan.esrf.scan.edfscan import EDFTomoScan
|
17
|
-
from tomoscan.esrf.scan.
|
17
|
+
from tomoscan.esrf.scan.nxtomoscan import NXtomoScan
|
18
18
|
import pytest
|
19
19
|
import h5py
|
20
20
|
import os
|
@@ -137,7 +137,7 @@ def test_find_histogram_single_frame_volume(tmp_path):
|
|
137
137
|
|
138
138
|
assert find_histogram(
|
139
139
|
volume=volume,
|
140
|
-
scan=
|
140
|
+
scan=NXtomoScan(scan=str(tmp_path), entry="entry"),
|
141
141
|
) == DataUrl(
|
142
142
|
file_path=histogram_file,
|
143
143
|
data_path="entry",
|
@@ -174,7 +174,7 @@ def test_find_histogram_multi_tiff_volume(tmp_path):
|
|
174
174
|
|
175
175
|
assert find_histogram(
|
176
176
|
volume=volume,
|
177
|
-
scan=
|
177
|
+
scan=NXtomoScan(scan=str(tmp_path), entry="entry"),
|
178
178
|
) == DataUrl(
|
179
179
|
file_path=histogram_file,
|
180
180
|
data_path="entry",
|
@@ -1,9 +1,9 @@
|
|
1
|
+
import pytest
|
1
2
|
import numpy as np
|
2
3
|
import scipy.ndimage
|
3
|
-
from nabu.io.detector_distortion import DetectorDistortionBase
|
4
4
|
from scipy import sparse
|
5
|
-
from nabu.
|
6
|
-
import
|
5
|
+
from nabu.io.detector_distortion import DetectorDistortionBase
|
6
|
+
from nabu.processing.rotation import Rotation, __have__skimage__
|
7
7
|
|
8
8
|
if __have__skimage__:
|
9
9
|
import skimage
|
nabu/io/tiffwriter_zmm.py
CHANGED
@@ -83,10 +83,10 @@ class TIFFWriter(StandardTIFFWriter): # pylint: disable=E0102
|
|
83
83
|
part_mm = int(value_mm)
|
84
84
|
rest_um = (value_mm - part_mm) * 1000
|
85
85
|
part_um = int(rest_um)
|
86
|
-
rest_nm = (rest_um - part_um) *
|
86
|
+
rest_nm = (rest_um - part_um) * 10
|
87
87
|
part_nm = int(rest_nm)
|
88
88
|
|
89
|
-
curr_rel_filename = prefix + "_{}{:
|
89
|
+
curr_rel_filename = prefix + "_{}{:04d}p{:03d}{:1d}".format(sign, part_mm, part_um, part_nm) + ext
|
90
90
|
|
91
91
|
fname = path.join(dirname, curr_rel_filename)
|
92
92
|
|
nabu/io/utils.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1
1
|
import os
|
2
2
|
from typing import Optional
|
3
|
-
import warnings
|
4
3
|
import contextlib
|
5
4
|
import h5py
|
6
5
|
import numpy as np
|
@@ -11,7 +10,7 @@ from tomoscan.io import HDF5File
|
|
11
10
|
|
12
11
|
|
13
12
|
# This function might be moved elsewhere
|
14
|
-
def get_compacted_dataslices(urls, subsampling=None):
|
13
|
+
def get_compacted_dataslices(urls, subsampling=None, begin=0):
|
15
14
|
"""
|
16
15
|
Regroup urls to get the data more efficiently.
|
17
16
|
Build a structure mapping files indices to information on
|
@@ -54,7 +53,9 @@ def get_compacted_dataslices(urls, subsampling=None):
|
|
54
53
|
return urls
|
55
54
|
|
56
55
|
sorted_files_indices = sorted(urls.keys())
|
57
|
-
|
56
|
+
# if begin > 0:
|
57
|
+
# sorted_files_indices = sorted_files_indices[begin:]
|
58
|
+
idx0 = sorted_files_indices[begin]
|
58
59
|
first_url = urls[idx0]
|
59
60
|
|
60
61
|
merged_indices = [[idx0]]
|
@@ -62,7 +63,11 @@ def get_compacted_dataslices(urls, subsampling=None):
|
|
62
63
|
data_location = [[first_url.file_path(), first_url.data_path(), _convert_to_slice(first_url.data_slice())]]
|
63
64
|
pos = 0
|
64
65
|
curr_fp, curr_dp, curr_slice = data_location[pos]
|
65
|
-
|
66
|
+
skip_next = 0
|
67
|
+
for idx in sorted_files_indices[begin + 1 :]:
|
68
|
+
if skip_next > 1:
|
69
|
+
skip_next -= 1
|
70
|
+
continue
|
66
71
|
url = urls[idx]
|
67
72
|
next_slice = _convert_to_slice(url.data_slice())
|
68
73
|
if (
|
@@ -74,7 +79,12 @@ def get_compacted_dataslices(urls, subsampling=None):
|
|
74
79
|
merged_slices = merge_slices(curr_slice, next_slice, step=subsampling)
|
75
80
|
data_location[pos][-1] = merged_slices
|
76
81
|
curr_slice = merged_slices
|
82
|
+
skip_next = 0
|
77
83
|
else: # "jump"
|
84
|
+
if begin > 0 and skip_next == 0:
|
85
|
+
# Skip the "begin" next urls (first of a new block)
|
86
|
+
skip_next = begin
|
87
|
+
continue
|
78
88
|
pos += 1
|
79
89
|
merged_indices.append([idx])
|
80
90
|
data_location.append([url.file_path(), url.data_path(), _convert_to_slice(url.data_slice())])
|
nabu/io/writer.py
CHANGED
@@ -795,15 +795,15 @@ class HSTVolWriter(Writer):
|
|
795
795
|
super().__init__(fname)
|
796
796
|
self.append = append
|
797
797
|
self._vol_writer = RawVolume(fname, overwrite=True, append=append)
|
798
|
+
self._hst_metadata = kwargs.get("hst_metadata", {})
|
798
799
|
|
799
|
-
|
800
|
-
def generate_metadata(data, **kwargs):
|
800
|
+
def generate_metadata(self, data, **kwargs):
|
801
801
|
n_z, n_y, n_x = data.shape
|
802
802
|
metadata = {
|
803
803
|
"NUM_X": n_x,
|
804
804
|
"NUM_Y": n_y,
|
805
805
|
"NUM_Z": n_z,
|
806
|
-
"voxelSize":
|
806
|
+
"voxelSize": 40.0,
|
807
807
|
"BYTEORDER": "LOWBYTEFIRST",
|
808
808
|
"ValMin": kwargs.get("ValMin", 0.0),
|
809
809
|
"ValMax": kwargs.get("ValMin", 1.0),
|
@@ -812,6 +812,8 @@ class HSTVolWriter(Writer):
|
|
812
812
|
"S1": 0.0,
|
813
813
|
"S2": 0.0,
|
814
814
|
}
|
815
|
+
for key, default_val in metadata.items():
|
816
|
+
metadata[key] = kwargs.get(key, None) or self._hst_metadata.get(key, None) or default_val
|
815
817
|
return metadata
|
816
818
|
|
817
819
|
@staticmethod
|