nabu 2024.1.9__py3-none-any.whl → 2024.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nabu/__init__.py +1 -1
- nabu/app/bootstrap.py +2 -3
- nabu/app/cast_volume.py +4 -2
- nabu/app/cli_configs.py +5 -0
- nabu/app/composite_cor.py +1 -1
- nabu/app/create_distortion_map_from_poly.py +5 -6
- nabu/app/diag_to_pix.py +7 -19
- nabu/app/diag_to_rot.py +14 -29
- nabu/app/double_flatfield.py +32 -44
- nabu/app/parse_reconstruction_log.py +3 -0
- nabu/app/reconstruct.py +53 -15
- nabu/app/reconstruct_helical.py +2 -2
- nabu/app/stitching.py +27 -13
- nabu/app/tests/test_reduce_dark_flat.py +4 -1
- nabu/cuda/kernel.py +11 -2
- nabu/cuda/processing.py +2 -2
- nabu/cuda/src/cone.cu +77 -0
- nabu/cuda/src/hierarchical_backproj.cu +271 -0
- nabu/cuda/utils.py +0 -6
- nabu/estimation/alignment.py +5 -19
- nabu/estimation/cor.py +173 -599
- nabu/estimation/cor_sino.py +356 -26
- nabu/estimation/focus.py +63 -11
- nabu/estimation/tests/test_cor.py +124 -58
- nabu/estimation/tests/test_focus.py +6 -6
- nabu/estimation/tilt.py +2 -1
- nabu/estimation/utils.py +5 -33
- nabu/io/__init__.py +1 -1
- nabu/io/cast_volume.py +1 -1
- nabu/io/reader.py +416 -21
- nabu/io/tests/test_readers.py +422 -0
- nabu/io/tests/test_writers.py +1 -102
- nabu/io/writer.py +4 -433
- nabu/opencl/kernel.py +14 -3
- nabu/opencl/processing.py +8 -0
- nabu/pipeline/config_validators.py +5 -2
- nabu/pipeline/datadump.py +12 -5
- nabu/pipeline/estimators.py +162 -188
- nabu/pipeline/fullfield/chunked.py +168 -92
- nabu/pipeline/fullfield/chunked_cuda.py +7 -3
- nabu/pipeline/fullfield/computations.py +2 -7
- nabu/pipeline/fullfield/dataset_validator.py +0 -4
- nabu/pipeline/fullfield/nabu_config.py +37 -13
- nabu/pipeline/fullfield/processconfig.py +22 -13
- nabu/pipeline/fullfield/reconstruction.py +13 -9
- nabu/pipeline/helical/helical_chunked_regridded.py +1 -1
- nabu/pipeline/helical/helical_chunked_regridded_cuda.py +1 -0
- nabu/pipeline/helical/helical_reconstruction.py +1 -1
- nabu/pipeline/params.py +21 -1
- nabu/pipeline/processconfig.py +1 -12
- nabu/pipeline/reader.py +146 -0
- nabu/pipeline/tests/test_estimators.py +44 -72
- nabu/pipeline/utils.py +4 -2
- nabu/pipeline/writer.py +10 -2
- nabu/preproc/ccd_cuda.py +1 -1
- nabu/preproc/ctf.py +14 -7
- nabu/preproc/ctf_cuda.py +2 -3
- nabu/preproc/double_flatfield.py +5 -12
- nabu/preproc/double_flatfield_cuda.py +2 -2
- nabu/preproc/flatfield.py +5 -1
- nabu/preproc/flatfield_cuda.py +5 -1
- nabu/preproc/phase.py +24 -73
- nabu/preproc/phase_cuda.py +5 -8
- nabu/preproc/tests/test_ctf.py +11 -7
- nabu/preproc/tests/test_flatfield.py +67 -122
- nabu/preproc/tests/test_paganin.py +54 -30
- nabu/processing/azim.py +206 -0
- nabu/processing/convolution_cuda.py +1 -1
- nabu/processing/fft_cuda.py +15 -17
- nabu/processing/histogram.py +2 -0
- nabu/processing/histogram_cuda.py +2 -1
- nabu/processing/kernel_base.py +3 -0
- nabu/processing/muladd_cuda.py +1 -0
- nabu/processing/padding_opencl.py +1 -1
- nabu/processing/roll_opencl.py +1 -0
- nabu/processing/rotation_cuda.py +2 -2
- nabu/processing/tests/test_fft.py +17 -10
- nabu/processing/unsharp_cuda.py +1 -1
- nabu/reconstruction/cone.py +104 -40
- nabu/reconstruction/fbp.py +3 -0
- nabu/reconstruction/fbp_base.py +7 -2
- nabu/reconstruction/filtering.py +20 -7
- nabu/reconstruction/filtering_cuda.py +7 -1
- nabu/reconstruction/hbp.py +424 -0
- nabu/reconstruction/mlem.py +99 -0
- nabu/reconstruction/reconstructor.py +2 -0
- nabu/reconstruction/rings_cuda.py +19 -19
- nabu/reconstruction/sinogram_cuda.py +1 -0
- nabu/reconstruction/sinogram_opencl.py +3 -1
- nabu/reconstruction/tests/test_cone.py +10 -5
- nabu/reconstruction/tests/test_deringer.py +7 -6
- nabu/reconstruction/tests/test_fbp.py +124 -10
- nabu/reconstruction/tests/test_filtering.py +13 -11
- nabu/reconstruction/tests/test_halftomo.py +30 -4
- nabu/reconstruction/tests/test_mlem.py +91 -0
- nabu/reconstruction/tests/test_reconstructor.py +8 -3
- nabu/resources/dataset_analyzer.py +142 -92
- nabu/resources/gpu.py +1 -0
- nabu/resources/nxflatfield.py +134 -125
- nabu/resources/templates/id16a_fluo.conf +42 -0
- nabu/resources/tests/test_extract.py +10 -0
- nabu/resources/tests/test_nxflatfield.py +2 -2
- nabu/stitching/alignment.py +80 -24
- nabu/stitching/config.py +105 -68
- nabu/stitching/definitions.py +1 -0
- nabu/stitching/frame_composition.py +68 -60
- nabu/stitching/overlap.py +91 -51
- nabu/stitching/single_axis_stitching.py +32 -0
- nabu/stitching/slurm_utils.py +6 -6
- nabu/stitching/stitcher/__init__.py +0 -0
- nabu/stitching/stitcher/base.py +124 -0
- nabu/stitching/stitcher/dumper/__init__.py +3 -0
- nabu/stitching/stitcher/dumper/base.py +94 -0
- nabu/stitching/stitcher/dumper/postprocessing.py +356 -0
- nabu/stitching/stitcher/dumper/preprocessing.py +60 -0
- nabu/stitching/stitcher/post_processing.py +555 -0
- nabu/stitching/stitcher/pre_processing.py +1068 -0
- nabu/stitching/stitcher/single_axis.py +484 -0
- nabu/stitching/stitcher/stitcher.py +0 -0
- nabu/stitching/stitcher/y_stitcher.py +13 -0
- nabu/stitching/stitcher/z_stitcher.py +45 -0
- nabu/stitching/stitcher_2D.py +278 -0
- nabu/stitching/tests/test_config.py +12 -37
- nabu/stitching/tests/test_frame_composition.py +33 -59
- nabu/stitching/tests/test_overlap.py +149 -7
- nabu/stitching/tests/test_utils.py +1 -1
- nabu/stitching/tests/test_y_preprocessing_stitching.py +132 -0
- nabu/stitching/tests/{test_z_stitching.py → test_z_postprocessing_stitching.py} +167 -561
- nabu/stitching/tests/test_z_preprocessing_stitching.py +431 -0
- nabu/stitching/utils/__init__.py +1 -0
- nabu/stitching/utils/post_processing.py +281 -0
- nabu/stitching/utils/tests/test_post-processing.py +21 -0
- nabu/stitching/{utils.py → utils/utils.py} +79 -52
- nabu/stitching/y_stitching.py +27 -0
- nabu/stitching/z_stitching.py +32 -2263
- nabu/testutils.py +1 -152
- nabu/thirdparty/tomocupy_remove_stripe.py +43 -9
- nabu/utils.py +158 -61
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/METADATA +10 -3
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/RECORD +144 -121
- nabu/io/tiffwriter_zmm.py +0 -99
- nabu/pipeline/fallback_utils.py +0 -149
- nabu/pipeline/helical/tests/test_accumulator.py +0 -158
- nabu/pipeline/helical/tests/test_pipeline_elements_full.py +0 -355
- nabu/pipeline/helical/tests/test_strategy.py +0 -61
- nabu/pipeline/helical/utils.py +0 -51
- nabu/pipeline/tests/test_chunk_reader.py +0 -74
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/LICENSE +0 -0
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/WHEEL +0 -0
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/entry_points.txt +0 -0
- {nabu-2024.1.9.dist-info → nabu-2024.2.0.dist-info}/top_level.txt +0 -0
nabu/io/writer.py
CHANGED
@@ -5,18 +5,17 @@ from posixpath import join as posix_join
|
|
5
5
|
from datetime import datetime
|
6
6
|
import numpy as np
|
7
7
|
from h5py import VirtualSource, VirtualLayout
|
8
|
-
from silx.io.dictdump import dicttoh5
|
8
|
+
from silx.io.dictdump import dicttoh5
|
9
9
|
from silx.io.url import DataUrl
|
10
10
|
|
11
11
|
try:
|
12
12
|
from tomoscan.io import HDF5File
|
13
13
|
except:
|
14
14
|
from h5py import File as HDF5File
|
15
|
-
from tomoscan.esrf import
|
15
|
+
from tomoscan.esrf import RawVolume
|
16
16
|
from tomoscan.esrf.volume.jp2kvolume import has_glymur as __have_jp2k__
|
17
17
|
from .. import version as nabu_version
|
18
|
-
from ..utils import merged_shape
|
19
|
-
from ..misc.utils import rescale_data
|
18
|
+
from ..utils import merged_shape
|
20
19
|
from .utils import convert_dict_values
|
21
20
|
|
22
21
|
|
@@ -39,169 +38,6 @@ class Writer:
|
|
39
38
|
return self.fname
|
40
39
|
|
41
40
|
|
42
|
-
class TomoscanNXProcessWriter(Writer):
|
43
|
-
"""
|
44
|
-
A class to write Nexus file with a processing result - using tomoscan.volumes as a backend
|
45
|
-
"""
|
46
|
-
|
47
|
-
def __init__(self, fname, entry=None, filemode="a", overwrite=False):
|
48
|
-
"""
|
49
|
-
Initialize a NXProcessWriter.
|
50
|
-
|
51
|
-
Parameters
|
52
|
-
-----------
|
53
|
-
fname: str
|
54
|
-
Path to the HDF5 file.
|
55
|
-
entry: str, optional
|
56
|
-
Entry in the HDF5 file. Default is "entry"
|
57
|
-
"""
|
58
|
-
super().__init__(fname)
|
59
|
-
self._set_entry(entry)
|
60
|
-
self._filemode = filemode
|
61
|
-
# TODO: notify file mode is deprecated
|
62
|
-
self.overwrite = overwrite
|
63
|
-
|
64
|
-
def _set_entry(self, entry):
|
65
|
-
self.entry = entry or "entry"
|
66
|
-
data_path = posix_join("/", self.entry)
|
67
|
-
self.data_path = data_path
|
68
|
-
|
69
|
-
def _write_npadday(self, result, volume, nx_info):
|
70
|
-
if result.ndim == 2:
|
71
|
-
result = result.reshape(1, result.shape[0], result.shape[1])
|
72
|
-
volume.data = result
|
73
|
-
|
74
|
-
self._update_volume_metadata(volume)
|
75
|
-
|
76
|
-
volume.save()
|
77
|
-
results_path = posix_join(nx_info["nx_process_path"], "results", nx_info["data_name"])
|
78
|
-
process_name = nx_info["process_name"]
|
79
|
-
process_info = nx_info["process_info"]
|
80
|
-
process_info.update(
|
81
|
-
{
|
82
|
-
f"{process_name}/results@NX_class": "NXdata",
|
83
|
-
f"{process_name}/results@signal": nx_info["data_name"],
|
84
|
-
}
|
85
|
-
)
|
86
|
-
if nx_info.get("is_frames_stack", True):
|
87
|
-
process_info.update({f"{process_name}/results@interpretation": "image"})
|
88
|
-
if nx_info.get("direct_access", False):
|
89
|
-
# prepare the direct access plots
|
90
|
-
process_info.update(
|
91
|
-
{
|
92
|
-
f"{process_name}@default": "results",
|
93
|
-
"@default": f"{process_name}/results",
|
94
|
-
}
|
95
|
-
)
|
96
|
-
return results_path
|
97
|
-
|
98
|
-
def _write_dict(self, result, volume, nx_info):
|
99
|
-
self._update_volume_metadata(volume)
|
100
|
-
volume.save_metadata() # if result is a dictionary then we only have some metadata to be saved
|
101
|
-
results_path = posix_join(nx_info["nx_process_path"], "results")
|
102
|
-
proc_result_key = posix_join(nx_info["process_name"], "results")
|
103
|
-
proc_result = convert_dict_values(result, {None: "None"})
|
104
|
-
process_info = nx_info["process_info"]
|
105
|
-
process_info.update({proc_result_key: proc_result})
|
106
|
-
return results_path
|
107
|
-
|
108
|
-
def _write_virtual_layout(self, result, volume, nx_info):
|
109
|
-
# TODO: add test on tomoscan to ensure this use case is handled
|
110
|
-
volume.data = result
|
111
|
-
self._update_volume_metadata(volume)
|
112
|
-
volume.save()
|
113
|
-
results_path = posix_join(nx_info["nx_process_path"], "results", nx_info["data_name"])
|
114
|
-
return results_path
|
115
|
-
|
116
|
-
@staticmethod
|
117
|
-
def _update_volume_metadata(volume):
|
118
|
-
if volume.metadata is not None:
|
119
|
-
volume.metadata = convert_dict_values(
|
120
|
-
volume.metadata,
|
121
|
-
{None: "None"},
|
122
|
-
)
|
123
|
-
|
124
|
-
def write(
|
125
|
-
self,
|
126
|
-
result,
|
127
|
-
process_name,
|
128
|
-
processing_index=0,
|
129
|
-
config=None,
|
130
|
-
data_name="data",
|
131
|
-
is_frames_stack=True,
|
132
|
-
direct_access=True,
|
133
|
-
) -> str:
|
134
|
-
"""
|
135
|
-
Write the result in the current NXProcess group.
|
136
|
-
|
137
|
-
Parameters
|
138
|
-
----------
|
139
|
-
result: numpy.ndarray
|
140
|
-
Array containing the processing result
|
141
|
-
process_name: str
|
142
|
-
Name of the processing
|
143
|
-
processing_index: int
|
144
|
-
Index of the processing (in a pipeline)
|
145
|
-
config: dict, optional
|
146
|
-
Dictionary containing the configuration.
|
147
|
-
"""
|
148
|
-
entry_path = self.data_path
|
149
|
-
nx_process_path = "/".join([entry_path, process_name])
|
150
|
-
|
151
|
-
if config is not None:
|
152
|
-
config.update({"@NX_class": "NXcollection"})
|
153
|
-
|
154
|
-
nabu_process_info = {
|
155
|
-
"@NX_class": "NXentry",
|
156
|
-
f"{process_name}@NX_class": "NXprocess",
|
157
|
-
f"{process_name}/program": "nabu",
|
158
|
-
f"{process_name}/version": nabu_version,
|
159
|
-
f"{process_name}/date": get_datetime(),
|
160
|
-
f"{process_name}/sequence_index": np.int32(processing_index),
|
161
|
-
}
|
162
|
-
|
163
|
-
# Create HDF5Volume object with initial information
|
164
|
-
volume = HDF5Volume(
|
165
|
-
data_url=DataUrl(
|
166
|
-
file_path=self.fname,
|
167
|
-
data_path=f"{nx_process_path}/results/{data_name}",
|
168
|
-
scheme="silx",
|
169
|
-
),
|
170
|
-
metadata_url=DataUrl(
|
171
|
-
file_path=self.fname,
|
172
|
-
data_path=f"{nx_process_path}/configuration",
|
173
|
-
),
|
174
|
-
metadata=config,
|
175
|
-
overwrite=self.overwrite,
|
176
|
-
)
|
177
|
-
if isinstance(result, dict):
|
178
|
-
write_method = self._write_dict
|
179
|
-
elif isinstance(result, np.ndarray):
|
180
|
-
write_method = self._write_npadday
|
181
|
-
elif isinstance(result, VirtualLayout):
|
182
|
-
write_method = self._write_virtual_layout
|
183
|
-
else:
|
184
|
-
raise TypeError(f"'result' must be a dict, numpy array or h5py.VirtualLayout, not {type(result)}")
|
185
|
-
nx_info = {
|
186
|
-
"process_name": process_name,
|
187
|
-
"nx_process_path": nx_process_path,
|
188
|
-
"process_info": nabu_process_info,
|
189
|
-
"data_name": data_name,
|
190
|
-
"is_frames_stack": is_frames_stack,
|
191
|
-
"direct_access": direct_access,
|
192
|
-
}
|
193
|
-
results_path = write_method(result, volume, nx_info)
|
194
|
-
|
195
|
-
dicttonx(
|
196
|
-
nabu_process_info,
|
197
|
-
h5file=self.fname,
|
198
|
-
h5path=entry_path,
|
199
|
-
update_mode="replace",
|
200
|
-
mode="a",
|
201
|
-
)
|
202
|
-
return results_path
|
203
|
-
|
204
|
-
|
205
41
|
###################################################################################################
|
206
42
|
## Nabu original code for NXProcessWriter - also works for non-3D data, does not depend on tomoscan
|
207
43
|
###################################################################################################
|
@@ -534,258 +370,11 @@ def merge_hdf5_files(
|
|
534
370
|
data_name=data_name,
|
535
371
|
is_frames_stack=True,
|
536
372
|
)
|
373
|
+
# pylint: disable=E0606
|
537
374
|
if base_dir is not None and prev_cwd != getcwd():
|
538
375
|
chdir(prev_cwd)
|
539
376
|
|
540
377
|
|
541
|
-
class TIFFWriter(Writer):
|
542
|
-
def __init__(self, fname, multiframe=False, start_index=0, filemode=None, append=False, big_tiff=None):
|
543
|
-
"""
|
544
|
-
Tiff writer.
|
545
|
-
|
546
|
-
Parameters
|
547
|
-
-----------
|
548
|
-
fname: str
|
549
|
-
Path to the output file name
|
550
|
-
multiframe: bool, optional
|
551
|
-
Whether to write all data in one single file. Default is False.
|
552
|
-
start_index: int, optional
|
553
|
-
When writing a stack of images, each image is written in a dedicated file
|
554
|
-
(unless multiframe is set to True).
|
555
|
-
In this case, the output is a series of files `filename_0000.tif`,
|
556
|
-
`filename_0001.tif`, etc. This parameter is the starting index for
|
557
|
-
file names.
|
558
|
-
This option is ignored when multiframe is True.
|
559
|
-
filemode: str, optional
|
560
|
-
DEPRECATED. Will be ignored. Please refer to 'append'
|
561
|
-
append: bool, optional
|
562
|
-
Whether to append data to the file rather than overwriting. Default is False.
|
563
|
-
big_tiff: bool, optional
|
564
|
-
Whether to write in "big tiff" format: https://www.awaresystems.be/imaging/tiff/bigtiff.html
|
565
|
-
Default is True when multiframe is True.
|
566
|
-
Note that default "standard" tiff cannot exceed 4 GB.
|
567
|
-
|
568
|
-
Notes
|
569
|
-
------
|
570
|
-
If multiframe is False (default), then each image will be written in a
|
571
|
-
dedicated tiff file.
|
572
|
-
"""
|
573
|
-
super().__init__(fname)
|
574
|
-
self.multiframe = multiframe
|
575
|
-
self.start_index = start_index
|
576
|
-
self.append = append
|
577
|
-
if big_tiff is None:
|
578
|
-
big_tiff = multiframe
|
579
|
-
if multiframe and not big_tiff:
|
580
|
-
# raise error ?
|
581
|
-
print("big_tiff was set to False while multiframe was set to True. This will probably be problematic.")
|
582
|
-
self.big_tiff = big_tiff
|
583
|
-
# Compat.
|
584
|
-
self.filemode = filemode
|
585
|
-
if filemode is not None:
|
586
|
-
deprecation_warning("Ignored parameter 'filemode'. Please use the 'append' parameter")
|
587
|
-
|
588
|
-
def write(self, data, *args, config=None, **kwargs):
|
589
|
-
ext = None
|
590
|
-
if not isinstance(data, np.ndarray):
|
591
|
-
raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
|
592
|
-
# Single image, or multiple image in the same file
|
593
|
-
if self.multiframe:
|
594
|
-
volume = MultiTIFFVolume(
|
595
|
-
self.fname,
|
596
|
-
data=data,
|
597
|
-
metadata={
|
598
|
-
"config": config,
|
599
|
-
},
|
600
|
-
append=self.append,
|
601
|
-
)
|
602
|
-
file_path = self.fname
|
603
|
-
# Multiple image, one file per image
|
604
|
-
else:
|
605
|
-
if data.ndim == 2:
|
606
|
-
data = data.reshape(1, data.shape[0], data.shape[1])
|
607
|
-
file_path, ext = path.splitext(self.fname)
|
608
|
-
volume = TIFFVolume(
|
609
|
-
path.dirname(file_path),
|
610
|
-
volume_basename=path.basename(file_path),
|
611
|
-
data=data,
|
612
|
-
metadata={
|
613
|
-
"config": config,
|
614
|
-
},
|
615
|
-
start_index=self.start_index,
|
616
|
-
data_extension=ext.lstrip("."),
|
617
|
-
overwrite=True,
|
618
|
-
)
|
619
|
-
volume.save()
|
620
|
-
|
621
|
-
|
622
|
-
class EDFWriter(Writer):
|
623
|
-
def __init__(self, fname, start_index=0, filemode="w"):
|
624
|
-
"""
|
625
|
-
EDF (ESRF Data Format) writer.
|
626
|
-
|
627
|
-
Parameters
|
628
|
-
-----------
|
629
|
-
fname: str
|
630
|
-
Path to the output file name
|
631
|
-
start_index: int, optional
|
632
|
-
When writing a stack of images, each image is written in a dedicated file
|
633
|
-
In this case, the output is a series of files `filename_0000.tif`,
|
634
|
-
`filename_0001.edf`, etc. This parameter is the starting index for
|
635
|
-
file names.
|
636
|
-
"""
|
637
|
-
super().__init__(fname)
|
638
|
-
self.filemode = filemode
|
639
|
-
self.start_index = start_index
|
640
|
-
|
641
|
-
def write(self, data, *args, config=None, **kwargs):
|
642
|
-
if not isinstance(data, np.ndarray):
|
643
|
-
raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
|
644
|
-
header = {
|
645
|
-
"software": "nabu",
|
646
|
-
"data": get_datetime(),
|
647
|
-
}
|
648
|
-
if data.ndim == 2:
|
649
|
-
data = data.reshape(1, data.shape[0], data.shape[1])
|
650
|
-
|
651
|
-
volume = EDFVolume(path.dirname(self.fname), data=data, start_index=self.start_index, header=header)
|
652
|
-
volume.save()
|
653
|
-
|
654
|
-
|
655
|
-
class JP2Writer(Writer):
|
656
|
-
def __init__(
|
657
|
-
self,
|
658
|
-
fname,
|
659
|
-
start_index=0,
|
660
|
-
filemode="wb",
|
661
|
-
psnr=None,
|
662
|
-
cratios=None,
|
663
|
-
auto_convert=True,
|
664
|
-
float_clip_values=None,
|
665
|
-
n_threads=None,
|
666
|
-
overwrite=False,
|
667
|
-
single_file=True,
|
668
|
-
):
|
669
|
-
"""
|
670
|
-
JPEG2000 writer. This class requires the python package `glymur` and the
|
671
|
-
library `libopenjp2`.
|
672
|
-
|
673
|
-
Parameters
|
674
|
-
-----------
|
675
|
-
fname: str
|
676
|
-
Path to the output file name
|
677
|
-
start_index: int, optional
|
678
|
-
When writing a stack of images, each image is written in a dedicated file
|
679
|
-
The output is a series of files `filename_0000.tif`, `filename_0001.tif`, etc.
|
680
|
-
This parameter is the starting index for file names.
|
681
|
-
psnr: list of int, optional
|
682
|
-
The PSNR (Peak Signal-to-Noise ratio) for each jpeg2000 layer.
|
683
|
-
This defines a quality metric for lossy compression.
|
684
|
-
The number "0" stands for lossless compression.
|
685
|
-
cratios: list of int, optional
|
686
|
-
Compression ratio for each jpeg2000 layer
|
687
|
-
auto_convert: bool, optional
|
688
|
-
Whether to automatically cast floating point data to uint16.
|
689
|
-
Default is True.
|
690
|
-
float_clip_values: tuple of floats, optional
|
691
|
-
If set to a tuple of two values (min, max), then each image values will be clipped
|
692
|
-
to these minimum and maximum values.
|
693
|
-
n_threads: int, optional
|
694
|
-
Number of threads to use for encoding. Default is the number of available threads.
|
695
|
-
Needs libopenjpeg >= 2.4.0.
|
696
|
-
"""
|
697
|
-
super().__init__(fname)
|
698
|
-
if not (__have_jp2k__):
|
699
|
-
raise ValueError("Need glymur python package and libopenjp2 library")
|
700
|
-
self.n_threads = n_threads
|
701
|
-
# self.setup_multithread_encoding(n_threads=n_threads, what_if_not_available="ignore")
|
702
|
-
self.filemode = filemode
|
703
|
-
self.start_index = start_index
|
704
|
-
self.single_file = single_file
|
705
|
-
self.auto_convert = auto_convert
|
706
|
-
if psnr is not None and np.isscalar(psnr):
|
707
|
-
psnr = [psnr]
|
708
|
-
self.psnr = psnr
|
709
|
-
self.cratios = cratios
|
710
|
-
self._vmin = None
|
711
|
-
self._vmax = None
|
712
|
-
self.overwrite = overwrite
|
713
|
-
self.clip_float = False
|
714
|
-
if float_clip_values is not None:
|
715
|
-
self._float_clip_min, self._float_clip_max = float_clip_values
|
716
|
-
self.clip_float = True
|
717
|
-
|
718
|
-
def write(self, data, *args, **kwargs):
|
719
|
-
if not isinstance(data, np.ndarray):
|
720
|
-
raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
|
721
|
-
|
722
|
-
if data.ndim == 2:
|
723
|
-
data = data.reshape(1, data.shape[0], data.shape[1])
|
724
|
-
|
725
|
-
if self.single_file and data.ndim == 3 and data.shape[0] == 1:
|
726
|
-
# case we will have a single file as output
|
727
|
-
data_url = DataUrl(
|
728
|
-
file_path=path.dirname(self.fname),
|
729
|
-
data_path=self.fname,
|
730
|
-
scheme=JP2KVolume.DEFAULT_DATA_SCHEME,
|
731
|
-
)
|
732
|
-
metadata_url = DataUrl(
|
733
|
-
file_path=path.dirname(self.fname),
|
734
|
-
data_path=f"{path.dirname(self.fname)}/{path.basename(self.fname)}_info.txt",
|
735
|
-
scheme=JP2KVolume.DEFAULT_METADATA_SCHEME,
|
736
|
-
)
|
737
|
-
volume_basename = None
|
738
|
-
folder = None
|
739
|
-
extension = None
|
740
|
-
else:
|
741
|
-
# case we need to save it as set of file
|
742
|
-
file_path, ext = path.splitext(self.fname)
|
743
|
-
data_url = None
|
744
|
-
metadata_url = None
|
745
|
-
volume_basename = path.basename(file_path)
|
746
|
-
folder = path.dirname(self.fname)
|
747
|
-
extension = ext.lstrip(".")
|
748
|
-
|
749
|
-
volume = JP2KVolume(
|
750
|
-
folder=folder,
|
751
|
-
start_index=self.start_index,
|
752
|
-
cratios=self.cratios,
|
753
|
-
psnr=self.psnr,
|
754
|
-
n_threads=self.n_threads,
|
755
|
-
volume_basename=volume_basename,
|
756
|
-
data_url=data_url,
|
757
|
-
metadata_url=metadata_url,
|
758
|
-
data_extension=extension,
|
759
|
-
overwrite=self.overwrite,
|
760
|
-
)
|
761
|
-
|
762
|
-
if data.dtype != np.uint16 and self.auto_convert:
|
763
|
-
if self.clip_float:
|
764
|
-
data = np.clip(data, self._float_clip_min, self._float_clip_max)
|
765
|
-
data = rescale_data(data, 0, 65535, data_min=self._vmin, data_max=self._vmax)
|
766
|
-
data = data.astype(np.uint16)
|
767
|
-
|
768
|
-
volume.data = data
|
769
|
-
config = kwargs.get("config", None)
|
770
|
-
if config is not None:
|
771
|
-
volume.metadata = {"config": config}
|
772
|
-
volume.save()
|
773
|
-
|
774
|
-
|
775
|
-
class NPYWriter(Writer):
|
776
|
-
def write(self, result, *args, **kwargs):
|
777
|
-
np.save(self.fname, result)
|
778
|
-
|
779
|
-
|
780
|
-
class NPZWriter(Writer):
|
781
|
-
def write(self, result, *args, **kwargs):
|
782
|
-
save_args = {"result": result}
|
783
|
-
config = kwargs.get("config", None)
|
784
|
-
if config is not None:
|
785
|
-
save_args["configuration"] = config
|
786
|
-
np.savez(self.fname, **save_args)
|
787
|
-
|
788
|
-
|
789
378
|
class HSTVolWriter(Writer):
|
790
379
|
"""
|
791
380
|
A writer to mimic PyHST2 ".vol" files
|
@@ -872,21 +461,3 @@ class HSTVolVolume(HSTVolWriter):
|
|
872
461
|
|
873
462
|
def browse_data_files(self):
|
874
463
|
return [self.fname]
|
875
|
-
|
876
|
-
|
877
|
-
# Unused - kept for compat.
|
878
|
-
Writers = {
|
879
|
-
"h5": NXProcessWriter,
|
880
|
-
"hdf5": NXProcessWriter,
|
881
|
-
"nx": NXProcessWriter,
|
882
|
-
"nexus": NXProcessWriter,
|
883
|
-
"npy": NPYWriter,
|
884
|
-
"npz": NPZWriter,
|
885
|
-
"tif": TIFFWriter,
|
886
|
-
"tiff": TIFFWriter,
|
887
|
-
"j2k": JP2Writer,
|
888
|
-
"jp2": JP2Writer,
|
889
|
-
"jp2k": JP2Writer,
|
890
|
-
"edf": EDFWriter,
|
891
|
-
"vol": HSTVolWriter,
|
892
|
-
}
|
nabu/opencl/kernel.py
CHANGED
@@ -1,6 +1,9 @@
|
|
1
1
|
import pyopencl.array as parray
|
2
2
|
from pyopencl import Program, CommandQueue, kernel_work_group_info
|
3
|
-
from ..utils import
|
3
|
+
from ..utils import (
|
4
|
+
deprecation_warning,
|
5
|
+
catch_warnings,
|
6
|
+
) # TODO use warnings.catch_warnings once python < 3.11 is dropped
|
4
7
|
from ..processing.kernel_base import KernelBase
|
5
8
|
|
6
9
|
|
@@ -34,9 +37,16 @@ class OpenCLKernel(KernelBase):
|
|
34
37
|
filename=None,
|
35
38
|
src=None,
|
36
39
|
automation_params=None,
|
40
|
+
silent_compilation_warnings=False,
|
37
41
|
**build_kwargs,
|
38
42
|
):
|
39
|
-
super().__init__(
|
43
|
+
super().__init__(
|
44
|
+
kernel_name,
|
45
|
+
filename=filename,
|
46
|
+
src=src,
|
47
|
+
automation_params=automation_params,
|
48
|
+
silent_compilation_warnings=silent_compilation_warnings,
|
49
|
+
)
|
40
50
|
if queue is not None:
|
41
51
|
self.ctx = queue.context
|
42
52
|
self.queue = queue
|
@@ -49,7 +59,8 @@ class OpenCLKernel(KernelBase):
|
|
49
59
|
def compile_kernel_source(self, kernel_name, build_kwargs):
|
50
60
|
self.build_kwargs = build_kwargs
|
51
61
|
self.kernel_name = kernel_name
|
52
|
-
|
62
|
+
with catch_warnings(action=("ignore" if self.silent_compilation_warnings else None)): # pylint: disable=E1123
|
63
|
+
self.program = Program(self.ctx, self.src).build(**self.build_kwargs)
|
53
64
|
|
54
65
|
def get_kernel(self):
|
55
66
|
self.kernel = None
|
nabu/opencl/processing.py
CHANGED
@@ -14,6 +14,7 @@ else:
|
|
14
14
|
dtype_to_ctype = MissingComponentError("pyopencl")
|
15
15
|
|
16
16
|
|
17
|
+
# pylint: disable=E0606
|
17
18
|
class OpenCLProcessing(ProcessingBase):
|
18
19
|
array_class = OpenCLArray
|
19
20
|
dtype_to_ctype = dtype_to_ctype
|
@@ -44,6 +45,13 @@ class OpenCLProcessing(ProcessingBase):
|
|
44
45
|
queue_init_kwargs = {"properties": cl.command_queue_properties.PROFILING_ENABLE}
|
45
46
|
queue = cl.CommandQueue(self.ctx, **queue_init_kwargs)
|
46
47
|
self.queue = queue
|
48
|
+
dev_types = {
|
49
|
+
cl.device_type.CPU: "cpu",
|
50
|
+
cl.device_type.GPU: "gpu",
|
51
|
+
cl.device_type.ACCELERATOR: "accelerator",
|
52
|
+
-1: "unknown",
|
53
|
+
}
|
54
|
+
self.device_type = dev_types.get(self.ctx.devices[0].type, "unknown")
|
47
55
|
|
48
56
|
# TODO push_context, pop_context ?
|
49
57
|
|
@@ -445,9 +445,12 @@ def fbp_filter_name_validator(val):
|
|
445
445
|
|
446
446
|
|
447
447
|
@validator
|
448
|
-
def
|
448
|
+
def reconstruction_implementation_validator(val):
|
449
449
|
return name_range_checker(
|
450
|
-
val,
|
450
|
+
val,
|
451
|
+
set(reco_implementations.values()),
|
452
|
+
"Reconstruction method implementation",
|
453
|
+
replacements=reco_implementations,
|
451
454
|
)
|
452
455
|
|
453
456
|
|
nabu/pipeline/datadump.py
CHANGED
@@ -50,7 +50,7 @@ class DataDumpManager:
|
|
50
50
|
self._z_sub_region = self.sub_region[1]
|
51
51
|
self.z_min = self._z_sub_region[0]
|
52
52
|
self.margin = get_subregion(margin, ndim=2) # ((U, D), (L, R))
|
53
|
-
self.margin_up = self.margin[0][0]
|
53
|
+
self.margin_up = self.margin[0][0] or 0
|
54
54
|
self.start_index = self.z_min + self.margin_up
|
55
55
|
self.delta_z = self._z_sub_region[-1] - self._z_sub_region[-2]
|
56
56
|
|
@@ -131,8 +131,8 @@ class DataDumpManager:
|
|
131
131
|
relative_end_z = relative_start_z + self.delta_z
|
132
132
|
# When using binning, every step after "read" results in smaller-sized data.
|
133
133
|
# Therefore dumped data has shape (ceil(n_angles/subsampling), n_z//binning_z, n_x//binning_x)
|
134
|
-
relative_start_z //= self.process_config.
|
135
|
-
relative_end_z //= self.process_config.
|
134
|
+
relative_start_z //= self.process_config.binning_z
|
135
|
+
relative_end_z //= self.process_config.binning_z
|
136
136
|
# (n_angles, n_z, n_x)
|
137
137
|
subregion = (None, None, relative_start_z, relative_end_z, None, None)
|
138
138
|
return subregion
|
@@ -146,7 +146,7 @@ class DataDumpManager:
|
|
146
146
|
)
|
147
147
|
# TODO check
|
148
148
|
|
149
|
-
def dump_data_to_file(self, step_name, data):
|
149
|
+
def dump_data_to_file(self, step_name, data, crop_margin=False):
|
150
150
|
if step_name not in self.data_dump:
|
151
151
|
return
|
152
152
|
writer = self.data_dump[step_name]
|
@@ -154,7 +154,14 @@ class DataDumpManager:
|
|
154
154
|
if __has_pycuda__:
|
155
155
|
if isinstance(data, garray.GPUArray):
|
156
156
|
data = data.get()
|
157
|
-
|
157
|
+
|
158
|
+
margin_up = self.margin[0][0] or None
|
159
|
+
margin_down = self.margin[0][1] or None
|
160
|
+
margin_down = -margin_down if margin_down is not None else None # pylint: disable=E1130
|
161
|
+
if crop_margin and (margin_up is not None or margin_down is not None):
|
162
|
+
data = data[:, margin_up:margin_down, :]
|
163
|
+
metadata = {"dump_sub_region": {"sub_region": self.sub_region, "margin": self.margin}}
|
164
|
+
writer.write_data(data, metadata=metadata)
|
158
165
|
|
159
166
|
def __repr__(self):
|
160
167
|
res = "%s(%s, margin=%s)" % (self.__class__.__name__, str(self.sub_region), str(self.margin))
|