nabu 2022.3.0a1__py3-none-any.whl → 2023.1.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. nabu/__init__.py +1 -1
  2. nabu/app/bootstrap.py +7 -1
  3. nabu/app/cast_volume.py +8 -2
  4. nabu/app/cli_configs.py +69 -0
  5. nabu/app/composite_cor.py +97 -0
  6. nabu/app/create_distortion_map_from_poly.py +118 -0
  7. nabu/app/nx_z_splitter.py +1 -1
  8. nabu/app/prepare_weights_double.py +21 -16
  9. nabu/app/reconstruct_helical.py +0 -1
  10. nabu/app/utils.py +10 -5
  11. nabu/cuda/processing.py +1 -0
  12. nabu/cuda/tests/test_padding.py +1 -0
  13. nabu/cuda/utils.py +1 -0
  14. nabu/distributed/__init__.py +0 -0
  15. nabu/distributed/utils.py +57 -0
  16. nabu/distributed/worker.py +543 -0
  17. nabu/estimation/cor.py +3 -7
  18. nabu/estimation/cor_sino.py +2 -1
  19. nabu/estimation/distortion.py +6 -4
  20. nabu/io/cast_volume.py +10 -1
  21. nabu/io/detector_distortion.py +305 -0
  22. nabu/io/reader.py +37 -7
  23. nabu/io/reader_helical.py +0 -3
  24. nabu/io/tests/test_cast_volume.py +16 -4
  25. nabu/io/tests/test_detector_distortion.py +178 -0
  26. nabu/io/tests/test_writers.py +2 -2
  27. nabu/io/tiffwriter_zmm.py +2 -3
  28. nabu/io/writer.py +84 -1
  29. nabu/io/writer_BACKUP_193259.py +556 -0
  30. nabu/io/writer_BACKUP_193381.py +556 -0
  31. nabu/io/writer_BASE_193259.py +548 -0
  32. nabu/io/writer_BASE_193381.py +548 -0
  33. nabu/io/writer_LOCAL_193259.py +550 -0
  34. nabu/io/writer_LOCAL_193381.py +550 -0
  35. nabu/io/writer_REMOTE_193259.py +557 -0
  36. nabu/io/writer_REMOTE_193381.py +557 -0
  37. nabu/misc/fourier_filters.py +2 -0
  38. nabu/misc/rotation.py +0 -1
  39. nabu/misc/tests/test_rotation.py +1 -0
  40. nabu/pipeline/config_validators.py +10 -0
  41. nabu/pipeline/datadump.py +1 -1
  42. nabu/pipeline/dataset_validator.py +0 -1
  43. nabu/pipeline/detector_distortion_provider.py +20 -0
  44. nabu/pipeline/estimators.py +35 -21
  45. nabu/pipeline/fallback_utils.py +1 -1
  46. nabu/pipeline/fullfield/chunked.py +30 -15
  47. nabu/pipeline/fullfield/chunked_black.py +881 -0
  48. nabu/pipeline/fullfield/chunked_cuda.py +34 -4
  49. nabu/pipeline/fullfield/chunked_fb.py +966 -0
  50. nabu/pipeline/fullfield/chunked_google.py +921 -0
  51. nabu/pipeline/fullfield/chunked_pep8.py +920 -0
  52. nabu/pipeline/fullfield/computations.py +7 -6
  53. nabu/pipeline/fullfield/dataset_validator.py +1 -1
  54. nabu/pipeline/fullfield/grouped_cuda.py +6 -0
  55. nabu/pipeline/fullfield/nabu_config.py +15 -3
  56. nabu/pipeline/fullfield/processconfig.py +5 -0
  57. nabu/pipeline/fullfield/reconstruction.py +1 -2
  58. nabu/pipeline/helical/gridded_accumulator.py +1 -8
  59. nabu/pipeline/helical/helical_chunked_regridded.py +48 -33
  60. nabu/pipeline/helical/helical_reconstruction.py +1 -9
  61. nabu/pipeline/helical/nabu_config.py +11 -14
  62. nabu/pipeline/helical/span_strategy.py +11 -4
  63. nabu/pipeline/helical/tests/test_accumulator.py +0 -3
  64. nabu/pipeline/helical/tests/test_pipeline_elements_full.py +0 -6
  65. nabu/pipeline/helical/tests/test_strategy.py +0 -1
  66. nabu/pipeline/helical/weight_balancer.py +0 -1
  67. nabu/pipeline/params.py +4 -0
  68. nabu/pipeline/processconfig.py +6 -2
  69. nabu/pipeline/writer.py +9 -4
  70. nabu/preproc/distortion.py +4 -3
  71. nabu/preproc/double_flatfield.py +16 -4
  72. nabu/preproc/double_flatfield_cuda.py +3 -2
  73. nabu/preproc/double_flatfield_variable_region.py +13 -4
  74. nabu/preproc/flatfield.py +29 -7
  75. nabu/preproc/flatfield_cuda.py +0 -1
  76. nabu/preproc/flatfield_variable_region.py +5 -2
  77. nabu/preproc/phase.py +0 -1
  78. nabu/preproc/phase_cuda.py +0 -1
  79. nabu/preproc/tests/test_ctf.py +4 -3
  80. nabu/preproc/tests/test_flatfield.py +6 -7
  81. nabu/reconstruction/fbp_opencl.py +1 -1
  82. nabu/reconstruction/filtering.py +0 -1
  83. nabu/reconstruction/tests/test_fbp.py +1 -0
  84. nabu/resources/dataset_analyzer.py +0 -1
  85. nabu/resources/templates/bm05_pag.conf +34 -0
  86. nabu/resources/templates/id16_ctf.conf +2 -1
  87. nabu/resources/tests/test_nxflatfield.py +0 -1
  88. nabu/resources/tests/test_units.py +0 -1
  89. nabu/stitching/frame_composition.py +7 -1
  90. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/METADATA +2 -7
  91. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/RECORD +96 -75
  92. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/WHEEL +1 -1
  93. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/entry_points.txt +2 -1
  94. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/LICENSE +0 -0
  95. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/top_level.txt +0 -0
  96. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/zip-safe +0 -0
nabu/io/writer.py CHANGED
@@ -11,7 +11,7 @@ try:
11
11
  from tomoscan.io import HDF5File
12
12
  except:
13
13
  from h5py import File as HDF5File
14
- from tomoscan.esrf import EDFVolume, HDF5Volume, TIFFVolume, MultiTIFFVolume, JP2KVolume
14
+ from tomoscan.esrf import EDFVolume, HDF5Volume, TIFFVolume, MultiTIFFVolume, JP2KVolume, RawVolume
15
15
  from tomoscan.esrf.volume.jp2kvolume import has_glymur as __have_jp2k__
16
16
  from .. import version as nabu_version
17
17
  from ..utils import merged_shape, deprecation_warning
@@ -720,6 +720,88 @@ class NPZWriter(Writer):
720
720
  np.savez(self.fname, **save_args)
721
721
 
722
722
 
723
+ class HSTVolWriter(Writer):
724
+ """
725
+ A writer to mimic PyHST2 ".vol" files
726
+ """
727
+
728
+ def __init__(self, fname, append=False, **kwargs):
729
+ super().__init__(fname)
730
+ self.append = append
731
+ self._vol_writer = RawVolume(fname, overwrite=True, append=append)
732
+
733
+ @staticmethod
734
+ def generate_metadata(data, **kwargs):
735
+ n_z, n_y, n_x = data.shape
736
+ metadata = {
737
+ "NUM_X": n_x,
738
+ "NUM_Y": n_y,
739
+ "NUM_Z": n_z,
740
+ "voxelSize": kwargs.get("voxelSize", 40.0),
741
+ "BYTEORDER": "LOWBYTEFIRST",
742
+ "ValMin": kwargs.get("ValMin", 0.0),
743
+ "ValMax": kwargs.get("ValMin", 1.0),
744
+ "s1": 0.0,
745
+ "s2": 0.0,
746
+ "S1": 0.0,
747
+ "S2": 0.0,
748
+ }
749
+ return metadata
750
+
751
+ @staticmethod
752
+ def sanitize_metadata(metadata):
753
+ # To be fixed in RawVolume
754
+ for what in ["NUM_X", "NUM_Y", "NUM_Z"]:
755
+ metadata[what] = int(metadata[what])
756
+ for what in ["voxelSize", "ValMin", "ValMax", "s1", "s2", "S1", "S2"]:
757
+ metadata[what] = float(metadata[what])
758
+
759
+ def write(self, data, *args, config=None, **kwargs):
760
+ existing_metadata = self._vol_writer.load_metadata()
761
+ new_metadata = self.generate_metadata(data)
762
+ if len(existing_metadata) == 0 or not (self.append):
763
+ # first write or append==False
764
+ metadata = new_metadata
765
+ else:
766
+ # append write ; update metadata
767
+ metadata = existing_metadata.copy()
768
+ self.sanitize_metadata(metadata)
769
+ metadata["NUM_Z"] += new_metadata["NUM_Z"]
770
+ self._vol_writer.data = data
771
+ self._vol_writer.metadata = metadata
772
+ self._vol_writer.save()
773
+ # Also save .xml
774
+ self._vol_writer.save_metadata(
775
+ url=DataUrl(
776
+ scheme="lxml",
777
+ file_path=self._vol_writer.metadata_url.file_path().replace(".info", ".xml"),
778
+ )
779
+ )
780
+
781
+
782
+ class HSTVolVolume(HSTVolWriter):
783
+ """
784
+ An interface to HSTVolWriter with the same API than tomoscan.esrf.volume.
785
+ This is really not ideal, see nabu:#381
786
+ """
787
+
788
+ def __init__(self, **kwargs):
789
+ file_path = kwargs.pop("file_path", None)
790
+ if file_path is None:
791
+ raise ValueError("Missing mandatory 'file_path' parameter")
792
+ super().__init__(file_path, append=kwargs.pop("append", False), **kwargs)
793
+ self.data = None
794
+ self.metadata = None
795
+
796
+ def save(self):
797
+ if self.data is None:
798
+ raise ValueError("Must set data first")
799
+ self.write(self.data)
800
+
801
+ def save_metadata(self):
802
+ pass # already done for HST part - proper metadata is not supported
803
+
804
+
723
805
  # Unused - kept for compat.
724
806
  Writers = {
725
807
  "h5": NXProcessWriter,
@@ -734,4 +816,5 @@ Writers = {
734
816
  "jp2": JP2Writer,
735
817
  "jp2k": JP2Writer,
736
818
  "edf": EDFWriter,
819
+ "vol": HSTVolWriter,
737
820
  }
@@ -0,0 +1,556 @@
1
+ from glob import glob
2
+ from os import path, getcwd, chdir
3
+ from posixpath import join as posix_join
4
+ from datetime import datetime
5
+ import numpy as np
6
+ from h5py import VirtualSource, VirtualLayout
7
+ from silx.io.dictdump import dicttonx
8
+ from tomoscan.io import HDF5File
9
+ from tomoscan.esrf import EDFVolume
10
+ from tomoscan.esrf import HDF5Volume
11
+ from silx.io.url import DataUrl
12
+ from tomoscan.esrf import TIFFVolume, MultiTIFFVolume
13
+ from tomoscan.esrf import JP2KVolume
14
+ from .. import version as nabu_version
15
+ from ..utils import merged_shape, deprecation_warning
16
+ from ..misc.utils import rescale_data
17
+ from .utils import check_h5py_version, convert_dict_values
18
+ try:
19
+ from glymur import Jp2k, set_option as glymur_set_option
20
+ from glymur.version import openjpeg_version, version as glymur_version
21
+ __have_jp2k__ = True
22
+ except ImportError:
23
+ __have_jp2k__ = False
24
+
25
+ def get_datetime():
26
+ """
27
+ Function used by some writers to indicate the current date.
28
+ """
29
+ return datetime.now().replace(microsecond=0).isoformat()
30
+
31
+
32
+ class Writer:
33
+ """
34
+ Base class for all writers.
35
+ """
36
+ def __init__(self, fname):
37
+ self.fname = fname
38
+
39
+
40
+ def get_filename(self):
41
+ return self.fname
42
+
43
+
44
+ class NXProcessWriter(Writer):
45
+ """
46
+ A class to write Nexus file with a processing result.
47
+ """
48
+ def __init__(self, fname, entry=None, filemode=None, overwrite=False):
49
+ """
50
+ Initialize a NXProcessWriter.
51
+
52
+ Parameters
53
+ -----------
54
+ fname: str
55
+ Path to the HDF5 file.
56
+ entry: str, optional
57
+ Entry in the HDF5 file. Default is "entry"
58
+ """
59
+ super().__init__(fname)
60
+ self._set_entry(entry)
61
+ self.overwrite = overwrite
62
+ check_h5py_version()
63
+ if filemode is not None:
64
+ deprecation_warning(
65
+ "'filemode' is deprecated and has no effect", func_name="nxprocess_init"
66
+ )
67
+
68
+
69
+ def _set_entry(self, entry):
70
+ self.entry = entry or "entry"
71
+ data_path = posix_join("/", self.entry)
72
+ self.data_path = data_path
73
+
74
+
75
+ def write(self, result, process_name, processing_index=0, config=None, data_name="data", is_frames_stack=True) -> str:
76
+ """
77
+ Write the result in the current NXProcess group.
78
+
79
+ Parameters
80
+ ----------
81
+ result: numpy.ndarray
82
+ Array containing the processing result
83
+ process_name: str
84
+ Name of the processing
85
+ processing_index: int
86
+ Index of the processing (in a pipeline)
87
+ config: dict, optional
88
+ Dictionary containing the configuration.
89
+ """
90
+ entry_path = self.data_path
91
+ nx_process_path = "/".join([entry_path, process_name])
92
+
93
+ if config is not None:
94
+ config.update(
95
+ {
96
+ "@NX_class": "NXcollection",
97
+ }
98
+ )
99
+
100
+ volume = HDF5Volume(
101
+ data_url=DataUrl(
102
+ file_path=self.fname,
103
+ data_path=f"{nx_process_path}/results/{data_name}",
104
+ scheme="silx",
105
+ ),
106
+ metadata_url=DataUrl(
107
+ file_path=self.fname,
108
+ data_path=f"{nx_process_path}/configuration",
109
+ scheme="silx",
110
+ ),
111
+ metadata=config,
112
+ overwrite=self.overwrite,
113
+ )
114
+
115
+ # on which case
116
+ if isinstance(result, dict):
117
+ pass
118
+ elif isinstance(result, np.ndarray):
119
+ if result.ndim == 2:
120
+ result = result.reshape(1, result.shape[0], result.shape[1])
121
+ volume.data = result
122
+ elif isinstance(result, VirtualLayout):
123
+ # TODO: add test on tomoscan to ensure this use case is handled
124
+ volume.data = result
125
+ else:
126
+ raise TypeError(f"result is expected to be a dict or a numpy arrau. Not {type(result)}")
127
+
128
+ if volume.metadata is not None:
129
+ volume.metadata = convert_dict_values(
130
+ volume.metadata,
131
+ {None: "None"},
132
+ )
133
+ # if result is a dictionary then we only have some metadata to be saved
134
+ if isinstance(result, dict):
135
+ volume.save_metadata()
136
+ results_path = posix_join(nx_process_path, "results")
137
+ else:
138
+ volume.save()
139
+ results_path = posix_join(nx_process_path, "results", data_name)
140
+
141
+ # adding nabu specific information
142
+ nabu_process_info = {
143
+ "@NX_class": "NXentry",
144
+ f"{process_name}@NX_class": "NXprocess",
145
+ f"{process_name}/program": "nabu",
146
+ f"{process_name}/version": nabu_version,
147
+ f"{process_name}/date": get_datetime(),
148
+ f"{process_name}/sequence_index": np.int32(processing_index),
149
+ }
150
+ if isinstance(result, np.ndarray):
151
+ nabu_process_info.update(
152
+ {
153
+ f"{process_name}/results@NX_class": "NXdata",
154
+ f"{process_name}/results@signal": data_name,
155
+ }
156
+ )
157
+ if is_frames_stack:
158
+ nabu_process_info.update(
159
+ {
160
+ f"{process_name}/results@interpretation": "image",
161
+
162
+ }
163
+ )
164
+
165
+ # prepare the direct access plots
166
+ nabu_process_info.update(
167
+ {
168
+ f"{process_name}@default": "results",
169
+ "@default": f"{process_name}/results",
170
+ }
171
+ )
172
+ elif isinstance(result, dict):
173
+ nabu_process_info.update(
174
+ {
175
+ "/".join([f"{process_name}/results"]): convert_dict_values(
176
+ result,
177
+ {None: "None"},
178
+ ),
179
+ }
180
+ )
181
+
182
+ dicttonx(
183
+ nabu_process_info,
184
+ h5file=self.fname,
185
+ h5path=entry_path,
186
+ update_mode="replace",
187
+ mode="a",
188
+ )
189
+ return results_path
190
+
191
+
192
+ def create_virtual_layout(files_or_pattern, h5_path, base_dir=None, axis=0):
193
+ """
194
+ Create a HDF5 virtual layout.
195
+
196
+ Parameters
197
+ ----------
198
+ files_or_pattern: str or list
199
+ A list of file names, or a wildcard pattern.
200
+ If a list is provided, it will not be sorted! This will have to be
201
+ done before calling this function.
202
+ h5_path: str
203
+ Path inside the HDF5 input file(s)
204
+ base_dir: str, optional
205
+ Base directory when using relative file names.
206
+ axis: int, optional
207
+ Data axis to merge. Default is 0.
208
+ """
209
+ prev_cwd = None
210
+ if base_dir is not None:
211
+ prev_cwd = getcwd()
212
+ chdir(base_dir)
213
+ if isinstance(files_or_pattern, str):
214
+ files_list = glob(files_or_pattern)
215
+ files_list.sort()
216
+ else: # list
217
+ files_list = files_or_pattern
218
+ if files_list == []:
219
+ raise ValueError("Nothing found as pattern %s" % files_or_pattern)
220
+ virtual_sources = []
221
+ shapes = []
222
+ for fname in files_list:
223
+ with HDF5File(fname, "r", swmr=True) as fid:
224
+ shape = fid[h5_path].shape
225
+ vsource = VirtualSource(fname, name=h5_path, shape=shape)
226
+ virtual_sources.append(vsource)
227
+ shapes.append(shape)
228
+ total_shape = merged_shape(shapes, axis=axis)
229
+
230
+ virtual_layout = VirtualLayout(
231
+ shape=total_shape,
232
+ dtype='f'
233
+ )
234
+ start_idx = 0
235
+ for vsource, shape in zip(virtual_sources, shapes):
236
+ n_imgs = shape[axis]
237
+ # Perhaps there is more elegant
238
+ if axis == 0:
239
+ virtual_layout[start_idx:start_idx + n_imgs] = vsource
240
+ elif axis == 1:
241
+ virtual_layout[:, start_idx:start_idx + n_imgs, :] = vsource
242
+ elif axis == 2:
243
+ virtual_layout[:, :, start_idx:start_idx + n_imgs] = vsource
244
+ else:
245
+ raise ValueError("Only axis 0,1,2 are supported")
246
+ #
247
+ start_idx += n_imgs
248
+
249
+ if base_dir is not None:
250
+ chdir(prev_cwd)
251
+ return virtual_layout
252
+
253
+
254
+
255
+ def merge_hdf5_files(
256
+ files_or_pattern, h5_path, output_file, process_name,
257
+ output_entry=None, output_filemode="a", data_name="data",
258
+ processing_index=0, config=None, base_dir=None,
259
+ axis=0, overwrite=False
260
+ ):
261
+ """
262
+ Parameters
263
+ -----------
264
+ files_or_pattern: str or list
265
+ A list of file names, or a wildcard pattern.
266
+ If a list is provided, it will not be sorted! This will have to be
267
+ done before calling this function.
268
+ h5_path: str
269
+ Path inside the HDF5 input file(s)
270
+ output_file: str
271
+ Path of the output file
272
+ process_name: str
273
+ Name of the process
274
+ output_entry: str, optional
275
+ Output HDF5 root entry (default is "/entry")
276
+ output_filemode: str, optional
277
+ File mode for output file. Default is "a" (append)
278
+ processing_index: int, optional
279
+ Processing index for the output file. Default is 0.
280
+ config: dict, optional
281
+ Dictionary describing the configuration needed to get the results.
282
+ base_dir: str, optional
283
+ Base directory when using relative file names.
284
+ axis: int, optional
285
+ Data axis to merge. Default is 0.
286
+ overwrite: bool, optional
287
+ Whether to overwrite already existing data in the final file.
288
+ Default is False.
289
+ """
290
+ if base_dir is not None:
291
+ prev_cwd = getcwd()
292
+ virtual_layout = create_virtual_layout(files_or_pattern, h5_path, base_dir=base_dir, axis=axis)
293
+ nx_file = NXProcessWriter(
294
+ output_file,
295
+ entry=output_entry, filemode=output_filemode, overwrite=overwrite
296
+ )
297
+ nx_file.write(
298
+ virtual_layout,
299
+ process_name,
300
+ processing_index=processing_index,
301
+ config=config,
302
+ data_name=data_name,
303
+ is_frames_stack=True
304
+ )
305
+ if base_dir is not None and prev_cwd != getcwd():
306
+ chdir(prev_cwd)
307
+
308
+
309
+ class TIFFWriter(Writer):
310
+ def __init__(self, fname, multiframe=False, start_index=0, filemode=None, append=False, big_tiff=None):
311
+ """
312
+ Tiff writer.
313
+
314
+ Parameters
315
+ -----------
316
+ fname: str
317
+ Path to the output file name
318
+ multiframe: bool, optional
319
+ Whether to write all data in one single file. Default is False.
320
+ start_index: int, optional
321
+ When writing a stack of images, each image is written in a dedicated file
322
+ (unless multiframe is set to True).
323
+ In this case, the output is a series of files `filename_0000.tif`,
324
+ `filename_0001.tif`, etc. This parameter is the starting index for
325
+ file names.
326
+ This option is ignored when multiframe is True.
327
+ filemode: str, optional
328
+ DEPRECATED. Will be ignored. Please refer to 'append'
329
+ append: bool, optional
330
+ Whether to append data to the file rather than overwriting. Default is False.
331
+ big_tiff: bool, optional
332
+ Whether to write in "big tiff" format: https://www.awaresystems.be/imaging/tiff/bigtiff.html
333
+ Default is True when multiframe is True.
334
+ Note that default "standard" tiff cannot exceed 4 GB.
335
+
336
+ Notes
337
+ ------
338
+ If multiframe is False (default), then each image will be written in a
339
+ dedicated tiff file.
340
+ """
341
+ super().__init__(fname)
342
+ self.multiframe = multiframe
343
+ self.start_index = start_index
344
+ self.append = append
345
+ if big_tiff is None:
346
+ big_tiff = multiframe
347
+ if multiframe and not big_tiff:
348
+ # raise error ?
349
+ print("big_tiff was set to False while multiframe was set to True. This will probably be problematic.")
350
+ self.big_tiff = big_tiff
351
+ # Compat.
352
+ self.filemode = filemode
353
+ if filemode is not None:
354
+ deprecation_warning("Ignored parameter 'filemode'. Please use the 'append' parameter")
355
+
356
+ def write(self, data, *args, config=None, **kwargs):
357
+ ext = None
358
+ if not isinstance(data, np.ndarray):
359
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
360
+ # Single image, or multiple image in the same file
361
+ if self.multiframe:
362
+ volume = MultiTIFFVolume(
363
+ self.fname,
364
+ data=data,
365
+ metadata={
366
+ "config": config,
367
+ },
368
+ )
369
+ file_path = self.fname
370
+ # Multiple image, one file per image
371
+ else:
372
+ if data.ndim == 2 or (data.ndim == 3 and data.shape[0] == 1):
373
+ data = data.reshape(1, data.shape[0], data.shape[1])
374
+ file_path = self.fname
375
+ volume = MultiTIFFVolume(
376
+ self.fname,
377
+ data=data,
378
+ metadata={
379
+ "config": config,
380
+ },
381
+ )
382
+ else:
383
+ file_path, ext = path.splitext(self.fname)
384
+
385
+ volume = TIFFVolume(
386
+ data_url=DataUrl(
387
+ file_path=path.dirname(file_path),
388
+ data_path=path.basename(file_path) + "_{index_zfill4}" + ext,
389
+ scheme="silx",
390
+ ),
391
+ metada_url=DataUrl(
392
+ file_path=path.dirname(file_path),
393
+ data_path=path.basename(file_path) + ".txt",
394
+ scheme="silx",
395
+ ),
396
+ )
397
+
398
+ volume.save()
399
+
400
+
401
+ class EDFWriter(Writer):
402
+ def __init__(self, fname, start_index=0, filemode="w"):
403
+ """
404
+ EDF (ESRF Data Format) writer.
405
+
406
+ Parameters
407
+ -----------
408
+ fname: str
409
+ Path to the output file name
410
+ start_index: int, optional
411
+ When writing a stack of images, each image is written in a dedicated file
412
+ In this case, the output is a series of files `filename_0000.tif`,
413
+ `filename_0001.edf`, etc. This parameter is the starting index for
414
+ file names.
415
+ """
416
+ super().__init__(fname)
417
+ self.filemode = filemode
418
+ self.start_index = start_index
419
+
420
+ def write(self, data, *args, config=None, **kwargs):
421
+ if not isinstance(data, np.ndarray):
422
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
423
+ header = {
424
+ "software": "nabu",
425
+ "data": get_datetime(),
426
+ }
427
+ if data.ndim == 2:
428
+ data = data.reshape(1, data.shape[0], data.shape[1])
429
+
430
+ volume = EDFVolume(
431
+ path.dirname(self.fname),
432
+ data=data,
433
+ start_index=self.start_index,
434
+ header=header
435
+ )
436
+ volume.save()
437
+
438
+
439
+ class JP2Writer(Writer):
440
+ def __init__(
441
+ self, fname, start_index=0, filemode="wb",
442
+ psnr=None, cratios=None, auto_convert=True, float_clip_values=None, n_threads=None
443
+ ):
444
+ """
445
+ JPEG2000 writer. This class requires the python package `glymur` and the
446
+ library `libopenjp2`.
447
+
448
+ Parameters
449
+ -----------
450
+ fname: str
451
+ Path to the output file name
452
+ start_index: int, optional
453
+ When writing a stack of images, each image is written in a dedicated file
454
+ The output is a series of files `filename_0000.tif`, `filename_0001.tif`, etc.
455
+ This parameter is the starting index for file names.
456
+ psnr: list of int, optional
457
+ The PSNR (Peak Signal-to-Noise ratio) for each jpeg2000 layer.
458
+ This defines a quality metric for lossy compression.
459
+ The number "0" stands for lossless compression.
460
+ cratios: list of int, optional
461
+ Compression ratio for each jpeg2000 layer
462
+ auto_convert: bool, optional
463
+ Whether to automatically cast floating point data to uint16.
464
+ Default is True.
465
+ float_clip_values: tuple of floats, optional
466
+ If set to a tuple of two values (min, max), then each image values will be clipped
467
+ to these minimum and maximum values.
468
+ n_threads: int, optional
469
+ Number of threads to use for encoding. Default is the number of available threads.
470
+ Needs libopenjpeg >= 2.4.0.
471
+ """
472
+ super().__init__(fname)
473
+ if not(__have_jp2k__):
474
+ raise ValueError("Need glymur python package and libopenjp2 library")
475
+ self.n_threads = n_threads
476
+ # self.setup_multithread_encoding(n_threads=n_threads, what_if_not_available="ignore")
477
+ # self.filemode = filemode
478
+ self.start_index = start_index
479
+ self.auto_convert = auto_convert
480
+ if psnr is not None and np.isscalar(psnr):
481
+ psnr = [psnr]
482
+ self.psnr = psnr
483
+ self.cratios = cratios
484
+ self._vmin = None
485
+ self._vmax = None
486
+ self.clip_float = False
487
+ if float_clip_values is not None:
488
+ self._float_clip_min, self._float_clip_max = float_clip_values
489
+ self.clip_float = True
490
+
491
+ def write(self, data, *args, **kwargs):
492
+ if not isinstance(data, np.ndarray):
493
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
494
+
495
+ if data.ndim == 2:
496
+ data = data.reshape(1, data.shape[0], data.shape[1])
497
+
498
+ if data.ndim == 3 and data.shape[0] == 1:
499
+ data_url=DataUrl(
500
+ file_path=path.dirname(file_path),
501
+ data_path=None,
502
+ scheme="silx",
503
+ )
504
+ metada_url=DataUrl(
505
+ file_path=path.dirname(file_path),
506
+ data_path=path.basename(file_path) + ".txt",
507
+ scheme="silx",
508
+ )
509
+
510
+ else:
511
+ file_path, ext = path.splitext(self.fname)
512
+ data_url = DataUrl(
513
+ file_path=file_path,
514
+ data_path=path.basename(file_path) + "_{index_zfill4}" + ext,
515
+ scheme="silx",
516
+ )
517
+ metadata_url = DataUrl(
518
+ file_path=file_path,
519
+ data_path=path.basename(file_path) + ".info",
520
+
521
+ )
522
+
523
+ volume = JP2KVolume(
524
+ data_url=data_url,
525
+ metada_url=metada_url,
526
+ start_index=self.start_index,
527
+ cratios=self.cratios,
528
+ psnr=self.psnr,
529
+ n_threads=self.n_threads,
530
+ )
531
+
532
+ if data.dtype != np.uint16 and self.auto_convert:
533
+ if self.clip_float:
534
+ data = np.clip(data, self._float_clip_min, self._float_clip_max)
535
+ data = rescale_data(data, 0, 65535, data_min=self._vmin, data_max=self._vmax)
536
+ data = data.astype(np.uint16)
537
+
538
+ volume.data = data
539
+ config = kwargs.get("config", None)
540
+ if config is not None:
541
+ volume.metadata = {"config": config}
542
+ volume.save()
543
+
544
+
545
+ Writers = {
546
+ "h5": NXProcessWriter,
547
+ "hdf5": NXProcessWriter,
548
+ "nx": NXProcessWriter,
549
+ "nexus": NXProcessWriter,
550
+ "tif": TIFFWriter,
551
+ "tiff": TIFFWriter,
552
+ "j2k": JP2Writer,
553
+ "jp2": JP2Writer,
554
+ "jp2k": JP2Writer,
555
+ "edf": EDFWriter,
556
+ }