nabu 2022.3.0a1__py3-none-any.whl → 2023.1.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. nabu/__init__.py +1 -1
  2. nabu/app/bootstrap.py +7 -1
  3. nabu/app/cast_volume.py +8 -2
  4. nabu/app/cli_configs.py +69 -0
  5. nabu/app/composite_cor.py +97 -0
  6. nabu/app/create_distortion_map_from_poly.py +118 -0
  7. nabu/app/nx_z_splitter.py +1 -1
  8. nabu/app/prepare_weights_double.py +21 -16
  9. nabu/app/reconstruct_helical.py +0 -1
  10. nabu/app/utils.py +10 -5
  11. nabu/cuda/processing.py +1 -0
  12. nabu/cuda/tests/test_padding.py +1 -0
  13. nabu/cuda/utils.py +1 -0
  14. nabu/distributed/__init__.py +0 -0
  15. nabu/distributed/utils.py +57 -0
  16. nabu/distributed/worker.py +543 -0
  17. nabu/estimation/cor.py +3 -7
  18. nabu/estimation/cor_sino.py +2 -1
  19. nabu/estimation/distortion.py +6 -4
  20. nabu/io/cast_volume.py +10 -1
  21. nabu/io/detector_distortion.py +305 -0
  22. nabu/io/reader.py +37 -7
  23. nabu/io/reader_helical.py +0 -3
  24. nabu/io/tests/test_cast_volume.py +16 -4
  25. nabu/io/tests/test_detector_distortion.py +178 -0
  26. nabu/io/tests/test_writers.py +2 -2
  27. nabu/io/tiffwriter_zmm.py +2 -3
  28. nabu/io/writer.py +84 -1
  29. nabu/io/writer_BACKUP_193259.py +556 -0
  30. nabu/io/writer_BACKUP_193381.py +556 -0
  31. nabu/io/writer_BASE_193259.py +548 -0
  32. nabu/io/writer_BASE_193381.py +548 -0
  33. nabu/io/writer_LOCAL_193259.py +550 -0
  34. nabu/io/writer_LOCAL_193381.py +550 -0
  35. nabu/io/writer_REMOTE_193259.py +557 -0
  36. nabu/io/writer_REMOTE_193381.py +557 -0
  37. nabu/misc/fourier_filters.py +2 -0
  38. nabu/misc/rotation.py +0 -1
  39. nabu/misc/tests/test_rotation.py +1 -0
  40. nabu/pipeline/config_validators.py +10 -0
  41. nabu/pipeline/datadump.py +1 -1
  42. nabu/pipeline/dataset_validator.py +0 -1
  43. nabu/pipeline/detector_distortion_provider.py +20 -0
  44. nabu/pipeline/estimators.py +35 -21
  45. nabu/pipeline/fallback_utils.py +1 -1
  46. nabu/pipeline/fullfield/chunked.py +30 -15
  47. nabu/pipeline/fullfield/chunked_black.py +881 -0
  48. nabu/pipeline/fullfield/chunked_cuda.py +34 -4
  49. nabu/pipeline/fullfield/chunked_fb.py +966 -0
  50. nabu/pipeline/fullfield/chunked_google.py +921 -0
  51. nabu/pipeline/fullfield/chunked_pep8.py +920 -0
  52. nabu/pipeline/fullfield/computations.py +7 -6
  53. nabu/pipeline/fullfield/dataset_validator.py +1 -1
  54. nabu/pipeline/fullfield/grouped_cuda.py +6 -0
  55. nabu/pipeline/fullfield/nabu_config.py +15 -3
  56. nabu/pipeline/fullfield/processconfig.py +5 -0
  57. nabu/pipeline/fullfield/reconstruction.py +1 -2
  58. nabu/pipeline/helical/gridded_accumulator.py +1 -8
  59. nabu/pipeline/helical/helical_chunked_regridded.py +48 -33
  60. nabu/pipeline/helical/helical_reconstruction.py +1 -9
  61. nabu/pipeline/helical/nabu_config.py +11 -14
  62. nabu/pipeline/helical/span_strategy.py +11 -4
  63. nabu/pipeline/helical/tests/test_accumulator.py +0 -3
  64. nabu/pipeline/helical/tests/test_pipeline_elements_full.py +0 -6
  65. nabu/pipeline/helical/tests/test_strategy.py +0 -1
  66. nabu/pipeline/helical/weight_balancer.py +0 -1
  67. nabu/pipeline/params.py +4 -0
  68. nabu/pipeline/processconfig.py +6 -2
  69. nabu/pipeline/writer.py +9 -4
  70. nabu/preproc/distortion.py +4 -3
  71. nabu/preproc/double_flatfield.py +16 -4
  72. nabu/preproc/double_flatfield_cuda.py +3 -2
  73. nabu/preproc/double_flatfield_variable_region.py +13 -4
  74. nabu/preproc/flatfield.py +29 -7
  75. nabu/preproc/flatfield_cuda.py +0 -1
  76. nabu/preproc/flatfield_variable_region.py +5 -2
  77. nabu/preproc/phase.py +0 -1
  78. nabu/preproc/phase_cuda.py +0 -1
  79. nabu/preproc/tests/test_ctf.py +4 -3
  80. nabu/preproc/tests/test_flatfield.py +6 -7
  81. nabu/reconstruction/fbp_opencl.py +1 -1
  82. nabu/reconstruction/filtering.py +0 -1
  83. nabu/reconstruction/tests/test_fbp.py +1 -0
  84. nabu/resources/dataset_analyzer.py +0 -1
  85. nabu/resources/templates/bm05_pag.conf +34 -0
  86. nabu/resources/templates/id16_ctf.conf +2 -1
  87. nabu/resources/tests/test_nxflatfield.py +0 -1
  88. nabu/resources/tests/test_units.py +0 -1
  89. nabu/stitching/frame_composition.py +7 -1
  90. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/METADATA +2 -7
  91. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/RECORD +96 -75
  92. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/WHEEL +1 -1
  93. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/entry_points.txt +2 -1
  94. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/LICENSE +0 -0
  95. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/top_level.txt +0 -0
  96. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/zip-safe +0 -0
@@ -0,0 +1,557 @@
1
+ from glob import glob
2
+ from os import path, getcwd, chdir
3
+ from posixpath import join as posix_join
4
+ from datetime import datetime
5
+ from gpg import Data
6
+ import numpy as np
7
+ from h5py import VirtualSource, VirtualLayout
8
+ from tomoscan.io import HDF5File
9
+ from tomoscan.esrf import EDFVolume
10
+ from tomoscan.esrf import HDF5Volume
11
+ from silx.io.url import DataUrl
12
+ from tomoscan.esrf import TIFFVolume, MultiTIFFVolume
13
+ from tomoscan.esrf import JP2KVolume
14
+ from .. import version as nabu_version
15
+ from ..utils import merged_shape, deprecation_warning
16
+ from ..misc.utils import rescale_data
17
+ from .utils import check_h5py_version, convert_dict_values
18
+ from silx.io.dictdump import dicttonx
19
+ try:
20
+ from glymur import Jp2k, set_option as glymur_set_option
21
+ from glymur.version import openjpeg_version, version as glymur_version
22
+ __have_jp2k__ = True
23
+ except ImportError:
24
+ __have_jp2k__ = False
25
+
26
+ def get_datetime():
27
+ """
28
+ Function used by some writers to indicate the current date.
29
+ """
30
+ return datetime.now().replace(microsecond=0).isoformat()
31
+
32
+
33
+ class Writer:
34
+ """
35
+ Base class for all writers.
36
+ """
37
+ def __init__(self, fname):
38
+ self.fname = fname
39
+
40
+
41
+ def get_filename(self):
42
+ return self.fname
43
+
44
+
45
+ class NXProcessWriter(Writer):
46
+ """
47
+ A class to write Nexus file with a processing result.
48
+ """
49
+ def __init__(self, fname, entry=None, filemode=None, overwrite=False):
50
+ """
51
+ Initialize a NXProcessWriter.
52
+
53
+ Parameters
54
+ -----------
55
+ fname: str
56
+ Path to the HDF5 file.
57
+ entry: str, optional
58
+ Entry in the HDF5 file. Default is "entry"
59
+ """
60
+ super().__init__(fname)
61
+ self._set_entry(entry)
62
+ self.overwrite = overwrite
63
+ check_h5py_version()
64
+ if filemode is not None:
65
+ deprecation_warning(
66
+ "'filemode' is deprecated and has no effect", func_name="nxprocess_init"
67
+ )
68
+
69
+
70
+ def _set_entry(self, entry):
71
+ self.entry = entry or "entry"
72
+ data_path = posix_join("/", self.entry)
73
+ self.data_path = data_path
74
+
75
+
76
+ def write(self, result, process_name, processing_index=0, config=None, data_name="data", is_frames_stack=True) -> str:
77
+ """
78
+ Write the result in the current NXProcess group.
79
+
80
+ Parameters
81
+ ----------
82
+ result: numpy.ndarray
83
+ Array containing the processing result
84
+ process_name: str
85
+ Name of the processing
86
+ processing_index: int
87
+ Index of the processing (in a pipeline)
88
+ config: dict, optional
89
+ Dictionary containing the configuration.
90
+ """
91
+ entry_path = self.data_path
92
+ nx_process_path = "/".join([entry_path, process_name])
93
+
94
+ if config is not None:
95
+ config.update(
96
+ {
97
+ "@NX_class": "NXcollection",
98
+ }
99
+ )
100
+
101
+ volume = HDF5Volume(
102
+ data_url=DataUrl(
103
+ file_path=self.fname,
104
+ data_path=f"{nx_process_path}/results/{data_name}",
105
+ scheme="silx",
106
+ ),
107
+ metadata_url=DataUrl(
108
+ file_path=self.fname,
109
+ data_path=f"{nx_process_path}/configuration",
110
+ scheme="silx",
111
+ ),
112
+ metadata=config,
113
+ overwrite=self.overwrite,
114
+ )
115
+
116
+ # on which case
117
+ if isinstance(result, dict):
118
+ pass
119
+ elif isinstance(result, np.ndarray):
120
+ if result.ndim == 2:
121
+ result = result.reshape(1, result.shape[0], result.shape[1])
122
+ volume.data = result
123
+ elif isinstance(result, VirtualLayout):
124
+ # TODO: add test on tomoscan to ensure this use case is handled
125
+ volume.data = result
126
+ else:
127
+ raise TypeError(f"result is expected to be a dict or a numpy arrau. Not {type(result)}")
128
+
129
+ if volume.metadata is not None:
130
+ volume.metadata = convert_dict_values(
131
+ volume.metadata,
132
+ {None: "None"},
133
+ )
134
+ # if result is a dictionary then we only have some metadata to be saved
135
+ if isinstance(result, dict):
136
+ volume.save_metadata()
137
+ results_path = posix_join(nx_process_path, "results")
138
+ else:
139
+ volume.save()
140
+ results_path = posix_join(nx_process_path, "results", data_name)
141
+
142
+ # adding nabu specific information
143
+ nabu_process_info = {
144
+ "@NX_class": "NXentry",
145
+ f"{process_name}@NX_class": "NXprocess",
146
+ f"{process_name}/program": "nabu",
147
+ f"{process_name}/version": nabu_version,
148
+ f"{process_name}/date": get_datetime(),
149
+ f"{process_name}/sequence_index": np.int32(processing_index),
150
+ }
151
+ if isinstance(result, np.ndarray):
152
+ nabu_process_info.update(
153
+ {
154
+ f"{process_name}/results@NX_class": "NXdata",
155
+ f"{process_name}/results@signal": data_name,
156
+ }
157
+ )
158
+ if is_frames_stack:
159
+ nabu_process_info.update(
160
+ {
161
+ f"{process_name}/results@interpretation": "image",
162
+
163
+ }
164
+ )
165
+
166
+ # prepare the direct access plots
167
+ nabu_process_info.update(
168
+ {
169
+ f"{process_name}@default": "results",
170
+ "@default": f"{process_name}/results",
171
+ }
172
+ )
173
+ elif isinstance(result, dict):
174
+ nabu_process_info.update(
175
+ {
176
+ "/".join([f"{process_name}/results"]): convert_dict_values(
177
+ result,
178
+ {None: "None"},
179
+ ),
180
+ }
181
+ )
182
+
183
+ dicttonx(
184
+ nabu_process_info,
185
+ h5file=self.fname,
186
+ h5path=entry_path,
187
+ update_mode="replace",
188
+ mode="a",
189
+ )
190
+ return results_path
191
+
192
+
193
+ def create_virtual_layout(files_or_pattern, h5_path, base_dir=None, axis=0):
194
+ """
195
+ Create a HDF5 virtual layout.
196
+
197
+ Parameters
198
+ ----------
199
+ files_or_pattern: str or list
200
+ A list of file names, or a wildcard pattern.
201
+ If a list is provided, it will not be sorted! This will have to be
202
+ done before calling this function.
203
+ h5_path: str
204
+ Path inside the HDF5 input file(s)
205
+ base_dir: str, optional
206
+ Base directory when using relative file names.
207
+ axis: int, optional
208
+ Data axis to merge. Default is 0.
209
+ """
210
+ prev_cwd = None
211
+ if base_dir is not None:
212
+ prev_cwd = getcwd()
213
+ chdir(base_dir)
214
+ if isinstance(files_or_pattern, str):
215
+ files_list = glob(files_or_pattern)
216
+ files_list.sort()
217
+ else: # list
218
+ files_list = files_or_pattern
219
+ if files_list == []:
220
+ raise ValueError("Nothing found as pattern %s" % files_or_pattern)
221
+ virtual_sources = []
222
+ shapes = []
223
+ for fname in files_list:
224
+ with HDF5File(fname, "r", swmr=True) as fid:
225
+ shape = fid[h5_path].shape
226
+ vsource = VirtualSource(fname, name=h5_path, shape=shape)
227
+ virtual_sources.append(vsource)
228
+ shapes.append(shape)
229
+ total_shape = merged_shape(shapes, axis=axis)
230
+
231
+ virtual_layout = VirtualLayout(
232
+ shape=total_shape,
233
+ dtype='f'
234
+ )
235
+ start_idx = 0
236
+ for vsource, shape in zip(virtual_sources, shapes):
237
+ n_imgs = shape[axis]
238
+ # Perhaps there is more elegant
239
+ if axis == 0:
240
+ virtual_layout[start_idx:start_idx + n_imgs] = vsource
241
+ elif axis == 1:
242
+ virtual_layout[:, start_idx:start_idx + n_imgs, :] = vsource
243
+ elif axis == 2:
244
+ virtual_layout[:, :, start_idx:start_idx + n_imgs] = vsource
245
+ else:
246
+ raise ValueError("Only axis 0,1,2 are supported")
247
+ #
248
+ start_idx += n_imgs
249
+
250
+ if base_dir is not None:
251
+ chdir(prev_cwd)
252
+ return virtual_layout
253
+
254
+
255
+
256
+ def merge_hdf5_files(
257
+ files_or_pattern, h5_path, output_file, process_name,
258
+ output_entry=None, output_filemode="a", data_name="data",
259
+ processing_index=0, config=None, base_dir=None,
260
+ axis=0, overwrite=False
261
+ ):
262
+ """
263
+ Parameters
264
+ -----------
265
+ files_or_pattern: str or list
266
+ A list of file names, or a wildcard pattern.
267
+ If a list is provided, it will not be sorted! This will have to be
268
+ done before calling this function.
269
+ h5_path: str
270
+ Path inside the HDF5 input file(s)
271
+ output_file: str
272
+ Path of the output file
273
+ process_name: str
274
+ Name of the process
275
+ output_entry: str, optional
276
+ Output HDF5 root entry (default is "/entry")
277
+ output_filemode: str, optional
278
+ File mode for output file. Default is "a" (append)
279
+ processing_index: int, optional
280
+ Processing index for the output file. Default is 0.
281
+ config: dict, optional
282
+ Dictionary describing the configuration needed to get the results.
283
+ base_dir: str, optional
284
+ Base directory when using relative file names.
285
+ axis: int, optional
286
+ Data axis to merge. Default is 0.
287
+ overwrite: bool, optional
288
+ Whether to overwrite already existing data in the final file.
289
+ Default is False.
290
+ """
291
+ if base_dir is not None:
292
+ prev_cwd = getcwd()
293
+ virtual_layout = create_virtual_layout(files_or_pattern, h5_path, base_dir=base_dir, axis=axis)
294
+ nx_file = NXProcessWriter(
295
+ output_file,
296
+ entry=output_entry, filemode=output_filemode, overwrite=overwrite
297
+ )
298
+ nx_file.write(
299
+ virtual_layout,
300
+ process_name,
301
+ processing_index=processing_index,
302
+ config=config,
303
+ data_name=data_name,
304
+ is_frames_stack=True
305
+ )
306
+ if base_dir is not None and prev_cwd != getcwd():
307
+ chdir(prev_cwd)
308
+
309
+
310
+ class TIFFWriter(Writer):
311
+ def __init__(self, fname, multiframe=False, start_index=0, filemode=None, append=False, big_tiff=None):
312
+ """
313
+ Tiff writer.
314
+
315
+ Parameters
316
+ -----------
317
+ fname: str
318
+ Path to the output file name
319
+ multiframe: bool, optional
320
+ Whether to write all data in one single file. Default is False.
321
+ start_index: int, optional
322
+ When writing a stack of images, each image is written in a dedicated file
323
+ (unless multiframe is set to True).
324
+ In this case, the output is a series of files `filename_0000.tif`,
325
+ `filename_0001.tif`, etc. This parameter is the starting index for
326
+ file names.
327
+ This option is ignored when multiframe is True.
328
+ filemode: str, optional
329
+ DEPRECATED. Will be ignored. Please refer to 'append'
330
+ append: bool, optional
331
+ Whether to append data to the file rather than overwriting. Default is False.
332
+ big_tiff: bool, optional
333
+ Whether to write in "big tiff" format: https://www.awaresystems.be/imaging/tiff/bigtiff.html
334
+ Default is True when multiframe is True.
335
+ Note that default "standard" tiff cannot exceed 4 GB.
336
+
337
+ Notes
338
+ ------
339
+ If multiframe is False (default), then each image will be written in a
340
+ dedicated tiff file.
341
+ """
342
+ super().__init__(fname)
343
+ self.multiframe = multiframe
344
+ self.start_index = start_index
345
+ self.append = append
346
+ if big_tiff is None:
347
+ big_tiff = multiframe
348
+ if multiframe and not big_tiff:
349
+ # raise error ?
350
+ print("big_tiff was set to False while multiframe was set to True. This will probably be problematic.")
351
+ self.big_tiff = big_tiff
352
+ # Compat.
353
+ self.filemode = filemode
354
+ if filemode is not None:
355
+ deprecation_warning("Ignored parameter 'filemode'. Please use the 'append' parameter")
356
+
357
+ def write(self, data, *args, config=None, **kwargs):
358
+ ext = None
359
+ if not isinstance(data, np.ndarray):
360
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
361
+ # Single image, or multiple image in the same file
362
+ if self.multiframe:
363
+ volume = MultiTIFFVolume(
364
+ self.fname,
365
+ data=data,
366
+ metadata={
367
+ "config": config,
368
+ },
369
+ )
370
+ file_path = self.fname
371
+ # Multiple image, one file per image
372
+ else:
373
+ if data.ndim == 2 or (data.ndim == 3 and data.shape[0] == 1):
374
+ data = data.reshape(1, data.shape[0], data.shape[1])
375
+ file_path = self.fname
376
+ volume = MultiTIFFVolume(
377
+ self.fname,
378
+ data=data,
379
+ metadata={
380
+ "config": config,
381
+ },
382
+ )
383
+ else:
384
+ file_path, ext = path.splitext(self.fname)
385
+
386
+ volume = TIFFVolume(
387
+ data_url=DataUrl(
388
+ file_path=path.dirname(file_path),
389
+ data_path=path.basename(file_path) + "_{index_zfill4}" + ext,
390
+ scheme="silx",
391
+ ),
392
+ metada_url=DataUrl(
393
+ file_path=path.dirname(file_path),
394
+ data_path=path.basename(file_path) + ".txt",
395
+ scheme="silx",
396
+ ),
397
+ )
398
+
399
+ volume.save()
400
+
401
+
402
+ class EDFWriter(Writer):
403
+ def __init__(self, fname, start_index=0, filemode="w"):
404
+ """
405
+ EDF (ESRF Data Format) writer.
406
+
407
+ Parameters
408
+ -----------
409
+ fname: str
410
+ Path to the output file name
411
+ start_index: int, optional
412
+ When writing a stack of images, each image is written in a dedicated file
413
+ In this case, the output is a series of files `filename_0000.tif`,
414
+ `filename_0001.edf`, etc. This parameter is the starting index for
415
+ file names.
416
+ """
417
+ super().__init__(fname)
418
+ self.filemode = filemode
419
+ self.start_index = start_index
420
+
421
+ def write(self, data, *args, config=None, **kwargs):
422
+ if not isinstance(data, np.ndarray):
423
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
424
+ header = {
425
+ "software": "nabu",
426
+ "data": get_datetime(),
427
+ }
428
+ if data.ndim == 2:
429
+ data = data.reshape(1, data.shape[0], data.shape[1])
430
+
431
+ volume = EDFVolume(
432
+ path.dirname(self.fname),
433
+ data=data,
434
+ start_index=self.start_index,
435
+ header=header
436
+ )
437
+ volume.save()
438
+
439
+
440
+ class JP2Writer(Writer):
441
+ def __init__(
442
+ self, fname, start_index=0, filemode="wb",
443
+ psnr=None, cratios=None, auto_convert=True, float_clip_values=None, n_threads=None
444
+ ):
445
+ """
446
+ JPEG2000 writer. This class requires the python package `glymur` and the
447
+ library `libopenjp2`.
448
+
449
+ Parameters
450
+ -----------
451
+ fname: str
452
+ Path to the output file name
453
+ start_index: int, optional
454
+ When writing a stack of images, each image is written in a dedicated file
455
+ The output is a series of files `filename_0000.tif`, `filename_0001.tif`, etc.
456
+ This parameter is the starting index for file names.
457
+ psnr: list of int, optional
458
+ The PSNR (Peak Signal-to-Noise ratio) for each jpeg2000 layer.
459
+ This defines a quality metric for lossy compression.
460
+ The number "0" stands for lossless compression.
461
+ cratios: list of int, optional
462
+ Compression ratio for each jpeg2000 layer
463
+ auto_convert: bool, optional
464
+ Whether to automatically cast floating point data to uint16.
465
+ Default is True.
466
+ float_clip_values: tuple of floats, optional
467
+ If set to a tuple of two values (min, max), then each image values will be clipped
468
+ to these minimum and maximum values.
469
+ n_threads: int, optional
470
+ Number of threads to use for encoding. Default is the number of available threads.
471
+ Needs libopenjpeg >= 2.4.0.
472
+ """
473
+ super().__init__(fname)
474
+ if not(__have_jp2k__):
475
+ raise ValueError("Need glymur python package and libopenjp2 library")
476
+ self.n_threads = n_threads
477
+ # self.setup_multithread_encoding(n_threads=n_threads, what_if_not_available="ignore")
478
+ # self.filemode = filemode
479
+ self.start_index = start_index
480
+ self.auto_convert = auto_convert
481
+ if psnr is not None and np.isscalar(psnr):
482
+ psnr = [psnr]
483
+ self.psnr = psnr
484
+ self.cratios = cratios
485
+ self._vmin = None
486
+ self._vmax = None
487
+ self.clip_float = False
488
+ if float_clip_values is not None:
489
+ self._float_clip_min, self._float_clip_max = float_clip_values
490
+ self.clip_float = True
491
+
492
+ def write(self, data, *args, **kwargs):
493
+ if not isinstance(data, np.ndarray):
494
+ raise TypeError(f"data is expected to be a numpy array and not {type(data)}")
495
+
496
+ if data.ndim == 2:
497
+ data = data.reshape(1, data.shape[0], data.shape[1])
498
+
499
+ if data.ndim == 3 and data.shape[0] == 1:
500
+ data_url=DataUrl(
501
+ file_path=path.dirname(file_path),
502
+ data_path=None,
503
+ scheme="silx",
504
+ )
505
+ metada_url=DataUrl(
506
+ file_path=path.dirname(file_path),
507
+ data_path=path.basename(file_path) + ".txt",
508
+ scheme="silx",
509
+ )
510
+
511
+ else:
512
+ file_path, ext = path.splitext(self.fname)
513
+ data_url = DataUrl(
514
+ file_path=file_path,
515
+ data_path=path.basename(file_path) + "_{index_zfill4}" + ext,
516
+ scheme="silx",
517
+ )
518
+ metadata_url = DataUrl(
519
+ file_path=file_path,
520
+ data_path=path.basename(file_path) + ".info",
521
+
522
+ )
523
+
524
+ volume = JP2KVolume(
525
+ data_url=data_url,
526
+ metada_url=metada_url,
527
+ start_index=self.start_index,
528
+ cratios=self.cratios,
529
+ psnr=self.psnr,
530
+ n_threads=self.n_threads,
531
+ )
532
+
533
+ if data.dtype != np.uint16 and self.auto_convert:
534
+ if self.clip_float:
535
+ data = np.clip(data, self._float_clip_min, self._float_clip_max)
536
+ data = rescale_data(data, 0, 65535, data_min=self._vmin, data_max=self._vmax)
537
+ data = data.astype(np.uint16)
538
+
539
+ volume.data = data
540
+ config = kwargs.get("config", None)
541
+ if config is not None:
542
+ volume.metadata = {"config": config}
543
+ volume.save()
544
+
545
+
546
+ Writers = {
547
+ "h5": NXProcessWriter,
548
+ "hdf5": NXProcessWriter,
549
+ "nx": NXProcessWriter,
550
+ "nexus": NXProcessWriter,
551
+ "tif": TIFFWriter,
552
+ "tiff": TIFFWriter,
553
+ "j2k": JP2Writer,
554
+ "jp2": JP2Writer,
555
+ "jp2k": JP2Writer,
556
+ "edf": EDFWriter,
557
+ }