nabu 2025.1.0.dev14__py3-none-any.whl → 2025.1.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. doc/doc_config.py +32 -0
  2. nabu/__init__.py +1 -1
  3. nabu/app/cast_volume.py +9 -1
  4. nabu/app/cli_configs.py +80 -3
  5. nabu/app/estimate_motion.py +54 -0
  6. nabu/app/multicor.py +2 -4
  7. nabu/app/pcaflats.py +116 -0
  8. nabu/app/reconstruct.py +1 -7
  9. nabu/app/reduce_dark_flat.py +5 -2
  10. nabu/estimation/cor.py +1 -1
  11. nabu/estimation/motion.py +557 -0
  12. nabu/estimation/tests/test_motion_estimation.py +471 -0
  13. nabu/estimation/tilt.py +1 -1
  14. nabu/estimation/translation.py +47 -1
  15. nabu/io/cast_volume.py +100 -13
  16. nabu/io/reader.py +32 -1
  17. nabu/io/tests/test_remove_volume.py +152 -0
  18. nabu/pipeline/config_validators.py +42 -43
  19. nabu/pipeline/estimators.py +255 -0
  20. nabu/pipeline/fullfield/chunked.py +67 -43
  21. nabu/pipeline/fullfield/chunked_cuda.py +5 -2
  22. nabu/pipeline/fullfield/nabu_config.py +20 -14
  23. nabu/pipeline/fullfield/processconfig.py +17 -3
  24. nabu/pipeline/fullfield/reconstruction.py +4 -1
  25. nabu/pipeline/params.py +12 -0
  26. nabu/pipeline/tests/test_estimators.py +240 -3
  27. nabu/preproc/ccd.py +53 -3
  28. nabu/preproc/flatfield.py +306 -1
  29. nabu/preproc/shift.py +3 -1
  30. nabu/preproc/tests/test_pcaflats.py +154 -0
  31. nabu/processing/rotation_cuda.py +3 -1
  32. nabu/processing/tests/test_rotation.py +4 -2
  33. nabu/reconstruction/astra.py +245 -0
  34. nabu/reconstruction/fbp.py +7 -0
  35. nabu/reconstruction/fbp_base.py +31 -7
  36. nabu/reconstruction/fbp_opencl.py +8 -0
  37. nabu/reconstruction/filtering_opencl.py +2 -0
  38. nabu/reconstruction/mlem.py +47 -13
  39. nabu/reconstruction/tests/test_filtering.py +13 -2
  40. nabu/reconstruction/tests/test_mlem.py +91 -62
  41. nabu/resources/dataset_analyzer.py +144 -20
  42. nabu/resources/nxflatfield.py +101 -35
  43. nabu/resources/tests/test_nxflatfield.py +1 -1
  44. nabu/resources/utils.py +16 -10
  45. nabu/stitching/alignment.py +7 -7
  46. nabu/stitching/config.py +22 -20
  47. nabu/stitching/definitions.py +2 -2
  48. nabu/stitching/overlap.py +4 -4
  49. nabu/stitching/sample_normalization.py +5 -5
  50. nabu/stitching/stitcher/post_processing.py +5 -3
  51. nabu/stitching/stitcher/pre_processing.py +24 -20
  52. nabu/stitching/tests/test_config.py +3 -3
  53. nabu/stitching/tests/test_y_preprocessing_stitching.py +11 -8
  54. nabu/stitching/tests/test_z_postprocessing_stitching.py +2 -2
  55. nabu/stitching/tests/test_z_preprocessing_stitching.py +23 -20
  56. nabu/stitching/utils/utils.py +7 -7
  57. nabu/testutils.py +1 -4
  58. nabu/utils.py +13 -0
  59. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/METADATA +3 -4
  60. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/RECORD +64 -57
  61. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/WHEEL +1 -1
  62. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/entry_points.txt +2 -1
  63. nabu/app/correct_rot.py +0 -62
  64. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/licenses/LICENSE +0 -0
  65. {nabu-2025.1.0.dev14.dist-info → nabu-2025.1.0rc2.dist-info}/top_level.txt +0 -0
nabu/io/cast_volume.py CHANGED
@@ -1,10 +1,12 @@
1
1
  import os
2
-
3
- from tomoscan.esrf.volume.singleframebase import VolumeSingleFrameBase
4
- from nabu.misc.utils import rescale_data
5
- from nabu.pipeline.params import files_formats
2
+ import logging
3
+ from glob import glob
4
+ from shutil import rmtree
5
+ import numpy
6
+ from silx.io.utils import get_data
7
+ from silx.io.url import DataUrl
6
8
  from tomoscan.volumebase import VolumeBase
7
- from tomoscan.scanbase import TomoScanBase
9
+ from tomoscan.esrf.volume.singleframebase import VolumeSingleFrameBase
8
10
  from tomoscan.esrf.volume import (
9
11
  EDFVolume,
10
12
  HDF5Volume,
@@ -13,11 +15,10 @@ from tomoscan.esrf.volume import (
13
15
  TIFFVolume,
14
16
  )
15
17
  from tomoscan.io import HDF5File
16
- from silx.io.utils import get_data
17
- import numpy
18
- from silx.io.url import DataUrl
19
- from typing import Optional
20
- import logging
18
+ from ..utils import first_generator_item
19
+ from ..misc.utils import rescale_data
20
+ from ..pipeline.params import files_formats
21
+ from .reader import get_hdf5_file_all_virtual_sources, list_hdf5_entries
21
22
 
22
23
  _logger = logging.getLogger(__name__)
23
24
 
@@ -133,11 +134,12 @@ def cast_volume(
133
134
  output_data_type: numpy.dtype,
134
135
  data_min=None,
135
136
  data_max=None,
136
- scan: Optional[TomoScanBase] = None,
137
+ scan=None,
137
138
  rescale_min_percentile=RESCALE_MIN_PERCENTILE,
138
139
  rescale_max_percentile=RESCALE_MAX_PERCENTILE,
139
140
  save=True,
140
141
  store=False,
142
+ remove_input_volume: bool = False,
141
143
  ) -> VolumeBase:
142
144
  """
143
145
  cast givent volume to output_volume of 'output_data_type' type
@@ -247,6 +249,11 @@ def cast_volume(
247
249
  except (OSError, KeyError):
248
250
  # if no metadata provided and or saved in disk or if some key are missing
249
251
  pass
252
+
253
+ if remove_input_volume:
254
+ _logger.info(f"Removing {input_volume.data_url.file_path()}")
255
+ remove_volume(input_volume, check=True)
256
+
250
257
  return output_volume
251
258
 
252
259
 
@@ -283,7 +290,7 @@ def clamp_and_rescale_data(
283
290
  return rescaled_data
284
291
 
285
292
 
286
- def find_histogram(volume: VolumeBase, scan: Optional[TomoScanBase] = None) -> Optional[DataUrl]:
293
+ def find_histogram(volume: VolumeBase, scan=None):
287
294
  """
288
295
  Look for histogram of the provided url. If found one return the DataUrl of the nabu histogram
289
296
  """
@@ -330,7 +337,7 @@ def find_histogram(volume: VolumeBase, scan: Optional[TomoScanBase] = None) -> O
330
337
  data_path = getattr(scan, "entry/histogram/results/data", "entry/histogram/results/data")
331
338
  else:
332
339
 
333
- def get_file_entries(file_path: str) -> Optional[tuple]:
340
+ def get_file_entries(file_path: str):
334
341
  if os.path.exists(file_path):
335
342
  with HDF5File(file_path, mode="r") as h5s:
336
343
  return tuple(h5s.keys())
@@ -408,3 +415,83 @@ def _min_max_from_histo(url: DataUrl, rescale_min_percentile: int, rescale_max_p
408
415
  return _get_hst_saturations(
409
416
  hist, bins, numpy.float32(rescale_min_percentile), numpy.float32(rescale_max_percentile)
410
417
  )
418
+
419
+
420
+ def _remove_volume_singleframe(volume, check=True):
421
+ volume_directory = volume.data_url.file_path()
422
+ if check:
423
+ volume_files = set(volume.browse_data_files())
424
+ files_names_pattern = os.path.join(volume_directory, "*." + volume.data_extension)
425
+ files_on_disk = set(glob(files_names_pattern))
426
+ # Don't check strict equality here, as some files on disk might be already removed.
427
+ # i.e, there should be no more files on disk than expected files in the volume
428
+ if not (files_on_disk.issubset(volume_files)):
429
+ raise RuntimeError(f"Unexpected files present in {volume_directory}: {files_on_disk - volume_files}")
430
+ # TODO also check for metadata file(s) ?
431
+ rmtree(volume_directory)
432
+
433
+
434
+ def _remove_volume_multiframe(volume, check=True):
435
+ file_path = volume.data_url.file_path()
436
+ if check:
437
+ if not (os.path.isfile(file_path)):
438
+ raise RuntimeError(f"Expected a file: {file_path}")
439
+ os.remove(file_path)
440
+
441
+
442
+ def _remove_volume_hdf5(volume, check=True):
443
+ file_path = volume.data_url.file_path()
444
+ entry = volume.data_url.data_path().lstrip("/").split("/")[0]
445
+
446
+ # Nabu HDF5 reconstructions have a folder alongside the HDF5 file, with the same prefix
447
+ # For example the HDF5 file "/path/to/rec.hdf5" has an associated directory "/path/to/rec"
448
+ associated_dir, _ = os.path.splitext(os.path.basename(file_path))
449
+ associated_dir_abs = os.path.join(os.path.dirname(file_path), associated_dir)
450
+
451
+ with HDF5File(file_path, "r") as f:
452
+ fdesc = f[entry]
453
+ virtual_sources = get_hdf5_file_all_virtual_sources(fdesc, return_only_filenames=True)
454
+
455
+ # TODO check if this is legitimate. Nabu reconstruction will only do one VS (for entry/reconstruction/results/data).
456
+ # Bliss/Lima do have multiple VS (flats/darks/projs), but we generally don't want to remove raw data ?
457
+ if len(virtual_sources) > 1:
458
+ raise ValueError("Found more than one virtual source - this looks weird. Interrupting.")
459
+ #
460
+ if len(virtual_sources) > 0:
461
+ h5path, virtual_source_files_paths = first_generator_item(virtual_sources[0].items())
462
+ if len(virtual_source_files_paths) == 1:
463
+ target_dir = os.path.dirname(virtual_source_files_paths[0])
464
+ else:
465
+ target_dir = os.path.commonpath(virtual_source_files_paths)
466
+ target_dir_abs = os.path.join(os.path.dirname(file_path), target_dir)
467
+ if check and (target_dir_abs != associated_dir_abs):
468
+ raise ValueError(
469
+ f"The virtual sources in {file_path}:{h5path} reference the directory {target_dir}, but expected was {associated_dir}"
470
+ )
471
+ if os.path.isdir(target_dir_abs):
472
+ rmtree(associated_dir_abs)
473
+ os.remove(file_path)
474
+
475
+
476
+ def remove_volume(volume, check=True):
477
+ """
478
+ Remove files belonging to a volume, claim disk space.
479
+
480
+ Parameters
481
+ ----------
482
+ volume: tomoscan.esrf.volume
483
+ Volume object
484
+ check: bool, optional
485
+ Whether to check if the files that would be removed do not have extra other files ; interrupt the operation if so.
486
+
487
+ """
488
+ if isinstance(volume, (EDFVolume, JP2KVolume, TIFFVolume)):
489
+ _remove_volume_singleframe(volume, check=check)
490
+ elif isinstance(volume, MultiTIFFVolume):
491
+ _remove_volume_multiframe(volume, check=check)
492
+ elif isinstance(volume, HDF5Volume):
493
+ if len(list_hdf5_entries(volume.file_path)) > 1:
494
+ raise NotImplementedError("Removing a HDF5 volume with more than one entry is not supported")
495
+ _remove_volume_hdf5(volume, check=check)
496
+ else:
497
+ raise TypeError("Unknown type of volume")
nabu/io/reader.py CHANGED
@@ -4,6 +4,7 @@ from math import ceil
4
4
  from multiprocessing.pool import ThreadPool
5
5
  from posixpath import sep as posix_sep, join as posix_join
6
6
  import numpy as np
7
+ from h5py import Dataset
7
8
  from silx.io import get_data
8
9
  from silx.io.dictdump import h5todict
9
10
  from tomoscan.io import HDF5File
@@ -770,7 +771,7 @@ class NXDarksFlats:
770
771
  reduced_frames = [self._reduce_func[method](frames, axis=0) for frames in raw_frames]
771
772
  reader = getattr(self, "%s_reader" % what)
772
773
  if as_dict:
773
- return {k: v for k, v in zip([s.start for s in reader._image_key_slices], reduced_frames)} # noqa: C416
774
+ return {k: v for k, v in zip([s.start for s in reader._image_key_slices], reduced_frames)}
774
775
  return reduced_frames
775
776
 
776
777
  def get_raw_darks(self, force_reload=False, as_multiple_array=True):
@@ -987,6 +988,12 @@ def get_entry_from_h5_path(h5_path):
987
988
  return v[0] or v[1]
988
989
 
989
990
 
991
+ def list_hdf5_entries(fname):
992
+ with HDF5File(fname, "r") as f:
993
+ entries = list(f.keys())
994
+ return entries
995
+
996
+
990
997
  def check_virtual_sources_exist(fname, data_path):
991
998
  with HDF5File(fname, "r") as f:
992
999
  if data_path not in f:
@@ -1006,6 +1013,30 @@ def check_virtual_sources_exist(fname, data_path):
1006
1013
  return True
1007
1014
 
1008
1015
 
1016
+ def get_hdf5_file_all_virtual_sources(file_path_or_obj, return_only_filenames=False):
1017
+ result = []
1018
+
1019
+ def collect_vsources(name, obj):
1020
+ if isinstance(obj, Dataset) and obj.is_virtual:
1021
+ vs = obj.virtual_sources()
1022
+ if return_only_filenames:
1023
+ vs = [vs_.file_name for vs_ in vs]
1024
+ result.append({name: vs})
1025
+
1026
+ _self_opened_file = False
1027
+ if isinstance(file_path_or_obj, str):
1028
+ fdesc = HDF5File(file_path_or_obj, "r")
1029
+ _self_opened_file = True
1030
+ else:
1031
+ fdesc = file_path_or_obj
1032
+
1033
+ fdesc.visititems(collect_vsources)
1034
+
1035
+ if _self_opened_file:
1036
+ fdesc.close()
1037
+ return result
1038
+
1039
+
1009
1040
  def import_h5_to_dict(h5file, h5path, asarray=False):
1010
1041
  """
1011
1042
  Wrapper on top of silx.io.dictdump.dicttoh5 replacing "None" with None
@@ -0,0 +1,152 @@
1
+ from pathlib import Path
2
+ from os import path, mkdir, rename
3
+ import numpy as np
4
+ import pytest
5
+ from tomoscan.esrf.volume import (
6
+ EDFVolume,
7
+ HDF5Volume,
8
+ JP2KVolume,
9
+ MultiTIFFVolume,
10
+ TIFFVolume,
11
+ )
12
+ from tomoscan.esrf.volume.jp2kvolume import has_glymur
13
+ from nabu.io.writer import merge_hdf5_files
14
+ from nabu.io.cast_volume import remove_volume
15
+
16
+
17
+ def test_remove_single_frame_volume(tmpdir):
18
+ """
19
+ Test volume removal for tiff, jp2 and EDF
20
+ """
21
+ # Have to use a not-too-small size because of jp2k
22
+ data = np.arange(10 * 40 * 50, dtype="f").reshape((10, 40, 50))
23
+
24
+ volume_classes = [EDFVolume, TIFFVolume, JP2KVolume]
25
+ if not (has_glymur):
26
+ volume_classes.pop()
27
+ for volume_cls in volume_classes:
28
+ ext = volume_cls.DEFAULT_DATA_EXTENSION
29
+ folder = path.join(tmpdir, f"{ext}_vol")
30
+ volume_basename = f"{ext}_basename"
31
+
32
+ vol_writer = volume_cls(folder=folder, volume_basename=volume_basename, overwrite=True, start_index=0)
33
+ vol_writer.data = data
34
+ vol_writer.save()
35
+
36
+ vol_reader = volume_cls(folder=folder, volume_basename=volume_basename)
37
+ assert path.isdir(folder), f"Expected to find a folder f{folder}"
38
+ remove_volume(vol_reader)
39
+ assert not (path.isdir(folder)), f"Expected to have removed the folder f{folder}"
40
+
41
+ vol_writer.save()
42
+ vol_reader = volume_cls(folder=folder, volume_basename=volume_basename)
43
+ Path(path.join(folder, f"unexpected.{ext}")).touch()
44
+ with pytest.raises(RuntimeError) as exc:
45
+ remove_volume(vol_reader, check=True)
46
+ assert "Unexpected files present" in str(exc.value), "Expected check to find extraneous files"
47
+
48
+
49
+ def test_remove_multiframe_volume(tmpdir):
50
+ """
51
+ Test volume removal for "multiframe" formats (HDF5, tiff3D)
52
+ The HDF5 files considered in this test do not have virtual sources
53
+ """
54
+ data = np.arange(3 * 4 * 5, dtype="f").reshape((3, 4, 5))
55
+
56
+ for ext, volume_cls in {"h5": HDF5Volume, "tiff": MultiTIFFVolume}.items():
57
+ file_path = path.join(tmpdir, f"{ext}_vol.{ext}")
58
+
59
+ init_kwargs = {"file_path": file_path}
60
+ if ext == "h5":
61
+ init_kwargs["data_path"] = "entry"
62
+ vol_writer = volume_cls(**init_kwargs)
63
+ vol_writer.data = data
64
+ vol_writer.save()
65
+
66
+ vol_reader = volume_cls(**init_kwargs)
67
+ assert path.isfile(file_path), f"Expected to find a {ext} volume at {file_path}"
68
+ remove_volume(vol_reader)
69
+ assert not (path.isfile(file_path)), f"Expected to have removed f{file_path}"
70
+
71
+
72
+ def test_remove_hdf5_multiple_entries(tmpdir):
73
+ data = np.arange(3 * 4 * 5, dtype="f").reshape((3, 4, 5))
74
+ file_path = path.join(tmpdir, "h5_vol.h5")
75
+ vol_writer_1 = HDF5Volume(file_path=file_path, data_path="entry0000")
76
+ vol_writer_1.data = data
77
+ vol_writer_1.save()
78
+ vol_writer_2 = HDF5Volume(file_path=file_path, data_path="entry0001")
79
+ vol_writer_2.data = data + 10
80
+ vol_writer_2.save()
81
+ vol_reader = HDF5Volume(file_path=file_path, data_path="entry0000")
82
+ with pytest.raises(NotImplementedError) as exc:
83
+ remove_volume(vol_reader, check=True)
84
+ assert "Removing a HDF5 volume with more than one entry is not supported" in str(
85
+ exc.value
86
+ ), "Expected an error message"
87
+
88
+
89
+ def test_remove_nabu_hdf5_reconstruction(tmpdir):
90
+ """
91
+ Test removal of HDF5 reconstruction generated by nabu (i.e with virtual sources)
92
+ """
93
+
94
+ entry = "entry"
95
+ process_name = "reconstruction"
96
+
97
+ master_file_path = path.join(tmpdir, "sample_naburec.hdf5")
98
+ associated_dir = path.join(tmpdir, "sample_naburec")
99
+ if not (path.isdir(associated_dir)):
100
+ mkdir(associated_dir)
101
+
102
+ n_chunks = 5
103
+ local_files = []
104
+ for i in range(n_chunks):
105
+ fname = "sample_naburec_%06d.h5" % i
106
+ partial_rec_abspath = path.join(associated_dir, fname)
107
+ local_files.append(f"sample_naburec/{fname}")
108
+ # local_files.append(fname)
109
+ vol = HDF5Volume(partial_rec_abspath, data_path=f"{entry}/{process_name}")
110
+ vol.data = np.arange(3 * 4 * 5, dtype="f").reshape((3, 4, 5))
111
+ vol.save()
112
+
113
+ h5_path = f"{entry}/{process_name}/results/data"
114
+
115
+ merge_hdf5_files(
116
+ local_files,
117
+ h5_path,
118
+ master_file_path,
119
+ process_name,
120
+ output_entry=entry,
121
+ output_filemode="a",
122
+ processing_index=0,
123
+ config=None,
124
+ base_dir=path.dirname(associated_dir),
125
+ axis=0,
126
+ overwrite=True,
127
+ )
128
+
129
+ assert path.isfile(master_file_path), f"Expected to find the master file at {master_file_path}"
130
+ assert path.isdir(associated_dir)
131
+ for local_file in local_files:
132
+ partial_rec_file = path.join(tmpdir, local_file)
133
+ assert path.isfile(partial_rec_file), f"Expected to find partial file number {i} at {partial_rec_file}"
134
+
135
+ # Check that the virtual links are handled properly
136
+ # sample_rec.hdf5 should reference sample_rec/sample_rec_{i}.h5
137
+ renamed_master_file_path = (
138
+ path.join(path.dirname(master_file_path), path.basename(master_file_path).split(".")[0]) + "_renamed" + ".h5"
139
+ )
140
+ rename(master_file_path, renamed_master_file_path)
141
+ h5_vol = HDF5Volume(file_path=renamed_master_file_path, data_path=f"{entry}/{process_name}")
142
+ with pytest.raises(ValueError) as exc:
143
+ remove_volume(h5_vol)
144
+ expected_error_message = f"The virtual sources in {renamed_master_file_path}:{process_name}/results/data reference the directory sample_naburec, but expected was sample_naburec_renamed"
145
+ assert str(exc.value) == expected_error_message
146
+
147
+ # Check removal in normal circumstances
148
+ rename(renamed_master_file_path, master_file_path)
149
+ h5_vol = HDF5Volume(file_path=master_file_path, data_path=f"{entry}/{process_name}")
150
+ remove_volume(h5_vol)
151
+ assert not (path.isfile(master_file_path)), f"Expected to find the master file at {master_file_path}"
152
+ assert not (path.isdir(associated_dir))
@@ -2,7 +2,7 @@
2
2
  import os
3
3
 
4
4
  path = os.path
5
- from ..utils import check_supported, is_writeable
5
+ from ..utils import check_supported, deprecation_warning, is_writeable
6
6
  from .params import * # noqa: F403
7
7
 
8
8
  """
@@ -97,12 +97,21 @@ def convert_to_bool_noerr(val):
97
97
  return res
98
98
 
99
99
 
100
- def name_range_checker(name, valid_names, descr, replacements=None):
100
+ def name_range_checker(name, available_names, descr):
101
+ """
102
+ Check whether a parameter name is valid, against a list or dictionary of names.
103
+ """
101
104
  name = name.strip().lower()
102
- if replacements is not None and name in replacements:
103
- name = replacements[name]
104
- valid = name in valid_names
105
- assert valid, "Invalid %s '%s'. Available are %s" % (descr, name, str(valid_names))
105
+ valid = name in available_names
106
+ if isinstance(available_names, dict):
107
+ # handle replacements, eg. {"edge": "edges"}
108
+ name = available_names[name]
109
+ # we could use .keys() instead to be more permissive to the user
110
+ available_names_str = str(set(available_names.values()))
111
+ else:
112
+ # assuming list
113
+ available_names_str = str(available_names)
114
+ assert valid, "Invalid %s '%s'. Available are %s" % (descr, name, available_names_str)
106
115
  return name
107
116
 
108
117
 
@@ -338,9 +347,7 @@ def cor_validator(val):
338
347
  return val_float
339
348
  if len(val.strip()) == 0:
340
349
  return None
341
- val = name_range_checker(
342
- val.lower(), set(cor_methods.values()), "center of rotation estimation method", replacements=cor_methods
343
- )
350
+ val = name_range_checker(val, cor_methods, "center of rotation estimation method")
344
351
  return val
345
352
 
346
353
 
@@ -351,9 +358,7 @@ def tilt_validator(val):
351
358
  return val_float
352
359
  if len(val.strip()) == 0:
353
360
  return None
354
- val = name_range_checker(
355
- val.lower(), set(tilt_methods.values()), "automatic detector tilt estimation method", replacements=tilt_methods
356
- )
361
+ val = name_range_checker(val, tilt_methods, "automatic detector tilt estimation method")
357
362
  return val
358
363
 
359
364
 
@@ -395,53 +400,55 @@ def cor_slice_validator(val):
395
400
 
396
401
 
397
402
  @validator
398
- def flatfield_enabled_validator(val):
399
- return name_range_checker(val, set(flatfield_modes.values()), "flatfield mode", replacements=flatfield_modes)
403
+ def flatfield_validator(val):
404
+ ret = name_range_checker(val, flatfield_modes, "flatfield mode")
405
+ if ret in ["force-load", "force-compute"]:
406
+ deprecation_warning(
407
+ f"Using 'flatfield = {ret}' is deprecated since version 2025.1.0. Please use the parameter 'flatfield_loading_mode'",
408
+ )
409
+ return ret
410
+
411
+
412
+ @validator
413
+ def flatfield_loading_mode_validator(val):
414
+ return name_range_checker(val, flatfield_loading_mode, "flatfield mode")
400
415
 
401
416
 
402
417
  @validator
403
418
  def phase_method_validator(val):
404
- return name_range_checker(
405
- val, set(phase_retrieval_methods.values()), "phase retrieval method", replacements=phase_retrieval_methods
406
- )
419
+ return name_range_checker(val, phase_retrieval_methods, "phase retrieval method")
407
420
 
408
421
 
409
422
  @validator
410
423
  def detector_distortion_correction_validator(val):
411
424
  return name_range_checker(
412
425
  val,
413
- set(detector_distortion_correction_methods.values()),
426
+ detector_distortion_correction_methods,
414
427
  "detector_distortion_correction_methods",
415
- replacements=detector_distortion_correction_methods,
416
428
  )
417
429
 
418
430
 
419
431
  @validator
420
432
  def unsharp_method_validator(val):
421
- return name_range_checker(
422
- val, set(unsharp_methods.values()), "unsharp mask method", replacements=phase_retrieval_methods
423
- )
433
+ return name_range_checker(val, unsharp_methods, "unsharp mask method")
424
434
 
425
435
 
426
436
  @validator
427
437
  def padding_mode_validator(val):
428
- return name_range_checker(val, set(padding_modes.values()), "padding mode", replacements=padding_modes)
438
+ return name_range_checker(val, padding_modes, "padding mode")
429
439
 
430
440
 
431
441
  @validator
432
442
  def reconstruction_method_validator(val):
433
- return name_range_checker(
434
- val, set(reconstruction_methods.values()), "reconstruction method", replacements=reconstruction_methods
435
- )
443
+ return name_range_checker(val, reconstruction_methods, "reconstruction method")
436
444
 
437
445
 
438
446
  @validator
439
447
  def fbp_filter_name_validator(val):
440
448
  return name_range_checker(
441
449
  val,
442
- set(fbp_filters.values()),
450
+ fbp_filters,
443
451
  "FBP filter",
444
- replacements=fbp_filters,
445
452
  )
446
453
 
447
454
 
@@ -449,29 +456,24 @@ def fbp_filter_name_validator(val):
449
456
  def reconstruction_implementation_validator(val):
450
457
  return name_range_checker(
451
458
  val,
452
- set(reco_implementations.values()),
459
+ reco_implementations,
453
460
  "Reconstruction method implementation",
454
- replacements=reco_implementations,
455
461
  )
456
462
 
457
463
 
458
464
  @validator
459
465
  def optimization_algorithm_name_validator(val):
460
- return name_range_checker(
461
- val, set(optim_algorithms.values()), "optimization algorithm name", replacements=iterative_methods
462
- )
466
+ return name_range_checker(val, optim_algorithms, "optimization algorithm name")
463
467
 
464
468
 
465
469
  @validator
466
470
  def output_file_format_validator(val):
467
- return name_range_checker(val, set(files_formats.values()), "output file format", replacements=files_formats)
471
+ return name_range_checker(val, files_formats, "output file format")
468
472
 
469
473
 
470
474
  @validator
471
475
  def distribution_method_validator(val):
472
- val = name_range_checker(
473
- val, set(distribution_methods.values()), "workload distribution method", replacements=distribution_methods
474
- )
476
+ val = name_range_checker(val, distribution_methods, "workload distribution method")
475
477
  # TEMP.
476
478
  if val != "local":
477
479
  raise NotImplementedError("Computation method '%s' is not implemented yet" % val)
@@ -481,9 +483,7 @@ def distribution_method_validator(val):
481
483
 
482
484
  @validator
483
485
  def sino_normalization_validator(val):
484
- val = name_range_checker(
485
- val, set(sino_normalizations.values()), "sinogram normalization method", replacements=sino_normalizations
486
- )
486
+ val = name_range_checker(val, sino_normalizations, "sinogram normalization method")
487
487
  return val
488
488
 
489
489
 
@@ -491,9 +491,8 @@ def sino_normalization_validator(val):
491
491
  def sino_deringer_methods(val):
492
492
  val = name_range_checker(
493
493
  val,
494
- set(rings_methods.values()),
494
+ rings_methods,
495
495
  "sinogram rings artefacts correction method",
496
- replacements=rings_methods,
497
496
  )
498
497
  return val
499
498
 
@@ -556,7 +555,7 @@ def nonempty_string_validator(val):
556
555
 
557
556
  @validator
558
557
  def logging_validator(val):
559
- return name_range_checker(val, set(log_levels.values()), "logging level", replacements=log_levels)
558
+ return name_range_checker(val, log_levels, "logging level")
560
559
 
561
560
 
562
561
  @validator