nabu 2022.3.0a1__py3-none-any.whl → 2023.1.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. nabu/__init__.py +1 -1
  2. nabu/app/bootstrap.py +7 -1
  3. nabu/app/cast_volume.py +8 -2
  4. nabu/app/cli_configs.py +69 -0
  5. nabu/app/composite_cor.py +97 -0
  6. nabu/app/create_distortion_map_from_poly.py +118 -0
  7. nabu/app/nx_z_splitter.py +1 -1
  8. nabu/app/prepare_weights_double.py +21 -16
  9. nabu/app/reconstruct_helical.py +0 -1
  10. nabu/app/utils.py +10 -5
  11. nabu/cuda/processing.py +1 -0
  12. nabu/cuda/tests/test_padding.py +1 -0
  13. nabu/cuda/utils.py +1 -0
  14. nabu/distributed/__init__.py +0 -0
  15. nabu/distributed/utils.py +57 -0
  16. nabu/distributed/worker.py +543 -0
  17. nabu/estimation/cor.py +3 -7
  18. nabu/estimation/cor_sino.py +2 -1
  19. nabu/estimation/distortion.py +6 -4
  20. nabu/io/cast_volume.py +10 -1
  21. nabu/io/detector_distortion.py +305 -0
  22. nabu/io/reader.py +37 -7
  23. nabu/io/reader_helical.py +0 -3
  24. nabu/io/tests/test_cast_volume.py +16 -4
  25. nabu/io/tests/test_detector_distortion.py +178 -0
  26. nabu/io/tests/test_writers.py +2 -2
  27. nabu/io/tiffwriter_zmm.py +2 -3
  28. nabu/io/writer.py +84 -1
  29. nabu/io/writer_BACKUP_193259.py +556 -0
  30. nabu/io/writer_BACKUP_193381.py +556 -0
  31. nabu/io/writer_BASE_193259.py +548 -0
  32. nabu/io/writer_BASE_193381.py +548 -0
  33. nabu/io/writer_LOCAL_193259.py +550 -0
  34. nabu/io/writer_LOCAL_193381.py +550 -0
  35. nabu/io/writer_REMOTE_193259.py +557 -0
  36. nabu/io/writer_REMOTE_193381.py +557 -0
  37. nabu/misc/fourier_filters.py +2 -0
  38. nabu/misc/rotation.py +0 -1
  39. nabu/misc/tests/test_rotation.py +1 -0
  40. nabu/pipeline/config_validators.py +10 -0
  41. nabu/pipeline/datadump.py +1 -1
  42. nabu/pipeline/dataset_validator.py +0 -1
  43. nabu/pipeline/detector_distortion_provider.py +20 -0
  44. nabu/pipeline/estimators.py +35 -21
  45. nabu/pipeline/fallback_utils.py +1 -1
  46. nabu/pipeline/fullfield/chunked.py +30 -15
  47. nabu/pipeline/fullfield/chunked_black.py +881 -0
  48. nabu/pipeline/fullfield/chunked_cuda.py +34 -4
  49. nabu/pipeline/fullfield/chunked_fb.py +966 -0
  50. nabu/pipeline/fullfield/chunked_google.py +921 -0
  51. nabu/pipeline/fullfield/chunked_pep8.py +920 -0
  52. nabu/pipeline/fullfield/computations.py +7 -6
  53. nabu/pipeline/fullfield/dataset_validator.py +1 -1
  54. nabu/pipeline/fullfield/grouped_cuda.py +6 -0
  55. nabu/pipeline/fullfield/nabu_config.py +15 -3
  56. nabu/pipeline/fullfield/processconfig.py +5 -0
  57. nabu/pipeline/fullfield/reconstruction.py +1 -2
  58. nabu/pipeline/helical/gridded_accumulator.py +1 -8
  59. nabu/pipeline/helical/helical_chunked_regridded.py +48 -33
  60. nabu/pipeline/helical/helical_reconstruction.py +1 -9
  61. nabu/pipeline/helical/nabu_config.py +11 -14
  62. nabu/pipeline/helical/span_strategy.py +11 -4
  63. nabu/pipeline/helical/tests/test_accumulator.py +0 -3
  64. nabu/pipeline/helical/tests/test_pipeline_elements_full.py +0 -6
  65. nabu/pipeline/helical/tests/test_strategy.py +0 -1
  66. nabu/pipeline/helical/weight_balancer.py +0 -1
  67. nabu/pipeline/params.py +4 -0
  68. nabu/pipeline/processconfig.py +6 -2
  69. nabu/pipeline/writer.py +9 -4
  70. nabu/preproc/distortion.py +4 -3
  71. nabu/preproc/double_flatfield.py +16 -4
  72. nabu/preproc/double_flatfield_cuda.py +3 -2
  73. nabu/preproc/double_flatfield_variable_region.py +13 -4
  74. nabu/preproc/flatfield.py +29 -7
  75. nabu/preproc/flatfield_cuda.py +0 -1
  76. nabu/preproc/flatfield_variable_region.py +5 -2
  77. nabu/preproc/phase.py +0 -1
  78. nabu/preproc/phase_cuda.py +0 -1
  79. nabu/preproc/tests/test_ctf.py +4 -3
  80. nabu/preproc/tests/test_flatfield.py +6 -7
  81. nabu/reconstruction/fbp_opencl.py +1 -1
  82. nabu/reconstruction/filtering.py +0 -1
  83. nabu/reconstruction/tests/test_fbp.py +1 -0
  84. nabu/resources/dataset_analyzer.py +0 -1
  85. nabu/resources/templates/bm05_pag.conf +34 -0
  86. nabu/resources/templates/id16_ctf.conf +2 -1
  87. nabu/resources/tests/test_nxflatfield.py +0 -1
  88. nabu/resources/tests/test_units.py +0 -1
  89. nabu/stitching/frame_composition.py +7 -1
  90. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/METADATA +2 -7
  91. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/RECORD +96 -75
  92. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/WHEEL +1 -1
  93. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/entry_points.txt +2 -1
  94. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/LICENSE +0 -0
  95. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/top_level.txt +0 -0
  96. {nabu-2022.3.0a1.dist-info → nabu-2023.1.0a2.dist-info}/zip-safe +0 -0
nabu/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "2022.3.0-a1"
1
+ __version__ = "2023.1.0-alpha2"
2
2
  __nabu_modules__ = [
3
3
  "app",
4
4
  "cuda",
nabu/app/bootstrap.py CHANGED
@@ -3,6 +3,7 @@ from glob import glob
3
3
  from ..utils import get_folder_path
4
4
  from ..pipeline.config import generate_nabu_configfile, parse_nabu_config_file
5
5
  from ..pipeline.fullfield.nabu_config import nabu_config as default_fullfield_config
6
+ from ..pipeline.helical.nabu_config import nabu_config as helical_fullfield_config
6
7
  from .utils import parse_params_values
7
8
  from .cli_configs import BootstrapConfig
8
9
 
@@ -46,9 +47,14 @@ def bootstrap():
46
47
  exit(1)
47
48
  prefilled_values["dataset"]["location"] = user_dataset
48
49
 
50
+ if args["helical"]:
51
+ my_config = helical_fullfield_config
52
+ else:
53
+ my_config = default_fullfield_config
54
+
49
55
  generate_nabu_configfile(
50
56
  args["output"],
51
- default_fullfield_config,
57
+ my_config,
52
58
  comments=not (no_comments),
53
59
  options_level=opts_level,
54
60
  prefilled_values=prefilled_values,
nabu/app/cast_volume.py CHANGED
@@ -27,8 +27,9 @@ from nabu.pipeline.params import files_formats
27
27
  _logger = logging.getLogger(__name__)
28
28
 
29
29
 
30
- def main(argv):
31
-
30
+ def main(argv=None):
31
+ if argv is None:
32
+ argv = sys.argv
32
33
  _volume_url_helps = "\n".join(
33
34
  [
34
35
  f"- {(volume.__name__).ljust(15)}: {volume.example_defined_from_str_identifier()}"
@@ -100,6 +101,11 @@ def main(argv):
100
101
 
101
102
  if os.path.exists(options.input_volume):
102
103
  volumes = guess_volumes(options.input_volume)
104
+
105
+ def is_not_histogram(vol_identifier):
106
+ return not (hasattr(vol_identifier, "data_path") and vol_identifier.data_path.endswith("histogram"))
107
+
108
+ volumes = tuple(filter(is_not_histogram, volumes))
103
109
  if len(volumes) == 0:
104
110
  _logger.error(f"no valid volume found in {options.input_volume}")
105
111
  exit(1)
nabu/app/cli_configs.py CHANGED
@@ -37,6 +37,7 @@ BootstrapConfig = {
37
37
  "help": "Use a template configuration file. Available are: id19_pag, id16_holo, id16_ctf. You can also define your own templates via the NABU_TEMPLATES_PATH environment variable.",
38
38
  "default": "",
39
39
  },
40
+ "helical": {"help": "Prepare configuration file for helical", "default": 0, "required": False, "type": int},
40
41
  }
41
42
 
42
43
  # Default configuration for "zsplit" command
@@ -352,3 +353,71 @@ ShrinkConfig = {
352
353
  "type": int,
353
354
  },
354
355
  }
356
+
357
+ CompositeCorConfig = {
358
+ "--filename_template": {
359
+ "required": True,
360
+ "help": """The filename template. It can optionally contain a segment equal to "X"*ndigits which will be replaced by the stage number if several stages are requested by the user""",
361
+ },
362
+ "--entry_name": {
363
+ "required": False,
364
+ "help": "Optional. The entry_name. It defaults to entry0000",
365
+ "default": "entry0000",
366
+ },
367
+ "--num_of_stages": {
368
+ "type": int,
369
+ "required": False,
370
+ "help": "Optional. How many stages. Example: from 0 to 43 -> --num_of_stages 44. It is optional. ",
371
+ },
372
+ "--oversampling": {
373
+ "type": int,
374
+ "default": 4,
375
+ "required": False,
376
+ "help": "Oversampling in the research of the axis position. Defaults to 4 ",
377
+ },
378
+ "--n_subsampling_y": {
379
+ "type": int,
380
+ "default": 10,
381
+ "required": False,
382
+ "help": "How many lines we are going to take from each radio. Defaults to 10.",
383
+ },
384
+ "--theta_interval": {
385
+ "type": float,
386
+ "default": 5,
387
+ "required": False,
388
+ "help": "Angular step for composing the image. Default to 5",
389
+ },
390
+ "--first_stage": {"type": int, "default": None, "required": False, "help": "Optional. The first stage. "},
391
+ "--output_file": {
392
+ "type": str,
393
+ "required": False,
394
+ "help": "Optional. Where the list of cors will be written. Default is the filename postixed with cors.txt",
395
+ },
396
+ "--cor_options": {
397
+ "type": str,
398
+ "help": """the cor_options string used by Nabu. Example
399
+ --cor_options "side='near'; near_pos = 300.0; near_width = 20.0"
400
+ """,
401
+ "required": True,
402
+ },
403
+ }
404
+
405
+ CreateDistortionMapHorizontallyMatchedFromPolyConfig = {
406
+ "--nz": {"type": int, "help": "vertical dimension of the detector", "required": True},
407
+ "--nx": {"type": int, "help": "horizontal dimension of the detector", "required": True},
408
+ "--center_z": {"type": float, "help": "vertical position of the optical center", "required": True},
409
+ "--center_x": {"type": float, "help": "horizontal position of the optical center", "required": True},
410
+ "--c4": {"type": float, "help": "order 4 coefficient", "required": True},
411
+ "--c2": {"type": float, "help": "order 2 coefficient", "required": True},
412
+ "--target_file": {"type": str, "help": "The map output filename", "required": True},
413
+ "--axis_pos": {
414
+ "type": float,
415
+ "default": None,
416
+ "help": "Optional argument. If given it will be corrected for use with the produced map. The value is printed, or given as return argument if the utility is used from a script",
417
+ "required": False,
418
+ },
419
+ "--loglevel": {
420
+ "help": "Logging level. Can be 'debug', 'info', 'warning', 'error'. Default is 'info'.",
421
+ "default": "info",
422
+ },
423
+ }
@@ -0,0 +1,97 @@
1
+ import logging
2
+ import os
3
+ import sys
4
+ import numpy as np
5
+ import re
6
+
7
+ from nabu.resources.dataset_analyzer import HDF5DatasetAnalyzer
8
+ from nabu.pipeline.estimators import CompositeCOREstimator
9
+ from nabu.resources.nxflatfield import update_dataset_info_flats_darks
10
+ from .. import version
11
+ from .cli_configs import CompositeCorConfig
12
+ from .utils import parse_params_values
13
+ from ..utils import DictToObj
14
+
15
+
16
+ def main(user_args=None):
17
+ "Application to extract with the composite cor finder the center of rotation for a scan or a series of scans"
18
+
19
+ if user_args is None:
20
+ user_args = sys.argv[1:]
21
+
22
+ args = DictToObj(
23
+ parse_params_values(
24
+ CompositeCorConfig,
25
+ parser_description=main.__doc__,
26
+ program_version="nabu " + version,
27
+ user_args=user_args,
28
+ )
29
+ )
30
+
31
+ if len(os.path.dirname(args.filename_template)) == 0:
32
+ # To make sure that other utility routines can succesfully deal with path within the current directory
33
+ args.filename_template = os.path.join(".", args.filename_template)
34
+
35
+ if args.first_stage is not None:
36
+ if args.num_of_stages is None:
37
+ args.num_of_stages = 1
38
+ # if the first_stage parameter has been given then
39
+ # we are using numbers to form the names of the files.
40
+ # The filename must containe a XX..X substring which will be replaced
41
+ pattern = re.compile("[X]+")
42
+ ps = pattern.findall(args.filename_template)
43
+ if len(ps) == 0:
44
+ message = f""" You have specified the "first_stage" parameter, with an integer.
45
+ Therefore the "filename_template" parameter is expected to containe a XX..X subsection
46
+ but none was found in the passed parameter which is {args.filename_template}
47
+ """
48
+ raise ValueError(message)
49
+ ls = list(map(len, ps))
50
+ idx = np.argmax(ls)
51
+
52
+ args.filename_template = args.filename_template.replace(ps[idx], "{i_stage:" + "0" + str(ls[idx]) + "d}")
53
+
54
+ if args.num_of_stages is None:
55
+ # this way it works also in the simple case where
56
+ # only the filename is provided together with the cor options
57
+ num_of_stages = 1
58
+ first_stage = 0
59
+ else:
60
+ num_of_stages = args.num_of_stages
61
+ first_stage = args.first_stage
62
+ cor_list = []
63
+ for iz in range(first_stage, first_stage + num_of_stages):
64
+ if args.num_of_stages is not None:
65
+ nexus_name = args.filename_template.format(i_stage=iz)
66
+ else:
67
+ nexus_name = args.filename_template
68
+
69
+ dataset_info = HDF5DatasetAnalyzer(nexus_name)
70
+
71
+ update_dataset_info_flats_darks(dataset_info, flatfield_mode=1)
72
+
73
+ cor_finder = CompositeCOREstimator(
74
+ dataset_info,
75
+ oversampling=args.oversampling,
76
+ theta_interval=args.theta_interval,
77
+ n_subsampling_y=args.n_subsampling_y,
78
+ take_log=True,
79
+ spike_threshold=0.04,
80
+ cor_options=args.cor_options,
81
+ )
82
+
83
+ cor_position = cor_finder.find_cor()
84
+
85
+ cor_list.append(cor_position)
86
+
87
+ cor_list = np.array(cor_list).T
88
+
89
+ if args.output_file is not None:
90
+ output_name = args.output_file
91
+ else:
92
+ output_name = os.path.splitext(args.filename_template)[0] + "_cors.txt"
93
+
94
+ np.savetxt(
95
+ output_name,
96
+ cor_list,
97
+ )
@@ -0,0 +1,118 @@
1
+ from .. import version
2
+ import numpy as np
3
+ import h5py
4
+ import argparse
5
+ import sys
6
+ from .cli_configs import CreateDistortionMapHorizontallyMatchedFromPolyConfig
7
+ from .utils import parse_params_values
8
+ from ..utils import DictToObj
9
+ from ..resources.logger import Logger, LoggerOrPrint
10
+
11
+
12
+ def horizontal_match(user_args=None):
13
+ """This application builds two arrays. Let us call them map_x and map_z. Both are 2D arrays with shape given by (nz, nx).
14
+ These maps are meant to be used to generate a corrected detector image, using them to obtain the pixel (i,j) of the corrected
15
+ image by interpolating the raw data at position ( map_z(i,j), map_x(i,j) ).
16
+
17
+ This map is determined by a user given polynomial P(rs) in the radial variable rs = sqrt( (z-center_z)**2 + (x-center_x)**2 ) / (nx/2)
18
+ where center_z and center_x give the center around which the deformation is centered.
19
+
20
+ The perfect position (zp,xp) , that would be observed on a perfect detector, of a photon observed at pixel (z,x) of the distorted detector is:
21
+
22
+ (zp, xp) = (center_z, center_x) + P(rs) * ( z - center_z , x - center_x )
23
+
24
+ The polynomial is given by P(rs) = rs *(1 + c2 * rs**2 + c4 * rs**4)
25
+
26
+ The map is rescaled and reshifted so that a perfect match is realised at the borders of a horizontal line passing by the center. This ensures coerence
27
+ with the procedure of pixel size calibration which is performed moving a needle horizontally and reading the motor positions at the extreme positions.
28
+
29
+ The maps are written in the target file, creating it as hdf5 file, in the datasets
30
+
31
+ "/coords_source_x"
32
+ "/coords_source_z"
33
+
34
+ The URLs of these two maps can be used for the detector correction of type "map_xz"
35
+ in the nabu configuration file as in this example
36
+
37
+ [dataset]
38
+ ...
39
+ detector_distortion_correction = map_xz
40
+ detector_distortion_correction_options = map_x="silx:./map_coordinates.h5?path=/coords_source_x" ; map_z="silx:./map_coordinates.h5?path=/coords_source_z"
41
+
42
+ """
43
+
44
+ if user_args is None:
45
+ user_args = sys.argv[1:]
46
+
47
+ args = DictToObj(
48
+ parse_params_values(
49
+ CreateDistortionMapHorizontallyMatchedFromPolyConfig,
50
+ parser_description=horizontal_match.__doc__,
51
+ program_version="nabu " + version,
52
+ user_args=user_args,
53
+ )
54
+ )
55
+ logger = Logger("horizontal_match", level=args.loglevel, logfile="horizontal_match.log")
56
+
57
+ nz, nx = args.nz, args.nx
58
+ center_x, center_z = (args.center_x, args.center_z)
59
+
60
+ c4 = args.c4
61
+ c2 = args.c2
62
+
63
+ polynomial = np.poly1d([c4, 0, c2, 0, 1, 0.0])
64
+ # change of variable
65
+ cofv = np.poly1d([1.0 / (nx / 2), 0])
66
+ polynomial = nx / 2 * polynomial(cofv)
67
+
68
+ left_border = 0 - center_x
69
+ right_border = nx - 1 - center_x
70
+
71
+ def get_rescaling_shift(left_border, right_border, polynomial):
72
+ dl = polynomial(left_border)
73
+ dr = polynomial(right_border)
74
+
75
+ rescaling = (dr - dl) / (right_border - left_border)
76
+ shift = -left_border * rescaling + dl
77
+ return rescaling, shift
78
+
79
+ final_grid_rescaling, final_grid_shift = get_rescaling_shift(left_border, right_border, polynomial)
80
+
81
+ coords_z, coords_x = np.indices([nz, nx])
82
+
83
+ coords_z = ((coords_z - center_z) * final_grid_rescaling).astype("d")
84
+ coords_x = ((coords_x - center_x) * final_grid_rescaling + final_grid_shift).astype("d")
85
+
86
+ distances_goal = np.sqrt(coords_z * coords_z + coords_x * coords_x)
87
+
88
+ distances_unknown = distances_goal
89
+
90
+ pp_deriv = polynomial.deriv()
91
+
92
+ # iteratively finding the positions to interpolated at by newton method
93
+ for i in range(10):
94
+ errors = polynomial(distances_unknown) - distances_goal
95
+ derivative = pp_deriv(distances_unknown)
96
+ distances_unknown = distances_unknown - errors / derivative
97
+
98
+ distances_data_sources = distances_unknown
99
+
100
+ # avoid 0/0
101
+ distances_data_sources[distances_goal < 1] = 1
102
+ distances_goal[distances_goal < 1] = 1
103
+
104
+ coords_source_z = coords_z * distances_data_sources / distances_goal + center_z
105
+ coords_source_x = coords_x * distances_data_sources / distances_goal + center_x
106
+
107
+ with h5py.File(args.target_file, "w") as f:
108
+ f["coords_source_x"] = coords_source_x
109
+ f["coords_source_z"] = coords_source_z
110
+
111
+ if args.axis_pos is not None:
112
+ coord_axis = args.axis_pos - center_x
113
+ new_pos = (polynomial(coord_axis) - final_grid_shift) / final_grid_rescaling + center_x
114
+ logger.info("New axis position at %e it was previously %e " % (new_pos, args.axis_pos))
115
+
116
+ return new_pos
117
+ else:
118
+ return None
nabu/app/nx_z_splitter.py CHANGED
@@ -89,7 +89,7 @@ class NXZSplitter:
89
89
  masks = [(z_transl == z) for z in different_z]
90
90
  for i_z, mask in enumerate(masks):
91
91
  fname_curr_z = path.join(
92
- self.output_dir, path.splitext(path.basename(self.fname))[0] + str("_%04d" % i_z) + self._ext
92
+ self.output_dir, path.splitext(path.basename(self.fname))[0] + str("_%06d" % i_z) + self._ext
93
93
  )
94
94
  self.logger.info("Creating %s" % fname_curr_z)
95
95
  copy_file(self.fname, fname_curr_z)
@@ -5,12 +5,13 @@ from scipy.special import erf
5
5
  import sys
6
6
  import os
7
7
  from scipy.ndimage import gaussian_filter
8
+ from tomoscan.esrf.scan.hdf5scan import ImageKey, HDF5TomoScan
8
9
 
9
10
 
10
11
  def main():
11
12
  """auxiliary program that can be called to create default input detector profiles, for nabu helical,
12
13
  concerning the weights of the pixels and the "double flat" renormalisation denominator.
13
- The result is an hdf5 file that can be used as a "processes" nabu file and is used by nabu-helical.
14
+ The result is an hdf5 file that can be used as a "processes_file" in the nabu configuration and is used by nabu-helical.
14
15
  In particulars cases the user may have fancy masks and correction map and will provide its own processes file,
15
16
  and will not need this.
16
17
 
@@ -27,32 +28,35 @@ def main():
27
28
  Then the resulting file can be used as processes file in the configuration file of nabu-helical
28
29
 
29
30
  """
30
- if len(sys.argv) not in [3, 4]:
31
+ if len(sys.argv) not in [3, 4, 5]:
31
32
  message = f""" Usage:
32
- {os.path.basename(sys.argv[0])} nexus_file_name entry_name [target file name]
33
+ {os.path.basename(sys.argv[0])} nexus_file_name entry_name [target_file name [transition_width]]
33
34
  """
34
35
  print(message)
35
36
  sys.exit(1)
36
37
 
37
38
  file_name = sys.argv[1]
38
39
  entry_name = sys.argv[2]
39
- if len(sys.argv) != 4:
40
- process_file_name = "double.h5"
41
- else:
40
+ process_file_name = "double.h5"
41
+ transition_width = 50.0
42
+
43
+ if len(sys.argv) in [4, 5]:
42
44
  process_file_name = sys.argv[3]
45
+ if len(sys.argv) in [5]:
46
+ transition_width = float(sys.argv[4])
43
47
 
44
- f = h5py.File(file_name, "r")
48
+ scan = HDF5TomoScan(file_name, entry_name)
49
+ scan_flats, metadata_flats = scan.load_reduced_flats(return_info=True)
45
50
 
46
- im_keys = f[entry_name]["data/image_key"][()]
47
- which_slices = list(np.where(np.equal(im_keys, 1))[0])
48
- mappe = np.array([f[entry_name]["data/data"][i] for i in which_slices])
49
- mappe = mappe.sum(0)
50
- f.close()
51
+ mappe = 0
52
+ for key, flat in scan_flats.items():
53
+ mappe += flat
54
+ mappe = mappe / len(list(scan_flats.keys()))
51
55
 
52
- create_heli_maps(mappe, process_file_name, entry_name)
56
+ create_heli_maps(mappe, process_file_name, entry_name, transition_width)
53
57
 
54
58
 
55
- def create_heli_maps(profile, process_file_name, entry_name):
59
+ def create_heli_maps(profile, process_file_name, entry_name, transition_width):
56
60
  profile = profile / profile.max()
57
61
  profile = profile.astype("f")
58
62
 
@@ -64,7 +68,6 @@ def create_heli_maps(profile, process_file_name, entry_name):
64
68
  fd = h5py.File(process_file_name, "w")
65
69
 
66
70
  def f(L, m, w):
67
-
68
71
  x = np.arange(L)
69
72
 
70
73
  d = (x - L + m).astype("f")
@@ -77,6 +80,8 @@ def create_heli_maps(profile, process_file_name, entry_name):
77
80
 
78
81
  border = f(profile.shape[1], 20, 13.33)
79
82
 
83
+ border_v = f(profile.shape[0], int(round(transition_width / 2)), transition_width / 4)
84
+
80
85
  path_weights = entry_name + "/weights_field/results/data"
81
86
  path_double = entry_name + "/double_flatfield/results/data"
82
87
 
@@ -85,5 +90,5 @@ def create_heli_maps(profile, process_file_name, entry_name):
85
90
  if path_double in fd:
86
91
  del fd[path_double]
87
92
 
88
- fd[path_weights] = profile * border
93
+ fd[path_weights] = (profile * border) * border_v[:, None]
89
94
  fd[path_double] = np.ones_like(profile)
@@ -64,7 +64,6 @@ def main_helical():
64
64
  Nx, Ny = proc.dataset_info.radio_dims
65
65
 
66
66
  if proc.nabu_config["reconstruction"]["auto_size"]:
67
-
68
67
  if 2 * rot_center > Nx:
69
68
  w = int(round(2 * rot_center))
70
69
  else:
nabu/app/utils.py CHANGED
@@ -1,16 +1,21 @@
1
1
  from argparse import ArgumentParser
2
2
 
3
3
 
4
- def parse_params_values(Params, parser_description=None, program_version=None):
4
+ def parse_params_values(Params, parser_description=None, program_version=None, user_args=None):
5
5
  parser = ArgumentParser(description=parser_description)
6
6
  for param_name, vals in Params.items():
7
- optional = not (vals.pop("mandatory", False))
8
- if optional:
9
- param_name = "--" + param_name
7
+ if param_name[0] != "-":
8
+ # It would be better to use "required" and not to pop it.
9
+ # required is an accepted keyword for argparse
10
+ optional = not (vals.pop("mandatory", False))
11
+ if optional:
12
+ param_name = "--" + param_name
10
13
  parser.add_argument(param_name, **vals)
11
14
  if program_version is not None:
12
15
  parser.add_argument("--version", "-V", action="version", version=program_version)
13
- args = parser.parse_args()
16
+
17
+ args = parser.parse_args(args=user_args)
18
+
14
19
  args_dict = args.__dict__
15
20
  return args_dict
16
21
 
nabu/cuda/processing.py CHANGED
@@ -7,6 +7,7 @@ if __has_pycuda__:
7
7
 
8
8
  dev_attrs = cuda.device_attribute
9
9
 
10
+
10
11
  # NB: we must detach from a context before creating another context
11
12
  class CudaProcessing:
12
13
  def __init__(self, device_id=None, ctx=None, stream=None, cleanup_at_exit=True):
@@ -18,6 +18,7 @@ scenarios_legacy = [
18
18
  },
19
19
  ]
20
20
 
21
+
21
22
  # parametrize with fixture and "params=" will launch a new class for each scenario.
22
23
  # the attributes set to "cls" will remain for all the tests done in this class
23
24
  # with the current scenario.
nabu/cuda/utils.py CHANGED
@@ -45,6 +45,7 @@ def get_cuda_context(device_id=None, cleanup_at_exit=True):
45
45
  # Unlike Context.make_context(), the newly-created context is not made current.
46
46
  context = cuda.Device(device_id).retain_primary_context()
47
47
  context.push()
48
+
48
49
  # Register a clean-up function at exit
49
50
  def _finish_up(context):
50
51
  if context is not None:
File without changes
@@ -0,0 +1,57 @@
1
+ from os import getpid
2
+ from datetime import datetime
3
+ from socket import gethostname
4
+
5
+
6
+ # Default formatter
7
+ FORMATTER = "[{hostname}/{pid}/{worker}] {day}/{month}/{year} {hour}:{min}:{sec} {message}"
8
+
9
+
10
+ def format_message(message, worker_name=None, replacements=None):
11
+ """
12
+ Format a message with additional information.
13
+ The formating is defined by `nabu.distributed.utils.FORMATTER`.
14
+
15
+ Parameters
16
+ -----------
17
+ message: str
18
+ Message to log.
19
+ worker_name: str, optional
20
+ Name of the current worker.
21
+ replacements: dict, optional
22
+ Dictionary of additional patterns that should be replaced in the current
23
+ formatter.
24
+ """
25
+ now = datetime.now()
26
+ # poor man's templating engine
27
+ formatters = {
28
+ "hostname": gethostname(),
29
+ "pid": getpid(),
30
+ "worker": "",
31
+ "day": now.strftime("%d"),
32
+ "month": now.strftime("%m"),
33
+ "year": now.strftime("%Y"),
34
+ "hour": now.strftime("%H"),
35
+ "min": now.strftime("%M"),
36
+ "sec": now.strftime("%S"),
37
+ "message": message
38
+ }
39
+ if worker_name is not None:
40
+ formatters["worker"] = worker_name
41
+ FMT = FORMATTER.replace("{", "").replace("}", "")
42
+ for fmt_key, fmt_val in formatters.items():
43
+ FMT = FMT.replace(fmt_key, str(fmt_val))
44
+ if replacements is not None:
45
+ for what, replacement in replacements.items():
46
+ FMT = FMT.replace(what, replacement)
47
+ return FMT
48
+
49
+
50
+ def log(message):
51
+ """
52
+ Log a message using the current formater.
53
+ """
54
+ print(format_message(message))
55
+
56
+
57
+