ewoksid02 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. ewoksid02/__init__.py +0 -0
  2. ewoksid02/ocl/__init__.py +0 -0
  3. ewoksid02/resources/__init__.py +8 -0
  4. ewoksid02/resources/saxs_loop.json +96 -0
  5. ewoksid02/resources/template_saxs.yaml +37 -0
  6. ewoksid02/scripts/__init__.py +0 -0
  7. ewoksid02/scripts/__main__.py +70 -0
  8. ewoksid02/scripts/parsers.py +224 -0
  9. ewoksid02/scripts/saxs/__init__.py +0 -0
  10. ewoksid02/scripts/saxs/main.py +255 -0
  11. ewoksid02/scripts/saxs/slurm_python_post_script.py +3 -0
  12. ewoksid02/scripts/saxs/slurm_python_pre_script.py +5 -0
  13. ewoksid02/scripts/utils.py +21 -0
  14. ewoksid02/scripts/xpcs/__init__.py +0 -0
  15. ewoksid02/scripts/xpcs/__main__.py +3 -0
  16. ewoksid02/tasks/__init__.py +7 -0
  17. ewoksid02/tasks/averagetask.py +179 -0
  18. ewoksid02/tasks/azimuthaltask.py +272 -0
  19. ewoksid02/tasks/cavingtask.py +170 -0
  20. ewoksid02/tasks/dahuprocessingtask.py +71 -0
  21. ewoksid02/tasks/end.py +35 -0
  22. ewoksid02/tasks/id02processingtask.py +2582 -0
  23. ewoksid02/tasks/looptask.py +672 -0
  24. ewoksid02/tasks/metadatatask.py +879 -0
  25. ewoksid02/tasks/normalizationtask.py +204 -0
  26. ewoksid02/tasks/scalerstask.py +46 -0
  27. ewoksid02/tasks/secondaryscatteringtask.py +159 -0
  28. ewoksid02/tasks/sumtask.py +45 -0
  29. ewoksid02/tests/__init__.py +3 -0
  30. ewoksid02/tests/conftest.py +639 -0
  31. ewoksid02/tests/debug.py +64 -0
  32. ewoksid02/tests/test_2scat_node.py +119 -0
  33. ewoksid02/tests/test_ave_node.py +106 -0
  34. ewoksid02/tests/test_azim_node.py +89 -0
  35. ewoksid02/tests/test_cave_node.py +118 -0
  36. ewoksid02/tests/test_norm_node.py +190 -0
  37. ewoksid02/tests/test_saxs.py +69 -0
  38. ewoksid02/tests/test_sumtask.py +10 -0
  39. ewoksid02/tests/utils.py +514 -0
  40. ewoksid02/utils/__init__.py +22 -0
  41. ewoksid02/utils/average.py +158 -0
  42. ewoksid02/utils/blissdata.py +1157 -0
  43. ewoksid02/utils/caving.py +851 -0
  44. ewoksid02/utils/cupyutils.py +42 -0
  45. ewoksid02/utils/io.py +722 -0
  46. ewoksid02/utils/normalization.py +804 -0
  47. ewoksid02/utils/pyfai.py +424 -0
  48. ewoksid02/utils/secondaryscattering.py +597 -0
  49. ewoksid02-0.1.0.dist-info/METADATA +76 -0
  50. ewoksid02-0.1.0.dist-info/RECORD +54 -0
  51. ewoksid02-0.1.0.dist-info/WHEEL +5 -0
  52. ewoksid02-0.1.0.dist-info/entry_points.txt +5 -0
  53. ewoksid02-0.1.0.dist-info/licenses/LICENSE.md +20 -0
  54. ewoksid02-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,204 @@
1
+ from contextlib import ExitStack
2
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
3
+ from ewoksid02.utils.normalization import (
4
+ normalize_dataset,
5
+ )
6
+ from ewoksid02.utils.pyfai import get_persistent_azimuthal_integrator
7
+
8
+
9
+ class NormalizationTask(
10
+ ID02ProcessingTask,
11
+ optional_input_names=[
12
+ "filename_mask",
13
+ "filename_dark",
14
+ "filename_flat",
15
+ "Dummy",
16
+ "DDummy",
17
+ "NormalizationFactor",
18
+ "polarization_factor",
19
+ "polarization_axis_offset",
20
+ "Center_1",
21
+ "Center_2",
22
+ "PSize_1",
23
+ "PSize_2",
24
+ "BSize_1",
25
+ "BSize_2",
26
+ "SampleDistance",
27
+ "WaveLength",
28
+ "DetectorRotation_1",
29
+ "DetectorRotation_2",
30
+ "DetectorRotation_3",
31
+ "pin_monitor",
32
+ "header_pin_monitor",
33
+ "variance_formula",
34
+ "algorithm",
35
+ "dark_filter",
36
+ "dark_filter_quantil_lower",
37
+ "dark_filter_quantil_upper",
38
+ ],
39
+ ):
40
+ """The `NormalizationTask` class is responsible for normalizing datasets in the ID02 SAXS pipeline.
41
+ It extends the `ID02ProcessingTask` class and provides additional functionality to apply a standard pyFAI normalization:
42
+ - Methods to read monitor values from the metadata file or from blissdata
43
+ - Methods to read normalization parameters from the metadata file or from the headers
44
+ - Methods to cache pyFAI azimuthal integrator and apply normalization
45
+ - Applies corrections such as masking, dark frame subtraction, flat field correction, and polarization adjustments.
46
+
47
+ Optional Inputs:
48
+ - filename_mask (str): Path to the mask file for correcting detector gaps or bad pixels.
49
+ - filename_dark (str): Path to the file for dark current correction.
50
+ - filename_flat (str): Path to the file for flat field correction.
51
+ - Dummy (float): Value to replace invalid pixels in the dataset.
52
+ - DDummy (float): Tolerance for dummy pixel replacement.
53
+ - NormalizationFactor (float): Factor for normalizing the dataset.
54
+ - polarization_factor (float): Factor for polarization correction.
55
+ - polarization_axis_offset (float): Axis for polarization correction.
56
+ - Center_1 (float): Beam center in the first dimension.
57
+ - Center_2 (float): Beam center in the second dimension.
58
+ - PSize_1 (float): Pixel size 1.
59
+ - PSize_2 (float): Pixel size 2.
60
+ - BSize_1 (float): Pixel binning 1.
61
+ - BSize_2 (float): Pixel binning 2.
62
+ - SampleDistance (float): Sample to detector distance in meters.
63
+ - WaveLength (float): Wavelength of beam in meters.
64
+ - DetectorRotation_1 (float): rot2 of pyFAI.
65
+ - DetectorRotation_2 (float): rot1 of pyFAI.
66
+ - DetectorRotation_3 (float): rot3 of pyFAI.
67
+ - pin_monitor (str): Pin to the monitor stream.
68
+ - header_pin_monitor (str): Header key used to monitor values.
69
+ - variance_formula (str): Formula for calculating variance in the dataset.
70
+ - algorithm (str): Implementation to perform the normalization (cython or cupy).
71
+ - dark_filter (str): Filter to use in case the dark-current file is a multi-frame scan file.
72
+ - dark_filter_quantil_lower (str): In case the dark_filter is quantil, lower limit.
73
+ - dark_filter_quantil_upper (str): In case the dark_filter is quantil, upper limit.
74
+ """
75
+
76
+ def run(self):
77
+ super().run(processing_type="norm")
78
+
79
+ def get_processing_inputs(self) -> dict:
80
+ monitor_values = self.get_monitor_1_values()
81
+
82
+ params_azimuthalintegrator = {
83
+ "Center_1": self.get_parameter("Center_1"),
84
+ "Center_2": self.get_parameter("Center_2"),
85
+ "PSize_1": self.get_parameter("PSize_1"),
86
+ "PSize_2": self.get_parameter("PSize_2"),
87
+ "SampleDistance": self.get_parameter("SampleDistance"),
88
+ "WaveLength": self.get_parameter("WaveLength"),
89
+ "BSize_1": self.get_parameter("BSize_1"),
90
+ "BSize_2": self.get_parameter("BSize_2"),
91
+ "DetectorRotation_1": self.get_parameter("DetectorRotation_1"),
92
+ "DetectorRotation_2": self.get_parameter("DetectorRotation_2"),
93
+ "DetectorRotation_3": self.get_parameter("DetectorRotation_3"),
94
+ }
95
+
96
+ azimuthal_integrator = get_persistent_azimuthal_integrator(
97
+ data_signal_shape=self.dataset_signal[0].shape,
98
+ **params_azimuthalintegrator,
99
+ )
100
+
101
+ params_normalization = {
102
+ "filename_mask": self.get_input_value(
103
+ "filename_mask",
104
+ self.get_mask_gaps_filename(),
105
+ ),
106
+ "filename_dark": self.get_input_value(
107
+ "filename_dark",
108
+ self.get_dark_filename(),
109
+ ),
110
+ "filename_flat": self.get_input_value(
111
+ "filename_flat",
112
+ self.get_flat_filename(),
113
+ ),
114
+ "Dummy": self.get_parameter("Dummy"),
115
+ "DDummy": self.get_parameter("DDummy"),
116
+ "variance_formula": self.get_parameter("variance_formula"),
117
+ "binning": (
118
+ self.get_parameter("BSize_1", to_integer=True),
119
+ self.get_parameter("BSize_2", to_integer=True),
120
+ ),
121
+ "algorithm": self.get_input_value("algorithm", "cython"),
122
+ "datatype": self.get_input_value("datatype", "float32"),
123
+ "monitor_values": monitor_values,
124
+ "azimuthal_integrator": azimuthal_integrator,
125
+ "NormalizationFactor": self.get_parameter("NormalizationFactor"),
126
+ **params_azimuthalintegrator,
127
+ }
128
+
129
+ do_polarization = self.get_parameter(
130
+ "polarization_factor"
131
+ ) or self.get_parameter("do_polarization")
132
+ if do_polarization:
133
+ params_normalization["polarization_factor"] = self.get_parameter(
134
+ "polarization_factor"
135
+ )
136
+ params_normalization["polarization_axis_offset"] = self.get_parameter(
137
+ "polarization_axis_offset", to_integer=True
138
+ )
139
+
140
+ if params_normalization.get("filename_dark"):
141
+ params_normalization["dark_filter"] = self.get_parameter(
142
+ "dark_filter", defaut="quantil"
143
+ )
144
+ params_normalization["dark_filter_quantil_lower"] = self.get_parameter(
145
+ "dark_filter_quantil_lower", defaut=0.1
146
+ )
147
+ params_normalization["dark_filter_quantil_upper"] = self.get_parameter(
148
+ "dark_filter_quantil_upper", defaut=0.9
149
+ )
150
+
151
+ return params_normalization
152
+
153
+ def process(self) -> None:
154
+ do_process = super().process()
155
+ if do_process is False:
156
+ return
157
+
158
+ with ExitStack() as stack:
159
+ if len(self.dataset_signal) == 0:
160
+ return
161
+
162
+ self.bench_process = self.Benchmark(
163
+ nb_frames=len(self.dataset_signal), benchmark_name="processing"
164
+ )
165
+ stack.enter_context(self.bench_process)
166
+
167
+ processing_params = self.get_processing_inputs()
168
+ self.processing_params = processing_params
169
+
170
+ self.log_info("Performing normalization...")
171
+
172
+ (
173
+ dataset_signal_normalized,
174
+ dataset_variance_normalized,
175
+ dataset_sigma_normalized,
176
+ ) = normalize_dataset(
177
+ dataset_signal=self.dataset_signal,
178
+ **processing_params,
179
+ )
180
+ self.log_debug("Processing done")
181
+ self.outputs.dataset_signal = dataset_signal_normalized
182
+ self.outputs.dataset_variance = dataset_variance_normalized
183
+ self.outputs.dataset_sigma = dataset_sigma_normalized
184
+
185
+ self._log_benchmark(self.bench_process)
186
+
187
+ def save(self) -> None:
188
+ do_save = super().save()
189
+ if not do_save:
190
+ return
191
+
192
+ def get_monitor_1_values(
193
+ self,
194
+ ):
195
+ """Generic method to read monitor_values online (from blissdata streams)
196
+ or offline (from a group in the scalers file)
197
+ """
198
+ stream_monitor_info = self.get_stream_monitor_1()
199
+ monitor_values, _, __ = self._read_from_stream(
200
+ stream_object=stream_monitor_info.get("stream"),
201
+ slice_init=self.index_range_last[0],
202
+ slice_end=self.index_range_last[-1],
203
+ )
204
+ return monitor_values
@@ -0,0 +1,46 @@
1
+ import threading
2
+ from contextlib import ExitStack
3
+
4
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
5
+
6
+ lock = threading.Lock()
7
+
8
+
9
+ class ScalersTask(
10
+ ID02ProcessingTask,
11
+ ):
12
+ """The `ScalersTask` class is responsible for creating a scalers file, that contains a large chunk of the metadata for reprocessing.
13
+ It extends the `ID02ProcessingTask` class and provides additional functionality to apply a standard pyFAI normalization:
14
+ - Methods to read monitor values from the metadata file or from blissdata
15
+ - Methods to read normalization parameters from the metadata file or from the headers
16
+ - Methods to cache pyFAI azimuthal integrator and apply normalization
17
+ """
18
+
19
+ def run(self):
20
+ super().run(processing_type="scalers")
21
+
22
+ def process(self) -> None:
23
+ return
24
+
25
+ def save(
26
+ self,
27
+ ):
28
+ if not self.do_save:
29
+ return
30
+
31
+ with ExitStack() as stack:
32
+ self._set_output_variables()
33
+ self._create_processing_file()
34
+
35
+ if not self.processing_filename or not self.do_save:
36
+ return
37
+
38
+ # Append data to the nexus data group
39
+ stack.enter_context(lock)
40
+ stack.enter_context(
41
+ self.Benchmark(
42
+ nb_frames=len(self.dataset_signal), benchmark_name="saving"
43
+ )
44
+ )
45
+ self.processing_params = {}
46
+ self._update_id02_metadata(stack)
@@ -0,0 +1,159 @@
1
+ from contextlib import ExitStack
2
+
3
+ import numexpr
4
+ from silx.io.h5py_utils import open_item as open_item_silx
5
+
6
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
7
+ from ewoksid02.utils.secondaryscattering import process_dataset_2scat
8
+
9
+ DEFAULT_WINDOW_ROI_SIZE = 120
10
+ DEFAULT_ALGORITHM = "numpy"
11
+
12
+
13
+ class SecondaryScatteringTask(
14
+ ID02ProcessingTask,
15
+ optional_input_names=[
16
+ "filename_window_wagon",
17
+ "WindowRoiSize",
18
+ "Dummy",
19
+ "Center_1",
20
+ "Center_2",
21
+ "algorithm",
22
+ "pre_caving",
23
+ "filename_mask_static",
24
+ "filename_mask_reference",
25
+ "flip_caving",
26
+ "save_secondary_scattering",
27
+ "BSize_1",
28
+ "BSize_2",
29
+ ],
30
+ output_names=[
31
+ "secondary_scattering",
32
+ ],
33
+ ):
34
+ """The `SecondaryScatteringTask` class is responsible for calculating and correcting the scattering coming from
35
+ the window that separates the wagon from the flying tube.
36
+ It can be used to correct any scattering source that is close to the detector.
37
+
38
+ Optional Inputs:
39
+ - filename_window_wagon (str): Path to the mask file used for defining the scattering window WAXS pattern.
40
+ - WindowRoiSize (float): Distance parameter for subdata extraction during secondary scattering correction.
41
+ - Dummy (float): Value to perform a pre-caving step (to mask the detector gaps)
42
+ - Center_1 (float): Beam center in the first dimension.
43
+ - Center_2 (float): Beam center in the second dimension.
44
+ - algorithm (str): Implementation to perform the secondary scattering correction and pre-caving (numpy or cupy).
45
+ - pre_caving (bool): To perform a caving step before the correction.
46
+ - filename_mask_static (str): Path to the mask file used for the caving operation.
47
+ - filename_mask_reference (str): Path to the reference mask file (kind of a negative mask).
48
+ - flip_caving (bool): Cave the image with its flipped projection, both horizontal and vertical. WARNING: it is physically not correct!
49
+ - save_secondary_scattering (bool): Flag to save the secondary scattering dataset. Default is `False`.
50
+ - BSize_1 (float): Pixel binning 1.
51
+ - BSize_2 (float): Pixel binning 2.
52
+ Outputs:
53
+ - secondary_scattering (numpy.ndarray): Dataset with the calculated secondary scattering.
54
+ """
55
+
56
+ def run(self):
57
+ super().run(processing_type="2scat")
58
+
59
+ def get_processing_inputs(self) -> dict:
60
+ processing_params = {
61
+ "filename_window_wagon": self.get_input_value(
62
+ "filename_window_wagon",
63
+ self.get_mask_window(),
64
+ ),
65
+ "WindowRoiSize": self.get_parameter(
66
+ "WindowRoiSize",
67
+ ),
68
+ "Dummy": self.get_parameter("Dummy"),
69
+ "Center_1": self.get_parameter("Center_1"),
70
+ "Center_2": self.get_parameter("Center_2"),
71
+ "binning": (
72
+ int(self.get_parameter("BSize_1")),
73
+ int(self.get_parameter("BSize_2")),
74
+ ),
75
+ "algorithm": self.get_input_value("algorithm", DEFAULT_ALGORITHM),
76
+ "pre_caving": self.get_input_value("pre_caving", True),
77
+ "filename_mask_static": self.get_input_value(
78
+ "filename_mask_static",
79
+ self.get_mask_gaps_filename(),
80
+ ),
81
+ "filename_mask_reference": self.get_input_value(
82
+ "filename_mask_reference",
83
+ self.get_mask_beamstop_filename(),
84
+ ),
85
+ "flip_caving": self.get_input_value(
86
+ "flip_caving",
87
+ bool(self.get_from_headers("nw_cave_flip")),
88
+ ),
89
+ }
90
+
91
+ return processing_params
92
+
93
+ def process(self) -> None:
94
+ do_process = super().process()
95
+ if do_process is False:
96
+ return
97
+
98
+ with ExitStack() as stack:
99
+ self.bench_process = self.Benchmark(
100
+ nb_frames=len(self.dataset_signal), benchmark_name="processing"
101
+ )
102
+ stack.enter_context(self.bench_process)
103
+
104
+ processing_params = self.get_processing_inputs()
105
+ self.processing_params = processing_params
106
+
107
+ if self.dataset_variance is not None:
108
+ dataset_variance = self.dataset_variance
109
+ elif self.dataset_sigma is not None:
110
+ dataset_sigma = self.dataset_sigma # noqa
111
+ Dummy = processing_params.get("Dummy", 0.0) # noqa
112
+ dataset_variance = numexpr.evaluate(
113
+ "where(dataset_sigma <= 0.0, Dummy, dataset_sigma ** 2)"
114
+ )
115
+ else:
116
+ dataset_variance = None
117
+
118
+ self.log_info("Performing secondary scattering correction...")
119
+
120
+ (
121
+ corrected_dataset_signal,
122
+ corrected_dataset_variance,
123
+ corrected_dataset_sigma,
124
+ secondary_scattering,
125
+ ) = process_dataset_2scat(
126
+ dataset_signal=self.dataset_signal,
127
+ dataset_variance=dataset_variance,
128
+ **processing_params,
129
+ )
130
+
131
+ self.outputs.dataset_signal = corrected_dataset_signal
132
+ self.outputs.dataset_variance = corrected_dataset_variance
133
+ self.outputs.dataset_sigma = corrected_dataset_sigma
134
+ self.outputs.secondary_scattering = secondary_scattering
135
+
136
+ self._log_benchmark(self.bench_process)
137
+
138
+ def save(self) -> None:
139
+ do_save = super().save()
140
+ if not do_save:
141
+ return
142
+
143
+ if not self.get_input_value("save_secondary_scattering", False):
144
+ return
145
+
146
+ filename_output, h5path_nxdata = self.links["destination"]["nxdata"].split("::")
147
+ params = {
148
+ "filename": filename_output,
149
+ "name": h5path_nxdata,
150
+ "retry_timeout": 0.1,
151
+ "mode": "a",
152
+ }
153
+ with open_item_silx(**params) as nexus_data_grp:
154
+ self._update_dataset(
155
+ added_dataset=self.outputs.secondary_scattering,
156
+ index_read=self.index_range_last,
157
+ h5_group=nexus_data_grp,
158
+ h5_dataset_name="secondary_scattering",
159
+ )
@@ -0,0 +1,45 @@
1
+ import time
2
+
3
+ from ewokscore import Task
4
+
5
+
6
+ class SumTask(
7
+ Task,
8
+ input_names=["a"],
9
+ optional_input_names=["b", "delay"],
10
+ output_names=["result"],
11
+ ):
12
+ """Add two numbers"""
13
+
14
+ def run(self):
15
+ result = self.inputs.a + self.get_input_value("b", 0)
16
+ time.sleep(self.get_input_value("delay", 0))
17
+ self.outputs.result = result
18
+
19
+
20
+ class SumTask1(
21
+ Task,
22
+ input_names=["a"],
23
+ optional_input_names=["b", "delay"],
24
+ output_names=["result"],
25
+ ):
26
+ """Add two numbers"""
27
+
28
+ def run(self):
29
+ result = self.inputs.a + self.get_input_value("b", 0)
30
+ time.sleep(self.get_input_value("delay", 0))
31
+ self.outputs.result = result
32
+
33
+
34
+ class SumTask2(
35
+ Task,
36
+ input_names=["a"],
37
+ optional_input_names=["b", "delay"],
38
+ output_names=["result"],
39
+ ):
40
+ """Add two numbers"""
41
+
42
+ def run(self):
43
+ result = self.inputs.a + self.get_input_value("b", 0)
44
+ time.sleep(self.get_input_value("delay", 0))
45
+ self.outputs.result = result
@@ -0,0 +1,3 @@
1
+ import os
2
+
3
+ TEST_FOLDER = os.path.dirname(__file__)