ewoksid02 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. ewoksid02/__init__.py +0 -0
  2. ewoksid02/ocl/__init__.py +0 -0
  3. ewoksid02/resources/__init__.py +8 -0
  4. ewoksid02/resources/saxs_loop.json +96 -0
  5. ewoksid02/resources/template_saxs.yaml +37 -0
  6. ewoksid02/scripts/__init__.py +0 -0
  7. ewoksid02/scripts/__main__.py +70 -0
  8. ewoksid02/scripts/parsers.py +224 -0
  9. ewoksid02/scripts/saxs/__init__.py +0 -0
  10. ewoksid02/scripts/saxs/main.py +255 -0
  11. ewoksid02/scripts/saxs/slurm_python_post_script.py +3 -0
  12. ewoksid02/scripts/saxs/slurm_python_pre_script.py +5 -0
  13. ewoksid02/scripts/utils.py +21 -0
  14. ewoksid02/scripts/xpcs/__init__.py +0 -0
  15. ewoksid02/scripts/xpcs/__main__.py +3 -0
  16. ewoksid02/tasks/__init__.py +7 -0
  17. ewoksid02/tasks/averagetask.py +179 -0
  18. ewoksid02/tasks/azimuthaltask.py +272 -0
  19. ewoksid02/tasks/cavingtask.py +170 -0
  20. ewoksid02/tasks/dahuprocessingtask.py +71 -0
  21. ewoksid02/tasks/end.py +35 -0
  22. ewoksid02/tasks/id02processingtask.py +2582 -0
  23. ewoksid02/tasks/looptask.py +672 -0
  24. ewoksid02/tasks/metadatatask.py +879 -0
  25. ewoksid02/tasks/normalizationtask.py +204 -0
  26. ewoksid02/tasks/scalerstask.py +46 -0
  27. ewoksid02/tasks/secondaryscatteringtask.py +159 -0
  28. ewoksid02/tasks/sumtask.py +45 -0
  29. ewoksid02/tests/__init__.py +3 -0
  30. ewoksid02/tests/conftest.py +639 -0
  31. ewoksid02/tests/debug.py +64 -0
  32. ewoksid02/tests/test_2scat_node.py +119 -0
  33. ewoksid02/tests/test_ave_node.py +106 -0
  34. ewoksid02/tests/test_azim_node.py +89 -0
  35. ewoksid02/tests/test_cave_node.py +118 -0
  36. ewoksid02/tests/test_norm_node.py +190 -0
  37. ewoksid02/tests/test_saxs.py +69 -0
  38. ewoksid02/tests/test_sumtask.py +10 -0
  39. ewoksid02/tests/utils.py +514 -0
  40. ewoksid02/utils/__init__.py +22 -0
  41. ewoksid02/utils/average.py +158 -0
  42. ewoksid02/utils/blissdata.py +1157 -0
  43. ewoksid02/utils/caving.py +851 -0
  44. ewoksid02/utils/cupyutils.py +42 -0
  45. ewoksid02/utils/io.py +722 -0
  46. ewoksid02/utils/normalization.py +804 -0
  47. ewoksid02/utils/pyfai.py +424 -0
  48. ewoksid02/utils/secondaryscattering.py +597 -0
  49. ewoksid02-0.1.0.dist-info/METADATA +76 -0
  50. ewoksid02-0.1.0.dist-info/RECORD +54 -0
  51. ewoksid02-0.1.0.dist-info/WHEEL +5 -0
  52. ewoksid02-0.1.0.dist-info/entry_points.txt +5 -0
  53. ewoksid02-0.1.0.dist-info/licenses/LICENSE.md +20 -0
  54. ewoksid02-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,3 @@
1
+ print(
2
+ "This python script will be executed after the slurm job and after the bash post-script"
3
+ )
@@ -0,0 +1,5 @@
1
+ import pyopencl
2
+
3
+ print("This is a python script executed before the slurm job execution")
4
+
5
+ print(f"OpenCL platforms: {[p.get_devices() for p in pyopencl.get_platforms()]}")
@@ -0,0 +1,21 @@
1
+ WORKER_MODULE = "scattering"
2
+ PRE_SCRIPT = "module load {WORKER_MODULE}; python3 -m ewoksid13.scripts.utils.slurm_python_pre_script"
3
+ PYTHON_CMD = "python3"
4
+ POST_SCRIPT = "python3 -m ewoksid13.scripts.utils.slurm_python_post_script"
5
+ EWOKS_CMD = "ewoks execute {workflow} --engine ppf -o pool_type=thread --inputs=all -o convert_destination={destination_filename}"
6
+ SLURM_URL = "http://slurm-api.esrf.fr:6820"
7
+
8
+ SLURM_JOB_PARAMETERS_SAXS = {
9
+ "partition": "gpu-long",
10
+ "time": "02:00:00", # 2 hours
11
+ "tasks_per_node": 1,
12
+ "cpus_per_task": 1,
13
+ "memory_per_cpu": "100G",
14
+ "tres_per_job": "gres/gpu:1",
15
+ "constraints": "l40s", # a40, a100, v100, l40s
16
+ }
17
+
18
+ ID02_EXECUTION_PARAMETERS = {
19
+ "engine": "ppf",
20
+ "pool_type": "thread",
21
+ }
File without changes
@@ -0,0 +1,3 @@
1
+ def main(args):
2
+ """Main function to trigger the XPCS pipeline."""
3
+ print("xpcs", args)
@@ -0,0 +1,7 @@
1
+ ###
2
+ from .id02processingtask import ID02ProcessingTask # noqa
3
+ from .normalizationtask import NormalizationTask # noqa
4
+ from .secondaryscatteringtask import SecondaryScatteringTask # noqa
5
+ from .cavingtask import CavingBeamstopTask, CavingGapsTask, CavingTask # noqa
6
+ from .azimuthaltask import AzimuthalTask # noqa
7
+ from .averagetask import AverageTask # noqa
@@ -0,0 +1,179 @@
1
+ from contextlib import ExitStack
2
+
3
+ from pyFAI.units import to_unit
4
+ from silx.io.h5py_utils import open_item as open_item_silx
5
+
6
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
7
+ from ewoksid02.utils.average import calculate_average, get_array_limit
8
+
9
+
10
+ class AverageTask(
11
+ ID02ProcessingTask,
12
+ optional_input_names=[
13
+ "dataset_sum_signal",
14
+ "dataset_sum_normalization",
15
+ "dataset_sum_variance",
16
+ "radial_array",
17
+ "azimuth_array",
18
+ "Dummy",
19
+ "unit",
20
+ "azimuth_range",
21
+ "pca_parameters",
22
+ ],
23
+ output_names=[
24
+ "dataset_average_signal_norm",
25
+ "radial_array",
26
+ ],
27
+ ):
28
+ """The `AverageTask` class is responsible for calculating the average of datasets in the ID02 SAXS pipeline.
29
+ It extends the `ID02ProcessingTask` class and provides additional functionality for handling averaging-specific
30
+ inputs and processing logic. If azimuth_range is not provided, a full average will be performed.
31
+
32
+ Optional Inputs:
33
+ - dataset_sum_signal (numpy.ndarray): Sum of signal, non-normalized from an ai.integrate2d result.
34
+ - dataset_sum_normalization (numpy.ndarray): Sum of normalized pixels, from an ai.integrate2d result.
35
+ - dataset_sum_variance (numpy.ndarray): Sum of variance, from an ai.integrate2d result.
36
+ - radial_array (numpy.ndarray): Radial axis array for the dataset.
37
+ - azimuth_array (numpy.ndarray): Azimuthal axis array for the dataset.
38
+ - Dummy (float): Value to replace invalid pixels in the dataset.
39
+ - unit (str): Unit for the radial axis (e.g., "q_nm^-1").
40
+ - azimuth_range (list of tuples): Azimuthal ranges for averaging (e.g., `[(0, 90), (270, 360)]`).
41
+ - pca_parameters (dict): Parameters for Principal Component Analysis (PCA) if applicable.
42
+ Outputs Parameters:
43
+ - dataset_average_signal_norm (numpy.ndarray): Normalized average signal dataset.
44
+ - radial_array (numpy.ndarray): Radial axis array for the averaged data.
45
+ """
46
+
47
+ def run(self):
48
+ super().run(processing_type="ave")
49
+
50
+ def get_processing_inputs(self) -> dict:
51
+ azimuth_range = self.get_input_value("azimuth_range", None)
52
+ array_ranges = None
53
+
54
+ if azimuth_range is None:
55
+ self.log_info("There is no azimuth_range. Full average will be done.")
56
+ array_ranges = [(0, -1)]
57
+ else:
58
+ azimuth_array = self.get_input_value("azimuth_array", None)
59
+ if azimuth_array is None:
60
+ self.log_warning(
61
+ "There is no azimuth_array , azimuth ranges cannot be transformed into array limits. Full average will be done."
62
+ )
63
+ array_ranges = [(0, -1)]
64
+ else:
65
+ array_ranges = [
66
+ get_array_limit(azimuth_array=azimuth_array, azimuth_range=az_range)
67
+ for az_range in azimuth_range
68
+ ]
69
+
70
+ params_ave = {
71
+ "array_ranges": array_ranges,
72
+ "Dummy": self.get_parameter("Dummy"),
73
+ "azimuth_range": azimuth_range,
74
+ }
75
+
76
+ return params_ave
77
+
78
+ def process(self):
79
+ do_process = super().process()
80
+ if do_process is False:
81
+ return
82
+
83
+ with ExitStack() as stack:
84
+ self.bench_process = self.Benchmark(
85
+ nb_frames=len(self.dataset_signal), benchmark_name="processing"
86
+ )
87
+ stack.enter_context(self.bench_process)
88
+
89
+ processing_params = self.get_processing_inputs()
90
+ self.processing_params = processing_params
91
+
92
+ self.log_info("Performing 1D integration...")
93
+
94
+ (
95
+ dataset_average_intensity,
96
+ dataset_average_signal_norm,
97
+ dataset_average_variance,
98
+ dataset_average_sigma,
99
+ ) = calculate_average(
100
+ dataset_intensity=self.dataset_signal,
101
+ dataset_sum_signal=self.get_input_value("dataset_sum_signal", None),
102
+ dataset_sum_norm=self.get_input_value(
103
+ "dataset_sum_normalization", None
104
+ ),
105
+ dataset_sum_variance=self.get_input_value("dataset_sum_variance", None),
106
+ calculate_variance=self.get_input_value("save_variance", False),
107
+ **processing_params,
108
+ )
109
+
110
+ self.outputs.dataset_signal = dataset_average_intensity
111
+ self.outputs.dataset_average_signal_norm = dataset_average_signal_norm
112
+ self.outputs.dataset_variance = dataset_average_variance
113
+ self.outputs.dataset_sigma = dataset_average_sigma
114
+ self.outputs.radial_array = self.get_input_value("radial_array", None)
115
+
116
+ self._log_benchmark(self.bench_process)
117
+
118
+ def save(self) -> None:
119
+ do_save = super().save()
120
+ if not do_save:
121
+ return
122
+
123
+ filename_output, h5path_nxdata = self.links["destination"]["nxdata"].split("::")
124
+ params = {
125
+ "filename": filename_output,
126
+ "name": h5path_nxdata,
127
+ "retry_timeout": 0.1,
128
+ "mode": "a",
129
+ }
130
+ with open_item_silx(**params) as nexus_data_grp:
131
+ self._update_dataset(
132
+ added_dataset=self.outputs.dataset_average_signal_norm,
133
+ h5_group=nexus_data_grp,
134
+ h5_dataset_name="data_signal_norm",
135
+ index_read=self.index_range_last,
136
+ )
137
+
138
+ # Add nexus data information (only once)
139
+ unit = self.get_parameter("unit")
140
+ radial_unit = to_unit(unit)
141
+
142
+ if radial_unit is not None and radial_unit.short_name not in nexus_data_grp:
143
+ if self.get_input_value("radial_array", None) is not None:
144
+ radial_dset = nexus_data_grp.create_dataset(
145
+ name=radial_unit.short_name,
146
+ data=self.get_input_value("radial_array", None),
147
+ )
148
+ radial_dset.attrs["unit"] = str(radial_unit)
149
+ radial_dset.attrs["interpretation"] = "scalar"
150
+ radial_dset.attrs["axis"] = "2"
151
+ nexus_data_grp.attrs["axes"] = [".", radial_unit.short_name]
152
+ nexus_data_grp.attrs["signal"] = "data"
153
+
154
+ def processing_info(self) -> list:
155
+ azimuth_range = self.get_input_value("azimuth_range", [0, 360])
156
+ return [{"h5path": "entry_0000", "name": "ave_limits", "value": azimuth_range}]
157
+
158
+ def _save_in_gallery(self) -> str:
159
+ if self.loop_nb != 1:
160
+ return
161
+
162
+ dataset_signal = self.outputs.dataset_signal
163
+ if dataset_signal is None or dataset_signal.size == 0:
164
+ return
165
+
166
+ import matplotlib.pyplot as plt
167
+
168
+ unit = self.get_parameter("unit")
169
+ radial_unit = to_unit(unit)
170
+
171
+ signal = dataset_signal.mean(axis=0)
172
+ q = self.outputs.radial_array
173
+ fig, ax = plt.subplots(figsize=(15, 10))
174
+ ax.plot(q, signal)
175
+ ax.set_xlabel(f"{radial_unit.short_name} ({radial_unit.unit_symbol})")
176
+ ax.set_ylabel("Intensity (arb. units)")
177
+ ax.set_title(f"scan {self.scan_nb} - average")
178
+ filename_png = self._get_filename_gallery()
179
+ fig.savefig(filename_png)
@@ -0,0 +1,272 @@
1
+ from contextlib import ExitStack
2
+
3
+ import numexpr
4
+ from pyFAI.units import to_unit
5
+ from silx.io.h5py_utils import open_item as open_item_silx
6
+
7
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
8
+ from ewoksid02.utils.pyfai import (
9
+ get_gpu_method,
10
+ get_persistent_azimuthal_integrator,
11
+ _get_persistent_pyfai_worker,
12
+ process_dataset_azim,
13
+ guess_npt2_rad,
14
+ )
15
+
16
+ DEFAULT_NPT_RAD = 1600
17
+ DEFAULT_NPT_AZIM = 360
18
+
19
+
20
+ class AzimuthalTask(
21
+ ID02ProcessingTask,
22
+ optional_input_names=[
23
+ "filename_mask",
24
+ "filename_dark",
25
+ "npt2_rad",
26
+ "npt2_azim",
27
+ "unit",
28
+ "Dummy",
29
+ "DDummy",
30
+ "Center_1",
31
+ "Center_2",
32
+ "PSize_1",
33
+ "PSize_2",
34
+ "BSize_1",
35
+ "BSize_2",
36
+ "SampleDistance",
37
+ "WaveLength",
38
+ "DetectorRotation_1",
39
+ "DetectorRotation_2",
40
+ "DetectorRotation_3",
41
+ "method",
42
+ "integration_options",
43
+ "do_variance_formula",
44
+ "variance_formula",
45
+ "save_sum",
46
+ ],
47
+ output_names=[
48
+ "dataset_sum_signal",
49
+ "dataset_sum_normalization",
50
+ "dataset_sum_variance",
51
+ "radial_array",
52
+ "azimuth_array",
53
+ ],
54
+ ):
55
+ """The `AzimuthalTask` class is responsible for performing azimuthal integration on datasets in the ID02 SAXS pipeline.
56
+ It extends the `ID02ProcessingTask` class and provides additional functionality for handling azimuthal integration-specific
57
+ inputs and processing logic using the pyFAI library.
58
+
59
+ Optional Inputs:
60
+ - filename_mask (str): Path to the mask file for masking invalid pixels.
61
+ - filename_dark (str): Path to the file with a dark-current correction.
62
+ - npt2_rad (int): Number of radial bins for the integration.
63
+ - npt2_azim (int): Number of azimuthal bins for the integration.
64
+ - unit (str): Unit for the radial axis (e.g., "q_nm^-1").
65
+ - Dummy (float): Value to replace invalid pixels in the dataset.
66
+ - DDummy (float): Tolerance for dummy pixel replacement.
67
+ - Center_1 (float): Beam center in the first dimension.
68
+ - Center_2 (float): Beam center in the second dimension.
69
+ - PSize_1 (float): Pixel size 1.
70
+ - PSize_2 (float): Pixel size 2.
71
+ - BSize_1 (float): Pixel binning 1.
72
+ - BSize_2 (float): Pixel binning 2.
73
+ - SampleDistance (float): Sample to detector distance in meters.
74
+ - WaveLength (float): Wavelength of beam in meters.
75
+ - DetectorRotation_1 (float): rot2 of pyFAI.
76
+ - DetectorRotation_2 (float): rot1 of pyFAI.
77
+ - DetectorRotation_3 (float): rot3 of pyFAI.
78
+ - method (str): Integration method to be used by pyFAI (e.g., "bbox", "csr").
79
+ - integration_options (dict): Additional options for pyFAI integration.
80
+ - do_variance_formula (bool): Flag to enable variance calculation using a formula. Default is `False`.
81
+ - variance_formula (str): Formula for calculating variance in the dataset.
82
+ - save_sum (bool): To save or not the arrays sum_signal, sum_normalization and sum_variance.
83
+ Outputs:
84
+ - dataset_sum_signal (numpy.ndarray): Summed signal dataset after azimuthal integration.
85
+ - dataset_sum_normalization (numpy.ndarray): Summed normalization dataset after azimuthal integration.
86
+ - dataset_sum_variance (numpy.ndarray): Summed variance dataset after azimuthal integration.
87
+ - radial_array (numpy.ndarray): Radial axis array for the integrated data.
88
+ - azimuth_array (numpy.ndarray): Azimuthal axis array for the integrated data.
89
+ """
90
+
91
+ def run(self):
92
+ super().run(processing_type="azim")
93
+
94
+ def get_processing_inputs(
95
+ self,
96
+ ) -> dict:
97
+ params_azimuthalintegrator = {
98
+ "Center_1": self.get_parameter("Center_1"),
99
+ "Center_2": self.get_parameter("Center_2"),
100
+ "PSize_1": self.get_parameter("PSize_1"),
101
+ "PSize_2": self.get_parameter("PSize_2"),
102
+ "SampleDistance": self.get_parameter("SampleDistance"),
103
+ "WaveLength": self.get_parameter("WaveLength"),
104
+ "BSize_1": self.get_parameter("BSize_1"),
105
+ "BSize_2": self.get_parameter("BSize_2"),
106
+ "DetectorRotation_1": self.get_parameter("DetectorRotation_1"),
107
+ "DetectorRotation_2": self.get_parameter("DetectorRotation_2"),
108
+ "DetectorRotation_3": self.get_parameter("DetectorRotation_3"),
109
+ }
110
+
111
+ azimuthal_integrator = get_persistent_azimuthal_integrator(
112
+ data_signal_shape=self.dataset_signal[0].shape,
113
+ **params_azimuthalintegrator,
114
+ )
115
+
116
+ processing_params = {
117
+ "filename_mask": self.get_input_value(
118
+ "filename_mask",
119
+ self.get_mask_beamstop_filename(),
120
+ ),
121
+ "filename_dark": self.get_input_value(
122
+ "filename_dark",
123
+ self.get_dark_filename(),
124
+ ),
125
+ "Dummy": self.get_parameter("Dummy"),
126
+ "DDummy": self.get_parameter("DDummy"),
127
+ "npt2_rad": self.get_parameter("npt2_rad"),
128
+ "npt2_azim": self.get_parameter("npt2_azim"),
129
+ "unit": self.get_parameter("unit", defaut="q_nm^-1"),
130
+ "method": self.get_input_value("method", get_gpu_method()),
131
+ "integration_options": self.get_input_value("integration_options", {}),
132
+ "do_variance_formula": self.get_input_value("do_variance_formula", False),
133
+ "variance_formula": self.get_parameter("variance_formula"),
134
+ "datatype": self.get_input_value("datatype", "float32"),
135
+ "binning": (
136
+ self.get_parameter(
137
+ "BSize_1",
138
+ ),
139
+ self.get_parameter(
140
+ "BSize_2",
141
+ ),
142
+ ),
143
+ "save_variance": self.get_input_value("save_variance", False),
144
+ "save_sum": self.get_input_value("save_sum", False),
145
+ "azimuthal_integrator": azimuthal_integrator,
146
+ **params_azimuthalintegrator,
147
+ }
148
+ if not processing_params.get("npt2_rad"):
149
+ processing_params["npt2_rad"] = guess_npt2_rad(
150
+ azimuthal_integrator=azimuthal_integrator
151
+ )
152
+ if not processing_params.get("npt2_azim"):
153
+ processing_params["npt2_azim"] = DEFAULT_NPT_AZIM
154
+ if not processing_params.get("method"):
155
+ processing_params["method"] = get_gpu_method()
156
+ return processing_params
157
+
158
+ def process(self) -> None:
159
+ do_process = super().process()
160
+ if do_process is False:
161
+ return
162
+
163
+ with ExitStack() as stack:
164
+ self.bench_process = self.Benchmark(
165
+ nb_frames=len(self.dataset_signal), benchmark_name="processing"
166
+ )
167
+ stack.enter_context(self.bench_process)
168
+
169
+ processing_params = self.get_processing_inputs()
170
+ self.processing_params = processing_params
171
+
172
+ if self.dataset_variance is not None:
173
+ dataset_variance = self.dataset_variance
174
+ elif self.dataset_sigma is not None:
175
+ dataset_sigma = self.dataset_sigma # noqa
176
+ Dummy = processing_params.get("Dummy", 0.0) # noqa
177
+ dataset_variance = numexpr.evaluate(
178
+ "where(dataset_sigma <= 0.0, Dummy, dataset_sigma ** 2)"
179
+ )
180
+ else:
181
+ dataset_variance = None
182
+
183
+ self.log_info("Performing 2D integration...")
184
+
185
+ (
186
+ dataset_signal_azim,
187
+ dataset_variance_azim,
188
+ dataset_sigma_azim,
189
+ dataset_sumsignal_azim,
190
+ dataset_sumnorm_azim,
191
+ dataset_sumvariance_azim,
192
+ array_radial,
193
+ array_azim,
194
+ ) = process_dataset_azim(
195
+ dataset_signal=self.dataset_signal,
196
+ dataset_variance=dataset_variance,
197
+ **processing_params,
198
+ )
199
+
200
+ self.outputs.dataset_signal = dataset_signal_azim
201
+ self.outputs.dataset_variance = dataset_variance_azim
202
+ self.outputs.dataset_sigma = dataset_sigma_azim
203
+ self.outputs.dataset_sum_signal = dataset_sumsignal_azim
204
+ self.outputs.dataset_sum_normalization = dataset_sumnorm_azim
205
+ self.outputs.dataset_sum_variance = dataset_sumvariance_azim
206
+ self.outputs.radial_array = array_radial
207
+ self.outputs.azimuth_array = array_azim
208
+
209
+ self._log_benchmark(self.bench_process)
210
+
211
+ def save(self) -> None:
212
+ do_save = super().save()
213
+ if not do_save:
214
+ return
215
+
216
+ filename_output, h5path_nxdata = self.links["destination"]["nxdata"].split("::")
217
+ params = {
218
+ "filename": filename_output,
219
+ "name": h5path_nxdata,
220
+ "retry_timeout": 0.1,
221
+ "mode": "a",
222
+ }
223
+ with open_item_silx(**params) as nexus_data_grp:
224
+ self._update_dataset(
225
+ added_dataset=self.outputs.dataset_sum_signal,
226
+ h5_group=nexus_data_grp,
227
+ h5_dataset_name="sum_signal",
228
+ index_read=self.index_range_last,
229
+ )
230
+
231
+ self._update_dataset(
232
+ added_dataset=self.outputs.dataset_sum_normalization,
233
+ h5_group=nexus_data_grp,
234
+ h5_dataset_name="sum_normalization",
235
+ index_read=self.index_range_last,
236
+ )
237
+
238
+ self._update_dataset(
239
+ added_dataset=self.outputs.dataset_sum_variance,
240
+ h5_group=nexus_data_grp,
241
+ h5_dataset_name="sum_variance",
242
+ index_read=self.index_range_last,
243
+ )
244
+
245
+ unit = self.get_parameter("unit")
246
+ radial_unit = to_unit(unit)
247
+
248
+ # Update radial and azimuthal arrays only once
249
+ if radial_unit and self.outputs.radial_array is not None:
250
+ if radial_unit.short_name not in nexus_data_grp:
251
+ radial_dset = nexus_data_grp.create_dataset(
252
+ name=radial_unit.short_name,
253
+ data=self.outputs.radial_array,
254
+ )
255
+ radial_dset.attrs["axis"] = "3"
256
+ radial_dset.attrs["interpretation"] = "scalar"
257
+ radial_dset.attrs["unit"] = str(radial_unit)
258
+
259
+ if self.outputs.azimuth_array is not None:
260
+ if "chi" not in nexus_data_grp:
261
+ chi_dset = nexus_data_grp.create_dataset(
262
+ name="chi",
263
+ data=self.outputs.azimuth_array,
264
+ )
265
+ chi_dset.attrs["axis"] = "2"
266
+ chi_dset.attrs["interpretation"] = "scalar"
267
+ chi_dset.attrs["unit"] = "deg"
268
+
269
+ nexus_data_grp.attrs["axes"] = [".", "chi", radial_unit.short_name]
270
+
271
+ def _get_last_worker(self):
272
+ return _get_persistent_pyfai_worker(**self.get_processing_inputs())
@@ -0,0 +1,170 @@
1
+ from contextlib import ExitStack
2
+
3
+ import numpy
4
+
5
+ from ewoksid02.tasks.id02processingtask import ID02ProcessingTask
6
+ from ewoksid02.utils.caving import process_data_caving
7
+
8
+ DEFAULT_ALGORITHM = "numpy"
9
+
10
+
11
+ class CavingTask(
12
+ ID02ProcessingTask,
13
+ optional_input_names=[
14
+ "Dummy",
15
+ "Center_1",
16
+ "Center_2",
17
+ "filename_mask_static",
18
+ "filename_mask_reference",
19
+ "flip_caving",
20
+ "algorithm",
21
+ ],
22
+ ):
23
+ """The `CavingTask` class is responsible for applying a "caving" operation to datasets in the ID02 SAXS pipeline.
24
+ The caving is applied to those pixels whose intensity matches with a dummy value + those pixels provided by a mask file.
25
+
26
+ Optional Inputs:
27
+ - Dummy (float): Value to replace invalid pixels in the dataset.
28
+ - Center_1 (float): Beam center in the first dimension.
29
+ - Center_2 (float): Beam center in the second dimension.
30
+ - filename_mask_static (str): Path to the mask file used for the caving operation.
31
+ - filename_mask_reference (str): Path to the reference mask file (kind of a negative mask).
32
+ - flip_caving (bool): Cave the image with its flipped projection, both horizontal and vertical. WARNING: it is physically not correct!
33
+ - algorithm (str): Implementation to perform the caving (numpy or cupy).
34
+ """
35
+
36
+ def run(self, processing_type: str = "caving"):
37
+ super().run(processing_type=processing_type)
38
+
39
+ def get_processing_inputs(self) -> dict:
40
+ processing_params = {
41
+ "Dummy": self.get_parameter("Dummy"),
42
+ "Center_1": self.get_parameter("Center_1"),
43
+ "Center_2": self.get_parameter("Center_2"),
44
+ "filename_mask_static": self.get_input_value("filename_mask_static", None),
45
+ "filename_mask_reference": self.get_input_value(
46
+ "filename_mask_reference", None
47
+ ),
48
+ "flip_caving": self.get_input_value(
49
+ "flip_caving",
50
+ bool(self.get_from_headers("cave_flip")),
51
+ ),
52
+ "algorithm": self.get_input_value("algorithm", DEFAULT_ALGORITHM),
53
+ }
54
+ return processing_params
55
+
56
+ def process(self) -> None:
57
+ do_process = super().process()
58
+ if do_process is False:
59
+ return
60
+
61
+ with ExitStack() as stack:
62
+ self.bench_process = self.Benchmark(
63
+ nb_frames=len(self.dataset_signal), benchmark_name="processing"
64
+ )
65
+ stack.enter_context(self.bench_process)
66
+ processing_params = self.get_processing_inputs()
67
+ self.processing_params = processing_params
68
+
69
+ self.log_info("Performing caving...")
70
+ caved_dataset = process_data_caving(
71
+ data=self.dataset_signal,
72
+ **processing_params,
73
+ )
74
+
75
+ if not self.get_input_value("save_variance", False):
76
+ caved_dataset_variance = None
77
+ elif self.dataset_variance is not None:
78
+ caved_dataset_variance = process_data_caving(
79
+ data=self.dataset_variance,
80
+ **processing_params,
81
+ )
82
+ else:
83
+ caved_dataset_variance = numpy.zeros_like(caved_dataset)
84
+
85
+ if self.dataset_sigma is not None:
86
+ caved_dataset_sigma = process_data_caving(
87
+ data=self.dataset_sigma,
88
+ **processing_params,
89
+ )
90
+ else:
91
+ caved_dataset_sigma = numpy.zeros_like(caved_dataset)
92
+
93
+ self.log_debug("Caving processing done")
94
+ self.outputs.dataset_signal = caved_dataset
95
+ self.outputs.dataset_variance = caved_dataset_variance
96
+ self.outputs.dataset_sigma = caved_dataset_sigma
97
+
98
+ self._log_benchmark(self.bench_process)
99
+
100
+ def save(self) -> None:
101
+ do_save = super().save()
102
+ if not do_save:
103
+ return
104
+
105
+
106
+ class CavingGapsTask(
107
+ CavingTask,
108
+ ):
109
+ """The `CavingGapsTask` inherits 'CavingTask'.
110
+ Here, it will take the parameters from the headers to perform the caving operation
111
+ and cover the gaps in the dataset and avoid the beamstop. Hence, no explicit mask filenames are needed.
112
+ """
113
+
114
+ def run(self, processing_type: str = "gaps"):
115
+ super().run(processing_type=processing_type)
116
+
117
+ def get_processing_inputs(self) -> dict:
118
+ processing_params = {
119
+ "Dummy": self.get_parameter("Dummy"),
120
+ "Center_1": self.get_parameter("Center_1"),
121
+ "Center_2": self.get_parameter(
122
+ "Center_2",
123
+ ),
124
+ "filename_mask_static": self.get_input_value(
125
+ "filename_mask_static",
126
+ self.get_mask_gaps_filename(),
127
+ ),
128
+ "filename_mask_reference": self.get_input_value(
129
+ "filename_mask_reference",
130
+ self.get_mask_beamstop_filename(),
131
+ ),
132
+ "flip_caving": self.get_input_value(
133
+ "flip_caving",
134
+ bool(self.get_from_headers("nw_cave_flip")),
135
+ ),
136
+ "algorithm": self.get_input_value("algorithm", DEFAULT_ALGORITHM),
137
+ }
138
+
139
+ return processing_params
140
+
141
+
142
+ class CavingBeamstopTask(
143
+ CavingTask,
144
+ ):
145
+ """The `CavingBeamstopTask` inherits 'CavingTask'.
146
+ Here, it will take the parameters from the headers to perform the caving operation
147
+ and cover the beamstop. Hence, no explicit mask filename is needed.
148
+ """
149
+
150
+ def run(self, processing_type: str = "cave"):
151
+ super().run(processing_type=processing_type)
152
+
153
+ def get_processing_inputs(self) -> dict:
154
+ processing_params = {
155
+ "Dummy": self.get_parameter("Dummy"),
156
+ "Center_1": self.get_parameter("Center_1"),
157
+ "Center_2": self.get_parameter("Center_2"),
158
+ "filename_mask_static": self.get_input_value(
159
+ "filename_mask_static",
160
+ self.get_mask_beamstop_filename(),
161
+ ),
162
+ "filename_mask_reference": None,
163
+ "flip_caving": self.get_input_value(
164
+ "flip_caving",
165
+ bool(self.get_from_headers("nw_cave_flip")),
166
+ ),
167
+ "algorithm": self.get_input_value("algorithm", DEFAULT_ALGORITHM),
168
+ }
169
+
170
+ return processing_params