ewoksid02 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ewoksid02/__init__.py +0 -0
- ewoksid02/ocl/__init__.py +0 -0
- ewoksid02/resources/__init__.py +8 -0
- ewoksid02/resources/saxs_loop.json +96 -0
- ewoksid02/resources/template_saxs.yaml +37 -0
- ewoksid02/scripts/__init__.py +0 -0
- ewoksid02/scripts/__main__.py +70 -0
- ewoksid02/scripts/parsers.py +224 -0
- ewoksid02/scripts/saxs/__init__.py +0 -0
- ewoksid02/scripts/saxs/main.py +255 -0
- ewoksid02/scripts/saxs/slurm_python_post_script.py +3 -0
- ewoksid02/scripts/saxs/slurm_python_pre_script.py +5 -0
- ewoksid02/scripts/utils.py +21 -0
- ewoksid02/scripts/xpcs/__init__.py +0 -0
- ewoksid02/scripts/xpcs/__main__.py +3 -0
- ewoksid02/tasks/__init__.py +7 -0
- ewoksid02/tasks/averagetask.py +179 -0
- ewoksid02/tasks/azimuthaltask.py +272 -0
- ewoksid02/tasks/cavingtask.py +170 -0
- ewoksid02/tasks/dahuprocessingtask.py +71 -0
- ewoksid02/tasks/end.py +35 -0
- ewoksid02/tasks/id02processingtask.py +2582 -0
- ewoksid02/tasks/looptask.py +672 -0
- ewoksid02/tasks/metadatatask.py +879 -0
- ewoksid02/tasks/normalizationtask.py +204 -0
- ewoksid02/tasks/scalerstask.py +46 -0
- ewoksid02/tasks/secondaryscatteringtask.py +159 -0
- ewoksid02/tasks/sumtask.py +45 -0
- ewoksid02/tests/__init__.py +3 -0
- ewoksid02/tests/conftest.py +639 -0
- ewoksid02/tests/debug.py +64 -0
- ewoksid02/tests/test_2scat_node.py +119 -0
- ewoksid02/tests/test_ave_node.py +106 -0
- ewoksid02/tests/test_azim_node.py +89 -0
- ewoksid02/tests/test_cave_node.py +118 -0
- ewoksid02/tests/test_norm_node.py +190 -0
- ewoksid02/tests/test_saxs.py +69 -0
- ewoksid02/tests/test_sumtask.py +10 -0
- ewoksid02/tests/utils.py +514 -0
- ewoksid02/utils/__init__.py +22 -0
- ewoksid02/utils/average.py +158 -0
- ewoksid02/utils/blissdata.py +1157 -0
- ewoksid02/utils/caving.py +851 -0
- ewoksid02/utils/cupyutils.py +42 -0
- ewoksid02/utils/io.py +722 -0
- ewoksid02/utils/normalization.py +804 -0
- ewoksid02/utils/pyfai.py +424 -0
- ewoksid02/utils/secondaryscattering.py +597 -0
- ewoksid02-0.1.0.dist-info/METADATA +76 -0
- ewoksid02-0.1.0.dist-info/RECORD +54 -0
- ewoksid02-0.1.0.dist-info/WHEEL +5 -0
- ewoksid02-0.1.0.dist-info/entry_points.txt +5 -0
- ewoksid02-0.1.0.dist-info/licenses/LICENSE.md +20 -0
- ewoksid02-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
from ewoksid02.tasks.secondaryscatteringtask import SecondaryScatteringTask
|
|
2
|
+
|
|
3
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_eiger2_2scat_numpy(
|
|
7
|
+
inputs_task_generic,
|
|
8
|
+
inputs_task_2scat,
|
|
9
|
+
tmp_path,
|
|
10
|
+
dataset_signal_norm,
|
|
11
|
+
dataset_sigma_norm,
|
|
12
|
+
filename_processed_2scat_full,
|
|
13
|
+
):
|
|
14
|
+
processing_filename_numpy = str(tmp_path / "id02test_eiger2_2scat_numpy.h5")
|
|
15
|
+
inputs_2scat_numpy = {
|
|
16
|
+
**inputs_task_generic,
|
|
17
|
+
**inputs_task_2scat,
|
|
18
|
+
"processing_filename": processing_filename_numpy,
|
|
19
|
+
"dataset_signal": dataset_signal_norm,
|
|
20
|
+
"dataset_sigma": dataset_sigma_norm,
|
|
21
|
+
}
|
|
22
|
+
task_2scat_numpy = SecondaryScatteringTask(inputs_2scat_numpy)
|
|
23
|
+
task_2scat_numpy.run()
|
|
24
|
+
|
|
25
|
+
check_h5groups_equivalent(
|
|
26
|
+
url_reference=f"silx://{processing_filename_numpy}?path=/entry_0000/PyFAI/result_2scat/",
|
|
27
|
+
url_test=f"silx://{processing_filename_numpy}?path=/entry_0000/PyFAI/result_2scat/",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
check_h5groups_common(
|
|
31
|
+
filename_reference=filename_processed_2scat_full,
|
|
32
|
+
filename_test=processing_filename_numpy,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_eiger2_2scat_cupy(
|
|
37
|
+
cupy_available,
|
|
38
|
+
inputs_task_generic,
|
|
39
|
+
inputs_task_2scat,
|
|
40
|
+
tmp_path,
|
|
41
|
+
dataset_signal_norm,
|
|
42
|
+
dataset_sigma_norm,
|
|
43
|
+
filename_processed_2scat_full,
|
|
44
|
+
):
|
|
45
|
+
if not cupy_available:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
processing_filename_cupy = str(tmp_path / "id02test_eiger2_2scat_numpy.h5")
|
|
49
|
+
inputs_2scat_cupy = {
|
|
50
|
+
**inputs_task_generic,
|
|
51
|
+
**inputs_task_2scat,
|
|
52
|
+
"processing_filename": processing_filename_cupy,
|
|
53
|
+
"dataset_signal": dataset_signal_norm,
|
|
54
|
+
"dataset_sigma": dataset_sigma_norm,
|
|
55
|
+
}
|
|
56
|
+
inputs_2scat_cupy["algorithm"] = "cupy"
|
|
57
|
+
task_2scat_cupy = SecondaryScatteringTask(inputs_2scat_cupy)
|
|
58
|
+
task_2scat_cupy.run()
|
|
59
|
+
|
|
60
|
+
check_h5groups_equivalent(
|
|
61
|
+
url_reference=f"silx://{processing_filename_cupy}?path=/entry_0000/PyFAI/result_2scat/",
|
|
62
|
+
url_test=f"silx://{processing_filename_cupy}?path=/entry_0000/PyFAI/result_2scat/",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
check_h5groups_common(
|
|
66
|
+
filename_reference=filename_processed_2scat_full,
|
|
67
|
+
filename_test=processing_filename_cupy,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def test_eiger2_2scat_workflow(
|
|
72
|
+
workflow_norm_2scat,
|
|
73
|
+
inputs_task_generic,
|
|
74
|
+
inputs_task_norm,
|
|
75
|
+
inputs_task_2scat,
|
|
76
|
+
tmp_path,
|
|
77
|
+
filename_processed_norm_reference,
|
|
78
|
+
filename_processed_2scat_full,
|
|
79
|
+
):
|
|
80
|
+
inputs = []
|
|
81
|
+
for key, value in inputs_task_generic.items():
|
|
82
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
83
|
+
for key, value in inputs_task_norm.items():
|
|
84
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
85
|
+
for key, value in inputs_task_2scat.items():
|
|
86
|
+
inputs.append({"name": key, "value": value, "id": "2scat"})
|
|
87
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
88
|
+
filename_processing_2scat = str(tmp_path / "id02test_eiger2_2scat.h5")
|
|
89
|
+
inputs.append(
|
|
90
|
+
{"name": "processing_filename", "value": filename_processing_norm, "id": "norm"}
|
|
91
|
+
)
|
|
92
|
+
inputs.append(
|
|
93
|
+
{
|
|
94
|
+
"name": "processing_filename",
|
|
95
|
+
"value": filename_processing_2scat,
|
|
96
|
+
"id": "2scat",
|
|
97
|
+
}
|
|
98
|
+
)
|
|
99
|
+
_ = execute_ewoks(
|
|
100
|
+
graph=workflow_norm_2scat,
|
|
101
|
+
inputs=inputs,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
check_h5groups_equivalent(
|
|
105
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
106
|
+
url_test=f"silx://{filename_processing_norm}?path=/entry_0000/PyFAI/result_norm/",
|
|
107
|
+
)
|
|
108
|
+
check_h5groups_common(
|
|
109
|
+
filename_reference=filename_processed_norm_reference,
|
|
110
|
+
filename_test=filename_processing_norm,
|
|
111
|
+
)
|
|
112
|
+
check_h5groups_equivalent(
|
|
113
|
+
url_reference=f"silx://{filename_processed_2scat_full}?path=/entry_0000/PyFAI/result_2scat/",
|
|
114
|
+
url_test=f"silx://{filename_processing_2scat}?path=/entry_0000/PyFAI/result_2scat/",
|
|
115
|
+
)
|
|
116
|
+
check_h5groups_common(
|
|
117
|
+
filename_reference=filename_processed_2scat_full,
|
|
118
|
+
filename_test=filename_processing_2scat,
|
|
119
|
+
)
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from ewoksid02.tasks.averagetask import AverageTask
|
|
2
|
+
|
|
3
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_eiger2_ave_full(
|
|
7
|
+
inputs_task_generic,
|
|
8
|
+
inputs_task_ave,
|
|
9
|
+
tmp_path,
|
|
10
|
+
dataset_signal_azim_new,
|
|
11
|
+
dataset_sigma_azim_new,
|
|
12
|
+
dataset_sumsignal_azim_new,
|
|
13
|
+
dataset_sumnorm_azim_new,
|
|
14
|
+
dataset_sumvariance_azim_new,
|
|
15
|
+
dataset_radial_array,
|
|
16
|
+
dataset_azimuthal_array,
|
|
17
|
+
filename_processed_ave_full,
|
|
18
|
+
):
|
|
19
|
+
processing_filename = str(tmp_path / "id02test_eiger2_ave.h5")
|
|
20
|
+
inputs = {
|
|
21
|
+
**inputs_task_generic,
|
|
22
|
+
**inputs_task_ave,
|
|
23
|
+
"processing_filename": processing_filename,
|
|
24
|
+
"dataset_signal": dataset_signal_azim_new,
|
|
25
|
+
"dataset_sigma": dataset_sigma_azim_new,
|
|
26
|
+
"dataset_sum_signal": dataset_sumsignal_azim_new,
|
|
27
|
+
"dataset_sum_normalization": dataset_sumnorm_azim_new,
|
|
28
|
+
"dataset_sum_variance": dataset_sumvariance_azim_new,
|
|
29
|
+
"radial_array": dataset_radial_array,
|
|
30
|
+
"azimuth_array": dataset_azimuthal_array,
|
|
31
|
+
}
|
|
32
|
+
task_azim = AverageTask(inputs)
|
|
33
|
+
task_azim.run()
|
|
34
|
+
|
|
35
|
+
check_h5groups_equivalent(
|
|
36
|
+
url_reference=f"silx://{filename_processed_ave_full}?path=/entry_0000/PyFAI/result_ave/",
|
|
37
|
+
url_test=f"silx://{processing_filename}?path=/entry_0000/PyFAI/result_ave/",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
check_h5groups_common(
|
|
41
|
+
filename_reference=filename_processed_ave_full,
|
|
42
|
+
filename_test=processing_filename,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def test_eiger2_ave_workflow(
|
|
47
|
+
workflow_norm_2scat_cave_azim_ave,
|
|
48
|
+
inputs_task_generic,
|
|
49
|
+
inputs_task_norm,
|
|
50
|
+
inputs_task_2scat,
|
|
51
|
+
inputs_task_cave,
|
|
52
|
+
inputs_task_azim,
|
|
53
|
+
inputs_task_ave,
|
|
54
|
+
tmp_path,
|
|
55
|
+
filename_processed_norm_reference,
|
|
56
|
+
filename_processed_2scat_full,
|
|
57
|
+
filename_processed_cave_full,
|
|
58
|
+
filename_processed_azim_full,
|
|
59
|
+
filename_processed_ave_full,
|
|
60
|
+
):
|
|
61
|
+
inputs = []
|
|
62
|
+
for key, value in inputs_task_generic.items():
|
|
63
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
64
|
+
for key, value in inputs_task_norm.items():
|
|
65
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
66
|
+
for key, value in inputs_task_2scat.items():
|
|
67
|
+
inputs.append({"name": key, "value": value, "id": "2scat"})
|
|
68
|
+
for key, value in inputs_task_cave.items():
|
|
69
|
+
inputs.append({"name": key, "value": value, "id": "cave"})
|
|
70
|
+
for key, value in inputs_task_azim.items():
|
|
71
|
+
inputs.append({"name": key, "value": value, "id": "azim"})
|
|
72
|
+
for key, value in inputs_task_ave.items():
|
|
73
|
+
inputs.append({"name": key, "value": value, "id": "ave"})
|
|
74
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
75
|
+
filename_processing_2scat = str(tmp_path / "id02test_eiger2_2scat.h5")
|
|
76
|
+
filename_processing_cave = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
77
|
+
filename_processing_azim = str(tmp_path / "id02test_eiger2_azim.h5")
|
|
78
|
+
filename_processing_ave = str(tmp_path / "id02test_eiger2_ave.h5")
|
|
79
|
+
for name, value, id_ in [
|
|
80
|
+
("processing_filename", filename_processing_norm, "norm"),
|
|
81
|
+
("processing_filename", filename_processing_2scat, "2scat"),
|
|
82
|
+
("processing_filename", filename_processing_cave, "cave"),
|
|
83
|
+
("processing_filename", filename_processing_azim, "azim"),
|
|
84
|
+
("processing_filename", filename_processing_ave, "ave"),
|
|
85
|
+
]:
|
|
86
|
+
inputs.append({"name": name, "value": value, "id": id_})
|
|
87
|
+
_ = execute_ewoks(
|
|
88
|
+
graph=workflow_norm_2scat_cave_azim_ave,
|
|
89
|
+
inputs=inputs,
|
|
90
|
+
)
|
|
91
|
+
for ref_file, test_file, proc_type in [
|
|
92
|
+
(filename_processed_norm_reference, filename_processing_norm, "norm"),
|
|
93
|
+
(filename_processed_2scat_full, filename_processing_2scat, "2scat"),
|
|
94
|
+
(filename_processed_cave_full, filename_processing_cave, "cave"),
|
|
95
|
+
(filename_processed_azim_full, filename_processing_azim, "azim"),
|
|
96
|
+
(filename_processed_ave_full, filename_processing_ave, "ave"),
|
|
97
|
+
]:
|
|
98
|
+
check_h5groups_equivalent(
|
|
99
|
+
url_reference=f"silx://{ref_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
100
|
+
url_test=f"silx://{test_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
check_h5groups_common(
|
|
104
|
+
filename_reference=ref_file,
|
|
105
|
+
filename_test=test_file,
|
|
106
|
+
)
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
from ewoksid02.tasks.azimuthaltask import AzimuthalTask
|
|
2
|
+
|
|
3
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_eiger2_azim_full(
|
|
7
|
+
inputs_task_generic,
|
|
8
|
+
inputs_task_azim,
|
|
9
|
+
tmp_path,
|
|
10
|
+
dataset_signal_cave_new,
|
|
11
|
+
dataset_sigma_cave_new,
|
|
12
|
+
filename_processed_azim_full,
|
|
13
|
+
):
|
|
14
|
+
processing_filename = str(tmp_path / "id02test_eiger2_azim.h5")
|
|
15
|
+
inputs = {
|
|
16
|
+
**inputs_task_generic,
|
|
17
|
+
**inputs_task_azim,
|
|
18
|
+
"processing_filename": processing_filename,
|
|
19
|
+
"dataset_signal": dataset_signal_cave_new,
|
|
20
|
+
"dataset_sigma": dataset_sigma_cave_new,
|
|
21
|
+
}
|
|
22
|
+
task_azim = AzimuthalTask(inputs)
|
|
23
|
+
task_azim.run()
|
|
24
|
+
|
|
25
|
+
check_h5groups_equivalent(
|
|
26
|
+
url_reference=f"silx://{filename_processed_azim_full}?path=/entry_0000/PyFAI/result_azim/",
|
|
27
|
+
url_test=f"silx://{processing_filename}?path=/entry_0000/PyFAI/result_azim/",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
check_h5groups_common(
|
|
31
|
+
filename_reference=filename_processed_azim_full,
|
|
32
|
+
filename_test=processing_filename,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_eiger2_azim_workflow(
|
|
37
|
+
workflow_norm_2scat_cave_azim,
|
|
38
|
+
inputs_task_generic,
|
|
39
|
+
inputs_task_norm,
|
|
40
|
+
inputs_task_2scat,
|
|
41
|
+
inputs_task_cave,
|
|
42
|
+
inputs_task_azim,
|
|
43
|
+
tmp_path,
|
|
44
|
+
filename_processed_norm_reference,
|
|
45
|
+
filename_processed_2scat_full,
|
|
46
|
+
filename_processed_cave_full,
|
|
47
|
+
filename_processed_azim_full,
|
|
48
|
+
):
|
|
49
|
+
inputs = []
|
|
50
|
+
for key, value in inputs_task_generic.items():
|
|
51
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
52
|
+
for key, value in inputs_task_norm.items():
|
|
53
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
54
|
+
for key, value in inputs_task_2scat.items():
|
|
55
|
+
inputs.append({"name": key, "value": value, "id": "2scat"})
|
|
56
|
+
for key, value in inputs_task_cave.items():
|
|
57
|
+
inputs.append({"name": key, "value": value, "id": "cave"})
|
|
58
|
+
for key, value in inputs_task_azim.items():
|
|
59
|
+
inputs.append({"name": key, "value": value, "id": "azim"})
|
|
60
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
61
|
+
filename_processing_2scat = str(tmp_path / "id02test_eiger2_2scat.h5")
|
|
62
|
+
filename_processing_cave = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
63
|
+
filename_processing_azim = str(tmp_path / "id02test_eiger2_azim.h5")
|
|
64
|
+
for name, value, id_ in [
|
|
65
|
+
("processing_filename", filename_processing_norm, "norm"),
|
|
66
|
+
("processing_filename", filename_processing_2scat, "2scat"),
|
|
67
|
+
("processing_filename", filename_processing_cave, "cave"),
|
|
68
|
+
("processing_filename", filename_processing_azim, "azim"),
|
|
69
|
+
]:
|
|
70
|
+
inputs.append({"name": name, "value": value, "id": id_})
|
|
71
|
+
_ = execute_ewoks(
|
|
72
|
+
graph=workflow_norm_2scat_cave_azim,
|
|
73
|
+
inputs=inputs,
|
|
74
|
+
)
|
|
75
|
+
for ref_file, test_file, proc_type in [
|
|
76
|
+
(filename_processed_norm_reference, filename_processing_norm, "norm"),
|
|
77
|
+
(filename_processed_2scat_full, filename_processing_2scat, "2scat"),
|
|
78
|
+
(filename_processed_cave_full, filename_processing_cave, "cave"),
|
|
79
|
+
(filename_processed_azim_full, filename_processing_azim, "azim"),
|
|
80
|
+
]:
|
|
81
|
+
check_h5groups_equivalent(
|
|
82
|
+
url_reference=f"silx://{ref_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
83
|
+
url_test=f"silx://{test_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
check_h5groups_common(
|
|
87
|
+
filename_reference=ref_file,
|
|
88
|
+
filename_test=test_file,
|
|
89
|
+
)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from ewoksid02.tasks.cavingtask import CavingBeamstopTask
|
|
2
|
+
|
|
3
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_eiger2_cave_numpy(
|
|
7
|
+
inputs_task_generic,
|
|
8
|
+
inputs_task_cave,
|
|
9
|
+
tmp_path,
|
|
10
|
+
dataset_signal_2scat,
|
|
11
|
+
dataset_sigma_2scat,
|
|
12
|
+
filename_processed_cave_full,
|
|
13
|
+
):
|
|
14
|
+
processing_filename_cave_numpy = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
15
|
+
inputs_cave_numpy = {
|
|
16
|
+
**inputs_task_generic,
|
|
17
|
+
**inputs_task_cave,
|
|
18
|
+
"processing_filename": processing_filename_cave_numpy,
|
|
19
|
+
"dataset_signal": dataset_signal_2scat,
|
|
20
|
+
"dataset_sigma": dataset_sigma_2scat,
|
|
21
|
+
}
|
|
22
|
+
task_cave_numpy = CavingBeamstopTask(inputs_cave_numpy)
|
|
23
|
+
task_cave_numpy.run()
|
|
24
|
+
|
|
25
|
+
check_h5groups_equivalent(
|
|
26
|
+
url_reference=f"silx://{filename_processed_cave_full}?path=/entry_0000/PyFAI/result_cave/",
|
|
27
|
+
url_test=f"silx://{processing_filename_cave_numpy}?path=/entry_0000/PyFAI/result_cave/",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
check_h5groups_common(
|
|
31
|
+
filename_reference=filename_processed_cave_full,
|
|
32
|
+
filename_test=processing_filename_cave_numpy,
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def test_eiger2_cave_cupy(
|
|
37
|
+
cupy_available,
|
|
38
|
+
inputs_task_generic,
|
|
39
|
+
inputs_task_cave,
|
|
40
|
+
tmp_path,
|
|
41
|
+
dataset_signal_2scat,
|
|
42
|
+
dataset_sigma_2scat,
|
|
43
|
+
filename_processed_cave_full,
|
|
44
|
+
):
|
|
45
|
+
if not cupy_available:
|
|
46
|
+
return
|
|
47
|
+
|
|
48
|
+
processing_filename_cave_cupy = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
49
|
+
inputs_cave_cupy = {
|
|
50
|
+
**inputs_task_generic,
|
|
51
|
+
**inputs_task_cave,
|
|
52
|
+
"processing_filename": processing_filename_cave_cupy,
|
|
53
|
+
"dataset_signal": dataset_signal_2scat,
|
|
54
|
+
"dataset_sigma": dataset_sigma_2scat,
|
|
55
|
+
}
|
|
56
|
+
inputs_cave_cupy["algorithm"] = "cupy"
|
|
57
|
+
task_cave_cupy = CavingBeamstopTask(inputs_cave_cupy)
|
|
58
|
+
task_cave_cupy.run()
|
|
59
|
+
|
|
60
|
+
check_h5groups_equivalent(
|
|
61
|
+
url_reference=f"silx://{filename_processed_cave_full}?path=/entry_0000/PyFAI/result_cave/",
|
|
62
|
+
url_test=f"silx://{processing_filename_cave_cupy}?path=/entry_0000/PyFAI/result_cave/",
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
check_h5groups_common(
|
|
66
|
+
filename_reference=filename_processed_cave_full,
|
|
67
|
+
filename_test=processing_filename_cave_cupy,
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def test_eiger2_cave_workflow(
|
|
72
|
+
workflow_norm_2scat_cave,
|
|
73
|
+
inputs_task_generic,
|
|
74
|
+
inputs_task_norm,
|
|
75
|
+
inputs_task_2scat,
|
|
76
|
+
inputs_task_cave,
|
|
77
|
+
tmp_path,
|
|
78
|
+
filename_processed_norm_reference,
|
|
79
|
+
filename_processed_2scat_full,
|
|
80
|
+
filename_processed_cave_full,
|
|
81
|
+
):
|
|
82
|
+
inputs = []
|
|
83
|
+
for key, value in inputs_task_generic.items():
|
|
84
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
85
|
+
for key, value in inputs_task_norm.items():
|
|
86
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
87
|
+
for key, value in inputs_task_2scat.items():
|
|
88
|
+
inputs.append({"name": key, "value": value, "id": "2scat"})
|
|
89
|
+
for key, value in inputs_task_cave.items():
|
|
90
|
+
inputs.append({"name": key, "value": value, "id": "cave"})
|
|
91
|
+
|
|
92
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
93
|
+
filename_processing_2scat = str(tmp_path / "id02test_eiger2_2scat.h5")
|
|
94
|
+
filename_processing_cave = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
95
|
+
for name, value, id_ in [
|
|
96
|
+
("processing_filename", filename_processing_norm, "norm"),
|
|
97
|
+
("processing_filename", filename_processing_2scat, "2scat"),
|
|
98
|
+
("processing_filename", filename_processing_cave, "cave"),
|
|
99
|
+
]:
|
|
100
|
+
inputs.append({"name": name, "value": value, "id": id_})
|
|
101
|
+
_ = execute_ewoks(
|
|
102
|
+
graph=workflow_norm_2scat_cave,
|
|
103
|
+
inputs=inputs,
|
|
104
|
+
)
|
|
105
|
+
for ref_file, test_file, proc_type in [
|
|
106
|
+
(filename_processed_norm_reference, filename_processing_norm, "norm"),
|
|
107
|
+
(filename_processed_2scat_full, filename_processing_2scat, "2scat"),
|
|
108
|
+
(filename_processed_cave_full, filename_processing_cave, "cave"),
|
|
109
|
+
]:
|
|
110
|
+
check_h5groups_equivalent(
|
|
111
|
+
url_reference=f"silx://{ref_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
112
|
+
url_test=f"silx://{test_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
check_h5groups_common(
|
|
116
|
+
filename_reference=ref_file,
|
|
117
|
+
filename_test=test_file,
|
|
118
|
+
)
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
from ewoksid02.tasks.normalizationtask import NormalizationTask
|
|
2
|
+
|
|
3
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def test_eiger2_normalization_cython(
|
|
7
|
+
inputs_task_generic,
|
|
8
|
+
inputs_task_norm,
|
|
9
|
+
tmp_path,
|
|
10
|
+
filename_processed_norm_reference,
|
|
11
|
+
):
|
|
12
|
+
processing_filename_cython = str(tmp_path / "id02test_eiger2_norm_cython.h5")
|
|
13
|
+
inputs_cython = {
|
|
14
|
+
**inputs_task_generic,
|
|
15
|
+
**inputs_task_norm,
|
|
16
|
+
"processing_filename": processing_filename_cython,
|
|
17
|
+
}
|
|
18
|
+
task_norm_cython = NormalizationTask(inputs_cython)
|
|
19
|
+
task_norm_cython.run()
|
|
20
|
+
|
|
21
|
+
check_h5groups_equivalent(
|
|
22
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
23
|
+
url_test=f"silx://{processing_filename_cython}?path=/entry_0000/PyFAI/result_norm/",
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
check_h5groups_common(
|
|
27
|
+
filename_reference=filename_processed_norm_reference,
|
|
28
|
+
filename_test=processing_filename_cython,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def test_eiger2_normalization_cupy(
|
|
33
|
+
cupy_available,
|
|
34
|
+
inputs_task_generic,
|
|
35
|
+
inputs_task_norm,
|
|
36
|
+
tmp_path,
|
|
37
|
+
filename_processed_norm_reference,
|
|
38
|
+
):
|
|
39
|
+
if not cupy_available:
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
processing_filename_cupy = str(tmp_path / "id02test_eiger2_norm_cupy.h5")
|
|
43
|
+
inputs_cupy = {
|
|
44
|
+
**inputs_task_generic,
|
|
45
|
+
**inputs_task_norm,
|
|
46
|
+
"processing_filename": processing_filename_cupy,
|
|
47
|
+
}
|
|
48
|
+
inputs_cupy["algorithm"] = "cupy"
|
|
49
|
+
task_norm_cupy = NormalizationTask(inputs_cupy)
|
|
50
|
+
task_norm_cupy.run()
|
|
51
|
+
|
|
52
|
+
check_h5groups_equivalent(
|
|
53
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
54
|
+
url_test=f"silx://{processing_filename_cupy}?path=/entry_0000/PyFAI/result_norm/",
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
check_h5groups_common(
|
|
58
|
+
filename_reference=filename_processed_norm_reference,
|
|
59
|
+
filename_test=processing_filename_cupy,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def test_eiger2_normalization_numpy(
|
|
64
|
+
cupy_available,
|
|
65
|
+
inputs_task_generic,
|
|
66
|
+
inputs_task_norm,
|
|
67
|
+
tmp_path,
|
|
68
|
+
filename_processed_norm_reference,
|
|
69
|
+
):
|
|
70
|
+
if not cupy_available:
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
processing_filename_numpy = str(tmp_path / "id02test_eiger2_norm_numpy.h5")
|
|
74
|
+
inputs_numpy = {
|
|
75
|
+
**inputs_task_generic,
|
|
76
|
+
**inputs_task_norm,
|
|
77
|
+
"processing_filename": processing_filename_numpy,
|
|
78
|
+
}
|
|
79
|
+
inputs_numpy["algorithm"] = "cupy"
|
|
80
|
+
task_norm_numpy = NormalizationTask(inputs_numpy)
|
|
81
|
+
task_norm_numpy.run()
|
|
82
|
+
|
|
83
|
+
check_h5groups_equivalent(
|
|
84
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
85
|
+
url_test=f"silx://{processing_filename_numpy}?path=/entry_0000/PyFAI/result_norm/",
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
check_h5groups_common(
|
|
89
|
+
filename_reference=filename_processed_norm_reference,
|
|
90
|
+
filename_test=processing_filename_numpy,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def test_eiger2_normalization_workflow(
|
|
95
|
+
workflow_norm,
|
|
96
|
+
inputs_task_generic,
|
|
97
|
+
inputs_task_norm,
|
|
98
|
+
tmp_path,
|
|
99
|
+
filename_processed_norm_reference,
|
|
100
|
+
):
|
|
101
|
+
inputs = []
|
|
102
|
+
for key, value in inputs_task_generic.items():
|
|
103
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
104
|
+
for key, value in inputs_task_norm.items():
|
|
105
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
106
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
107
|
+
inputs.append(
|
|
108
|
+
{"name": "processing_filename", "value": filename_processing_norm, "id": "norm"}
|
|
109
|
+
)
|
|
110
|
+
_ = execute_ewoks(
|
|
111
|
+
graph=workflow_norm,
|
|
112
|
+
inputs=inputs,
|
|
113
|
+
)
|
|
114
|
+
check_h5groups_equivalent(
|
|
115
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
116
|
+
url_test=f"silx://{filename_processing_norm}?path=/entry_0000/PyFAI/result_norm/",
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
check_h5groups_common(
|
|
120
|
+
filename_reference=filename_processed_norm_reference,
|
|
121
|
+
filename_test=filename_processing_norm,
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def test_eiger2_normalization_workflow_loop(
|
|
126
|
+
workflow_norm,
|
|
127
|
+
inputs_task_generic,
|
|
128
|
+
inputs_task_norm,
|
|
129
|
+
tmp_path,
|
|
130
|
+
filename_processed_norm_reference,
|
|
131
|
+
):
|
|
132
|
+
workflow_norm["links"] = [
|
|
133
|
+
{
|
|
134
|
+
"source": "norm",
|
|
135
|
+
"target": "norm",
|
|
136
|
+
"conditions": [{"source_output": "continue_pipeline", "value": True}],
|
|
137
|
+
"map_all_data": True,
|
|
138
|
+
}
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
inputs = []
|
|
142
|
+
for key, value in inputs_task_generic.items():
|
|
143
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
144
|
+
for key, value in inputs_task_norm.items():
|
|
145
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
146
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
147
|
+
inputs.append(
|
|
148
|
+
{"name": "processing_filename", "value": filename_processing_norm, "id": "norm"}
|
|
149
|
+
)
|
|
150
|
+
inputs.append({"name": "reading_node", "value": True, "id": "norm"})
|
|
151
|
+
|
|
152
|
+
_ = execute_ewoks(
|
|
153
|
+
graph=workflow_norm,
|
|
154
|
+
inputs=inputs,
|
|
155
|
+
)
|
|
156
|
+
check_h5groups_equivalent(
|
|
157
|
+
url_reference=f"silx://{filename_processed_norm_reference}?path=/entry_0000/PyFAI/result_norm/",
|
|
158
|
+
url_test=f"silx://{filename_processing_norm}?path=/entry_0000/PyFAI/result_norm/",
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
check_h5groups_common(
|
|
162
|
+
filename_reference=filename_processed_norm_reference,
|
|
163
|
+
filename_test=filename_processing_norm,
|
|
164
|
+
)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def test_eiger2_normalization_subscan2(
|
|
168
|
+
inputs_task_generic_subscan2,
|
|
169
|
+
inputs_task_norm,
|
|
170
|
+
tmp_path,
|
|
171
|
+
filename_processed_norm_dahu_subscan2,
|
|
172
|
+
):
|
|
173
|
+
processing_filename_cython = str(tmp_path / "id02test_eiger2_norm_subscan2.h5")
|
|
174
|
+
inputs_cython = {
|
|
175
|
+
**inputs_task_generic_subscan2,
|
|
176
|
+
**inputs_task_norm,
|
|
177
|
+
"processing_filename": processing_filename_cython,
|
|
178
|
+
}
|
|
179
|
+
task_norm_cython = NormalizationTask(inputs_cython)
|
|
180
|
+
task_norm_cython.run()
|
|
181
|
+
|
|
182
|
+
check_h5groups_equivalent(
|
|
183
|
+
url_reference=f"silx://{filename_processed_norm_dahu_subscan2}?path=/entry_0000/PyFAI/result_norm/&slice=0,2",
|
|
184
|
+
url_test=f"silx://{processing_filename_cython}?path=/entry_0000/PyFAI/result_norm/",
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
check_h5groups_common(
|
|
188
|
+
filename_reference=filename_processed_norm_dahu_subscan2,
|
|
189
|
+
filename_test=processing_filename_cython,
|
|
190
|
+
)
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from .utils import check_h5groups_common, check_h5groups_equivalent, execute_ewoks
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def test_eiger2_saxs_loop(
|
|
5
|
+
workflow_saxs_loop,
|
|
6
|
+
inputs_task_generic,
|
|
7
|
+
inputs_task_norm,
|
|
8
|
+
inputs_task_2scat,
|
|
9
|
+
inputs_task_cave,
|
|
10
|
+
inputs_task_azim,
|
|
11
|
+
inputs_task_ave,
|
|
12
|
+
tmp_path,
|
|
13
|
+
filename_processed_norm_reference,
|
|
14
|
+
filename_processed_2scat_full,
|
|
15
|
+
filename_processed_cave_full,
|
|
16
|
+
filename_processed_azim_full,
|
|
17
|
+
filename_processed_ave_full,
|
|
18
|
+
):
|
|
19
|
+
inputs = []
|
|
20
|
+
inputs_task_generic["max_slice_size"] = 1
|
|
21
|
+
inputs_task_generic["range_index_read"] = [0, 2]
|
|
22
|
+
for key, value in inputs_task_generic.items():
|
|
23
|
+
inputs.append({"name": key, "value": value, "all": True})
|
|
24
|
+
for key, value in inputs_task_norm.items():
|
|
25
|
+
inputs.append({"name": key, "value": value, "id": "norm"})
|
|
26
|
+
for key, value in inputs_task_2scat.items():
|
|
27
|
+
inputs.append({"name": key, "value": value, "id": "2scat"})
|
|
28
|
+
for key, value in inputs_task_cave.items():
|
|
29
|
+
inputs.append({"name": key, "value": value, "id": "cave"})
|
|
30
|
+
for key, value in inputs_task_azim.items():
|
|
31
|
+
inputs.append({"name": key, "value": value, "id": "azim"})
|
|
32
|
+
for key, value in inputs_task_ave.items():
|
|
33
|
+
inputs.append({"name": key, "value": value, "id": "ave"})
|
|
34
|
+
filename_processing_norm = str(tmp_path / "id02test_eiger2_norm.h5")
|
|
35
|
+
filename_processing_2scat = str(tmp_path / "id02test_eiger2_2scat.h5")
|
|
36
|
+
filename_processing_cave = str(tmp_path / "id02test_eiger2_cave.h5")
|
|
37
|
+
filename_processing_azim = str(tmp_path / "id02test_eiger2_azim.h5")
|
|
38
|
+
filename_processing_ave = str(tmp_path / "id02test_eiger2_ave.h5")
|
|
39
|
+
filename_processing_scalers = str(tmp_path / "id02test_eiger2_scalers.h5")
|
|
40
|
+
for name, value, id_ in [
|
|
41
|
+
("processing_filename", filename_processing_norm, "norm"),
|
|
42
|
+
("processing_filename", filename_processing_2scat, "2scat"),
|
|
43
|
+
("processing_filename", filename_processing_cave, "cave"),
|
|
44
|
+
("processing_filename", filename_processing_azim, "azim"),
|
|
45
|
+
("processing_filename", filename_processing_ave, "ave"),
|
|
46
|
+
("processing_filename", filename_processing_scalers, "scalers"),
|
|
47
|
+
]:
|
|
48
|
+
inputs.append({"name": name, "value": value, "id": id_})
|
|
49
|
+
|
|
50
|
+
_ = execute_ewoks(
|
|
51
|
+
graph=workflow_saxs_loop,
|
|
52
|
+
inputs=inputs,
|
|
53
|
+
)
|
|
54
|
+
for ref_file, test_file, proc_type in [
|
|
55
|
+
(filename_processed_norm_reference, filename_processing_norm, "norm"),
|
|
56
|
+
(filename_processed_2scat_full, filename_processing_2scat, "2scat"),
|
|
57
|
+
(filename_processed_cave_full, filename_processing_cave, "cave"),
|
|
58
|
+
(filename_processed_azim_full, filename_processing_azim, "azim"),
|
|
59
|
+
(filename_processed_ave_full, filename_processing_ave, "ave"),
|
|
60
|
+
]:
|
|
61
|
+
check_h5groups_equivalent(
|
|
62
|
+
url_reference=f"silx://{ref_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
63
|
+
url_test=f"silx://{test_file}?path=/entry_0000/PyFAI/result_{proc_type}",
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
check_h5groups_common(
|
|
67
|
+
filename_reference=ref_file,
|
|
68
|
+
filename_test=test_file,
|
|
69
|
+
)
|