ewoksid02 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ewoksid02/__init__.py +0 -0
- ewoksid02/ocl/__init__.py +0 -0
- ewoksid02/resources/__init__.py +8 -0
- ewoksid02/resources/saxs_loop.json +96 -0
- ewoksid02/resources/template_saxs.yaml +37 -0
- ewoksid02/scripts/__init__.py +0 -0
- ewoksid02/scripts/__main__.py +70 -0
- ewoksid02/scripts/parsers.py +224 -0
- ewoksid02/scripts/saxs/__init__.py +0 -0
- ewoksid02/scripts/saxs/main.py +255 -0
- ewoksid02/scripts/saxs/slurm_python_post_script.py +3 -0
- ewoksid02/scripts/saxs/slurm_python_pre_script.py +5 -0
- ewoksid02/scripts/utils.py +21 -0
- ewoksid02/scripts/xpcs/__init__.py +0 -0
- ewoksid02/scripts/xpcs/__main__.py +3 -0
- ewoksid02/tasks/__init__.py +7 -0
- ewoksid02/tasks/averagetask.py +179 -0
- ewoksid02/tasks/azimuthaltask.py +272 -0
- ewoksid02/tasks/cavingtask.py +170 -0
- ewoksid02/tasks/dahuprocessingtask.py +71 -0
- ewoksid02/tasks/end.py +35 -0
- ewoksid02/tasks/id02processingtask.py +2582 -0
- ewoksid02/tasks/looptask.py +672 -0
- ewoksid02/tasks/metadatatask.py +879 -0
- ewoksid02/tasks/normalizationtask.py +204 -0
- ewoksid02/tasks/scalerstask.py +46 -0
- ewoksid02/tasks/secondaryscatteringtask.py +159 -0
- ewoksid02/tasks/sumtask.py +45 -0
- ewoksid02/tests/__init__.py +3 -0
- ewoksid02/tests/conftest.py +639 -0
- ewoksid02/tests/debug.py +64 -0
- ewoksid02/tests/test_2scat_node.py +119 -0
- ewoksid02/tests/test_ave_node.py +106 -0
- ewoksid02/tests/test_azim_node.py +89 -0
- ewoksid02/tests/test_cave_node.py +118 -0
- ewoksid02/tests/test_norm_node.py +190 -0
- ewoksid02/tests/test_saxs.py +69 -0
- ewoksid02/tests/test_sumtask.py +10 -0
- ewoksid02/tests/utils.py +514 -0
- ewoksid02/utils/__init__.py +22 -0
- ewoksid02/utils/average.py +158 -0
- ewoksid02/utils/blissdata.py +1157 -0
- ewoksid02/utils/caving.py +851 -0
- ewoksid02/utils/cupyutils.py +42 -0
- ewoksid02/utils/io.py +722 -0
- ewoksid02/utils/normalization.py +804 -0
- ewoksid02/utils/pyfai.py +424 -0
- ewoksid02/utils/secondaryscattering.py +597 -0
- ewoksid02-0.1.0.dist-info/METADATA +76 -0
- ewoksid02-0.1.0.dist-info/RECORD +54 -0
- ewoksid02-0.1.0.dist-info/WHEEL +5 -0
- ewoksid02-0.1.0.dist-info/entry_points.txt +5 -0
- ewoksid02-0.1.0.dist-info/licenses/LICENSE.md +20 -0
- ewoksid02-0.1.0.dist-info/top_level.txt +1 -0
ewoksid02/__init__.py
ADDED
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
{
|
|
2
|
+
"directed": true,
|
|
3
|
+
"multigraph": false,
|
|
4
|
+
"graph": {
|
|
5
|
+
"id": "saxs_loop",
|
|
6
|
+
"schema_version": "1.1"
|
|
7
|
+
},
|
|
8
|
+
"nodes": [
|
|
9
|
+
{
|
|
10
|
+
"task_type": "class",
|
|
11
|
+
"task_identifier": "ewoksid02.tasks.normalizationtask.NormalizationTask",
|
|
12
|
+
"force_start_node": true,
|
|
13
|
+
"default_inputs" : [{"name": "reading_node", "value": true}],
|
|
14
|
+
"id": "norm"
|
|
15
|
+
},
|
|
16
|
+
{
|
|
17
|
+
"task_type": "class",
|
|
18
|
+
"task_identifier": "ewoksid02.tasks.secondaryscatteringtask.SecondaryScatteringTask",
|
|
19
|
+
"id": "2scat"
|
|
20
|
+
},
|
|
21
|
+
{
|
|
22
|
+
"task_type": "class",
|
|
23
|
+
"task_identifier": "ewoksid02.tasks.cavingtask.CavingBeamstopTask",
|
|
24
|
+
"id": "cave"
|
|
25
|
+
},
|
|
26
|
+
{
|
|
27
|
+
"task_type": "class",
|
|
28
|
+
"task_identifier": "ewoksid02.tasks.azimuthaltask.AzimuthalTask",
|
|
29
|
+
"id": "azim"
|
|
30
|
+
},
|
|
31
|
+
{
|
|
32
|
+
"task_type": "class",
|
|
33
|
+
"task_identifier": "ewoksid02.tasks.averagetask.AverageTask",
|
|
34
|
+
"id": "ave"
|
|
35
|
+
},
|
|
36
|
+
{
|
|
37
|
+
"task_type": "class",
|
|
38
|
+
"task_identifier": "ewoksid02.tasks.scalerstask.ScalersTask",
|
|
39
|
+
"id": "scalers"
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
"task_type": "class",
|
|
43
|
+
"task_identifier": "ewoksid02.tasks.end.EndTask",
|
|
44
|
+
"id": "end"
|
|
45
|
+
}
|
|
46
|
+
],
|
|
47
|
+
"links": [
|
|
48
|
+
{
|
|
49
|
+
"map_all_data": true,
|
|
50
|
+
"source": "norm",
|
|
51
|
+
"target": "2scat",
|
|
52
|
+
"conditions": [
|
|
53
|
+
{
|
|
54
|
+
"source_output": "continue_pipeline",
|
|
55
|
+
"value": true
|
|
56
|
+
}
|
|
57
|
+
]
|
|
58
|
+
},
|
|
59
|
+
{
|
|
60
|
+
"map_all_data": true,
|
|
61
|
+
"source": "norm",
|
|
62
|
+
"target": "end",
|
|
63
|
+
"conditions": [
|
|
64
|
+
{
|
|
65
|
+
"source_output": "continue_pipeline",
|
|
66
|
+
"value": false
|
|
67
|
+
}
|
|
68
|
+
]
|
|
69
|
+
},
|
|
70
|
+
{
|
|
71
|
+
"map_all_data": true,
|
|
72
|
+
"source": "2scat",
|
|
73
|
+
"target": "cave"
|
|
74
|
+
},
|
|
75
|
+
{
|
|
76
|
+
"map_all_data": true,
|
|
77
|
+
"source": "cave",
|
|
78
|
+
"target": "azim"
|
|
79
|
+
},
|
|
80
|
+
{
|
|
81
|
+
"map_all_data": true,
|
|
82
|
+
"source": "azim",
|
|
83
|
+
"target": "ave"
|
|
84
|
+
},
|
|
85
|
+
{
|
|
86
|
+
"map_all_data": true,
|
|
87
|
+
"source": "ave",
|
|
88
|
+
"target": "scalers"
|
|
89
|
+
},
|
|
90
|
+
{
|
|
91
|
+
"map_all_data": true,
|
|
92
|
+
"source": "scalers",
|
|
93
|
+
"target": "norm"
|
|
94
|
+
}
|
|
95
|
+
]
|
|
96
|
+
}
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
###########################
|
|
2
|
+
## ewoksid02 template SAXS.yaml
|
|
3
|
+
###########################
|
|
4
|
+
|
|
5
|
+
bliss_filenames: # List of RAW_DATA files (proposal / sample / dataset files)
|
|
6
|
+
- /data/visitor/xxx/id02/xxx/RAW_DATA/xxx/.../file1.h5
|
|
7
|
+
#- /data/visitor/xxx/id02/xxx/RAW_DATA/xxx/.../file2.h5
|
|
8
|
+
|
|
9
|
+
detectors: # List of 2D detectors to integrate (Options: eiger2, waxs, eiger500k)
|
|
10
|
+
- eiger2
|
|
11
|
+
# - waxs
|
|
12
|
+
|
|
13
|
+
scans: # List of scan numbers (if empty, takes all)
|
|
14
|
+
- 1
|
|
15
|
+
# - 2
|
|
16
|
+
# - !slice 1:10 # Slice option: it will iterate between scans 1 and 10 (included)
|
|
17
|
+
|
|
18
|
+
tag: # Additional suffix to the output files (overwritting is forbidden)
|
|
19
|
+
output_root: # Root folder to save the output files (if empty, replace RAW_DATA -> PROCESSED_DATA)
|
|
20
|
+
|
|
21
|
+
# Processing parameters
|
|
22
|
+
to_process: "norm 2scat cave azim ave" # Include in this string which processing steps you want to perform (norm, 2scat, cave, azim, ave)
|
|
23
|
+
to_save: "norm 2scat cave azim ave" # Include in this string which processing results you want to save (norm, 2scat, cave, azim, ave)
|
|
24
|
+
|
|
25
|
+
# Correction parameters
|
|
26
|
+
filename_flatfield: # Path to the file with the flat field array (if empty, it will be taken from headers)
|
|
27
|
+
filename_darkcurrent: # Path to the file with the dark current array (if empty, it will be taken from headers)
|
|
28
|
+
filename_maskgaps: # Path to the file with the detector mask array (if empty, it will be taken from headers)
|
|
29
|
+
filename_maskbeamstop: # Path to the file with an additional detector mask, like beamstop (if empty, it will be taken from headers)
|
|
30
|
+
filename_window_wagon: # Path to the file with the WAXS pattern from the wagon window
|
|
31
|
+
WindowRoiSize: 120 # Size of the window used to convolute the WAXS pattern from the wagon window
|
|
32
|
+
|
|
33
|
+
# Submit parameters
|
|
34
|
+
submit: True # If False, it will save the instructions without submitting the job
|
|
35
|
+
slurm_job_parameters: # Some SLURM parameters to be tweak if the job fails
|
|
36
|
+
time_limit: 01:00:00
|
|
37
|
+
memory_per_cpu: 50GB
|
|
File without changes
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
from ..utils import AVAILABLE_TEMPLATES, TEMPLATE_MESSAGE
|
|
7
|
+
from .parsers import add_saxs_arguments, add_xpcs_arguments
|
|
8
|
+
from .saxs.main import main as main_saxs
|
|
9
|
+
from .xpcs.__main__ import main as main_xpcs
|
|
10
|
+
|
|
11
|
+
TECHNIQUES = [
|
|
12
|
+
"saxs",
|
|
13
|
+
"xpcs",
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def main(argv=None):
|
|
18
|
+
"""Main function to run the offline scripts
|
|
19
|
+
|
|
20
|
+
Inputs:
|
|
21
|
+
- argv: command line arguments
|
|
22
|
+
"""
|
|
23
|
+
if argv is None:
|
|
24
|
+
argv = sys.argv
|
|
25
|
+
|
|
26
|
+
parser = argparse.ArgumentParser(
|
|
27
|
+
prog="ewoksid02",
|
|
28
|
+
description="Run data processing pipelines for SAXS or XPCS.",
|
|
29
|
+
usage="ewoksid02 {saxs | xpcs | templates} [...]",
|
|
30
|
+
)
|
|
31
|
+
subparsers = parser.add_subparsers(dest="command", required=True)
|
|
32
|
+
|
|
33
|
+
subparser_saxs = subparsers.add_parser("saxs", help="Trigger SAXS pipelines")
|
|
34
|
+
add_saxs_arguments(subparser_saxs)
|
|
35
|
+
|
|
36
|
+
subparser_xpcs = subparsers.add_parser("xpcs", help="Trigger XPCS pipelines")
|
|
37
|
+
add_xpcs_arguments(subparser_xpcs)
|
|
38
|
+
|
|
39
|
+
subparser_templates = subparsers.add_parser( # noqa
|
|
40
|
+
"templates", help="Download available templates for ID02 offline pipelines"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if len(argv) == 1:
|
|
44
|
+
parser.print_help()
|
|
45
|
+
sys.exit(1)
|
|
46
|
+
|
|
47
|
+
if argv[1] == "templates":
|
|
48
|
+
print(TEMPLATE_MESSAGE)
|
|
49
|
+
for _, template_info in AVAILABLE_TEMPLATES.items():
|
|
50
|
+
os.makedirs(template_info["directory"], exist_ok=True)
|
|
51
|
+
shutil.copy(template_info["path"], template_info["future_path"])
|
|
52
|
+
return
|
|
53
|
+
|
|
54
|
+
args = parser.parse_args(argv[1:])
|
|
55
|
+
if args.command == "saxs":
|
|
56
|
+
if len(args.FILES) == 0:
|
|
57
|
+
subparser_saxs.print_help()
|
|
58
|
+
sys.exit(1)
|
|
59
|
+
main_saxs(args)
|
|
60
|
+
elif args.command == "xpcs":
|
|
61
|
+
main_xpcs(args)
|
|
62
|
+
elif args.command == "templates":
|
|
63
|
+
print(TEMPLATE_MESSAGE)
|
|
64
|
+
for _, template_info in AVAILABLE_TEMPLATES.items():
|
|
65
|
+
os.makedirs(template_info["directory"], exist_ok=True)
|
|
66
|
+
shutil.copy(template_info["path"], template_info["future_path"])
|
|
67
|
+
return
|
|
68
|
+
else:
|
|
69
|
+
parser.print_help()
|
|
70
|
+
sys.exit(1)
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
def add_celery_arguments(parser):
|
|
2
|
+
"""Add to parser arguments related to celery queue and submit"""
|
|
3
|
+
parser.add_argument(
|
|
4
|
+
"-q",
|
|
5
|
+
"--queue",
|
|
6
|
+
dest="queue",
|
|
7
|
+
default=None,
|
|
8
|
+
help="Queue name for the job submission (default: None)",
|
|
9
|
+
)
|
|
10
|
+
parser.add_argument(
|
|
11
|
+
"--no-submit",
|
|
12
|
+
dest="submit",
|
|
13
|
+
action="store_false",
|
|
14
|
+
help="Do not submit anything",
|
|
15
|
+
default=None,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def add_workflow_flags(parser):
|
|
20
|
+
"""Add to parser arguments related to process and save flags."""
|
|
21
|
+
parser.add_argument(
|
|
22
|
+
"--to_process",
|
|
23
|
+
dest="to_process",
|
|
24
|
+
default=None,
|
|
25
|
+
help="Steps to process",
|
|
26
|
+
)
|
|
27
|
+
parser.add_argument(
|
|
28
|
+
"--to_save",
|
|
29
|
+
dest="to_save",
|
|
30
|
+
default=None,
|
|
31
|
+
help="Steps to save",
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def add_main_arguments(parser):
|
|
36
|
+
"""Add to parser common arguments to run the offline pipeline."""
|
|
37
|
+
parser.add_argument(
|
|
38
|
+
"FILES",
|
|
39
|
+
metavar="FILES",
|
|
40
|
+
help="List of BLISS files read and reprocess",
|
|
41
|
+
nargs="+",
|
|
42
|
+
)
|
|
43
|
+
parser.add_argument(
|
|
44
|
+
"-n",
|
|
45
|
+
"--scan",
|
|
46
|
+
dest="scan_nb",
|
|
47
|
+
nargs="+",
|
|
48
|
+
help="Number of scan to process",
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
parser.add_argument(
|
|
52
|
+
"-d",
|
|
53
|
+
"--detector",
|
|
54
|
+
dest="detector_name",
|
|
55
|
+
default=None,
|
|
56
|
+
help="Name of the Lima detector which data to process",
|
|
57
|
+
)
|
|
58
|
+
parser.add_argument(
|
|
59
|
+
"-t",
|
|
60
|
+
"--tag",
|
|
61
|
+
dest="tag",
|
|
62
|
+
default=None,
|
|
63
|
+
help="Tag to be added on each processing filename. The workflow will not overwrite files",
|
|
64
|
+
)
|
|
65
|
+
parser.add_argument(
|
|
66
|
+
"--r",
|
|
67
|
+
"--output-root",
|
|
68
|
+
dest="output_root",
|
|
69
|
+
help="Root directory for the output files",
|
|
70
|
+
default=None,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def add_common_saxs_arguments(parser):
|
|
75
|
+
"""Add to parser common arguments to SAXS/WAXS pipeline"""
|
|
76
|
+
parser.add_argument(
|
|
77
|
+
"--metadata",
|
|
78
|
+
dest="filename_metadata",
|
|
79
|
+
default=None,
|
|
80
|
+
help="File with metadata used for processing",
|
|
81
|
+
)
|
|
82
|
+
parser.add_argument(
|
|
83
|
+
"--max",
|
|
84
|
+
dest="max_slice_size",
|
|
85
|
+
default=None,
|
|
86
|
+
type=int,
|
|
87
|
+
help="Maximum number of slices to process at once. Default: 100",
|
|
88
|
+
)
|
|
89
|
+
parser.add_argument(
|
|
90
|
+
"--dummy",
|
|
91
|
+
dest="dummy",
|
|
92
|
+
default=None,
|
|
93
|
+
help="Dummy value",
|
|
94
|
+
)
|
|
95
|
+
parser.add_argument(
|
|
96
|
+
"--delta-dummy",
|
|
97
|
+
dest="delta_dummy",
|
|
98
|
+
default=None,
|
|
99
|
+
help="Delta dummy value",
|
|
100
|
+
)
|
|
101
|
+
parser.add_argument(
|
|
102
|
+
"-l",
|
|
103
|
+
"--log-level",
|
|
104
|
+
dest="log_level",
|
|
105
|
+
default=None,
|
|
106
|
+
help="Logging level.",
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def add_normalization_arguments(parser):
|
|
111
|
+
"""Add to parser arguments for NormalizationTask"""
|
|
112
|
+
help = "Normalization: "
|
|
113
|
+
parser.add_argument(
|
|
114
|
+
"--dark",
|
|
115
|
+
dest="dark_current_filename",
|
|
116
|
+
default=None,
|
|
117
|
+
help=help + "Filename of the dark current file",
|
|
118
|
+
)
|
|
119
|
+
parser.add_argument(
|
|
120
|
+
"--flat",
|
|
121
|
+
dest="flat_field_filename",
|
|
122
|
+
default=None,
|
|
123
|
+
help=help + "Filename of the flat field file",
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def add_masking_arguments(parser):
|
|
128
|
+
"""Add to parser arguments for CavingTask"""
|
|
129
|
+
help = "Caving: "
|
|
130
|
+
parser.add_argument(
|
|
131
|
+
"--mask-gaps",
|
|
132
|
+
dest="mask_gaps_filename",
|
|
133
|
+
default=None,
|
|
134
|
+
help=help + "Filename of the mask with detector gaps",
|
|
135
|
+
)
|
|
136
|
+
parser.add_argument(
|
|
137
|
+
"--mask-beamstop",
|
|
138
|
+
dest="mask_beamstop_filename",
|
|
139
|
+
default=None,
|
|
140
|
+
help=help + "Filename of the mask with the beamstop",
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def add_secondary_scattering_arguments(parser):
|
|
145
|
+
"""Add to parser arguments for SecondaryScatteringTask"""
|
|
146
|
+
help = "Secondary Scattering: "
|
|
147
|
+
parser.add_argument(
|
|
148
|
+
"--window-pattern",
|
|
149
|
+
dest="window_pattern_filename",
|
|
150
|
+
default=None,
|
|
151
|
+
help=help + "Filename of the window pattern file",
|
|
152
|
+
)
|
|
153
|
+
parser.add_argument(
|
|
154
|
+
"--window-roi",
|
|
155
|
+
dest="window_roi_size",
|
|
156
|
+
default=None,
|
|
157
|
+
help=help + "Subdata distance for secondary scattering correction",
|
|
158
|
+
)
|
|
159
|
+
parser.add_argument(
|
|
160
|
+
"--flip-vertical",
|
|
161
|
+
dest="flip_vertical",
|
|
162
|
+
action="store_true",
|
|
163
|
+
default=None,
|
|
164
|
+
help=help + "Flip the image vertically (default: False)",
|
|
165
|
+
)
|
|
166
|
+
parser.add_argument(
|
|
167
|
+
"--flip-horizontal",
|
|
168
|
+
dest="flip_horizontal",
|
|
169
|
+
action="store_true",
|
|
170
|
+
default=None,
|
|
171
|
+
help=help + "Flip the image horizontally (default: False)",
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def add_azimuthal_arguments(parser):
|
|
176
|
+
"""Add to parser arguments for AzimuthalTask"""
|
|
177
|
+
help = "Azimuthal: "
|
|
178
|
+
parser.add_argument(
|
|
179
|
+
"--npt-rad",
|
|
180
|
+
dest="npt_rad",
|
|
181
|
+
default=None,
|
|
182
|
+
help=help + "Number of radial bins",
|
|
183
|
+
)
|
|
184
|
+
parser.add_argument(
|
|
185
|
+
"--npt-azim",
|
|
186
|
+
dest="npt_azim",
|
|
187
|
+
default=None,
|
|
188
|
+
help=help + "Number of azimuthal bins",
|
|
189
|
+
)
|
|
190
|
+
parser.add_argument(
|
|
191
|
+
"--unit",
|
|
192
|
+
dest="unit",
|
|
193
|
+
default=None,
|
|
194
|
+
help=help + "Unit for azimuthal averaging",
|
|
195
|
+
)
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def add_average_arguments(parser):
|
|
199
|
+
"""Add to parser arguments for AverageTask"""
|
|
200
|
+
help = "Average: "
|
|
201
|
+
parser.add_argument(
|
|
202
|
+
"--azim-range",
|
|
203
|
+
dest="azimuth_range",
|
|
204
|
+
default=None,
|
|
205
|
+
help=help + "Azimuthal limits for the average",
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def add_saxs_arguments(parser):
|
|
210
|
+
"""Add all arguments needed for SAXS/WAXS pipeline"""
|
|
211
|
+
add_main_arguments(parser)
|
|
212
|
+
add_common_saxs_arguments(parser)
|
|
213
|
+
add_celery_arguments(parser)
|
|
214
|
+
add_normalization_arguments(parser)
|
|
215
|
+
add_masking_arguments(parser)
|
|
216
|
+
add_secondary_scattering_arguments(parser)
|
|
217
|
+
add_azimuthal_arguments(parser)
|
|
218
|
+
add_average_arguments(parser)
|
|
219
|
+
add_workflow_flags(parser)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def add_xpcs_arguments(parser):
|
|
223
|
+
"""Add all arguments needed for XPCS pipeline"""
|
|
224
|
+
add_main_arguments(parser)
|
|
File without changes
|
|
@@ -0,0 +1,255 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import time
|
|
3
|
+
from pprint import pprint
|
|
4
|
+
|
|
5
|
+
from ewokstools.reprocess import (
|
|
6
|
+
finish_queue,
|
|
7
|
+
generate_params_from_yaml_file,
|
|
8
|
+
get_datasets_list_id02,
|
|
9
|
+
get_params_from_cli,
|
|
10
|
+
save_and_execute,
|
|
11
|
+
validate_inputs_ewoks,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from ...resources import WORKFLOW_SAXS_LOOP
|
|
15
|
+
from ...tasks.averagetask import AverageTask
|
|
16
|
+
from ...tasks.azimuthaltask import AzimuthalTask
|
|
17
|
+
from ...tasks.cavingtask import CavingBeamstopTask
|
|
18
|
+
from ...tasks.normalizationtask import NormalizationTask
|
|
19
|
+
from ...tasks.secondaryscatteringtask import SecondaryScatteringTask
|
|
20
|
+
from ...utils.blissdata import LIMA_URL_TEMPLATE_ID02, get_lima_url_template_args_id02
|
|
21
|
+
from ..utils import SLURM_JOB_PARAMETERS_SAXS
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
WORKER_MODULE = "scattering"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_saxs_inputs(
|
|
28
|
+
**kwargs,
|
|
29
|
+
) -> list:
|
|
30
|
+
"""Compile and return the list of inputs to be used on an ewoks SAXS/WAXS pipeline."""
|
|
31
|
+
inputs = []
|
|
32
|
+
|
|
33
|
+
###########
|
|
34
|
+
# Add all nodes inputs
|
|
35
|
+
###########
|
|
36
|
+
inputs_dict_all_nodes = {
|
|
37
|
+
"detector_name": kwargs.get("detector_name"),
|
|
38
|
+
"filename_data": kwargs.get("dataset_filename"),
|
|
39
|
+
"scan_nb": kwargs.get("scan_nb"),
|
|
40
|
+
"filename_lima": kwargs.get("filename_scan"),
|
|
41
|
+
"log_level": kwargs.get("log_level", "info"),
|
|
42
|
+
"datatype": kwargs.get("datatype"),
|
|
43
|
+
"save_variance": kwargs.get("save_variance"),
|
|
44
|
+
"max_slice_size": kwargs.get("max_slice_size"),
|
|
45
|
+
"lima_url_template": LIMA_URL_TEMPLATE_ID02,
|
|
46
|
+
"lima_url_template_args": get_lima_url_template_args_id02(
|
|
47
|
+
scan_number=kwargs.get("scan_nb"),
|
|
48
|
+
detector_name=kwargs.get("detector_name"),
|
|
49
|
+
collection_name=kwargs.get("collection_name"),
|
|
50
|
+
),
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
inputs += validate_inputs_ewoks(
|
|
54
|
+
inputs=inputs_dict_all_nodes,
|
|
55
|
+
all=True,
|
|
56
|
+
ewoks_task=None,
|
|
57
|
+
id=None,
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
#############
|
|
61
|
+
# Add normalization inputs
|
|
62
|
+
#############
|
|
63
|
+
inputs_dict_norm = {
|
|
64
|
+
"filename_mask": kwargs.get("filename_maskgaps"),
|
|
65
|
+
"filename_dark": kwargs.get("filename_darkcurrent"),
|
|
66
|
+
"filename_flat": kwargs.get("filename_flatfield"),
|
|
67
|
+
"algorithm": kwargs.get("algorithm_norm", "cython"),
|
|
68
|
+
"Dummy": kwargs.get("Dummy"),
|
|
69
|
+
"DDummy": kwargs.get("DDummy"),
|
|
70
|
+
}
|
|
71
|
+
inputs += validate_inputs_ewoks(
|
|
72
|
+
inputs=inputs_dict_norm,
|
|
73
|
+
ewoks_task=NormalizationTask,
|
|
74
|
+
id="norm",
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
#############
|
|
78
|
+
# Add secondary scattering inputs
|
|
79
|
+
#############
|
|
80
|
+
inputs_dict_2scat = {
|
|
81
|
+
"filename_window_wagon": kwargs.get("filename_window_wagon"),
|
|
82
|
+
"WindowRoiSize": kwargs.get("WindowRoiSize"),
|
|
83
|
+
"filename_mask_static": kwargs.get("filename_maskgaps"),
|
|
84
|
+
"filename_mask_reference": kwargs.get("filename_maskbeamstop"),
|
|
85
|
+
"algorithm": kwargs.get("algorithm_2scat", "numpy"),
|
|
86
|
+
}
|
|
87
|
+
inputs += validate_inputs_ewoks(
|
|
88
|
+
inputs=inputs_dict_2scat,
|
|
89
|
+
ewoks_task=SecondaryScatteringTask,
|
|
90
|
+
id="2scat",
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
#############
|
|
94
|
+
# Add caving inputs
|
|
95
|
+
#############
|
|
96
|
+
inputs_dict_cave = {
|
|
97
|
+
"filename_mask_static": kwargs.get("filename_maskbeamstop"),
|
|
98
|
+
"flip_caving": kwargs.get("flip_caving"),
|
|
99
|
+
"algorithm": kwargs.get("algorithm_cave", "numpy"),
|
|
100
|
+
}
|
|
101
|
+
inputs += validate_inputs_ewoks(
|
|
102
|
+
inputs=inputs_dict_cave,
|
|
103
|
+
ewoks_task=CavingBeamstopTask,
|
|
104
|
+
id="cave",
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
#############
|
|
108
|
+
# Add azimuthal inputs
|
|
109
|
+
#############
|
|
110
|
+
inputs_dict_azim = {
|
|
111
|
+
"filename_mask": kwargs.get("filename_maskbeamstop"),
|
|
112
|
+
"npt_rad": kwargs.get("npt_rad"),
|
|
113
|
+
"npt_azim": kwargs.get("npt_azim"),
|
|
114
|
+
"unit": kwargs.get("unit"),
|
|
115
|
+
}
|
|
116
|
+
inputs += validate_inputs_ewoks(
|
|
117
|
+
inputs=inputs_dict_azim,
|
|
118
|
+
ewoks_task=AzimuthalTask,
|
|
119
|
+
id="azim",
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
#############
|
|
123
|
+
# Add average inputs
|
|
124
|
+
#############
|
|
125
|
+
inputs_dict_ave = {
|
|
126
|
+
"azimuth_range": kwargs.get("azimuth_range"),
|
|
127
|
+
}
|
|
128
|
+
inputs += validate_inputs_ewoks(
|
|
129
|
+
inputs=inputs_dict_ave,
|
|
130
|
+
ewoks_task=AverageTask,
|
|
131
|
+
id="ave",
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
#############
|
|
135
|
+
# Add flag inputs
|
|
136
|
+
#############
|
|
137
|
+
to_process = kwargs.get("to_process", "").split(" ")
|
|
138
|
+
to_save = kwargs.get("to_save", "").split(" ")
|
|
139
|
+
nodes = ["norm", "2scat", "cave", "azim", "ave", "scalers"]
|
|
140
|
+
|
|
141
|
+
inputs += [
|
|
142
|
+
{"name": "do_process", "id": node, "value": node in to_process}
|
|
143
|
+
for node in nodes
|
|
144
|
+
]
|
|
145
|
+
inputs += [
|
|
146
|
+
{"name": "do_save", "id": node, "value": node in to_save} for node in nodes
|
|
147
|
+
]
|
|
148
|
+
|
|
149
|
+
##############
|
|
150
|
+
# Add processing filenames inputs
|
|
151
|
+
##############
|
|
152
|
+
processing_filename_template = kwargs.get("processed_filename_scan")
|
|
153
|
+
tag = kwargs.get("tag", "")
|
|
154
|
+
if tag:
|
|
155
|
+
processing_filename_template = processing_filename_template.replace(
|
|
156
|
+
".h5", f"_{tag}.h5"
|
|
157
|
+
)
|
|
158
|
+
inputs_dict_filenames = {
|
|
159
|
+
"norm": processing_filename_template.replace(".h5", "_norm.h5"),
|
|
160
|
+
"2scat": processing_filename_template.replace(".h5", "_2scat.h5"),
|
|
161
|
+
"cave": processing_filename_template.replace(".h5", "_cave.h5"),
|
|
162
|
+
"azim": processing_filename_template.replace(".h5", "_azim.h5"),
|
|
163
|
+
"ave": processing_filename_template.replace(".h5", "_ave.h5"),
|
|
164
|
+
"scalers": processing_filename_template.replace(".h5", "_scalers.h5"),
|
|
165
|
+
}
|
|
166
|
+
inputs += [
|
|
167
|
+
{"name": "processing_filename", "value": value, "id": task_id}
|
|
168
|
+
for task_id, value in inputs_dict_filenames.items()
|
|
169
|
+
]
|
|
170
|
+
|
|
171
|
+
return inputs
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def main(args):
|
|
175
|
+
"""Main function to trigger the SAXS/WAXS pipeline."""
|
|
176
|
+
saxs_parameters = {}
|
|
177
|
+
saxs_dataset_parameters = {}
|
|
178
|
+
|
|
179
|
+
# 2) Get parameters from .yaml files provided in the command line
|
|
180
|
+
for saxs_parameters_from_yaml in generate_params_from_yaml_file(args.FILES):
|
|
181
|
+
saxs_parameters.update(saxs_parameters_from_yaml)
|
|
182
|
+
|
|
183
|
+
# 3) Add more bliss filenames from the command line
|
|
184
|
+
saxs_parameters["bliss_filenames"] += [
|
|
185
|
+
file for file in args.FILES if file.endswith(".h5")
|
|
186
|
+
]
|
|
187
|
+
|
|
188
|
+
# 4) Get parameters from the command line
|
|
189
|
+
reprocess_parameters_from_cli = get_params_from_cli(args)
|
|
190
|
+
saxs_parameters.update(reprocess_parameters_from_cli)
|
|
191
|
+
|
|
192
|
+
# 5) If no input/pyfai parameter was provided, try through user input
|
|
193
|
+
if not saxs_parameters.get("bliss_filenames"):
|
|
194
|
+
saxs_parameters["bliss_filenames"] = (
|
|
195
|
+
input(
|
|
196
|
+
"No bliss filenames provided. Please enter the filenames (comma-separated): "
|
|
197
|
+
)
|
|
198
|
+
.strip()
|
|
199
|
+
.split(",")
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
# 6) Iterate through the bliss saving objects
|
|
203
|
+
dataset_list = get_datasets_list_id02(**saxs_parameters)
|
|
204
|
+
nb_datasets = len(dataset_list)
|
|
205
|
+
print(
|
|
206
|
+
f"\033[92mFound {nb_datasets} datasets in {saxs_parameters['bliss_filenames']}\033[0m"
|
|
207
|
+
)
|
|
208
|
+
filenames_dataset = [
|
|
209
|
+
dataset_info["dataset_filename"] for dataset_info in dataset_list
|
|
210
|
+
]
|
|
211
|
+
print("\033[92m", end="")
|
|
212
|
+
pprint(filenames_dataset)
|
|
213
|
+
print("\033[0m", end="")
|
|
214
|
+
|
|
215
|
+
if nb_datasets > 10:
|
|
216
|
+
logger.warning(
|
|
217
|
+
"More than 10 datasets found in this file. You have 10 seconds to cancel..."
|
|
218
|
+
)
|
|
219
|
+
time.sleep(10)
|
|
220
|
+
|
|
221
|
+
for nb_submitted, dataset_info in enumerate(dataset_list, start=1):
|
|
222
|
+
saxs_dataset_parameters = {
|
|
223
|
+
**saxs_parameters,
|
|
224
|
+
**dataset_info,
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
tag = saxs_dataset_parameters.get("tag", "")
|
|
228
|
+
dryrun = "dryrun" if not saxs_dataset_parameters.get("submit") else ""
|
|
229
|
+
tag = "_".join(filter(None, [tag, dryrun]))
|
|
230
|
+
saxs_dataset_parameters["tag"] = tag
|
|
231
|
+
|
|
232
|
+
# Take slurm parameters
|
|
233
|
+
slurm_job_parameters = {
|
|
234
|
+
**SLURM_JOB_PARAMETERS_SAXS,
|
|
235
|
+
**saxs_dataset_parameters.pop("slurm_job_parameters", {}),
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
save_and_execute(
|
|
239
|
+
workflow=WORKFLOW_SAXS_LOOP,
|
|
240
|
+
inputs=get_saxs_inputs(**saxs_dataset_parameters),
|
|
241
|
+
slurm_job_parameters=slurm_job_parameters,
|
|
242
|
+
processing_name=tag,
|
|
243
|
+
**saxs_dataset_parameters,
|
|
244
|
+
execution_kwargs={
|
|
245
|
+
"engine": "ppf",
|
|
246
|
+
"pool_type": "thread",
|
|
247
|
+
},
|
|
248
|
+
worker_module=WORKER_MODULE,
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
print(
|
|
252
|
+
f"\033[92mSubmitted {nb_submitted}/{nb_datasets} datasets for reprocessing: {nb_submitted / nb_datasets * 100:.2f}%\033[0m"
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
finish_queue(**saxs_dataset_parameters)
|