ewoksid02 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ewoksid02/__init__.py +0 -0
- ewoksid02/ocl/__init__.py +0 -0
- ewoksid02/resources/__init__.py +8 -0
- ewoksid02/resources/saxs_loop.json +96 -0
- ewoksid02/resources/template_saxs.yaml +37 -0
- ewoksid02/scripts/__init__.py +0 -0
- ewoksid02/scripts/__main__.py +70 -0
- ewoksid02/scripts/parsers.py +224 -0
- ewoksid02/scripts/saxs/__init__.py +0 -0
- ewoksid02/scripts/saxs/main.py +255 -0
- ewoksid02/scripts/saxs/slurm_python_post_script.py +3 -0
- ewoksid02/scripts/saxs/slurm_python_pre_script.py +5 -0
- ewoksid02/scripts/utils.py +21 -0
- ewoksid02/scripts/xpcs/__init__.py +0 -0
- ewoksid02/scripts/xpcs/__main__.py +3 -0
- ewoksid02/tasks/__init__.py +7 -0
- ewoksid02/tasks/averagetask.py +179 -0
- ewoksid02/tasks/azimuthaltask.py +272 -0
- ewoksid02/tasks/cavingtask.py +170 -0
- ewoksid02/tasks/dahuprocessingtask.py +71 -0
- ewoksid02/tasks/end.py +35 -0
- ewoksid02/tasks/id02processingtask.py +2582 -0
- ewoksid02/tasks/looptask.py +672 -0
- ewoksid02/tasks/metadatatask.py +879 -0
- ewoksid02/tasks/normalizationtask.py +204 -0
- ewoksid02/tasks/scalerstask.py +46 -0
- ewoksid02/tasks/secondaryscatteringtask.py +159 -0
- ewoksid02/tasks/sumtask.py +45 -0
- ewoksid02/tests/__init__.py +3 -0
- ewoksid02/tests/conftest.py +639 -0
- ewoksid02/tests/debug.py +64 -0
- ewoksid02/tests/test_2scat_node.py +119 -0
- ewoksid02/tests/test_ave_node.py +106 -0
- ewoksid02/tests/test_azim_node.py +89 -0
- ewoksid02/tests/test_cave_node.py +118 -0
- ewoksid02/tests/test_norm_node.py +190 -0
- ewoksid02/tests/test_saxs.py +69 -0
- ewoksid02/tests/test_sumtask.py +10 -0
- ewoksid02/tests/utils.py +514 -0
- ewoksid02/utils/__init__.py +22 -0
- ewoksid02/utils/average.py +158 -0
- ewoksid02/utils/blissdata.py +1157 -0
- ewoksid02/utils/caving.py +851 -0
- ewoksid02/utils/cupyutils.py +42 -0
- ewoksid02/utils/io.py +722 -0
- ewoksid02/utils/normalization.py +804 -0
- ewoksid02/utils/pyfai.py +424 -0
- ewoksid02/utils/secondaryscattering.py +597 -0
- ewoksid02-0.1.0.dist-info/METADATA +76 -0
- ewoksid02-0.1.0.dist-info/RECORD +54 -0
- ewoksid02-0.1.0.dist-info/WHEEL +5 -0
- ewoksid02-0.1.0.dist-info/entry_points.txt +5 -0
- ewoksid02-0.1.0.dist-info/licenses/LICENSE.md +20 -0
- ewoksid02-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,879 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import posixpath
|
|
7
|
+
from collections.abc import Mapping
|
|
8
|
+
from numbers import Integral
|
|
9
|
+
from string import Formatter
|
|
10
|
+
|
|
11
|
+
import h5py
|
|
12
|
+
import numpy as np
|
|
13
|
+
import pyFAI.io
|
|
14
|
+
from blissdata.h5api import dynamic_hdf5, static_hdf5
|
|
15
|
+
from ewoksxrpd.tasks.data_access import TaskWithDataAccess
|
|
16
|
+
|
|
17
|
+
pyFAI.io.logger.setLevel(logging.ERROR)
|
|
18
|
+
if pyFAI.version_info < (0, 20):
|
|
19
|
+
from .nexus import Nexus
|
|
20
|
+
else:
|
|
21
|
+
from pyFAI.io import Nexus
|
|
22
|
+
|
|
23
|
+
# silence non serious error messages, which are printed
|
|
24
|
+
# because we use h5py in a new thread (not in the main one)
|
|
25
|
+
# this is a bug seems to be fixed on newer version !!
|
|
26
|
+
# https://github.com/h5py/h5py/issues/206
|
|
27
|
+
|
|
28
|
+
try:
|
|
29
|
+
h5py._errors.silence_errors()
|
|
30
|
+
except Exception:
|
|
31
|
+
passStringTypes = (str, bytes)
|
|
32
|
+
|
|
33
|
+
StringTypes = (str, bytes)
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
__authors__ = ["Jérôme Kieffer"]
|
|
37
|
+
__contact__ = "Jerome.Kieffer@ESRF.eu"
|
|
38
|
+
__license__ = "MIT"
|
|
39
|
+
__copyright__ = "European Synchrotron Radiation Facility, Grenoble, France"
|
|
40
|
+
__date__ = "03/09/2020"
|
|
41
|
+
__status__ = "development"
|
|
42
|
+
__version__ = "1.0.0"
|
|
43
|
+
|
|
44
|
+
# Deprecated, to be substituted by the Id02ProcessingTask children
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def ensure_str(junk):
|
|
48
|
+
"return a unicode string, regardless to the input"
|
|
49
|
+
if isinstance(junk, bytes):
|
|
50
|
+
return junk.decode()
|
|
51
|
+
else:
|
|
52
|
+
return str(junk)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def channels_name_to_fullname(scan_info):
|
|
56
|
+
"""
|
|
57
|
+
Return a directory with channel name in the scan file to the fullname (ensure consistency between scans, always use the fullname.)
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
rd = {}
|
|
61
|
+
|
|
62
|
+
for sscan_i, k in enumerate(sorted(scan_info["acquisition_chain"].keys())):
|
|
63
|
+
|
|
64
|
+
sscan = sscan_i + 1
|
|
65
|
+
|
|
66
|
+
rd[sscan] = dict()
|
|
67
|
+
|
|
68
|
+
scal = (
|
|
69
|
+
scan_info["acquisition_chain"][k]["master"]["scalars"]
|
|
70
|
+
+ scan_info["acquisition_chain"][k]["scalars"]
|
|
71
|
+
)
|
|
72
|
+
# print(scal)
|
|
73
|
+
for countname in scal:
|
|
74
|
+
|
|
75
|
+
fullname = countname.replace(":", "_")
|
|
76
|
+
scnt = countname.split(":")
|
|
77
|
+
|
|
78
|
+
if len(scnt) == 2:
|
|
79
|
+
shortname = scnt[-1]
|
|
80
|
+
elif len(scnt) == 3:
|
|
81
|
+
shortname = f"{scnt[0]}_{scnt[-1]}"
|
|
82
|
+
else:
|
|
83
|
+
raise ValueError("Unknown shortname format")
|
|
84
|
+
|
|
85
|
+
if shortname not in rd[sscan]:
|
|
86
|
+
rd[sscan][shortname] = fullname
|
|
87
|
+
else: # Name defined more than once in sscan
|
|
88
|
+
if (
|
|
89
|
+
rd[sscan][shortname] is not None
|
|
90
|
+
): # Already remove the previous shortname, skip
|
|
91
|
+
rd[sscan][rd[sscan][shortname]] = rd[sscan][
|
|
92
|
+
shortname
|
|
93
|
+
] # rd[Fullname] = Fullname
|
|
94
|
+
# print(shortname, fullname, rd[sscan][shortname])
|
|
95
|
+
rd[sscan][shortname] = None
|
|
96
|
+
|
|
97
|
+
if fullname in rd[sscan]:
|
|
98
|
+
raise ValueError("Fullname appears more than once in subscan....")
|
|
99
|
+
|
|
100
|
+
rd[sscan][fullname] = fullname
|
|
101
|
+
|
|
102
|
+
# Remove Nones from dict
|
|
103
|
+
to_delete = []
|
|
104
|
+
|
|
105
|
+
for sscan, dd in rd.items():
|
|
106
|
+
for k, v in dd.items():
|
|
107
|
+
if v is None:
|
|
108
|
+
to_delete += [
|
|
109
|
+
(sscan, k),
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
for s, k in to_delete:
|
|
113
|
+
del rd[s][k]
|
|
114
|
+
|
|
115
|
+
return rd
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def get_bliss_scan_allcounters(
|
|
119
|
+
filename: str,
|
|
120
|
+
scan_nr: Integral,
|
|
121
|
+
nsubs: Integral,
|
|
122
|
+
**options,
|
|
123
|
+
):
|
|
124
|
+
"""Iterate over the data from one Bliss scan. The counters are assumed to have
|
|
125
|
+
many data values as scan points.
|
|
126
|
+
|
|
127
|
+
:param str filename: the Bliss dataset filename
|
|
128
|
+
:param Integral filename: the scan number in the dataset
|
|
129
|
+
:param Number retry_timeout: timeout when it cannot access the data for `retry_timeout` seconds
|
|
130
|
+
:param Number retry_period: interval in seconds between data access retries
|
|
131
|
+
:return dict: data
|
|
132
|
+
"""
|
|
133
|
+
|
|
134
|
+
r = {}
|
|
135
|
+
|
|
136
|
+
# First ensure that all the subscans are finished (have end_time)
|
|
137
|
+
with dynamic_hdf5.File(filename, **options, locking=False) as root:
|
|
138
|
+
|
|
139
|
+
for subscan in range(1, nsubs + 1):
|
|
140
|
+
s = f"{scan_nr}.{subscan}"
|
|
141
|
+
_ = root[s]["end_time"] # Ensure the subscan is finished to write
|
|
142
|
+
|
|
143
|
+
with static_hdf5.File(filename, locking=False) as root:
|
|
144
|
+
for subscan in range(1, nsubs + 1):
|
|
145
|
+
|
|
146
|
+
fullscanname = f"{scan_nr}.{subscan}"
|
|
147
|
+
|
|
148
|
+
scan = root[fullscanname]
|
|
149
|
+
measurement = scan["measurement"]
|
|
150
|
+
r[int(subscan)] = {
|
|
151
|
+
n: np.array(measurement[n])
|
|
152
|
+
for n in measurement
|
|
153
|
+
if measurement[n].ndim == 1
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return r
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def safe_formatting(template, **fields):
|
|
160
|
+
|
|
161
|
+
r = ""
|
|
162
|
+
|
|
163
|
+
F = Formatter()
|
|
164
|
+
|
|
165
|
+
for lt, fn, fspec, conv in F.parse(template):
|
|
166
|
+
print((lt, fn, fspec, conv))
|
|
167
|
+
r += lt
|
|
168
|
+
|
|
169
|
+
val = fields.get(fn, None)
|
|
170
|
+
|
|
171
|
+
if val is not None:
|
|
172
|
+
try:
|
|
173
|
+
if conv is not None:
|
|
174
|
+
val = F.convert_field(val, conv)
|
|
175
|
+
|
|
176
|
+
val = F.format_field(val, fspec)
|
|
177
|
+
|
|
178
|
+
except Exception:
|
|
179
|
+
if isinstance(val, str):
|
|
180
|
+
val = f"!!!{val}!!!"
|
|
181
|
+
else:
|
|
182
|
+
val = None
|
|
183
|
+
|
|
184
|
+
if val is None:
|
|
185
|
+
if fn is not None:
|
|
186
|
+
val = "{%s%s%s}" % (
|
|
187
|
+
fn,
|
|
188
|
+
"" if conv is None else f"!{conv}",
|
|
189
|
+
"" if fspec is None or len(fspec) == 0 else f":{fspec}",
|
|
190
|
+
)
|
|
191
|
+
else:
|
|
192
|
+
val = ""
|
|
193
|
+
|
|
194
|
+
r += f"{val}"
|
|
195
|
+
|
|
196
|
+
return r
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class MetadataTask(
|
|
200
|
+
TaskWithDataAccess,
|
|
201
|
+
input_names=["scan_info", "hdf5_filename", "headers"],
|
|
202
|
+
output_names=["input2", "counters"],
|
|
203
|
+
optional_input_names=[
|
|
204
|
+
"entry",
|
|
205
|
+
"instrument",
|
|
206
|
+
"raw_counter_names",
|
|
207
|
+
"val_counter_names",
|
|
208
|
+
],
|
|
209
|
+
):
|
|
210
|
+
"""
|
|
211
|
+
Metadata Task read the values of specific scan, and write the scalers file
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
TO_SKIP = ("entry", "hdf5_filename", "plugin_name")
|
|
215
|
+
|
|
216
|
+
def __del__(self):
|
|
217
|
+
self.close()
|
|
218
|
+
|
|
219
|
+
def close(self):
|
|
220
|
+
if self.nxs is not None:
|
|
221
|
+
self.nxs.close()
|
|
222
|
+
self.nxs = None
|
|
223
|
+
|
|
224
|
+
def run(self):
|
|
225
|
+
|
|
226
|
+
# Read parameters and preprocess
|
|
227
|
+
self.hdf5_filename = self.inputs["hdf5_filename"]
|
|
228
|
+
self.entry = self.get_input_value("entry", "entry")
|
|
229
|
+
self.instrument = self.get_input_value("instrument", "TRUSAXS")
|
|
230
|
+
self.raw_counter_names = self.get_input_value("raw_counter_names", None)
|
|
231
|
+
self.val_counter_names = self.get_input_value("val_counter_names", None)
|
|
232
|
+
|
|
233
|
+
self.scan_info = self.inputs["scan_info"]
|
|
234
|
+
|
|
235
|
+
scan_nr = self.scan_info["scan_nb"]
|
|
236
|
+
nsubs = len(self.scan_info["acquisition_chain"])
|
|
237
|
+
filename = self.scan_info["filename"]
|
|
238
|
+
|
|
239
|
+
self.input2 = self.preproc(self.inputs["headers"])
|
|
240
|
+
self.outputs["input2"] = self.input2
|
|
241
|
+
|
|
242
|
+
# Create HDF5 output file (from DAHU sources)
|
|
243
|
+
self.create_hdf5()
|
|
244
|
+
|
|
245
|
+
# Read counters
|
|
246
|
+
self.counters = get_bliss_scan_allcounters(
|
|
247
|
+
filename, scan_nr, nsubs, retry_timeout=1800, retry_period=10
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
# print(self.counters)
|
|
251
|
+
|
|
252
|
+
self.outputs["counters"] = self.counters
|
|
253
|
+
|
|
254
|
+
self.write_counters_to_file()
|
|
255
|
+
|
|
256
|
+
# Close file
|
|
257
|
+
self.close()
|
|
258
|
+
|
|
259
|
+
def _getPin(self, val):
|
|
260
|
+
try:
|
|
261
|
+
pin = int(val)
|
|
262
|
+
except ValueError:
|
|
263
|
+
if val in self.val_counter_names.values():
|
|
264
|
+
pin = int(
|
|
265
|
+
list(self.val_counter_names.keys())[
|
|
266
|
+
list(self.val_counter_names.values()).index(val)
|
|
267
|
+
]
|
|
268
|
+
)
|
|
269
|
+
else:
|
|
270
|
+
raise
|
|
271
|
+
|
|
272
|
+
return pin
|
|
273
|
+
|
|
274
|
+
def write_counters_to_file(self):
|
|
275
|
+
"""
|
|
276
|
+
Process and write the counters to the output file
|
|
277
|
+
"""
|
|
278
|
+
|
|
279
|
+
fastscan = 1
|
|
280
|
+
cntcounts = dict()
|
|
281
|
+
|
|
282
|
+
fastscanchecker = self.val_counter_names.copy()
|
|
283
|
+
if fastscanchecker is None:
|
|
284
|
+
fastscanchecker = self.raw_counter_names
|
|
285
|
+
elif self.raw_counter_names is not None:
|
|
286
|
+
fastscanchecker.update(self.raw_counter_names)
|
|
287
|
+
|
|
288
|
+
if fastscanchecker is not None:
|
|
289
|
+
allthere = True
|
|
290
|
+
for subs in self.counters:
|
|
291
|
+
cntcounts[subs] = 0
|
|
292
|
+
for idx, name in self.raw_counter_names.items():
|
|
293
|
+
if name not in self.counters[subs]:
|
|
294
|
+
allthere = False
|
|
295
|
+
else:
|
|
296
|
+
cntcounts[subs] += 1
|
|
297
|
+
|
|
298
|
+
if allthere:
|
|
299
|
+
fastscan = subs
|
|
300
|
+
break
|
|
301
|
+
else:
|
|
302
|
+
maxc = 0
|
|
303
|
+
for subs, v in cntcounts.items():
|
|
304
|
+
if maxc < v:
|
|
305
|
+
maxc = v
|
|
306
|
+
fastscan = subs
|
|
307
|
+
|
|
308
|
+
print(f"Fastscan not detected. Assuming subscan #{fastscan}.")
|
|
309
|
+
else:
|
|
310
|
+
print("no raw_counter_names nor val_counter_names option.")
|
|
311
|
+
|
|
312
|
+
nframes = None
|
|
313
|
+
|
|
314
|
+
if "npoints" in self.scan_info:
|
|
315
|
+
nframes = int(self.scan_info["npoints"])
|
|
316
|
+
|
|
317
|
+
if nframes is None:
|
|
318
|
+
|
|
319
|
+
for k in self.counters[fastscan]:
|
|
320
|
+
nframes = self.counters[fastscan][k].shape[0]
|
|
321
|
+
break
|
|
322
|
+
|
|
323
|
+
if nframes is None:
|
|
324
|
+
raise RuntimeError("Unable to determine the number of frames")
|
|
325
|
+
|
|
326
|
+
HS32C = -1 * np.ones((nframes, self.HS32Len), dtype=np.int64)
|
|
327
|
+
HS32V = np.zeros((nframes, self.HS32Len), dtype=np.float64)
|
|
328
|
+
|
|
329
|
+
# HS32C: raw counters
|
|
330
|
+
if self.raw_counter_names is not None:
|
|
331
|
+
if not isinstance(self.raw_counter_names, Mapping):
|
|
332
|
+
raise ValueError(
|
|
333
|
+
"raw_counter_names must be a index: counter name mapping"
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
for idx, name in self.raw_counter_names.items():
|
|
337
|
+
try:
|
|
338
|
+
counters_values = self.counters[fastscan][name].ravel()
|
|
339
|
+
|
|
340
|
+
if counters_values.shape[0] > nframes:
|
|
341
|
+
# raise RuntimeError(f"Counters have more values than nframes ({nframes}), this is not allowed!")
|
|
342
|
+
counters_values = counters_values[:nframes]
|
|
343
|
+
|
|
344
|
+
elif counters_values.shape[0] < nframes:
|
|
345
|
+
logger.info(
|
|
346
|
+
"There is less counters values than number of frames: filling with 0."
|
|
347
|
+
)
|
|
348
|
+
counters_values = np.append(
|
|
349
|
+
counters_values,
|
|
350
|
+
np.zeros(
|
|
351
|
+
(nframes - counters_values.shape[0],),
|
|
352
|
+
dtype=counters_values.dtype,
|
|
353
|
+
),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
HS32C[:, int(idx) - 1] = counters_values
|
|
357
|
+
except KeyError:
|
|
358
|
+
logger.error(f"{name} not in fastscan.")
|
|
359
|
+
|
|
360
|
+
self.mcs_grp.create_dataset(
|
|
361
|
+
"HS32C", data=HS32C, dtype=HS32C.dtype
|
|
362
|
+
).attrs.__setitem__("interpretation", "spectrum")
|
|
363
|
+
|
|
364
|
+
# HS32V: processed counters
|
|
365
|
+
if self.val_counter_names is not None:
|
|
366
|
+
if not isinstance(self.val_counter_names, Mapping):
|
|
367
|
+
raise ValueError(
|
|
368
|
+
"val_counter_names must be a index: counter name mapping"
|
|
369
|
+
)
|
|
370
|
+
|
|
371
|
+
for idx, name in self.val_counter_names.items():
|
|
372
|
+
try:
|
|
373
|
+
counters_values = self.counters[fastscan][name].ravel()
|
|
374
|
+
|
|
375
|
+
if counters_values.shape[0] > nframes:
|
|
376
|
+
raise RuntimeError(
|
|
377
|
+
f"Counters have more values than nframes ({nframes}), this is not allowed!"
|
|
378
|
+
)
|
|
379
|
+
|
|
380
|
+
elif counters_values.shape[0] < nframes:
|
|
381
|
+
logger.info(
|
|
382
|
+
"There is less counters values than number of frames: filling with 0."
|
|
383
|
+
)
|
|
384
|
+
counters_values = np.append(
|
|
385
|
+
counters_values,
|
|
386
|
+
np.zeros(
|
|
387
|
+
(nframes - counters_values.shape[0],),
|
|
388
|
+
dtype=counters_values.dtype,
|
|
389
|
+
),
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
HS32V[:, int(idx) - 1] = counters_values
|
|
393
|
+
except KeyError:
|
|
394
|
+
logger.error(f"{name} not in fastscan.")
|
|
395
|
+
|
|
396
|
+
self.mcs_grp.create_dataset(
|
|
397
|
+
"HS32V", data=HS32V, dtype=HS32V.dtype
|
|
398
|
+
).attrs.__setitem__("interpretation", "spectrum")
|
|
399
|
+
|
|
400
|
+
if "HSTime" in self.input2:
|
|
401
|
+
|
|
402
|
+
pin = self._getPin(self.input2["HSTime"])
|
|
403
|
+
|
|
404
|
+
if pin > self.HS32Len:
|
|
405
|
+
self.log_error("invalid pin number %s" % pin)
|
|
406
|
+
self.mcs_grp["HSTime"] = pin
|
|
407
|
+
self.mcs_grp["HSTime"].attrs["interpretation"] = "scalar"
|
|
408
|
+
self.mcs_grp["HSTime"].attrs["counter"] = "1-based pin number"
|
|
409
|
+
pin -= 1 # 1 based pin number
|
|
410
|
+
time_counter = HS32C[:, pin]
|
|
411
|
+
if "HS32F" in self.mcs_grp:
|
|
412
|
+
factor = self.mcs_grp["HS32F"][pin]
|
|
413
|
+
else:
|
|
414
|
+
logger.info(
|
|
415
|
+
"No factors provided for time measurement: defaulting to 1e-6"
|
|
416
|
+
)
|
|
417
|
+
factor = 1e-6
|
|
418
|
+
measured_time = time_counter * factor
|
|
419
|
+
self.mcs_grp["ExposureTime"] = measured_time
|
|
420
|
+
self.mcs_grp["ExposureTime"].attrs["interpretation"] = "scalar"
|
|
421
|
+
|
|
422
|
+
# I0/I1
|
|
423
|
+
sot = self.input2.get("ShutterOpeningTime", 0.0)
|
|
424
|
+
sct = self.input2.get("ShutterClosingTime", 0.0)
|
|
425
|
+
for name, value in (
|
|
426
|
+
("ShutterOpeningTime", sot),
|
|
427
|
+
("ShutterClosingTime", sct),
|
|
428
|
+
):
|
|
429
|
+
self.mcs_grp[name] = value
|
|
430
|
+
self.mcs_grp[name].attrs["interpretation"] = "scalar"
|
|
431
|
+
correction_time = (measured_time - sot + sct) / (measured_time - sot)
|
|
432
|
+
|
|
433
|
+
for key in ("HSI0", "HSI1"):
|
|
434
|
+
if key in self.input2:
|
|
435
|
+
dest = "Intensity" + key[-1]
|
|
436
|
+
pin = self._getPin(self.input2[key])
|
|
437
|
+
if pin > self.HS32Len:
|
|
438
|
+
self.log_error("invalid pin number %s" % pin)
|
|
439
|
+
self.mcs_grp[key] = pin
|
|
440
|
+
self.mcs_grp[key].attrs["interpretation"] = "scalar"
|
|
441
|
+
self.mcs_grp[key].attrs["counter"] = "1-based pin number"
|
|
442
|
+
pin -= 1 # 1 based pin number got 0 based.
|
|
443
|
+
counter = HS32V[:, pin]
|
|
444
|
+
# factor = self.mcs_grp["HS32F"][pin]
|
|
445
|
+
# zero = self.mcs_grp["HS32Z"][pin]
|
|
446
|
+
# measured = (counter - measured_time * zero) * factor
|
|
447
|
+
I_factor = float(self.input2.get(key + "Factor", 1.0))
|
|
448
|
+
self.mcs_grp[key + "Factor"] = I_factor
|
|
449
|
+
self.mcs_grp[key + "Factor"].attrs["interpretation"] = "scalar"
|
|
450
|
+
measured = counter * I_factor
|
|
451
|
+
self.mcs_grp[dest + "UnCor"] = measured
|
|
452
|
+
self.mcs_grp[dest + "UnCor"].attrs["interpretation"] = "scalar"
|
|
453
|
+
self.mcs_grp[dest + "ShutCor"] = measured * correction_time
|
|
454
|
+
self.mcs_grp[dest + "ShutCor"].attrs["interpretation"] = "scalar"
|
|
455
|
+
|
|
456
|
+
else:
|
|
457
|
+
logger.error("No HSTime pin number")
|
|
458
|
+
|
|
459
|
+
# This is needed by Saxsutilities
|
|
460
|
+
for k in ("HMStartTime", "HMStartEpoch"):
|
|
461
|
+
if k in self.inputs.headers:
|
|
462
|
+
self.tfg_grp[k] = self.inputs.headers[k]
|
|
463
|
+
|
|
464
|
+
# Write all the raw and interpreted values (All counters)
|
|
465
|
+
|
|
466
|
+
translator = channels_name_to_fullname(self.scan_info)
|
|
467
|
+
|
|
468
|
+
# print(translator)
|
|
469
|
+
|
|
470
|
+
raw_grp = self.mcs_grp["raw"]
|
|
471
|
+
for subs, v in self.counters.items():
|
|
472
|
+
subsk = f"subscan_{subs}"
|
|
473
|
+
raw_grp.require_group(subsk)
|
|
474
|
+
|
|
475
|
+
for k, c in v.items():
|
|
476
|
+
ds = raw_grp[subsk].create_dataset(k, data=c)
|
|
477
|
+
ds.attrs.update({"interpretation": "scalar"})
|
|
478
|
+
|
|
479
|
+
# Fast scan are not interpolated
|
|
480
|
+
for k in self.counters[fastscan]:
|
|
481
|
+
|
|
482
|
+
# FIXME: Soft link are better, but not supported by DAHU...
|
|
483
|
+
# fastscankey = f'subscan_{fastscan}'
|
|
484
|
+
# self.mcs_grp['interpreted'][k] = h5py.SoftLink(f'{raw_grp.name}/{fastscankey}/{k}')
|
|
485
|
+
# self.mcs_grp['interpreted'][translator[fastscan][k]] = self.counters[fastscan][k]
|
|
486
|
+
|
|
487
|
+
counter_val = self.counters[fastscan][k]
|
|
488
|
+
|
|
489
|
+
if counter_val.shape[0] < nframes:
|
|
490
|
+
counter_val = np.append(
|
|
491
|
+
counter_val, np.nan * np.ones((nframes - counter_val.shape[0],))
|
|
492
|
+
)
|
|
493
|
+
|
|
494
|
+
ds = self.mcs_grp["interpreted"].create_dataset(
|
|
495
|
+
translator[fastscan][k], data=counter_val
|
|
496
|
+
)
|
|
497
|
+
ds.attrs.update(
|
|
498
|
+
{
|
|
499
|
+
"interpretation": "scalar",
|
|
500
|
+
"operation": "",
|
|
501
|
+
"subscan": fastscan,
|
|
502
|
+
"original_name": k,
|
|
503
|
+
}
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
master_eltime_key = self.find_elapsed_time(fastscan)
|
|
507
|
+
if master_eltime_key is not None:
|
|
508
|
+
master_elapsed_time = self.counters[fastscan][master_eltime_key]
|
|
509
|
+
|
|
510
|
+
if master_elapsed_time.shape[0] < nframes:
|
|
511
|
+
_master_elapsed_time = np.append(
|
|
512
|
+
master_elapsed_time,
|
|
513
|
+
np.nan * np.ones((nframes - master_elapsed_time.shape[0],)),
|
|
514
|
+
)
|
|
515
|
+
else:
|
|
516
|
+
_master_elapsed_time = master_elapsed_time
|
|
517
|
+
# FIXME: Soft link are better, but not supported by DAHU...
|
|
518
|
+
# int_grp = self.mcs_grp['interpreted']
|
|
519
|
+
# self.tfg_grp['delta_time'] = h5py.SoftLink(f'{int_grp.name}/elapsed_time')
|
|
520
|
+
self.tfg_grp["delta_time"] = _master_elapsed_time
|
|
521
|
+
|
|
522
|
+
master_epoch_key = self.find_epoch(fastscan)
|
|
523
|
+
if master_epoch_key is not None:
|
|
524
|
+
master_epoch = self.counters[fastscan][master_epoch_key]
|
|
525
|
+
|
|
526
|
+
# The main elapsed_time must be there. Create soft link if the master elaped time has different name.
|
|
527
|
+
if "epoch" not in self.mcs_grp["interpreted"]:
|
|
528
|
+
# FIXME: Soft link are better, but not supported by DAHU...
|
|
529
|
+
# self.mcs_grp['interpreted']['elapsed_time'] = h5py.SoftLink(master_eltime_key)
|
|
530
|
+
|
|
531
|
+
if master_epoch.shape[0] < nframes:
|
|
532
|
+
_master_epoch = np.append(
|
|
533
|
+
master_epoch,
|
|
534
|
+
np.nan * np.ones((nframes - master_epoch.shape[0],)),
|
|
535
|
+
)
|
|
536
|
+
else:
|
|
537
|
+
_master_epoch = master_elapsed_time
|
|
538
|
+
|
|
539
|
+
self.mcs_grp["interpreted"]["epoch"] = _master_epoch
|
|
540
|
+
|
|
541
|
+
self.mcs_grp["interpreted"]["epoch"].attrs.update(
|
|
542
|
+
{"interpretation": "scalar", "master_epoch": True}
|
|
543
|
+
)
|
|
544
|
+
|
|
545
|
+
# Interpolate and save values from the other scans
|
|
546
|
+
for subs, v in self.counters.items():
|
|
547
|
+
if subs == fastscan: # Skip fast scan interpolation
|
|
548
|
+
continue
|
|
549
|
+
|
|
550
|
+
subsk = f"subscan_{subs}"
|
|
551
|
+
slave_epoch_key = self.find_epoch(subs)
|
|
552
|
+
|
|
553
|
+
if slave_epoch_key is not None:
|
|
554
|
+
slave_epoch = self.counters[subs][slave_epoch_key]
|
|
555
|
+
|
|
556
|
+
for k, c in v.items():
|
|
557
|
+
# patch edgar 29-08-2025
|
|
558
|
+
if len(slave_epoch) != len(self.counters[subs][k]):
|
|
559
|
+
logger.error(
|
|
560
|
+
f"Length of {slave_epoch_key} != length of counter"
|
|
561
|
+
)
|
|
562
|
+
continue
|
|
563
|
+
|
|
564
|
+
# print('interp', master_elapsed_time, slave_elapsed_time, self.counters[subs][k])
|
|
565
|
+
interpdata = np.interp(
|
|
566
|
+
master_epoch, slave_epoch, self.counters[subs][k]
|
|
567
|
+
)
|
|
568
|
+
|
|
569
|
+
if interpdata.shape[0] < nframes:
|
|
570
|
+
interpdata = np.append(
|
|
571
|
+
interpdata,
|
|
572
|
+
np.nan * np.ones((nframes - interpdata.shape[0],)),
|
|
573
|
+
)
|
|
574
|
+
|
|
575
|
+
ds = self.mcs_grp["interpreted"].create_dataset(
|
|
576
|
+
translator[subs][k], data=interpdata
|
|
577
|
+
)
|
|
578
|
+
ds.attrs.update(
|
|
579
|
+
{
|
|
580
|
+
"interpretation": "scalar",
|
|
581
|
+
"operation": "interpolate",
|
|
582
|
+
"subscan": subs,
|
|
583
|
+
"original_name": k,
|
|
584
|
+
"slave_epoch": slave_epoch_key,
|
|
585
|
+
"master_epoch": master_epoch_key,
|
|
586
|
+
}
|
|
587
|
+
)
|
|
588
|
+
|
|
589
|
+
else:
|
|
590
|
+
print(f"Unable to find an epoch time vector for subscan {subs}")
|
|
591
|
+
|
|
592
|
+
else:
|
|
593
|
+
print(
|
|
594
|
+
"Unable to find the master epoch time vector. Give up with the interpolated values."
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
# Fill TitleExtension if needed
|
|
598
|
+
if "TitleExtension" in self.inputs.headers:
|
|
599
|
+
template = str(self.inputs.headers["TitleExtension"])
|
|
600
|
+
cntnames = [
|
|
601
|
+
fn for _, fn, _, _ in Formatter().parse(template) if fn is not None
|
|
602
|
+
]
|
|
603
|
+
|
|
604
|
+
# print(template, cntnames)
|
|
605
|
+
|
|
606
|
+
if len(cntnames) > 0:
|
|
607
|
+
|
|
608
|
+
cntval = {}
|
|
609
|
+
maxl = 0
|
|
610
|
+
|
|
611
|
+
for cnt in cntnames:
|
|
612
|
+
if cnt not in self.mcs_grp["interpreted"]:
|
|
613
|
+
logger.error(f"{cnt} is not a valid counter.")
|
|
614
|
+
cntval[cnt] = None
|
|
615
|
+
else:
|
|
616
|
+
cntval[cnt] = np.array(self.mcs_grp["interpreted"][cnt])
|
|
617
|
+
if len(cntval[cnt]) > maxl:
|
|
618
|
+
maxl = len(cntval[cnt])
|
|
619
|
+
|
|
620
|
+
if maxl > 0:
|
|
621
|
+
t_ext = []
|
|
622
|
+
for rval in self._counters_value_iter(cntval):
|
|
623
|
+
# t_ext += [ template.format(**rval), ]
|
|
624
|
+
t_ext += [safe_formatting(template, **rval)]
|
|
625
|
+
|
|
626
|
+
if "TitleExtension" in self.info_grp:
|
|
627
|
+
del self.info_grp["TitleExtension"]
|
|
628
|
+
|
|
629
|
+
self.info_grp["TitleExtension"] = t_ext
|
|
630
|
+
|
|
631
|
+
def _counters_value_iter(self, cntval):
|
|
632
|
+
|
|
633
|
+
keys = cntval.keys()
|
|
634
|
+
maxl = max(
|
|
635
|
+
[
|
|
636
|
+
0,
|
|
637
|
+
]
|
|
638
|
+
+ [len(v) for k, v in cntval.items() if v is not None]
|
|
639
|
+
)
|
|
640
|
+
|
|
641
|
+
for i in range(maxl):
|
|
642
|
+
frameval = {}
|
|
643
|
+
for k in keys:
|
|
644
|
+
if cntval[k] is None:
|
|
645
|
+
frameval[k] = "CniS"
|
|
646
|
+
else:
|
|
647
|
+
try:
|
|
648
|
+
frameval[k] = cntval[k][i]
|
|
649
|
+
except IndexError:
|
|
650
|
+
frameval[k] = "***"
|
|
651
|
+
|
|
652
|
+
yield frameval
|
|
653
|
+
|
|
654
|
+
def find_elapsed_time(self, subs):
|
|
655
|
+
|
|
656
|
+
cnts = self.counters[subs]
|
|
657
|
+
|
|
658
|
+
known_titles_in_priority_order = (
|
|
659
|
+
"elapsed_time",
|
|
660
|
+
"mcs_elapsed_time",
|
|
661
|
+
"timer_elapsed_time",
|
|
662
|
+
)
|
|
663
|
+
|
|
664
|
+
for t in known_titles_in_priority_order:
|
|
665
|
+
if t in cnts:
|
|
666
|
+
return t
|
|
667
|
+
|
|
668
|
+
# If standard titles not found, just look for *elapsed_time
|
|
669
|
+
for k in cnts:
|
|
670
|
+
if k.endswith("elapsed_time"):
|
|
671
|
+
return k
|
|
672
|
+
|
|
673
|
+
# Give up.
|
|
674
|
+
return None
|
|
675
|
+
|
|
676
|
+
def find_epoch(self, subs):
|
|
677
|
+
|
|
678
|
+
cnts = self.counters[subs]
|
|
679
|
+
|
|
680
|
+
known_titles_in_priority_order = ("epoch", "mcs_epoch", "timer_epoch")
|
|
681
|
+
|
|
682
|
+
for t in known_titles_in_priority_order:
|
|
683
|
+
if t in cnts:
|
|
684
|
+
return t
|
|
685
|
+
|
|
686
|
+
# If standard titles not found, just look for *epoch
|
|
687
|
+
for k in cnts:
|
|
688
|
+
if k.endswith("epoch"):
|
|
689
|
+
return k
|
|
690
|
+
|
|
691
|
+
# Give up.
|
|
692
|
+
return None
|
|
693
|
+
|
|
694
|
+
# From DAHU https://github.com/kif/dahu/blob/master/plugins/id02/metadata.py
|
|
695
|
+
def preproc(self, headers):
|
|
696
|
+
"""Take a dict as input and forms a metadata structure as output
|
|
697
|
+
@param: any dict
|
|
698
|
+
"""
|
|
699
|
+
dd = headers.copy()
|
|
700
|
+
if "job_id" in dd:
|
|
701
|
+
dd.pop("job_id")
|
|
702
|
+
list_f = []
|
|
703
|
+
list_n = []
|
|
704
|
+
list_z = []
|
|
705
|
+
HS32Len = int(dd.get("HS32Len", 16))
|
|
706
|
+
|
|
707
|
+
self.HS32Len = HS32Len
|
|
708
|
+
|
|
709
|
+
HS32Depth = int(dd.get("HS32Depth", 32))
|
|
710
|
+
HSI0Factor = float(dd.get("HSI0Factor", 1))
|
|
711
|
+
HSI1Factor = float(dd.get("HSI1Factor", 1))
|
|
712
|
+
# "0.005 s"
|
|
713
|
+
if "ShutterOpeningTime" in dd:
|
|
714
|
+
value = dd["ShutterOpeningTime"]
|
|
715
|
+
if type(value) in StringTypes:
|
|
716
|
+
ShutterOpeningTime = float(value.split()[0])
|
|
717
|
+
else:
|
|
718
|
+
ShutterOpeningTime = float(value)
|
|
719
|
+
else:
|
|
720
|
+
ShutterOpeningTime = 0
|
|
721
|
+
if "ShutterClosingTime" in dd:
|
|
722
|
+
value = dd["ShutterClosingTime"]
|
|
723
|
+
if type(value) in StringTypes:
|
|
724
|
+
ShutterClosingTime = float(value.split()[0])
|
|
725
|
+
else:
|
|
726
|
+
ShutterClosingTime = float(value)
|
|
727
|
+
else:
|
|
728
|
+
ShutterClosingTime = 0
|
|
729
|
+
for ind in map(lambda x: "HS32F" + "{0:02d}".format(x), range(1, HS32Len + 1)):
|
|
730
|
+
list_f.append(float(dd[ind]))
|
|
731
|
+
self.HS32F = np.array(list_f)
|
|
732
|
+
|
|
733
|
+
for ind in map(lambda x: "HS32N" + "{0:02d}".format(x), range(1, HS32Len + 1)):
|
|
734
|
+
list_n.append(dd[ind])
|
|
735
|
+
self.HS32N = list_n
|
|
736
|
+
|
|
737
|
+
for ind in map(lambda x: "HS32Z" + "{0:02d}".format(x), range(1, HS32Len + 1)):
|
|
738
|
+
list_z.append(float(dd[ind]))
|
|
739
|
+
self.HS32Z = np.array(list_z)
|
|
740
|
+
|
|
741
|
+
info_dir = {}
|
|
742
|
+
for info_ind in dd:
|
|
743
|
+
if info_ind[0:2].find("HS") == 0:
|
|
744
|
+
continue
|
|
745
|
+
elif info_ind[0:2].find("HM") == 0:
|
|
746
|
+
continue
|
|
747
|
+
else:
|
|
748
|
+
info_dir[info_ind] = dd[info_ind]
|
|
749
|
+
|
|
750
|
+
final_dir = {
|
|
751
|
+
"HS32Len": HS32Len,
|
|
752
|
+
"HS32Depth": HS32Depth,
|
|
753
|
+
"HSI0Factor": HSI0Factor,
|
|
754
|
+
"HSI1Factor": HSI1Factor,
|
|
755
|
+
"ShutterOpeningTime": ShutterOpeningTime,
|
|
756
|
+
"ShutterClosingTime": ShutterClosingTime,
|
|
757
|
+
"instrument": "id02",
|
|
758
|
+
"c216": "bliss", # DEVICE from which the meta are read
|
|
759
|
+
"HS32F": list_f,
|
|
760
|
+
"HS32Z": list_z,
|
|
761
|
+
"HS32N": list_n,
|
|
762
|
+
"Info": info_dir,
|
|
763
|
+
}
|
|
764
|
+
for key in [
|
|
765
|
+
"HMStartEpoch",
|
|
766
|
+
"HMStartTime",
|
|
767
|
+
"hdf5_filename",
|
|
768
|
+
"entry",
|
|
769
|
+
"HSTime",
|
|
770
|
+
"HSI0",
|
|
771
|
+
"HSI1",
|
|
772
|
+
]:
|
|
773
|
+
if key in dd:
|
|
774
|
+
final_dir[key] = dd[key]
|
|
775
|
+
return final_dir
|
|
776
|
+
|
|
777
|
+
# From DAHU https://github.com/kif/dahu/blob/master/plugins/id02/metadata.py
|
|
778
|
+
def create_hdf5(self):
|
|
779
|
+
"""
|
|
780
|
+
Create a HDF5 file and data-structure
|
|
781
|
+
"""
|
|
782
|
+
|
|
783
|
+
# Ensure output directory exists
|
|
784
|
+
|
|
785
|
+
outdir = os.path.abspath(os.path.dirname(self.hdf5_filename))
|
|
786
|
+
print(f"Creating {outdir}")
|
|
787
|
+
print(os.environ)
|
|
788
|
+
os.makedirs(outdir, exist_ok=True)
|
|
789
|
+
|
|
790
|
+
try:
|
|
791
|
+
self.nxs = Nexus(self.hdf5_filename, mode="a", creator=__name__)
|
|
792
|
+
except IOError as error:
|
|
793
|
+
print(
|
|
794
|
+
"Unable to open %s: %s. Removing file and starting from scratch"
|
|
795
|
+
% (self.hdf5_filename, error)
|
|
796
|
+
)
|
|
797
|
+
os.unlink(self.hdf5_filename)
|
|
798
|
+
self.nxs = Nexus(self.hdf5_filename, mode="w", creator=__name__)
|
|
799
|
+
|
|
800
|
+
entry = self.nxs.new_entry(
|
|
801
|
+
self.entry, program_name=__name__, title="TFG metadata collection"
|
|
802
|
+
)
|
|
803
|
+
self.entry = entry.name
|
|
804
|
+
entry["program_name"].attrs["version"] = __version__
|
|
805
|
+
|
|
806
|
+
# configuration
|
|
807
|
+
config_grp = self.nxs.new_class(entry, "configuration", "NXnote")
|
|
808
|
+
config_grp["type"] = "text/json"
|
|
809
|
+
|
|
810
|
+
inps = dict()
|
|
811
|
+
for k, v in self.inputs._container.items():
|
|
812
|
+
try:
|
|
813
|
+
inps[k] = v.serialize()
|
|
814
|
+
except Exception:
|
|
815
|
+
pass
|
|
816
|
+
|
|
817
|
+
config_grp["data"] = json.dumps(
|
|
818
|
+
inps, indent=2, separators=(",\r\n", ": "), default=str
|
|
819
|
+
)
|
|
820
|
+
|
|
821
|
+
# Instrument
|
|
822
|
+
instrument_grp = self.nxs.new_instrument(
|
|
823
|
+
entry=entry, instrument_name=self.instrument
|
|
824
|
+
)
|
|
825
|
+
instrument_grp["name"] = "TruSAXS"
|
|
826
|
+
self.instrument = instrument_grp.name
|
|
827
|
+
|
|
828
|
+
# TimeFrameGenerator
|
|
829
|
+
self.tfg_grp = self.nxs.new_class(instrument_grp, "TFG", "NXcollection")
|
|
830
|
+
self.tfg_grp["device"] = "bliss"
|
|
831
|
+
|
|
832
|
+
# MultiCounterScaler
|
|
833
|
+
self.mcs_grp = self.nxs.new_class(instrument_grp, "MCS", "NXcollection")
|
|
834
|
+
self.mcs_grp["device"] = "bliss"
|
|
835
|
+
|
|
836
|
+
# Static metadata
|
|
837
|
+
self.info_grp = self.nxs.h5.require_group(
|
|
838
|
+
posixpath.join(self.instrument, "parameters")
|
|
839
|
+
)
|
|
840
|
+
self.info_grp.attrs["NX_class"] = "NXcollection"
|
|
841
|
+
|
|
842
|
+
for field, value in self.input2.get("Info", {}).items():
|
|
843
|
+
if field not in self.TO_SKIP and not isinstance(value, dict):
|
|
844
|
+
try:
|
|
845
|
+
value.encode("ascii")
|
|
846
|
+
except UnicodeEncodeError:
|
|
847
|
+
print("Unicode Error in field %s: %s, skipping" % (field, value))
|
|
848
|
+
except AttributeError:
|
|
849
|
+
self.info_grp[field] = str(value)
|
|
850
|
+
else:
|
|
851
|
+
self.info_grp[field] = str(value)
|
|
852
|
+
|
|
853
|
+
# Factor
|
|
854
|
+
HS32F = self.input2.get("HS32F")
|
|
855
|
+
if HS32F is not None:
|
|
856
|
+
self.mcs_grp.create_dataset("HS32F", data=HS32F).attrs.__setitem__(
|
|
857
|
+
"interpretation", "spectrum"
|
|
858
|
+
)
|
|
859
|
+
# Zero
|
|
860
|
+
HS32Z = self.input2.get("HS32Z")
|
|
861
|
+
if HS32Z is not None:
|
|
862
|
+
self.mcs_grp.create_dataset("HS32Z", data=HS32Z).attrs.__setitem__(
|
|
863
|
+
"interpretation", "spectrum"
|
|
864
|
+
)
|
|
865
|
+
# Name
|
|
866
|
+
HS32N = self.input2.get("HS32N")
|
|
867
|
+
if HS32N is not None:
|
|
868
|
+
self.mcs_grp.create_dataset(
|
|
869
|
+
"HS32N", data=[i.encode() for i in HS32N]
|
|
870
|
+
).attrs.__setitem__("interpretation", "spectrum")
|
|
871
|
+
# Mode
|
|
872
|
+
HS32M = self.input2.get("HS32M")
|
|
873
|
+
if HS32M is not None:
|
|
874
|
+
self.mcs_grp.create_dataset("HS32M", data=HS32M).attrs.__setitem__(
|
|
875
|
+
"interpretation", "spectrum"
|
|
876
|
+
)
|
|
877
|
+
|
|
878
|
+
self.mcs_grp.require_group("interpreted")
|
|
879
|
+
self.mcs_grp.require_group("raw")
|