ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHAP/TaskManager.py +216 -0
- CHAP/__init__.py +27 -0
- CHAP/common/__init__.py +57 -0
- CHAP/common/models/__init__.py +8 -0
- CHAP/common/models/common.py +124 -0
- CHAP/common/models/integration.py +659 -0
- CHAP/common/models/map.py +1291 -0
- CHAP/common/processor.py +2869 -0
- CHAP/common/reader.py +658 -0
- CHAP/common/utils.py +110 -0
- CHAP/common/writer.py +730 -0
- CHAP/edd/__init__.py +23 -0
- CHAP/edd/models.py +876 -0
- CHAP/edd/processor.py +3069 -0
- CHAP/edd/reader.py +1023 -0
- CHAP/edd/select_material_params_gui.py +348 -0
- CHAP/edd/utils.py +1572 -0
- CHAP/edd/writer.py +26 -0
- CHAP/foxden/__init__.py +19 -0
- CHAP/foxden/models.py +71 -0
- CHAP/foxden/processor.py +124 -0
- CHAP/foxden/reader.py +224 -0
- CHAP/foxden/utils.py +80 -0
- CHAP/foxden/writer.py +168 -0
- CHAP/giwaxs/__init__.py +11 -0
- CHAP/giwaxs/models.py +491 -0
- CHAP/giwaxs/processor.py +776 -0
- CHAP/giwaxs/reader.py +8 -0
- CHAP/giwaxs/writer.py +8 -0
- CHAP/inference/__init__.py +7 -0
- CHAP/inference/processor.py +69 -0
- CHAP/inference/reader.py +8 -0
- CHAP/inference/writer.py +8 -0
- CHAP/models.py +227 -0
- CHAP/pipeline.py +479 -0
- CHAP/processor.py +125 -0
- CHAP/reader.py +124 -0
- CHAP/runner.py +277 -0
- CHAP/saxswaxs/__init__.py +7 -0
- CHAP/saxswaxs/processor.py +8 -0
- CHAP/saxswaxs/reader.py +8 -0
- CHAP/saxswaxs/writer.py +8 -0
- CHAP/server.py +125 -0
- CHAP/sin2psi/__init__.py +7 -0
- CHAP/sin2psi/processor.py +8 -0
- CHAP/sin2psi/reader.py +8 -0
- CHAP/sin2psi/writer.py +8 -0
- CHAP/tomo/__init__.py +15 -0
- CHAP/tomo/models.py +210 -0
- CHAP/tomo/processor.py +3862 -0
- CHAP/tomo/reader.py +9 -0
- CHAP/tomo/writer.py +59 -0
- CHAP/utils/__init__.py +6 -0
- CHAP/utils/converters.py +188 -0
- CHAP/utils/fit.py +2947 -0
- CHAP/utils/general.py +2655 -0
- CHAP/utils/material.py +274 -0
- CHAP/utils/models.py +595 -0
- CHAP/utils/parfile.py +224 -0
- CHAP/writer.py +122 -0
- MLaaS/__init__.py +0 -0
- MLaaS/ktrain.py +205 -0
- MLaaS/mnist_img.py +83 -0
- MLaaS/tfaas_client.py +371 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
- chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/common/reader.py
ADDED
|
@@ -0,0 +1,658 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
"""
|
|
3
|
+
File : reader.py
|
|
4
|
+
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
5
|
+
Description: Module for Readers used in multiple experiment-specific workflows.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
# System modules
|
|
9
|
+
from typing import (
|
|
10
|
+
Optional,
|
|
11
|
+
Union,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
# Third party modules
|
|
15
|
+
import numpy as np
|
|
16
|
+
from pydantic import (
|
|
17
|
+
PrivateAttr,
|
|
18
|
+
conint,
|
|
19
|
+
conlist,
|
|
20
|
+
constr,
|
|
21
|
+
model_validator,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
# Local modules
|
|
25
|
+
from CHAP import Reader
|
|
26
|
+
from CHAP.reader import validate_reader_model
|
|
27
|
+
from CHAP.common.models.map import (
|
|
28
|
+
DetectorConfig,
|
|
29
|
+
SpecConfig,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
def validate_model(model):
|
|
33
|
+
if model.filename is not None:
|
|
34
|
+
validate_reader_model(model)
|
|
35
|
+
return model
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class BinaryFileReader(Reader):
|
|
39
|
+
"""Reader for binary files."""
|
|
40
|
+
def read(self):
|
|
41
|
+
"""Return a content of a given binary file.
|
|
42
|
+
|
|
43
|
+
:return: File content.
|
|
44
|
+
:rtype: binary
|
|
45
|
+
"""
|
|
46
|
+
with open(self.filename, 'rb') as file:
|
|
47
|
+
data = file.read()
|
|
48
|
+
return data
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ConfigReader(Reader):
|
|
52
|
+
"""Reader for YAML files that optionally implements and verifies it
|
|
53
|
+
agaist its Pydantic configuration schema.
|
|
54
|
+
"""
|
|
55
|
+
def read(self):
|
|
56
|
+
"""Return an optionally verified dictionary from the contents
|
|
57
|
+
of a yaml file.
|
|
58
|
+
"""
|
|
59
|
+
data = YAMLReader(**self.model_dump()).read()
|
|
60
|
+
#print(f'\nConfigReader.read start data {type(data)}:')
|
|
61
|
+
raise RuntimeError(
|
|
62
|
+
'FIX ConfigReader downstream validators do not like a pydantic '
|
|
63
|
+
'class as output of a reader, but returning data.model_dict() '
|
|
64
|
+
'instead screws up default value identification')
|
|
65
|
+
#pprint(data)
|
|
66
|
+
if self.get_schema() is not None:
|
|
67
|
+
data = self.get_config(config=data, schema=self.get_schema())
|
|
68
|
+
self.status = 'read'
|
|
69
|
+
#print(f'\nConfigReader.read end data {type(data)}:')
|
|
70
|
+
#pprint(data)
|
|
71
|
+
return data
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class FabioImageReader(Reader):
|
|
75
|
+
"""Reader for images using the python package.
|
|
76
|
+
|
|
77
|
+
:ivar frame: Index of a specific frame to read from the file(s),
|
|
78
|
+
defaults to `None`.
|
|
79
|
+
:type frame: int, optional
|
|
80
|
+
"""
|
|
81
|
+
frame: Optional[conint(ge=0)] = None
|
|
82
|
+
|
|
83
|
+
def read(self):
|
|
84
|
+
"""Return the data from the image file(s) provided.
|
|
85
|
+
|
|
86
|
+
:returns: Image data as a numpy array (or list of numpy
|
|
87
|
+
arrays, if a glob pattern matching more than one file was
|
|
88
|
+
provided).
|
|
89
|
+
:rtype: Union[numpy.ndarray, list[numpy.ndarray]]
|
|
90
|
+
"""
|
|
91
|
+
# Third party modules
|
|
92
|
+
from glob import glob
|
|
93
|
+
import fabio
|
|
94
|
+
|
|
95
|
+
filenames = glob(self.filename)
|
|
96
|
+
data = []
|
|
97
|
+
for f in filenames:
|
|
98
|
+
image = fabio.open(f, frame=self.frame)
|
|
99
|
+
data.append(image.data)
|
|
100
|
+
image.close()
|
|
101
|
+
return data
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class H5Reader(Reader):
|
|
105
|
+
"""Reader for h5 files.
|
|
106
|
+
|
|
107
|
+
:ivar h5path: Path to a specific location in the h5 file to read
|
|
108
|
+
data from, defaults to `'/'`.
|
|
109
|
+
:type h5path: str, optional
|
|
110
|
+
:ivar idx: Data slice to read from the object at the specified
|
|
111
|
+
location in the h5 file.
|
|
112
|
+
:type idx: list[int], optional
|
|
113
|
+
|
|
114
|
+
"""
|
|
115
|
+
h5path: Optional[constr(strip_whitespace=True, min_length=1)] = '/'
|
|
116
|
+
idx: Optional[conlist(min_length=1, max_length=3, item_type=int)] = None
|
|
117
|
+
|
|
118
|
+
def read(self):
|
|
119
|
+
"""Return the data object stored at `h5path` in an h5-file.
|
|
120
|
+
|
|
121
|
+
:return: Object indicated by `filename` and `h5path`.
|
|
122
|
+
:rtype: object
|
|
123
|
+
"""
|
|
124
|
+
# Third party modules
|
|
125
|
+
from h5py import File
|
|
126
|
+
|
|
127
|
+
data = File(self.filename, 'r')[self.h5path]
|
|
128
|
+
if self.idx is not None:
|
|
129
|
+
data = data[tuple(self.idx)]
|
|
130
|
+
return data
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
class LinkamReader(Reader):
|
|
134
|
+
"""Reader for loading Linkam load frame .txt files as an
|
|
135
|
+
`NXdata`.
|
|
136
|
+
|
|
137
|
+
:ivar columns: Column names to read in, defaults to None
|
|
138
|
+
(read in all columns)
|
|
139
|
+
:type columns: list[str], optional
|
|
140
|
+
"""
|
|
141
|
+
columns: Optional[conlist(
|
|
142
|
+
item_type=constr(strip_whitespace=True, min_length=1))] = None
|
|
143
|
+
|
|
144
|
+
def read(self):
|
|
145
|
+
"""Read specified columns from the given Linkam file.
|
|
146
|
+
|
|
147
|
+
:returns: Linkam data represented in an `NXdata` object
|
|
148
|
+
:rtype: nexusformat.nexus.NXdata
|
|
149
|
+
"""
|
|
150
|
+
# Third party modules
|
|
151
|
+
from nexusformat.nexus import (
|
|
152
|
+
NXdata,
|
|
153
|
+
NXfield,
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Parse .txt file
|
|
157
|
+
start_time, metadata, data = LinkamReader.parse_file(
|
|
158
|
+
self.filename, self.logger)
|
|
159
|
+
|
|
160
|
+
# Get list of actual data column names and corresponding
|
|
161
|
+
# signal nxnames (same as user-supplied column names)
|
|
162
|
+
signal_names = []
|
|
163
|
+
if self.columns is None:
|
|
164
|
+
signal_names = [(col, col) for col in data.keys() if col != 'Time']
|
|
165
|
+
else:
|
|
166
|
+
for col in self.columns:
|
|
167
|
+
col_actual = col
|
|
168
|
+
if col == 'Distance':
|
|
169
|
+
col_actual = 'Force V Distance_X'
|
|
170
|
+
elif col == 'Force':
|
|
171
|
+
col_actual = 'Force V Distance_Y'
|
|
172
|
+
elif not col in data:
|
|
173
|
+
if f'{col}_Y' in data:
|
|
174
|
+
# Always use the *_Y column if the user-supplied
|
|
175
|
+
# column name has both _X and _Y components
|
|
176
|
+
col_actual = f'{col}_Y'
|
|
177
|
+
else:
|
|
178
|
+
self.logger.warning(
|
|
179
|
+
f'{col} not present in {self.filename}')
|
|
180
|
+
continue
|
|
181
|
+
signal_names.append((col_actual, col))
|
|
182
|
+
self.logger.info(f'Using (column name, signal name): {signal_names}')
|
|
183
|
+
|
|
184
|
+
nxdata = NXdata(
|
|
185
|
+
axes=(NXfield(
|
|
186
|
+
name='Time',
|
|
187
|
+
value=np.array(data['Time']) + start_time,
|
|
188
|
+
dtype='float64',
|
|
189
|
+
),),
|
|
190
|
+
**{col: NXfield(
|
|
191
|
+
name=col,
|
|
192
|
+
value=data[col_actual],
|
|
193
|
+
dtype='float32',
|
|
194
|
+
) for col_actual, col in signal_names},
|
|
195
|
+
attrs=metadata
|
|
196
|
+
)
|
|
197
|
+
return nxdata
|
|
198
|
+
|
|
199
|
+
@classmethod
|
|
200
|
+
def parse_file(cls, filename, logger):
|
|
201
|
+
"""Return start time, metadata, and data stored in the
|
|
202
|
+
provided Linkam .txt file.
|
|
203
|
+
|
|
204
|
+
:returns:
|
|
205
|
+
:rtype: tuple(float, dict[str, str], dict[str, list[float]])
|
|
206
|
+
"""
|
|
207
|
+
# System modules
|
|
208
|
+
from datetime import datetime
|
|
209
|
+
import os
|
|
210
|
+
import re
|
|
211
|
+
|
|
212
|
+
# Get t=0 from filename
|
|
213
|
+
start_time = None
|
|
214
|
+
basename = os.path.basename(filename)
|
|
215
|
+
pattern = r'(\d{2}-\d{2}-\d{2}_\d{2}-\d{2}-\d{2}-\d{2})'
|
|
216
|
+
match = re.search(pattern, basename)
|
|
217
|
+
if match:
|
|
218
|
+
datetime_str = match.group(1)
|
|
219
|
+
dt = datetime.strptime(datetime_str, '%d-%m-%y_%H-%M-%S-%f')
|
|
220
|
+
start_time = dt.timestamp()
|
|
221
|
+
else:
|
|
222
|
+
logger.warning(f'Datetime not found in {filename}')
|
|
223
|
+
|
|
224
|
+
# Get data add metadata from file contents
|
|
225
|
+
metadata = {}
|
|
226
|
+
data = False
|
|
227
|
+
with open(filename, 'r', encoding='utf-8') as inf:
|
|
228
|
+
for line in inf:
|
|
229
|
+
line = line.strip()
|
|
230
|
+
if not line:
|
|
231
|
+
continue
|
|
232
|
+
if data:
|
|
233
|
+
# If data dict has been initialized, remaining
|
|
234
|
+
# lines are all data values
|
|
235
|
+
values = line.replace(',', '').split('\t')
|
|
236
|
+
for val, col in zip(values, list(data.keys())):
|
|
237
|
+
try:
|
|
238
|
+
val = float(val)
|
|
239
|
+
except Exception as exc:
|
|
240
|
+
logger.warning(
|
|
241
|
+
f'Cannot convert {col} value to float: {val} '
|
|
242
|
+
f'({exc})')
|
|
243
|
+
continue
|
|
244
|
+
else:
|
|
245
|
+
data[col].append(val)
|
|
246
|
+
if ':' in line:
|
|
247
|
+
# Metadata key: value pair kept on this line
|
|
248
|
+
_metadata = line.split(':', 1)
|
|
249
|
+
if len(_metadata) == 2:
|
|
250
|
+
key, val = _metadata
|
|
251
|
+
else:
|
|
252
|
+
continue
|
|
253
|
+
key, val = _metadata[0], None
|
|
254
|
+
metadata[key] = val
|
|
255
|
+
if re.match(r'^([\w\s\w]+)(\t\t[\w\s\w]+)*$', line):
|
|
256
|
+
# Match found for start of data section -- this
|
|
257
|
+
# line and the next are column labels.
|
|
258
|
+
data_cols = []
|
|
259
|
+
# Get base quantity column names
|
|
260
|
+
base_cols = line.split('\t\t')
|
|
261
|
+
# Get Index, X and Y component columns
|
|
262
|
+
line = next(inf)
|
|
263
|
+
comp_cols = line.split('\t')
|
|
264
|
+
# Assemble actual column names
|
|
265
|
+
data_cols.append('Index')
|
|
266
|
+
comp_cols_count = int((len(comp_cols) - 1) / 2)
|
|
267
|
+
for i in range(comp_cols_count):
|
|
268
|
+
data_cols.extend(
|
|
269
|
+
[f'{base_cols[i]}_{comp}' for comp in ('X', 'Y')]
|
|
270
|
+
)
|
|
271
|
+
if len(base_cols) > comp_cols_count:
|
|
272
|
+
data_cols.extend(base_cols[comp_cols_count - 1:])
|
|
273
|
+
# First column (after 0th) is actually Time
|
|
274
|
+
data_cols[1] = 'Time'
|
|
275
|
+
# Start of data lines
|
|
276
|
+
data = {col: [] for col in data_cols}
|
|
277
|
+
logger.info(f'Found data columns: {data_cols}')
|
|
278
|
+
|
|
279
|
+
return start_time, metadata, data
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
class NexusReader(Reader):
|
|
283
|
+
"""Reader for NeXus files.
|
|
284
|
+
|
|
285
|
+
:ivar nxpath: Path to a specific location in the NeXus file tree
|
|
286
|
+
to read from, defaults to `'/'`.
|
|
287
|
+
:type nxpath: str, optional
|
|
288
|
+
:ivar nxmemory: Maximum memory usage when reading NeXus files.
|
|
289
|
+
:type nxmemory: int, optional
|
|
290
|
+
"""
|
|
291
|
+
nxpath: Optional[constr(strip_whitespace=True, min_length=1)] = '/'
|
|
292
|
+
nxmemory: Optional[conint(gt=0)] = None
|
|
293
|
+
|
|
294
|
+
def read(self):
|
|
295
|
+
"""Return the NeXus object stored at `nxpath` in a NeXus file.
|
|
296
|
+
|
|
297
|
+
:raises nexusformat.nexus.NeXusError: If `filename` is not a
|
|
298
|
+
NeXus file or `nxpath` is not in its tree.
|
|
299
|
+
:return: NeXus object indicated by `filename` and `nxpath`.
|
|
300
|
+
:rtype: nexusformat.nexus.NXobject
|
|
301
|
+
"""
|
|
302
|
+
# Third party modules
|
|
303
|
+
from nexusformat.nexus import (
|
|
304
|
+
nxload,
|
|
305
|
+
nxsetconfig,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
if self.nxmemory is not None:
|
|
309
|
+
nxsetconfig(memory=self.nxmemory)
|
|
310
|
+
return nxload(self.filename)[self.nxpath]
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
class NXdataReader(Reader):
|
|
314
|
+
"""Reader for constructing an NXdata object from components."""
|
|
315
|
+
def read(self, name, nxfield_params, signal_name, axes_names, attrs=None):
|
|
316
|
+
"""Return a basic NXdata object constructed from components.
|
|
317
|
+
|
|
318
|
+
:param name: NXdata group name.
|
|
319
|
+
:type name: str
|
|
320
|
+
:param nxfield_params: List of sets of parameters for
|
|
321
|
+
`NXfieldReader` specifying the NXfields belonging to the
|
|
322
|
+
NXdata.
|
|
323
|
+
:type nxfield_params: list[dict]
|
|
324
|
+
:param signal_name: Name of the signal for the NXdata (must be
|
|
325
|
+
one of the names of the NXfields indicated in `nxfields`).
|
|
326
|
+
:type signal: str
|
|
327
|
+
:param axes_names: Name or names of the coordinate axes
|
|
328
|
+
NXfields associated with the signal (must be names of
|
|
329
|
+
NXfields indicated in `nxfields`).
|
|
330
|
+
:type axes_names: Union[str, list[str]]
|
|
331
|
+
:param attrs: Dictionary of additional attributes for the
|
|
332
|
+
NXdata.
|
|
333
|
+
:type attrs: dict, optional
|
|
334
|
+
:returns: A new NXdata object.
|
|
335
|
+
:rtype: nexusformat.nexus.NXdata
|
|
336
|
+
"""
|
|
337
|
+
# Third party modules
|
|
338
|
+
from nexusformat.nexus import NXdata
|
|
339
|
+
|
|
340
|
+
# Read in NXfields
|
|
341
|
+
nxfields = [NXfieldReader().read(**params, inputdir=self.inputdir)
|
|
342
|
+
for params in nxfield_params]
|
|
343
|
+
nxfields = {nxfield.nxname: nxfield for nxfield in nxfields}
|
|
344
|
+
|
|
345
|
+
# Get signal NXfield
|
|
346
|
+
try:
|
|
347
|
+
nxsignal = nxfields[signal_name]
|
|
348
|
+
except Exception as exc:
|
|
349
|
+
raise ValueError(
|
|
350
|
+
'`signal_name` must be the name of one of the NXfields '
|
|
351
|
+
'indicated in `nxfields`: , '.join(nxfields.keys())) from exc
|
|
352
|
+
|
|
353
|
+
# Get axes NXfield(s)
|
|
354
|
+
if isinstance(axes_names, str):
|
|
355
|
+
axes_names = [axes_names]
|
|
356
|
+
try:
|
|
357
|
+
nxaxes = [nxfields[axis_name] for axis_name in axes_names]
|
|
358
|
+
except Exception as exc:
|
|
359
|
+
raise ValueError(
|
|
360
|
+
'`axes_names` must contain only names of NXfields indicated '
|
|
361
|
+
'in `nxfields`: ' + ', '.join(nxfields.keys())) from exc
|
|
362
|
+
for i, nxaxis in enumerate(nxaxes):
|
|
363
|
+
if len(nxaxis) != nxsignal.shape[i]:
|
|
364
|
+
raise ValueError(
|
|
365
|
+
f'Shape mismatch on signal dimension {i}: signal '
|
|
366
|
+
+ f'"{nxsignal.nxname}" has {nxsignal.shape[i]} values, '
|
|
367
|
+
+ f'but axis "{nxaxis.nxname}" has {len(nxaxis)} values.')
|
|
368
|
+
|
|
369
|
+
if attrs is None:
|
|
370
|
+
attrs = {}
|
|
371
|
+
result = NXdata(signal=nxsignal, axes=nxaxes, name=name, attrs=attrs,
|
|
372
|
+
**nxfields)
|
|
373
|
+
self.logger.info(result.tree)
|
|
374
|
+
return result
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
class NXfieldReader(Reader):
|
|
378
|
+
"""Reader for an NXfield with options to modify certain attributes.
|
|
379
|
+
"""
|
|
380
|
+
def read(self, nxpath, nxname=None, update_attrs=None, slice_params=None):
|
|
381
|
+
"""Return a copy of the indicated NXfield from the file. Name
|
|
382
|
+
and attributes of the returned copy may be modified with the
|
|
383
|
+
`nxname` and `update_attrs` keyword arguments.
|
|
384
|
+
|
|
385
|
+
:param nxpath: Path in `nxfile` pointing to the NXfield to
|
|
386
|
+
read.
|
|
387
|
+
:type nxpath: str
|
|
388
|
+
:param nxname: New name for the returned NXfield.
|
|
389
|
+
:type nxname: str, optional
|
|
390
|
+
:param update_attrs: Optional dictonary used to add to /
|
|
391
|
+
update the original NXfield's attributes.
|
|
392
|
+
:type update_attrs: dict, optional
|
|
393
|
+
:param slice_params: Parameters for returning just a slice of
|
|
394
|
+
the full field data. Slice parameters are provided in a
|
|
395
|
+
list dictionaries with integer values for any / all of the
|
|
396
|
+
following keys: `"start"`, `"end"`, `"step"`. Default
|
|
397
|
+
values used are: `"start"` - `0`, `"end"` -- `None`,
|
|
398
|
+
`"step"` -- `1`. The order of the list must correspond to
|
|
399
|
+
the order of the field's axes.
|
|
400
|
+
:type slice_params: list[dict[str, int]], optional
|
|
401
|
+
:returns: A copy of the indicated NXfield (with name and
|
|
402
|
+
attributes optionally modified).
|
|
403
|
+
:rtype: nexusformat.nexus.NXfield
|
|
404
|
+
"""
|
|
405
|
+
# Third party modules
|
|
406
|
+
from nexusformat.nexus import (
|
|
407
|
+
NXfield,
|
|
408
|
+
nxload,
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
nxroot = nxload(self.filename)
|
|
412
|
+
nxfield = nxroot[nxpath]
|
|
413
|
+
|
|
414
|
+
if nxname is None:
|
|
415
|
+
nxname = nxfield.nxname
|
|
416
|
+
|
|
417
|
+
attrs = nxfield.attrs
|
|
418
|
+
if update_attrs is not None:
|
|
419
|
+
attrs.update(update_attrs)
|
|
420
|
+
|
|
421
|
+
if slice_params is None:
|
|
422
|
+
value = nxfield.nxdata
|
|
423
|
+
else:
|
|
424
|
+
if len(slice_params) < nxfield.ndim:
|
|
425
|
+
slice_params.extend([{}] * (nxfield.ndim - len(slice_params)))
|
|
426
|
+
if len(slice_params) > nxfield.ndim:
|
|
427
|
+
slice_params = slice_params[0:nxfield.ndim]
|
|
428
|
+
slices = ()
|
|
429
|
+
default_slice = {'start': 0, 'end': None, 'step': 1}
|
|
430
|
+
for s in slice_params:
|
|
431
|
+
for k, v in default_slice.items():
|
|
432
|
+
if k not in s:
|
|
433
|
+
s[k] = v
|
|
434
|
+
slices = (*slices, slice(s['start'], s['end'], s['step']))
|
|
435
|
+
value = nxfield.nxdata[slices]
|
|
436
|
+
|
|
437
|
+
nxfield = NXfield(value=value, name=nxname, attrs=attrs)
|
|
438
|
+
self.logger.debug(f'Result -- nxfield.tree =\n{nxfield.tree}')
|
|
439
|
+
|
|
440
|
+
return nxfield
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
class SpecReader(Reader):
|
|
444
|
+
"""Reader for CHESS SPEC scans.
|
|
445
|
+
|
|
446
|
+
:ivar config: SPEC configuration to be passed directly to the
|
|
447
|
+
constructor of `CHAP.common.models.map.SpecConfig`.
|
|
448
|
+
:type config: dict, optional
|
|
449
|
+
:ivar detectors: Detector configurations of the detectors to
|
|
450
|
+
include raw data for in the returned NeXus NXroot object,
|
|
451
|
+
defaults to None (only a valid input for EDD).
|
|
452
|
+
:type detectors: Union[
|
|
453
|
+
dict, common.models.map.DetectorConfig], optional
|
|
454
|
+
:ivar filename: Name of file to read from.
|
|
455
|
+
:type filename: str, optional
|
|
456
|
+
"""
|
|
457
|
+
config: Optional[Union[dict, SpecConfig]] = None
|
|
458
|
+
detector_config: Optional[DetectorConfig] = None
|
|
459
|
+
filename: Optional[str] = None
|
|
460
|
+
|
|
461
|
+
_mapping_filename: PrivateAttr(default=None)
|
|
462
|
+
|
|
463
|
+
_validate_filename = model_validator(mode="after")(validate_model)
|
|
464
|
+
|
|
465
|
+
@model_validator(mode='after')
|
|
466
|
+
def validate_specreader_after(self):
|
|
467
|
+
"""Validate the `SpecReader` configuration.
|
|
468
|
+
|
|
469
|
+
:return: The validated configuration.
|
|
470
|
+
:rtype: PipelineItem
|
|
471
|
+
"""
|
|
472
|
+
if self.filename is not None:
|
|
473
|
+
if self.config is not None:
|
|
474
|
+
raise ValueError('Specify either filename or config in '
|
|
475
|
+
'common.SpecReader, not both')
|
|
476
|
+
self.config = YAMLReader(**self.model_dump()).read()
|
|
477
|
+
self.config = self.get_config(
|
|
478
|
+
config=self.config, schema='common.models.map.SpecConfig')
|
|
479
|
+
if self.detector_config is None:
|
|
480
|
+
if self.config.experiment_type != 'EDD':
|
|
481
|
+
raise RuntimeError(
|
|
482
|
+
'Missing parameter detector_config for experiment type '
|
|
483
|
+
f'{self.config.experiment_type}')
|
|
484
|
+
return self
|
|
485
|
+
|
|
486
|
+
def read(self):
|
|
487
|
+
"""Take a SPEC configuration filename or dictionary and return
|
|
488
|
+
the raw data as a NeXus NXentry object.
|
|
489
|
+
|
|
490
|
+
:return: Data from the provided SPEC configuration.
|
|
491
|
+
:rtype: nexusformat.nexus.NXroot
|
|
492
|
+
"""
|
|
493
|
+
# Third party modules
|
|
494
|
+
# pylint: disable=no-name-in-module
|
|
495
|
+
from json import dumps
|
|
496
|
+
from nexusformat.nexus import (
|
|
497
|
+
NXcollection,
|
|
498
|
+
NXdata,
|
|
499
|
+
NXentry,
|
|
500
|
+
NXfield,
|
|
501
|
+
NXroot,
|
|
502
|
+
)
|
|
503
|
+
# pylint: enable=no-name-in-module
|
|
504
|
+
|
|
505
|
+
# Local modules
|
|
506
|
+
from CHAP.common.models.map import Detector
|
|
507
|
+
|
|
508
|
+
#print(f'\n\nSpecReader.read\nself.config:')
|
|
509
|
+
#pprint(self.config)
|
|
510
|
+
#print(f'\n\ndetector_config:')
|
|
511
|
+
#pprint(self.detector_config)
|
|
512
|
+
#print(f'\n\n')
|
|
513
|
+
|
|
514
|
+
# Create the NXroot object
|
|
515
|
+
nxroot = NXroot()
|
|
516
|
+
nxentry = NXentry(name=self.config.experiment_type)
|
|
517
|
+
nxroot[nxentry.nxname] = nxentry
|
|
518
|
+
nxentry.set_default()
|
|
519
|
+
|
|
520
|
+
# Set up NXentry and add misc. CHESS-specific metadata as well
|
|
521
|
+
# as all spec_motors, scan_columns, and smb_pars, and the
|
|
522
|
+
# detector info and raw detector data
|
|
523
|
+
nxentry.config = self.config.model_dump_json()
|
|
524
|
+
nxentry.attrs['station'] = self.config.station
|
|
525
|
+
nxentry.spec_scans = NXcollection()
|
|
526
|
+
# nxpaths = []
|
|
527
|
+
if self.config.experiment_type == 'EDD':
|
|
528
|
+
detector_data_format = None
|
|
529
|
+
for scans in self.config.spec_scans:
|
|
530
|
+
nxscans = NXcollection()
|
|
531
|
+
nxentry.spec_scans[f'{scans.scanparsers[0].scan_name}'] = nxscans
|
|
532
|
+
nxscans.attrs['spec_file'] = str(scans.spec_file)
|
|
533
|
+
nxscans.attrs['scan_numbers'] = scans.scan_numbers
|
|
534
|
+
for scan_number in scans.scan_numbers:
|
|
535
|
+
scanparser = scans.get_scanparser(scan_number)
|
|
536
|
+
if self.config.experiment_type == 'EDD':
|
|
537
|
+
if detector_data_format is None:
|
|
538
|
+
detector_data_format = scanparser.detector_data_format
|
|
539
|
+
elif (scanparser.detector_data_format !=
|
|
540
|
+
detector_data_format):
|
|
541
|
+
raise NotImplementedError(
|
|
542
|
+
'Mixing `spec` and `h5` data formats')
|
|
543
|
+
if self.detector_config is None:
|
|
544
|
+
detectors_ids = None
|
|
545
|
+
elif detector_data_format == 'spec':
|
|
546
|
+
raise NotImplementedError(
|
|
547
|
+
'detector_data_format = "spec"')
|
|
548
|
+
else:
|
|
549
|
+
detectors_ids = [
|
|
550
|
+
int(d.get_id())
|
|
551
|
+
for d in self.detector_config.detectors]
|
|
552
|
+
nxscans[scan_number] = NXcollection()
|
|
553
|
+
try:
|
|
554
|
+
nxscans[scan_number].spec_motors = dumps(
|
|
555
|
+
{k:float(v) for k,v
|
|
556
|
+
in scanparser.spec_positioner_values.items()})
|
|
557
|
+
except Exception:
|
|
558
|
+
pass
|
|
559
|
+
try:
|
|
560
|
+
nxscans[scan_number].scan_columns = dumps(
|
|
561
|
+
{k:list(v) for k,v
|
|
562
|
+
in scanparser.spec_scan_data.items() if len(v)})
|
|
563
|
+
except Exception:
|
|
564
|
+
pass
|
|
565
|
+
try:
|
|
566
|
+
nxscans[scan_number].smb_pars = dumps(
|
|
567
|
+
{k:v for k,v in scanparser.pars.items()})
|
|
568
|
+
except Exception:
|
|
569
|
+
pass
|
|
570
|
+
try:
|
|
571
|
+
nxscans[scan_number].spec_scan_motor_mnes = dumps(
|
|
572
|
+
scanparser.spec_scan_motor_mnes)
|
|
573
|
+
except Exception:
|
|
574
|
+
pass
|
|
575
|
+
if self.config.experiment_type == 'EDD':
|
|
576
|
+
nxdata = NXdata()
|
|
577
|
+
nxscans[scan_number].data = nxdata
|
|
578
|
+
# nxpaths.append(
|
|
579
|
+
# f'spec_scans/{nxscans.nxname}/{scan_number}/data')
|
|
580
|
+
nxdata.data = NXfield(
|
|
581
|
+
value=scanparser.get_detector_data(detectors_ids)[0])
|
|
582
|
+
else:
|
|
583
|
+
if self.config.experiment_type == 'TOMO':
|
|
584
|
+
dtype = np.float32
|
|
585
|
+
else:
|
|
586
|
+
dtype = None
|
|
587
|
+
nxdata = NXdata()
|
|
588
|
+
nxscans[scan_number].data = nxdata
|
|
589
|
+
# nxpaths.append(
|
|
590
|
+
# f'spec_scans/{nxscans.nxname}/{scan_number}/data')
|
|
591
|
+
for detector in self.detector_config.detectors:
|
|
592
|
+
nxdata[detector.get_id()] = NXfield(
|
|
593
|
+
value=scanparser.get_detector_data(
|
|
594
|
+
detector.get_id(), dtype=dtype))
|
|
595
|
+
|
|
596
|
+
if (self.config.experiment_type == 'EDD' and
|
|
597
|
+
self.detector_config is None):
|
|
598
|
+
if detector_data_format == 'spec':
|
|
599
|
+
raise NotImplementedError('detector_data_format = "spec"')
|
|
600
|
+
self.detector_config = DetectorConfig(
|
|
601
|
+
detectors=[
|
|
602
|
+
Detector(id=i) for i in range(nxdata.data.shape[1])])
|
|
603
|
+
nxentry.detectors = self.detector_config.model_dump_json()
|
|
604
|
+
|
|
605
|
+
#return nxroot, nxpaths
|
|
606
|
+
return nxroot
|
|
607
|
+
|
|
608
|
+
|
|
609
|
+
class URLReader(Reader):
|
|
610
|
+
"""Reader for data available over HTTPS."""
|
|
611
|
+
def read(self, url, headers=None, timeout=10):
|
|
612
|
+
"""Make an HTTPS request to the provided URL and return the
|
|
613
|
+
results. Headers for the request are optional.
|
|
614
|
+
|
|
615
|
+
:param url: URL to read.
|
|
616
|
+
:type url: str
|
|
617
|
+
:param headers: Headers to attach to the request.
|
|
618
|
+
:type headers: dict, optional
|
|
619
|
+
:param timeout: Timeout for the HTTPS request,
|
|
620
|
+
defaults to `10`.
|
|
621
|
+
:type timeout: int
|
|
622
|
+
:return: Content of the response.
|
|
623
|
+
:rtype: object
|
|
624
|
+
"""
|
|
625
|
+
# System modules
|
|
626
|
+
import requests
|
|
627
|
+
|
|
628
|
+
if headers is None:
|
|
629
|
+
headers = {}
|
|
630
|
+
resp = requests.get(url, headers=headers, timeout=timeout)
|
|
631
|
+
data = resp.content
|
|
632
|
+
|
|
633
|
+
self.logger.debug(f'Response content: {data}')
|
|
634
|
+
|
|
635
|
+
return data
|
|
636
|
+
|
|
637
|
+
|
|
638
|
+
class YAMLReader(Reader):
|
|
639
|
+
"""Reader for YAML files."""
|
|
640
|
+
def read(self):
|
|
641
|
+
"""Return a dictionary from the contents of a yaml file.
|
|
642
|
+
|
|
643
|
+
:return: Contents of the file.
|
|
644
|
+
:rtype: dict
|
|
645
|
+
"""
|
|
646
|
+
# Third party modules
|
|
647
|
+
import yaml
|
|
648
|
+
|
|
649
|
+
with open(self.filename) as f:
|
|
650
|
+
data = yaml.safe_load(f)
|
|
651
|
+
return data
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
if __name__ == '__main__':
|
|
655
|
+
# Local modules
|
|
656
|
+
from CHAP.reader import main
|
|
657
|
+
|
|
658
|
+
main()
|