ChessAnalysisPipeline 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/TaskManager.py +214 -0
- CHAP/common/models/__init__.py +0 -2
- CHAP/common/models/integration.py +392 -249
- CHAP/common/models/map.py +350 -198
- CHAP/common/processor.py +229 -191
- CHAP/common/reader.py +52 -39
- CHAP/common/utils/__init__.py +0 -37
- CHAP/common/utils/fit.py +1197 -991
- CHAP/common/utils/general.py +629 -372
- CHAP/common/utils/material.py +158 -121
- CHAP/common/utils/scanparsers.py +735 -339
- CHAP/common/writer.py +31 -25
- CHAP/edd/models.py +65 -51
- CHAP/edd/processor.py +136 -113
- CHAP/edd/reader.py +1 -1
- CHAP/edd/writer.py +1 -1
- CHAP/inference/processor.py +35 -28
- CHAP/inference/reader.py +1 -1
- CHAP/inference/writer.py +1 -1
- CHAP/pipeline.py +14 -28
- CHAP/processor.py +44 -75
- CHAP/reader.py +49 -40
- CHAP/runner.py +73 -32
- CHAP/saxswaxs/processor.py +1 -1
- CHAP/saxswaxs/reader.py +1 -1
- CHAP/saxswaxs/writer.py +1 -1
- CHAP/server.py +130 -0
- CHAP/sin2psi/processor.py +1 -1
- CHAP/sin2psi/reader.py +1 -1
- CHAP/sin2psi/writer.py +1 -1
- CHAP/tomo/__init__.py +1 -4
- CHAP/tomo/models.py +53 -31
- CHAP/tomo/processor.py +1326 -902
- CHAP/tomo/reader.py +4 -2
- CHAP/tomo/writer.py +4 -2
- CHAP/writer.py +47 -41
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/METADATA +1 -1
- ChessAnalysisPipeline-0.0.6.dist-info/RECORD +52 -0
- ChessAnalysisPipeline-0.0.4.dist-info/RECORD +0 -50
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/LICENSE +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/WHEEL +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/top_level.txt +0 -0
CHAP/common/processor.py
CHANGED
|
@@ -1,134 +1,139 @@
|
|
|
1
1
|
#!/usr/bin/env python
|
|
2
2
|
#-*- coding: utf-8 -*-
|
|
3
3
|
#pylint: disable=
|
|
4
|
-
|
|
4
|
+
"""
|
|
5
5
|
File : processor.py
|
|
6
6
|
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
7
|
-
Description: Module for Processors used in multiple experiment-specific
|
|
8
|
-
|
|
7
|
+
Description: Module for Processors used in multiple experiment-specific
|
|
8
|
+
workflows.
|
|
9
|
+
"""
|
|
9
10
|
|
|
10
11
|
# system modules
|
|
11
|
-
import
|
|
12
|
-
import logging
|
|
13
|
-
import json
|
|
14
|
-
import sys
|
|
12
|
+
from json import dumps
|
|
15
13
|
from time import time
|
|
16
14
|
|
|
17
15
|
# local modules
|
|
18
16
|
from CHAP import Processor
|
|
19
17
|
|
|
18
|
+
|
|
20
19
|
class AsyncProcessor(Processor):
|
|
21
|
-
|
|
20
|
+
"""A Processor to process multiple sets of input data via asyncio
|
|
21
|
+
module
|
|
22
22
|
|
|
23
23
|
:ivar mgr: The `Processor` used to process every set of input data
|
|
24
24
|
:type mgr: Processor
|
|
25
|
-
|
|
25
|
+
"""
|
|
26
26
|
def __init__(self, mgr):
|
|
27
27
|
super().__init__()
|
|
28
28
|
self.mgr = mgr
|
|
29
29
|
|
|
30
|
-
def _process(self,
|
|
31
|
-
|
|
32
|
-
`Processor`.
|
|
33
|
-
|
|
34
|
-
:param
|
|
30
|
+
def _process(self, data):
|
|
31
|
+
"""Asynchronously process the input documents with the
|
|
32
|
+
`self.mgr` `Processor`.
|
|
33
|
+
|
|
34
|
+
:param data: input data documents to process
|
|
35
35
|
:type docs: iterable
|
|
36
|
-
|
|
36
|
+
"""
|
|
37
37
|
|
|
38
38
|
import asyncio
|
|
39
39
|
|
|
40
40
|
async def task(mgr, doc):
|
|
41
|
-
|
|
42
|
-
|
|
41
|
+
"""Process given data using provided `Processor`
|
|
42
|
+
|
|
43
43
|
:param mgr: the object that will process given data
|
|
44
44
|
:type mgr: Processor
|
|
45
45
|
:param doc: the data to process
|
|
46
46
|
:type doc: object
|
|
47
47
|
:return: processed data
|
|
48
48
|
:rtype: object
|
|
49
|
-
|
|
49
|
+
"""
|
|
50
50
|
return mgr.process(doc)
|
|
51
51
|
|
|
52
|
-
async def
|
|
53
|
-
|
|
54
|
-
|
|
52
|
+
async def execute_tasks(mgr, docs):
|
|
53
|
+
"""Process given set of documents using provided task
|
|
54
|
+
manager
|
|
55
|
+
|
|
55
56
|
:param mgr: the object that will process all documents
|
|
56
57
|
:type mgr: Processor
|
|
57
58
|
:param docs: the set of data documents to process
|
|
58
59
|
:type doc: iterable
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
await asyncio.gather(*
|
|
60
|
+
"""
|
|
61
|
+
coroutines = [task(mgr, d) for d in docs]
|
|
62
|
+
await asyncio.gather(*coroutines)
|
|
63
|
+
|
|
64
|
+
asyncio.run(execute_tasks(self.mgr, data))
|
|
62
65
|
|
|
63
|
-
asyncio.run(executeTasks(self.mgr, docs))
|
|
64
66
|
|
|
65
67
|
class IntegrationProcessor(Processor):
|
|
66
|
-
|
|
67
|
-
'''
|
|
68
|
+
"""A processor for integrating 2D data with pyFAI"""
|
|
68
69
|
|
|
69
70
|
def _process(self, data):
|
|
70
|
-
|
|
71
|
-
arguments supplied and return the results.
|
|
72
|
-
|
|
73
|
-
:param data: input data, including raw data, integration
|
|
74
|
-
keyword args for the integration method.
|
|
75
|
-
:type data: tuple[typing.Union[numpy.ndarray,
|
|
76
|
-
callable,
|
|
77
|
-
dict]
|
|
71
|
+
"""Integrate the input data with the integration method and
|
|
72
|
+
keyword arguments supplied and return the results.
|
|
73
|
+
|
|
74
|
+
:param data: input data, including raw data, integration
|
|
75
|
+
method, and keyword args for the integration method.
|
|
76
|
+
:type data: tuple[typing.Union[numpy.ndarray,
|
|
77
|
+
list[numpy.ndarray]], callable, dict]
|
|
78
78
|
:param integration_method: the method of a
|
|
79
79
|
`pyFAI.azimuthalIntegrator.AzimuthalIntegrator` or
|
|
80
|
-
`pyFAI.multi_geometry.MultiGeometry` that returns the
|
|
81
|
-
integration results.
|
|
80
|
+
`pyFAI.multi_geometry.MultiGeometry` that returns the
|
|
81
|
+
desired integration results.
|
|
82
82
|
:return: integrated raw data
|
|
83
83
|
:rtype: pyFAI.containers.IntegrateResult
|
|
84
|
-
|
|
85
|
-
|
|
84
|
+
"""
|
|
86
85
|
detector_data, integration_method, integration_kwargs = data
|
|
87
86
|
|
|
88
|
-
return
|
|
87
|
+
return integration_method(detector_data, **integration_kwargs)
|
|
88
|
+
|
|
89
89
|
|
|
90
90
|
class IntegrateMapProcessor(Processor):
|
|
91
|
-
|
|
92
|
-
configuration and returns a `nexusformat.nexus.NXprocess`
|
|
93
|
-
the integrated detector data requested.
|
|
94
|
-
|
|
91
|
+
"""Class representing a process that takes a map and integration
|
|
92
|
+
configuration and returns a `nexusformat.nexus.NXprocess`
|
|
93
|
+
containing a map of the integrated detector data requested.
|
|
94
|
+
"""
|
|
95
95
|
|
|
96
96
|
def _process(self, data):
|
|
97
|
-
|
|
98
|
-
configuration and return a
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
the value `'
|
|
97
|
+
"""Process the output of a `Reader` that contains a map and
|
|
98
|
+
integration configuration and return a
|
|
99
|
+
`nexusformat.nexus.NXprocess` containing a map of the
|
|
100
|
+
integrated detector data requested
|
|
101
|
+
|
|
102
|
+
:param data: Result of `Reader.read` where at least one item
|
|
103
|
+
has the value `'MapConfig'` for the `'schema'` key, and at
|
|
104
|
+
least one item has the value `'IntegrationConfig'` for the
|
|
105
|
+
`'schema'` key.
|
|
104
106
|
:type data: list[dict[str,object]]
|
|
105
107
|
:return: integrated data and process metadata
|
|
106
108
|
:rtype: nexusformat.nexus.NXprocess
|
|
107
|
-
|
|
109
|
+
"""
|
|
108
110
|
|
|
109
111
|
map_config, integration_config = self.get_configs(data)
|
|
110
112
|
nxprocess = self.get_nxprocess(map_config, integration_config)
|
|
111
113
|
|
|
112
|
-
return
|
|
114
|
+
return nxprocess
|
|
113
115
|
|
|
114
116
|
def get_configs(self, data):
|
|
115
|
-
|
|
116
|
-
input supplied by
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
the value `'
|
|
117
|
+
"""Return valid instances of `MapConfig` and
|
|
118
|
+
`IntegrationConfig` from the input supplied by
|
|
119
|
+
`MultipleReader`.
|
|
120
|
+
|
|
121
|
+
:param data: Result of `Reader.read` where at least one item
|
|
122
|
+
has the value `'MapConfig'` for the `'schema'` key, and at
|
|
123
|
+
least one item has the value `'IntegrationConfig'` for the
|
|
124
|
+
`'schema'` key.
|
|
121
125
|
:type data: list[dict[str,object]]
|
|
122
|
-
:raises ValueError: if `data` cannot be parsed into map and
|
|
126
|
+
:raises ValueError: if `data` cannot be parsed into map and
|
|
127
|
+
integration configurations.
|
|
123
128
|
:return: valid map and integration configuration objects.
|
|
124
129
|
:rtype: tuple[MapConfig, IntegrationConfig]
|
|
125
|
-
|
|
130
|
+
"""
|
|
126
131
|
|
|
127
132
|
self.logger.debug('Getting configuration objects')
|
|
128
133
|
t0 = time()
|
|
129
134
|
|
|
130
|
-
from CHAP.common.models import MapConfig
|
|
131
|
-
from CHAP.common.models import IntegrationConfig
|
|
135
|
+
from CHAP.common.models.map import MapConfig
|
|
136
|
+
from CHAP.common.models.integration import IntegrationConfig
|
|
132
137
|
|
|
133
138
|
map_config = False
|
|
134
139
|
integration_config = False
|
|
@@ -142,29 +147,30 @@ class IntegrateMapProcessor(Processor):
|
|
|
142
147
|
integration_config = item.get('data')
|
|
143
148
|
|
|
144
149
|
if not map_config:
|
|
145
|
-
raise
|
|
150
|
+
raise ValueError('No map configuration found')
|
|
146
151
|
if not integration_config:
|
|
147
|
-
raise
|
|
152
|
+
raise ValueError('No integration configuration found')
|
|
148
153
|
|
|
149
154
|
map_config = MapConfig(**map_config)
|
|
150
155
|
integration_config = IntegrationConfig(**integration_config)
|
|
151
156
|
|
|
152
|
-
self.logger.debug(
|
|
157
|
+
self.logger.debug(
|
|
158
|
+
f'Got configuration objects in {time()-t0:.3f} seconds')
|
|
153
159
|
|
|
154
|
-
return
|
|
160
|
+
return map_config, integration_config
|
|
155
161
|
|
|
156
162
|
def get_nxprocess(self, map_config, integration_config):
|
|
157
|
-
|
|
163
|
+
"""Use a `MapConfig` and `IntegrationConfig` to construct a
|
|
158
164
|
`nexusformat.nexus.NXprocess`
|
|
159
165
|
|
|
160
166
|
:param map_config: a valid map configuration
|
|
161
167
|
:type map_config: MapConfig
|
|
162
168
|
:param integration_config: a valid integration configuration
|
|
163
169
|
:type integration_config: IntegrationConfig
|
|
164
|
-
:return: the integrated detector data and metadata contained
|
|
165
|
-
structure
|
|
170
|
+
:return: the integrated detector data and metadata contained
|
|
171
|
+
in a NeXus structure
|
|
166
172
|
:rtype: nexusformat.nexus.NXprocess
|
|
167
|
-
|
|
173
|
+
"""
|
|
168
174
|
|
|
169
175
|
self.logger.debug('Constructing NXprocess')
|
|
170
176
|
t0 = time()
|
|
@@ -178,13 +184,13 @@ class IntegrateMapProcessor(Processor):
|
|
|
178
184
|
|
|
179
185
|
nxprocess = NXprocess(name=integration_config.title)
|
|
180
186
|
|
|
181
|
-
nxprocess.map_config =
|
|
182
|
-
nxprocess.integration_config =
|
|
187
|
+
nxprocess.map_config = dumps(map_config.dict())
|
|
188
|
+
nxprocess.integration_config = dumps(integration_config.dict())
|
|
183
189
|
|
|
184
190
|
nxprocess.program = 'pyFAI'
|
|
185
191
|
nxprocess.version = pyFAI.version
|
|
186
192
|
|
|
187
|
-
for k,v in integration_config.dict().items():
|
|
193
|
+
for k, v in integration_config.dict().items():
|
|
188
194
|
if k == 'detectors':
|
|
189
195
|
continue
|
|
190
196
|
nxprocess.attrs[k] = v
|
|
@@ -195,11 +201,13 @@ class IntegrateMapProcessor(Processor):
|
|
|
195
201
|
nxdetector.local_name = detector.prefix
|
|
196
202
|
nxdetector.distance = detector.azimuthal_integrator.dist
|
|
197
203
|
nxdetector.distance.attrs['units'] = 'm'
|
|
198
|
-
nxdetector.calibration_wavelength =
|
|
204
|
+
nxdetector.calibration_wavelength = \
|
|
205
|
+
detector.azimuthal_integrator.wavelength
|
|
199
206
|
nxdetector.calibration_wavelength.attrs['units'] = 'm'
|
|
200
207
|
nxdetector.attrs['poni_file'] = str(detector.poni_file)
|
|
201
208
|
nxdetector.attrs['mask_file'] = str(detector.mask_file)
|
|
202
|
-
nxdetector.raw_data_files = np.full(map_config.shape,
|
|
209
|
+
nxdetector.raw_data_files = np.full(map_config.shape,
|
|
210
|
+
'', dtype='|S256')
|
|
203
211
|
|
|
204
212
|
nxprocess.data = NXdata()
|
|
205
213
|
|
|
@@ -207,7 +215,7 @@ class IntegrateMapProcessor(Processor):
|
|
|
207
215
|
*map_config.dims,
|
|
208
216
|
*integration_config.integrated_data_dims
|
|
209
217
|
)
|
|
210
|
-
for i,dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
218
|
+
for i, dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
211
219
|
nxprocess.data[dim.label] = NXfield(
|
|
212
220
|
value=map_config.coords[dim.label],
|
|
213
221
|
units=dim.units,
|
|
@@ -216,8 +224,8 @@ class IntegrateMapProcessor(Processor):
|
|
|
216
224
|
'local_name': dim.name})
|
|
217
225
|
nxprocess.data.attrs[f'{dim.label}_indices'] = i
|
|
218
226
|
|
|
219
|
-
for i,(coord_name,coord_values) in
|
|
220
|
-
|
|
227
|
+
for i, (coord_name, coord_values) in enumerate(
|
|
228
|
+
integration_config.integrated_data_coordinates.items()):
|
|
221
229
|
if coord_name == 'radial':
|
|
222
230
|
type_ = pyFAI.units.RADIAL_UNITS
|
|
223
231
|
elif coord_name == 'azimuthal':
|
|
@@ -226,19 +234,19 @@ class IntegrateMapProcessor(Processor):
|
|
|
226
234
|
getattr(integration_config, f'{coord_name}_units'),
|
|
227
235
|
type_=type_)
|
|
228
236
|
nxprocess.data[coord_units.name] = coord_values
|
|
229
|
-
nxprocess.data.attrs[f'{coord_units.name}_indices'] = i+len(
|
|
237
|
+
nxprocess.data.attrs[f'{coord_units.name}_indices'] = i + len(
|
|
238
|
+
map_config.coords)
|
|
230
239
|
nxprocess.data[coord_units.name].units = coord_units.unit_symbol
|
|
231
|
-
nxprocess.data[coord_units.name].attrs['long_name'] =
|
|
240
|
+
nxprocess.data[coord_units.name].attrs['long_name'] = \
|
|
241
|
+
coord_units.label
|
|
232
242
|
|
|
233
243
|
nxprocess.data.attrs['signal'] = 'I'
|
|
234
244
|
nxprocess.data.I = NXfield(
|
|
235
245
|
value=np.empty(
|
|
236
246
|
(*tuple(
|
|
237
|
-
[len(coord_values) for coord_name,coord_values
|
|
238
|
-
|
|
239
|
-
*integration_config.integrated_data_shape
|
|
240
|
-
)
|
|
241
|
-
),
|
|
247
|
+
[len(coord_values) for coord_name, coord_values
|
|
248
|
+
in map_config.coords.items()][::-1]),
|
|
249
|
+
*integration_config.integrated_data_shape)),
|
|
242
250
|
units='a.u',
|
|
243
251
|
attrs={'long_name':'Intensity (a.u)'})
|
|
244
252
|
|
|
@@ -246,13 +254,17 @@ class IntegrateMapProcessor(Processor):
|
|
|
246
254
|
if integration_config.integration_type == 'azimuthal':
|
|
247
255
|
integration_method = integrator.integrate1d
|
|
248
256
|
integration_kwargs = {
|
|
249
|
-
'lst_mask': [detector.mask_array
|
|
257
|
+
'lst_mask': [detector.mask_array
|
|
258
|
+
for detector
|
|
259
|
+
in integration_config.detectors],
|
|
250
260
|
'npt': integration_config.radial_npt
|
|
251
261
|
}
|
|
252
262
|
elif integration_config.integration_type == 'cake':
|
|
253
263
|
integration_method = integrator.integrate2d
|
|
254
264
|
integration_kwargs = {
|
|
255
|
-
'lst_mask': [detector.mask_array
|
|
265
|
+
'lst_mask': [detector.mask_array
|
|
266
|
+
for detector
|
|
267
|
+
in integration_config.detectors],
|
|
256
268
|
'npt_rad': integration_config.radial_npt,
|
|
257
269
|
'npt_azim': integration_config.azimuthal_npt,
|
|
258
270
|
'method': 'bbox'
|
|
@@ -261,7 +273,6 @@ class IntegrateMapProcessor(Processor):
|
|
|
261
273
|
integration_processor = IntegrationProcessor()
|
|
262
274
|
integration_processor.logger.setLevel(self.logger.getEffectiveLevel())
|
|
263
275
|
integration_processor.logger.addHandler(self.logger.handlers[0])
|
|
264
|
-
lst_args = []
|
|
265
276
|
for scans in map_config.spec_scans:
|
|
266
277
|
for scan_number in scans.scan_numbers:
|
|
267
278
|
scanparser = scans.get_scanparser(scan_number)
|
|
@@ -275,53 +286,59 @@ class IntegrateMapProcessor(Processor):
|
|
|
275
286
|
scan_number,
|
|
276
287
|
scan_step_index)
|
|
277
288
|
result = integration_processor.process(
|
|
278
|
-
(detector_data,
|
|
289
|
+
(detector_data,
|
|
290
|
+
integration_method, integration_kwargs))
|
|
279
291
|
nxprocess.data.I[map_index] = result.intensity
|
|
280
292
|
|
|
281
293
|
for detector in integration_config.detectors:
|
|
282
|
-
nxprocess[detector.prefix].raw_data_files[map_index]
|
|
294
|
+
nxprocess[detector.prefix].raw_data_files[map_index] =\
|
|
283
295
|
scanparser.get_detector_data_file(
|
|
284
|
-
detector.prefix,
|
|
285
|
-
scan_step_index)
|
|
296
|
+
detector.prefix, scan_step_index)
|
|
286
297
|
|
|
287
298
|
self.logger.debug(f'Constructed NXprocess in {time()-t0:.3f} seconds')
|
|
288
299
|
|
|
289
|
-
return
|
|
300
|
+
return nxprocess
|
|
301
|
+
|
|
290
302
|
|
|
291
303
|
class MapProcessor(Processor):
|
|
292
|
-
|
|
293
|
-
`nexusformat.nexus.NXentry` representing that map's metadata and
|
|
294
|
-
scalar-valued raw data requseted by the supplied map
|
|
295
|
-
|
|
304
|
+
"""A Processor to take a map configuration and return a
|
|
305
|
+
`nexusformat.nexus.NXentry` representing that map's metadata and
|
|
306
|
+
any scalar-valued raw data requseted by the supplied map
|
|
307
|
+
configuration.
|
|
308
|
+
"""
|
|
296
309
|
|
|
297
310
|
def _process(self, data):
|
|
298
|
-
|
|
299
|
-
return a `nexusformat.nexus.NXentry`
|
|
311
|
+
"""Process the output of a `Reader` that contains a map
|
|
312
|
+
configuration and return a `nexusformat.nexus.NXentry`
|
|
313
|
+
representing the map.
|
|
300
314
|
|
|
301
|
-
:param data: Result of `Reader.read` where at least one item
|
|
302
|
-
value `'MapConfig'` for the `'schema'` key.
|
|
315
|
+
:param data: Result of `Reader.read` where at least one item
|
|
316
|
+
has the value `'MapConfig'` for the `'schema'` key.
|
|
303
317
|
:type data: list[dict[str,object]]
|
|
304
318
|
:return: Map data & metadata
|
|
305
319
|
:rtype: nexusformat.nexus.NXentry
|
|
306
|
-
|
|
320
|
+
"""
|
|
307
321
|
|
|
308
322
|
map_config = self.get_map_config(data)
|
|
309
323
|
nxentry = self.__class__.get_nxentry(map_config)
|
|
310
324
|
|
|
311
|
-
return
|
|
325
|
+
return nxentry
|
|
312
326
|
|
|
313
327
|
def get_map_config(self, data):
|
|
314
|
-
|
|
328
|
+
"""Get an instance of `MapConfig` from a returned value of
|
|
329
|
+
`Reader.read`
|
|
315
330
|
|
|
316
|
-
:param data: Result of `Reader.read` where at least one item
|
|
317
|
-
value `'MapConfig'` for the `'schema'` key.
|
|
331
|
+
:param data: Result of `Reader.read` where at least one item
|
|
332
|
+
has the value `'MapConfig'` for the `'schema'` key.
|
|
318
333
|
:type data: list[dict[str,object]]
|
|
319
|
-
:raises Exception: If a valid `MapConfig` cannot be
|
|
320
|
-
|
|
334
|
+
:raises Exception: If a valid `MapConfig` cannot be
|
|
335
|
+
constructed from `data`.
|
|
336
|
+
:return: a valid instance of `MapConfig` with field values
|
|
337
|
+
taken from `data`.
|
|
321
338
|
:rtype: MapConfig
|
|
322
|
-
|
|
339
|
+
"""
|
|
323
340
|
|
|
324
|
-
from .models.map import MapConfig
|
|
341
|
+
from CHAP.common.models.map import MapConfig
|
|
325
342
|
|
|
326
343
|
map_config = False
|
|
327
344
|
if isinstance(data, list):
|
|
@@ -332,19 +349,21 @@ class MapProcessor(Processor):
|
|
|
332
349
|
break
|
|
333
350
|
|
|
334
351
|
if not map_config:
|
|
335
|
-
raise
|
|
352
|
+
raise ValueError('No map configuration found')
|
|
336
353
|
|
|
337
|
-
return
|
|
354
|
+
return MapConfig(**map_config)
|
|
338
355
|
|
|
339
356
|
@staticmethod
|
|
340
357
|
def get_nxentry(map_config):
|
|
341
|
-
|
|
358
|
+
"""Use a `MapConfig` to construct a
|
|
359
|
+
`nexusformat.nexus.NXentry`
|
|
342
360
|
|
|
343
361
|
:param map_config: a valid map configuration
|
|
344
362
|
:type map_config: MapConfig
|
|
345
|
-
:return: the map's data and metadata contained in a NeXus
|
|
363
|
+
:return: the map's data and metadata contained in a NeXus
|
|
364
|
+
structure
|
|
346
365
|
:rtype: nexusformat.nexus.NXentry
|
|
347
|
-
|
|
366
|
+
"""
|
|
348
367
|
|
|
349
368
|
from nexusformat.nexus import (NXcollection,
|
|
350
369
|
NXdata,
|
|
@@ -355,7 +374,7 @@ class MapProcessor(Processor):
|
|
|
355
374
|
|
|
356
375
|
nxentry = NXentry(name=map_config.title)
|
|
357
376
|
|
|
358
|
-
nxentry.map_config =
|
|
377
|
+
nxentry.map_config = dumps(map_config.dict())
|
|
359
378
|
|
|
360
379
|
nxentry[map_config.sample.name] = NXsample(**map_config.sample.dict())
|
|
361
380
|
|
|
@@ -366,11 +385,11 @@ class MapProcessor(Processor):
|
|
|
366
385
|
nxentry.spec_scans[scans.scanparsers[0].scan_name] = \
|
|
367
386
|
NXfield(value=scans.scan_numbers,
|
|
368
387
|
dtype='int8',
|
|
369
|
-
attrs={'spec_file':str(scans.spec_file)})
|
|
388
|
+
attrs={'spec_file': str(scans.spec_file)})
|
|
370
389
|
|
|
371
390
|
nxentry.data = NXdata()
|
|
372
391
|
nxentry.data.attrs['axes'] = map_config.dims
|
|
373
|
-
for i,dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
392
|
+
for i, dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
374
393
|
nxentry.data[dim.label] = NXfield(
|
|
375
394
|
value=map_config.coords[dim.label],
|
|
376
395
|
units=dim.units,
|
|
@@ -411,23 +430,25 @@ class MapProcessor(Processor):
|
|
|
411
430
|
scan_number,
|
|
412
431
|
scan_step_index)
|
|
413
432
|
|
|
414
|
-
return
|
|
433
|
+
return nxentry
|
|
434
|
+
|
|
415
435
|
|
|
416
436
|
class NexusToNumpyProcessor(Processor):
|
|
417
|
-
|
|
418
|
-
an `numpy.ndarray`.
|
|
419
|
-
|
|
437
|
+
"""A Processor to convert the default plottable data in an
|
|
438
|
+
`NXobject` into an `numpy.ndarray`.
|
|
439
|
+
"""
|
|
420
440
|
|
|
421
441
|
def _process(self, data):
|
|
422
|
-
|
|
442
|
+
"""Return the default plottable data signal in `data` as an
|
|
423
443
|
`numpy.ndarray`.
|
|
424
|
-
|
|
444
|
+
|
|
425
445
|
:param data: input NeXus structure
|
|
426
446
|
:type data: nexusformat.nexus.tree.NXobject
|
|
427
|
-
:raises ValueError: if `data` has no default plottable data
|
|
447
|
+
:raises ValueError: if `data` has no default plottable data
|
|
448
|
+
signal
|
|
428
449
|
:return: default plottable data signal in `data`
|
|
429
450
|
:rtype: numpy.ndarray
|
|
430
|
-
|
|
451
|
+
"""
|
|
431
452
|
|
|
432
453
|
default_data = data.plottable_data
|
|
433
454
|
|
|
@@ -435,31 +456,35 @@ class NexusToNumpyProcessor(Processor):
|
|
|
435
456
|
default_data_path = data.attrs['default']
|
|
436
457
|
default_data = data.get(default_data_path)
|
|
437
458
|
if default_data is None:
|
|
438
|
-
raise
|
|
459
|
+
raise ValueError(
|
|
460
|
+
f'The structure of {data} contains no default data')
|
|
439
461
|
|
|
440
462
|
default_signal = default_data.attrs.get('signal')
|
|
441
463
|
if default_signal is None:
|
|
442
|
-
raise
|
|
464
|
+
raise ValueError(f'The signal of {default_data} is unknown')
|
|
443
465
|
default_signal = default_signal.nxdata
|
|
444
466
|
|
|
445
467
|
np_data = default_data[default_signal].nxdata
|
|
446
468
|
|
|
447
|
-
return
|
|
469
|
+
return np_data
|
|
470
|
+
|
|
448
471
|
|
|
449
472
|
class NexusToXarrayProcessor(Processor):
|
|
450
|
-
|
|
451
|
-
an `xarray.DataArray`.
|
|
473
|
+
"""A Processor to convert the default plottable data in an
|
|
474
|
+
`NXobject` into an `xarray.DataArray`.
|
|
475
|
+
"""
|
|
452
476
|
|
|
453
477
|
def _process(self, data):
|
|
454
|
-
|
|
478
|
+
"""Return the default plottable data signal in `data` as an
|
|
455
479
|
`xarray.DataArray`.
|
|
456
|
-
|
|
480
|
+
|
|
457
481
|
:param data: input NeXus structure
|
|
458
482
|
:type data: nexusformat.nexus.tree.NXobject
|
|
459
|
-
:raises ValueError: if metadata for `xarray` is absent from
|
|
483
|
+
:raises ValueError: if metadata for `xarray` is absent from
|
|
484
|
+
`data`
|
|
460
485
|
:return: default plottable data signal in `data`
|
|
461
486
|
:rtype: xarray.DataArray
|
|
462
|
-
|
|
487
|
+
"""
|
|
463
488
|
|
|
464
489
|
from xarray import DataArray
|
|
465
490
|
|
|
@@ -469,11 +494,12 @@ class NexusToXarrayProcessor(Processor):
|
|
|
469
494
|
default_data_path = data.attrs['default']
|
|
470
495
|
default_data = data.get(default_data_path)
|
|
471
496
|
if default_data is None:
|
|
472
|
-
raise
|
|
497
|
+
raise ValueError(
|
|
498
|
+
f'The structure of {data} contains no default data')
|
|
473
499
|
|
|
474
500
|
default_signal = default_data.attrs.get('signal')
|
|
475
501
|
if default_signal is None:
|
|
476
|
-
raise
|
|
502
|
+
raise ValueError(f'The signal of {default_data} is unknown')
|
|
477
503
|
default_signal = default_signal.nxdata
|
|
478
504
|
|
|
479
505
|
signal_data = default_data[default_signal].nxdata
|
|
@@ -487,129 +513,139 @@ class NexusToXarrayProcessor(Processor):
|
|
|
487
513
|
axis.attrs)
|
|
488
514
|
|
|
489
515
|
dims = tuple(axes)
|
|
490
|
-
|
|
491
516
|
name = default_signal
|
|
492
|
-
|
|
493
517
|
attrs = default_data[default_signal].attrs
|
|
494
518
|
|
|
495
|
-
return
|
|
519
|
+
return DataArray(data=signal_data,
|
|
496
520
|
coords=coords,
|
|
497
521
|
dims=dims,
|
|
498
522
|
name=name,
|
|
499
|
-
attrs=attrs)
|
|
523
|
+
attrs=attrs)
|
|
524
|
+
|
|
500
525
|
|
|
501
526
|
class PrintProcessor(Processor):
|
|
502
|
-
|
|
503
|
-
original input data, unchanged in any way.
|
|
504
|
-
|
|
527
|
+
"""A Processor to simply print the input data to stdout and return
|
|
528
|
+
the original input data, unchanged in any way.
|
|
529
|
+
"""
|
|
505
530
|
|
|
506
531
|
def _process(self, data):
|
|
507
|
-
|
|
532
|
+
"""Print and return the input data.
|
|
508
533
|
|
|
509
534
|
:param data: Input data
|
|
510
535
|
:type data: object
|
|
511
536
|
:return: `data`
|
|
512
537
|
:rtype: object
|
|
513
|
-
|
|
538
|
+
"""
|
|
514
539
|
|
|
515
540
|
print(f'{self.__name__} data :')
|
|
516
541
|
|
|
517
542
|
if callable(getattr(data, '_str_tree', None)):
|
|
518
|
-
# If data is likely an NXobject, print its tree
|
|
519
|
-
# (since NXobjects' str representations are
|
|
543
|
+
# If data is likely an NXobject, print its tree
|
|
544
|
+
# representation (since NXobjects' str representations are
|
|
545
|
+
# just their nxname)
|
|
520
546
|
print(data._str_tree(attrs=True, recursive=True))
|
|
521
547
|
else:
|
|
522
548
|
print(str(data))
|
|
523
549
|
|
|
524
|
-
return
|
|
550
|
+
return data
|
|
551
|
+
|
|
525
552
|
|
|
526
553
|
class StrainAnalysisProcessor(Processor):
|
|
527
|
-
|
|
528
|
-
1D detector data and analyzing the difference between
|
|
529
|
-
locations and expected peak locations for the sample
|
|
530
|
-
|
|
554
|
+
"""A Processor to compute a map of sample strains by fitting bragg
|
|
555
|
+
peaks in 1D detector data and analyzing the difference between
|
|
556
|
+
measured peak locations and expected peak locations for the sample
|
|
557
|
+
measured.
|
|
558
|
+
"""
|
|
531
559
|
|
|
532
560
|
def _process(self, data):
|
|
533
|
-
|
|
534
|
-
analysis procedure, and return a map of sample strains.
|
|
561
|
+
"""Process the input map detector data & configuration for the
|
|
562
|
+
strain analysis procedure, and return a map of sample strains.
|
|
535
563
|
|
|
536
|
-
:param data: results of `MutlipleReader.read` containing input
|
|
537
|
-
detector data and strain analysis configuration
|
|
564
|
+
:param data: results of `MutlipleReader.read` containing input
|
|
565
|
+
map detector data and strain analysis configuration
|
|
538
566
|
:type data: dict[list[str,object]]
|
|
539
567
|
:return: map of sample strains
|
|
540
568
|
:rtype: xarray.Dataset
|
|
541
|
-
|
|
569
|
+
"""
|
|
542
570
|
|
|
543
571
|
strain_analysis_config = self.get_config(data)
|
|
544
572
|
|
|
545
|
-
return
|
|
573
|
+
return data
|
|
546
574
|
|
|
547
575
|
def get_config(self, data):
|
|
548
|
-
|
|
576
|
+
"""Get instances of the configuration objects needed by this
|
|
549
577
|
`Processor` from a returned value of `Reader.read`
|
|
550
578
|
|
|
551
|
-
:param data: Result of `Reader.read` where at least one item
|
|
552
|
-
value `'StrainAnalysisConfig'` for the `'schema'`
|
|
579
|
+
:param data: Result of `Reader.read` where at least one item
|
|
580
|
+
has the value `'StrainAnalysisConfig'` for the `'schema'`
|
|
581
|
+
key.
|
|
553
582
|
:type data: list[dict[str,object]]
|
|
554
|
-
:raises Exception: If valid config objects cannot be
|
|
555
|
-
|
|
556
|
-
|
|
583
|
+
:raises Exception: If valid config objects cannot be
|
|
584
|
+
constructed from `data`.
|
|
585
|
+
:return: valid instances of the configuration objects with
|
|
586
|
+
field values taken from `data`.
|
|
557
587
|
:rtype: StrainAnalysisConfig
|
|
558
|
-
|
|
588
|
+
"""
|
|
559
589
|
|
|
560
590
|
strain_analysis_config = False
|
|
561
591
|
if isinstance(data, list):
|
|
562
592
|
for item in data:
|
|
563
593
|
if isinstance(item, dict):
|
|
564
|
-
schema = item.get('schema')
|
|
565
594
|
if item.get('schema') == 'StrainAnalysisConfig':
|
|
566
595
|
strain_analysis_config = item.get('data')
|
|
567
596
|
|
|
568
597
|
if not strain_analysis_config:
|
|
569
|
-
raise
|
|
598
|
+
raise ValueError(
|
|
599
|
+
'No strain analysis configuration found in input data')
|
|
570
600
|
|
|
571
|
-
return
|
|
601
|
+
return strain_analysis_config
|
|
572
602
|
|
|
573
603
|
|
|
574
604
|
class URLResponseProcessor(Processor):
|
|
575
|
-
|
|
605
|
+
"""A Processor to decode and return data resulting from from
|
|
606
|
+
URLReader.read
|
|
607
|
+
"""
|
|
608
|
+
|
|
576
609
|
def _process(self, data):
|
|
577
|
-
|
|
578
|
-
of the content.
|
|
610
|
+
"""Take data returned from URLReader.read and return a decoded
|
|
611
|
+
version of the content.
|
|
579
612
|
|
|
580
613
|
:param data: input data (output of URLReader.read)
|
|
581
614
|
:type data: list[dict]
|
|
582
615
|
:return: decoded data contents
|
|
583
616
|
:rtype: object
|
|
584
|
-
|
|
617
|
+
"""
|
|
585
618
|
|
|
586
619
|
data = data[0]
|
|
587
620
|
|
|
588
621
|
content = data['data']
|
|
589
622
|
encoding = data['encoding']
|
|
590
623
|
|
|
591
|
-
self.logger.debug(
|
|
624
|
+
self.logger.debug(
|
|
625
|
+
f'Decoding content of type {type(content)} with {encoding}')
|
|
592
626
|
|
|
593
627
|
try:
|
|
594
628
|
content = content.decode(encoding)
|
|
595
629
|
except:
|
|
596
|
-
self.logger.warning(
|
|
630
|
+
self.logger.warning('Failed to decode content of type '
|
|
631
|
+
f'{type(content)} with {encoding}')
|
|
632
|
+
|
|
633
|
+
return content
|
|
597
634
|
|
|
598
|
-
return(content)
|
|
599
635
|
|
|
600
636
|
class XarrayToNexusProcessor(Processor):
|
|
601
|
-
|
|
637
|
+
"""A Processor to convert the data in an `xarray` structure to an
|
|
602
638
|
`nexusformat.nexus.NXdata`.
|
|
603
|
-
|
|
639
|
+
"""
|
|
604
640
|
|
|
605
641
|
def _process(self, data):
|
|
606
|
-
|
|
642
|
+
"""Return `data` represented as an `nexusformat.nexus.NXdata`.
|
|
607
643
|
|
|
608
644
|
:param data: The input `xarray` structure
|
|
609
645
|
:type data: typing.Union[xarray.DataArray, xarray.Dataset]
|
|
610
646
|
:return: The data and metadata in `data`
|
|
611
647
|
:rtype: nexusformat.nexus.NXdata
|
|
612
|
-
|
|
648
|
+
"""
|
|
613
649
|
|
|
614
650
|
from nexusformat.nexus import NXdata, NXfield
|
|
615
651
|
|
|
@@ -617,28 +653,30 @@ class XarrayToNexusProcessor(Processor):
|
|
|
617
653
|
|
|
618
654
|
axes = []
|
|
619
655
|
for name, coord in data.coords.items():
|
|
620
|
-
axes.append(
|
|
656
|
+
axes.append(
|
|
657
|
+
NXfield(value=coord.data, name=name, attrs=coord.attrs))
|
|
621
658
|
axes = tuple(axes)
|
|
622
659
|
|
|
623
|
-
return
|
|
660
|
+
return NXdata(signal=signal, axes=axes)
|
|
661
|
+
|
|
624
662
|
|
|
625
663
|
class XarrayToNumpyProcessor(Processor):
|
|
626
|
-
|
|
627
|
-
`numpy.ndarray`.
|
|
628
|
-
|
|
664
|
+
"""A Processor to convert the data in an `xarray.DataArray`
|
|
665
|
+
structure to an `numpy.ndarray`.
|
|
666
|
+
"""
|
|
629
667
|
|
|
630
668
|
def _process(self, data):
|
|
631
|
-
|
|
632
|
-
|
|
669
|
+
"""Return just the signal values contained in `data`.
|
|
670
|
+
|
|
633
671
|
:param data: The input `xarray.DataArray`
|
|
634
672
|
:type data: xarray.DataArray
|
|
635
673
|
:return: The data in `data`
|
|
636
674
|
:rtype: numpy.ndarray
|
|
637
|
-
|
|
675
|
+
"""
|
|
676
|
+
|
|
677
|
+
return data.data
|
|
638
678
|
|
|
639
|
-
return(data.data)
|
|
640
679
|
|
|
641
680
|
if __name__ == '__main__':
|
|
642
681
|
from CHAP.processor import main
|
|
643
682
|
main()
|
|
644
|
-
|