ChessAnalysisPipeline 0.0.2__py3-none-any.whl → 0.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/__init__.py +3 -0
- CHAP/common/__init__.py +19 -0
- CHAP/common/models/__init__.py +2 -0
- CHAP/common/models/integration.py +515 -0
- CHAP/common/models/map.py +535 -0
- CHAP/common/processor.py +644 -0
- CHAP/common/reader.py +119 -0
- CHAP/common/utils/__init__.py +37 -0
- CHAP/common/utils/fit.py +2613 -0
- CHAP/common/utils/general.py +1225 -0
- CHAP/common/utils/material.py +231 -0
- CHAP/common/utils/scanparsers.py +785 -0
- CHAP/common/writer.py +96 -0
- CHAP/edd/__init__.py +7 -0
- CHAP/edd/models.py +215 -0
- CHAP/edd/processor.py +321 -0
- CHAP/edd/reader.py +5 -0
- CHAP/edd/writer.py +5 -0
- CHAP/inference/__init__.py +3 -0
- CHAP/inference/processor.py +68 -0
- CHAP/inference/reader.py +5 -0
- CHAP/inference/writer.py +5 -0
- CHAP/pipeline.py +1 -1
- CHAP/processor.py +11 -818
- CHAP/reader.py +18 -113
- CHAP/saxswaxs/__init__.py +6 -0
- CHAP/saxswaxs/processor.py +5 -0
- CHAP/saxswaxs/reader.py +5 -0
- CHAP/saxswaxs/writer.py +5 -0
- CHAP/sin2psi/__init__.py +7 -0
- CHAP/sin2psi/processor.py +5 -0
- CHAP/sin2psi/reader.py +5 -0
- CHAP/sin2psi/writer.py +5 -0
- CHAP/tomo/__init__.py +5 -0
- CHAP/tomo/models.py +125 -0
- CHAP/tomo/processor.py +2009 -0
- CHAP/tomo/reader.py +5 -0
- CHAP/tomo/writer.py +5 -0
- CHAP/writer.py +17 -167
- {ChessAnalysisPipeline-0.0.2.dist-info → ChessAnalysisPipeline-0.0.4.dist-info}/METADATA +1 -1
- ChessAnalysisPipeline-0.0.4.dist-info/RECORD +50 -0
- CHAP/async.py +0 -56
- ChessAnalysisPipeline-0.0.2.dist-info/RECORD +0 -17
- {ChessAnalysisPipeline-0.0.2.dist-info → ChessAnalysisPipeline-0.0.4.dist-info}/LICENSE +0 -0
- {ChessAnalysisPipeline-0.0.2.dist-info → ChessAnalysisPipeline-0.0.4.dist-info}/WHEEL +0 -0
- {ChessAnalysisPipeline-0.0.2.dist-info → ChessAnalysisPipeline-0.0.4.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.2.dist-info → ChessAnalysisPipeline-0.0.4.dist-info}/top_level.txt +0 -0
CHAP/common/processor.py
ADDED
|
@@ -0,0 +1,644 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
#-*- coding: utf-8 -*-
|
|
3
|
+
#pylint: disable=
|
|
4
|
+
'''
|
|
5
|
+
File : processor.py
|
|
6
|
+
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
7
|
+
Description: Module for Processors used in multiple experiment-specific workflows.
|
|
8
|
+
'''
|
|
9
|
+
|
|
10
|
+
# system modules
|
|
11
|
+
import argparse
|
|
12
|
+
import logging
|
|
13
|
+
import json
|
|
14
|
+
import sys
|
|
15
|
+
from time import time
|
|
16
|
+
|
|
17
|
+
# local modules
|
|
18
|
+
from CHAP import Processor
|
|
19
|
+
|
|
20
|
+
class AsyncProcessor(Processor):
|
|
21
|
+
'''A Processor to process multiple sets of input data via asyncio module
|
|
22
|
+
|
|
23
|
+
:ivar mgr: The `Processor` used to process every set of input data
|
|
24
|
+
:type mgr: Processor
|
|
25
|
+
'''
|
|
26
|
+
def __init__(self, mgr):
|
|
27
|
+
super().__init__()
|
|
28
|
+
self.mgr = mgr
|
|
29
|
+
|
|
30
|
+
def _process(self, docs):
|
|
31
|
+
'''Asynchronously process the input documents with the `self.mgr`
|
|
32
|
+
`Processor`.
|
|
33
|
+
|
|
34
|
+
:param docs: input documents to process
|
|
35
|
+
:type docs: iterable
|
|
36
|
+
'''
|
|
37
|
+
|
|
38
|
+
import asyncio
|
|
39
|
+
|
|
40
|
+
async def task(mgr, doc):
|
|
41
|
+
'''Process given data using provided `Processor`
|
|
42
|
+
|
|
43
|
+
:param mgr: the object that will process given data
|
|
44
|
+
:type mgr: Processor
|
|
45
|
+
:param doc: the data to process
|
|
46
|
+
:type doc: object
|
|
47
|
+
:return: processed data
|
|
48
|
+
:rtype: object
|
|
49
|
+
'''
|
|
50
|
+
return mgr.process(doc)
|
|
51
|
+
|
|
52
|
+
async def executeTasks(mgr, docs):
|
|
53
|
+
'''Process given set of documents using provided task manager
|
|
54
|
+
|
|
55
|
+
:param mgr: the object that will process all documents
|
|
56
|
+
:type mgr: Processor
|
|
57
|
+
:param docs: the set of data documents to process
|
|
58
|
+
:type doc: iterable
|
|
59
|
+
'''
|
|
60
|
+
coRoutines = [task(mgr, d) for d in docs]
|
|
61
|
+
await asyncio.gather(*coRoutines)
|
|
62
|
+
|
|
63
|
+
asyncio.run(executeTasks(self.mgr, docs))
|
|
64
|
+
|
|
65
|
+
class IntegrationProcessor(Processor):
|
|
66
|
+
'''A processor for integrating 2D data with pyFAI
|
|
67
|
+
'''
|
|
68
|
+
|
|
69
|
+
def _process(self, data):
|
|
70
|
+
'''Integrate the input data with the integration method and keyword
|
|
71
|
+
arguments supplied and return the results.
|
|
72
|
+
|
|
73
|
+
:param data: input data, including raw data, integration method, and
|
|
74
|
+
keyword args for the integration method.
|
|
75
|
+
:type data: tuple[typing.Union[numpy.ndarray, list[numpy.ndarray]],
|
|
76
|
+
callable,
|
|
77
|
+
dict]
|
|
78
|
+
:param integration_method: the method of a
|
|
79
|
+
`pyFAI.azimuthalIntegrator.AzimuthalIntegrator` or
|
|
80
|
+
`pyFAI.multi_geometry.MultiGeometry` that returns the desired
|
|
81
|
+
integration results.
|
|
82
|
+
:return: integrated raw data
|
|
83
|
+
:rtype: pyFAI.containers.IntegrateResult
|
|
84
|
+
'''
|
|
85
|
+
|
|
86
|
+
detector_data, integration_method, integration_kwargs = data
|
|
87
|
+
|
|
88
|
+
return(integration_method(detector_data, **integration_kwargs))
|
|
89
|
+
|
|
90
|
+
class IntegrateMapProcessor(Processor):
|
|
91
|
+
'''Class representing a process that takes a map and integration
|
|
92
|
+
configuration and returns a `nexusformat.nexus.NXprocess` containing a map of
|
|
93
|
+
the integrated detector data requested.
|
|
94
|
+
'''
|
|
95
|
+
|
|
96
|
+
def _process(self, data):
|
|
97
|
+
'''Process the output of a `Reader` that contains a map and integration
|
|
98
|
+
configuration and return a `nexusformat.nexus.NXprocess` containing a map
|
|
99
|
+
of the integrated detector data requested
|
|
100
|
+
|
|
101
|
+
:param data: Result of `Reader.read` where at least one item has the
|
|
102
|
+
value `'MapConfig'` for the `'schema'` key, and at least one item has
|
|
103
|
+
the value `'IntegrationConfig'` for the `'schema'` key.
|
|
104
|
+
:type data: list[dict[str,object]]
|
|
105
|
+
:return: integrated data and process metadata
|
|
106
|
+
:rtype: nexusformat.nexus.NXprocess
|
|
107
|
+
'''
|
|
108
|
+
|
|
109
|
+
map_config, integration_config = self.get_configs(data)
|
|
110
|
+
nxprocess = self.get_nxprocess(map_config, integration_config)
|
|
111
|
+
|
|
112
|
+
return(nxprocess)
|
|
113
|
+
|
|
114
|
+
def get_configs(self, data):
|
|
115
|
+
'''Return valid instances of `MapConfig` and `IntegrationConfig` from the
|
|
116
|
+
input supplied by `MultipleReader`.
|
|
117
|
+
|
|
118
|
+
:param data: Result of `Reader.read` where at least one item has the
|
|
119
|
+
value `'MapConfig'` for the `'schema'` key, and at least one item has
|
|
120
|
+
the value `'IntegrationConfig'` for the `'schema'` key.
|
|
121
|
+
:type data: list[dict[str,object]]
|
|
122
|
+
:raises ValueError: if `data` cannot be parsed into map and integration configurations.
|
|
123
|
+
:return: valid map and integration configuration objects.
|
|
124
|
+
:rtype: tuple[MapConfig, IntegrationConfig]
|
|
125
|
+
'''
|
|
126
|
+
|
|
127
|
+
self.logger.debug('Getting configuration objects')
|
|
128
|
+
t0 = time()
|
|
129
|
+
|
|
130
|
+
from CHAP.common.models import MapConfig
|
|
131
|
+
from CHAP.common.models import IntegrationConfig
|
|
132
|
+
|
|
133
|
+
map_config = False
|
|
134
|
+
integration_config = False
|
|
135
|
+
if isinstance(data, list):
|
|
136
|
+
for item in data:
|
|
137
|
+
if isinstance(item, dict):
|
|
138
|
+
schema = item.get('schema')
|
|
139
|
+
if schema == 'MapConfig':
|
|
140
|
+
map_config = item.get('data')
|
|
141
|
+
elif schema == 'IntegrationConfig':
|
|
142
|
+
integration_config = item.get('data')
|
|
143
|
+
|
|
144
|
+
if not map_config:
|
|
145
|
+
raise(ValueError('No map configuration found'))
|
|
146
|
+
if not integration_config:
|
|
147
|
+
raise(ValueError('No integration configuration found'))
|
|
148
|
+
|
|
149
|
+
map_config = MapConfig(**map_config)
|
|
150
|
+
integration_config = IntegrationConfig(**integration_config)
|
|
151
|
+
|
|
152
|
+
self.logger.debug(f'Got configuration objects in {time()-t0:.3f} seconds')
|
|
153
|
+
|
|
154
|
+
return(map_config, integration_config)
|
|
155
|
+
|
|
156
|
+
def get_nxprocess(self, map_config, integration_config):
|
|
157
|
+
'''Use a `MapConfig` and `IntegrationConfig` to construct a
|
|
158
|
+
`nexusformat.nexus.NXprocess`
|
|
159
|
+
|
|
160
|
+
:param map_config: a valid map configuration
|
|
161
|
+
:type map_config: MapConfig
|
|
162
|
+
:param integration_config: a valid integration configuration
|
|
163
|
+
:type integration_config: IntegrationConfig
|
|
164
|
+
:return: the integrated detector data and metadata contained in a NeXus
|
|
165
|
+
structure
|
|
166
|
+
:rtype: nexusformat.nexus.NXprocess
|
|
167
|
+
'''
|
|
168
|
+
|
|
169
|
+
self.logger.debug('Constructing NXprocess')
|
|
170
|
+
t0 = time()
|
|
171
|
+
|
|
172
|
+
from nexusformat.nexus import (NXdata,
|
|
173
|
+
NXdetector,
|
|
174
|
+
NXfield,
|
|
175
|
+
NXprocess)
|
|
176
|
+
import numpy as np
|
|
177
|
+
import pyFAI
|
|
178
|
+
|
|
179
|
+
nxprocess = NXprocess(name=integration_config.title)
|
|
180
|
+
|
|
181
|
+
nxprocess.map_config = json.dumps(map_config.dict())
|
|
182
|
+
nxprocess.integration_config = json.dumps(integration_config.dict())
|
|
183
|
+
|
|
184
|
+
nxprocess.program = 'pyFAI'
|
|
185
|
+
nxprocess.version = pyFAI.version
|
|
186
|
+
|
|
187
|
+
for k,v in integration_config.dict().items():
|
|
188
|
+
if k == 'detectors':
|
|
189
|
+
continue
|
|
190
|
+
nxprocess.attrs[k] = v
|
|
191
|
+
|
|
192
|
+
for detector in integration_config.detectors:
|
|
193
|
+
nxprocess[detector.prefix] = NXdetector()
|
|
194
|
+
nxdetector = nxprocess[detector.prefix]
|
|
195
|
+
nxdetector.local_name = detector.prefix
|
|
196
|
+
nxdetector.distance = detector.azimuthal_integrator.dist
|
|
197
|
+
nxdetector.distance.attrs['units'] = 'm'
|
|
198
|
+
nxdetector.calibration_wavelength = detector.azimuthal_integrator.wavelength
|
|
199
|
+
nxdetector.calibration_wavelength.attrs['units'] = 'm'
|
|
200
|
+
nxdetector.attrs['poni_file'] = str(detector.poni_file)
|
|
201
|
+
nxdetector.attrs['mask_file'] = str(detector.mask_file)
|
|
202
|
+
nxdetector.raw_data_files = np.full(map_config.shape, '', dtype='|S256')
|
|
203
|
+
|
|
204
|
+
nxprocess.data = NXdata()
|
|
205
|
+
|
|
206
|
+
nxprocess.data.attrs['axes'] = (
|
|
207
|
+
*map_config.dims,
|
|
208
|
+
*integration_config.integrated_data_dims
|
|
209
|
+
)
|
|
210
|
+
for i,dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
211
|
+
nxprocess.data[dim.label] = NXfield(
|
|
212
|
+
value=map_config.coords[dim.label],
|
|
213
|
+
units=dim.units,
|
|
214
|
+
attrs={'long_name': f'{dim.label} ({dim.units})',
|
|
215
|
+
'data_type': dim.data_type,
|
|
216
|
+
'local_name': dim.name})
|
|
217
|
+
nxprocess.data.attrs[f'{dim.label}_indices'] = i
|
|
218
|
+
|
|
219
|
+
for i,(coord_name,coord_values) in \
|
|
220
|
+
enumerate(integration_config.integrated_data_coordinates.items()):
|
|
221
|
+
if coord_name == 'radial':
|
|
222
|
+
type_ = pyFAI.units.RADIAL_UNITS
|
|
223
|
+
elif coord_name == 'azimuthal':
|
|
224
|
+
type_ = pyFAI.units.AZIMUTHAL_UNITS
|
|
225
|
+
coord_units = pyFAI.units.to_unit(
|
|
226
|
+
getattr(integration_config, f'{coord_name}_units'),
|
|
227
|
+
type_=type_)
|
|
228
|
+
nxprocess.data[coord_units.name] = coord_values
|
|
229
|
+
nxprocess.data.attrs[f'{coord_units.name}_indices'] = i+len(map_config.coords)
|
|
230
|
+
nxprocess.data[coord_units.name].units = coord_units.unit_symbol
|
|
231
|
+
nxprocess.data[coord_units.name].attrs['long_name'] = coord_units.label
|
|
232
|
+
|
|
233
|
+
nxprocess.data.attrs['signal'] = 'I'
|
|
234
|
+
nxprocess.data.I = NXfield(
|
|
235
|
+
value=np.empty(
|
|
236
|
+
(*tuple(
|
|
237
|
+
[len(coord_values) for coord_name,coord_values in map_config.coords.items()][::-1]
|
|
238
|
+
),
|
|
239
|
+
*integration_config.integrated_data_shape
|
|
240
|
+
)
|
|
241
|
+
),
|
|
242
|
+
units='a.u',
|
|
243
|
+
attrs={'long_name':'Intensity (a.u)'})
|
|
244
|
+
|
|
245
|
+
integrator = integration_config.get_multi_geometry_integrator()
|
|
246
|
+
if integration_config.integration_type == 'azimuthal':
|
|
247
|
+
integration_method = integrator.integrate1d
|
|
248
|
+
integration_kwargs = {
|
|
249
|
+
'lst_mask': [detector.mask_array for detector in integration_config.detectors],
|
|
250
|
+
'npt': integration_config.radial_npt
|
|
251
|
+
}
|
|
252
|
+
elif integration_config.integration_type == 'cake':
|
|
253
|
+
integration_method = integrator.integrate2d
|
|
254
|
+
integration_kwargs = {
|
|
255
|
+
'lst_mask': [detector.mask_array for detector in integration_config.detectors],
|
|
256
|
+
'npt_rad': integration_config.radial_npt,
|
|
257
|
+
'npt_azim': integration_config.azimuthal_npt,
|
|
258
|
+
'method': 'bbox'
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
integration_processor = IntegrationProcessor()
|
|
262
|
+
integration_processor.logger.setLevel(self.logger.getEffectiveLevel())
|
|
263
|
+
integration_processor.logger.addHandler(self.logger.handlers[0])
|
|
264
|
+
lst_args = []
|
|
265
|
+
for scans in map_config.spec_scans:
|
|
266
|
+
for scan_number in scans.scan_numbers:
|
|
267
|
+
scanparser = scans.get_scanparser(scan_number)
|
|
268
|
+
for scan_step_index in range(scanparser.spec_scan_npts):
|
|
269
|
+
map_index = scans.get_index(
|
|
270
|
+
scan_number,
|
|
271
|
+
scan_step_index,
|
|
272
|
+
map_config)
|
|
273
|
+
detector_data = scans.get_detector_data(
|
|
274
|
+
integration_config.detectors,
|
|
275
|
+
scan_number,
|
|
276
|
+
scan_step_index)
|
|
277
|
+
result = integration_processor.process(
|
|
278
|
+
(detector_data, integration_method, integration_kwargs))
|
|
279
|
+
nxprocess.data.I[map_index] = result.intensity
|
|
280
|
+
|
|
281
|
+
for detector in integration_config.detectors:
|
|
282
|
+
nxprocess[detector.prefix].raw_data_files[map_index] = \
|
|
283
|
+
scanparser.get_detector_data_file(
|
|
284
|
+
detector.prefix,
|
|
285
|
+
scan_step_index)
|
|
286
|
+
|
|
287
|
+
self.logger.debug(f'Constructed NXprocess in {time()-t0:.3f} seconds')
|
|
288
|
+
|
|
289
|
+
return(nxprocess)
|
|
290
|
+
|
|
291
|
+
class MapProcessor(Processor):
|
|
292
|
+
'''A Processor to take a map configuration and return a
|
|
293
|
+
`nexusformat.nexus.NXentry` representing that map's metadata and any
|
|
294
|
+
scalar-valued raw data requseted by the supplied map configuration.
|
|
295
|
+
'''
|
|
296
|
+
|
|
297
|
+
def _process(self, data):
|
|
298
|
+
'''Process the output of a `Reader` that contains a map configuration and
|
|
299
|
+
return a `nexusformat.nexus.NXentry` representing the map.
|
|
300
|
+
|
|
301
|
+
:param data: Result of `Reader.read` where at least one item has the
|
|
302
|
+
value `'MapConfig'` for the `'schema'` key.
|
|
303
|
+
:type data: list[dict[str,object]]
|
|
304
|
+
:return: Map data & metadata
|
|
305
|
+
:rtype: nexusformat.nexus.NXentry
|
|
306
|
+
'''
|
|
307
|
+
|
|
308
|
+
map_config = self.get_map_config(data)
|
|
309
|
+
nxentry = self.__class__.get_nxentry(map_config)
|
|
310
|
+
|
|
311
|
+
return(nxentry)
|
|
312
|
+
|
|
313
|
+
def get_map_config(self, data):
|
|
314
|
+
'''Get an instance of `MapConfig` from a returned value of `Reader.read`
|
|
315
|
+
|
|
316
|
+
:param data: Result of `Reader.read` where at least one item has the
|
|
317
|
+
value `'MapConfig'` for the `'schema'` key.
|
|
318
|
+
:type data: list[dict[str,object]]
|
|
319
|
+
:raises Exception: If a valid `MapConfig` cannot be constructed from `data`.
|
|
320
|
+
:return: a valid instance of `MapConfig` with field values taken from `data`.
|
|
321
|
+
:rtype: MapConfig
|
|
322
|
+
'''
|
|
323
|
+
|
|
324
|
+
from .models.map import MapConfig
|
|
325
|
+
|
|
326
|
+
map_config = False
|
|
327
|
+
if isinstance(data, list):
|
|
328
|
+
for item in data:
|
|
329
|
+
if isinstance(item, dict):
|
|
330
|
+
if item.get('schema') == 'MapConfig':
|
|
331
|
+
map_config = item.get('data')
|
|
332
|
+
break
|
|
333
|
+
|
|
334
|
+
if not map_config:
|
|
335
|
+
raise(ValueError('No map configuration found'))
|
|
336
|
+
|
|
337
|
+
return(MapConfig(**map_config))
|
|
338
|
+
|
|
339
|
+
@staticmethod
|
|
340
|
+
def get_nxentry(map_config):
|
|
341
|
+
'''Use a `MapConfig` to construct a `nexusformat.nexus.NXentry`
|
|
342
|
+
|
|
343
|
+
:param map_config: a valid map configuration
|
|
344
|
+
:type map_config: MapConfig
|
|
345
|
+
:return: the map's data and metadata contained in a NeXus structure
|
|
346
|
+
:rtype: nexusformat.nexus.NXentry
|
|
347
|
+
'''
|
|
348
|
+
|
|
349
|
+
from nexusformat.nexus import (NXcollection,
|
|
350
|
+
NXdata,
|
|
351
|
+
NXentry,
|
|
352
|
+
NXfield,
|
|
353
|
+
NXsample)
|
|
354
|
+
import numpy as np
|
|
355
|
+
|
|
356
|
+
nxentry = NXentry(name=map_config.title)
|
|
357
|
+
|
|
358
|
+
nxentry.map_config = json.dumps(map_config.dict())
|
|
359
|
+
|
|
360
|
+
nxentry[map_config.sample.name] = NXsample(**map_config.sample.dict())
|
|
361
|
+
|
|
362
|
+
nxentry.attrs['station'] = map_config.station
|
|
363
|
+
|
|
364
|
+
nxentry.spec_scans = NXcollection()
|
|
365
|
+
for scans in map_config.spec_scans:
|
|
366
|
+
nxentry.spec_scans[scans.scanparsers[0].scan_name] = \
|
|
367
|
+
NXfield(value=scans.scan_numbers,
|
|
368
|
+
dtype='int8',
|
|
369
|
+
attrs={'spec_file':str(scans.spec_file)})
|
|
370
|
+
|
|
371
|
+
nxentry.data = NXdata()
|
|
372
|
+
nxentry.data.attrs['axes'] = map_config.dims
|
|
373
|
+
for i,dim in enumerate(map_config.independent_dimensions[::-1]):
|
|
374
|
+
nxentry.data[dim.label] = NXfield(
|
|
375
|
+
value=map_config.coords[dim.label],
|
|
376
|
+
units=dim.units,
|
|
377
|
+
attrs={'long_name': f'{dim.label} ({dim.units})',
|
|
378
|
+
'data_type': dim.data_type,
|
|
379
|
+
'local_name': dim.name})
|
|
380
|
+
nxentry.data.attrs[f'{dim.label}_indices'] = i
|
|
381
|
+
|
|
382
|
+
signal = False
|
|
383
|
+
auxilliary_signals = []
|
|
384
|
+
for data in map_config.all_scalar_data:
|
|
385
|
+
nxentry.data[data.label] = NXfield(
|
|
386
|
+
value=np.empty(map_config.shape),
|
|
387
|
+
units=data.units,
|
|
388
|
+
attrs={'long_name': f'{data.label} ({data.units})',
|
|
389
|
+
'data_type': data.data_type,
|
|
390
|
+
'local_name': data.name})
|
|
391
|
+
if not signal:
|
|
392
|
+
signal = data.label
|
|
393
|
+
else:
|
|
394
|
+
auxilliary_signals.append(data.label)
|
|
395
|
+
|
|
396
|
+
if signal:
|
|
397
|
+
nxentry.data.attrs['signal'] = signal
|
|
398
|
+
nxentry.data.attrs['auxilliary_signals'] = auxilliary_signals
|
|
399
|
+
|
|
400
|
+
for scans in map_config.spec_scans:
|
|
401
|
+
for scan_number in scans.scan_numbers:
|
|
402
|
+
scanparser = scans.get_scanparser(scan_number)
|
|
403
|
+
for scan_step_index in range(scanparser.spec_scan_npts):
|
|
404
|
+
map_index = scans.get_index(
|
|
405
|
+
scan_number,
|
|
406
|
+
scan_step_index,
|
|
407
|
+
map_config)
|
|
408
|
+
for data in map_config.all_scalar_data:
|
|
409
|
+
nxentry.data[data.label][map_index] = data.get_value(
|
|
410
|
+
scans,
|
|
411
|
+
scan_number,
|
|
412
|
+
scan_step_index)
|
|
413
|
+
|
|
414
|
+
return(nxentry)
|
|
415
|
+
|
|
416
|
+
class NexusToNumpyProcessor(Processor):
|
|
417
|
+
'''A Processor to convert the default plottable data in an `NXobject` into
|
|
418
|
+
an `numpy.ndarray`.
|
|
419
|
+
'''
|
|
420
|
+
|
|
421
|
+
def _process(self, data):
|
|
422
|
+
'''Return the default plottable data signal in `data` as an
|
|
423
|
+
`numpy.ndarray`.
|
|
424
|
+
|
|
425
|
+
:param data: input NeXus structure
|
|
426
|
+
:type data: nexusformat.nexus.tree.NXobject
|
|
427
|
+
:raises ValueError: if `data` has no default plottable data signal
|
|
428
|
+
:return: default plottable data signal in `data`
|
|
429
|
+
:rtype: numpy.ndarray
|
|
430
|
+
'''
|
|
431
|
+
|
|
432
|
+
default_data = data.plottable_data
|
|
433
|
+
|
|
434
|
+
if default_data is None:
|
|
435
|
+
default_data_path = data.attrs['default']
|
|
436
|
+
default_data = data.get(default_data_path)
|
|
437
|
+
if default_data is None:
|
|
438
|
+
raise(ValueError(f'The structure of {data} contains no default data'))
|
|
439
|
+
|
|
440
|
+
default_signal = default_data.attrs.get('signal')
|
|
441
|
+
if default_signal is None:
|
|
442
|
+
raise(ValueError(f'The signal of {default_data} is unknown'))
|
|
443
|
+
default_signal = default_signal.nxdata
|
|
444
|
+
|
|
445
|
+
np_data = default_data[default_signal].nxdata
|
|
446
|
+
|
|
447
|
+
return(np_data)
|
|
448
|
+
|
|
449
|
+
class NexusToXarrayProcessor(Processor):
|
|
450
|
+
'''A Processor to convert the default plottable data in an `NXobject` into
|
|
451
|
+
an `xarray.DataArray`.'''
|
|
452
|
+
|
|
453
|
+
def _process(self, data):
|
|
454
|
+
'''Return the default plottable data signal in `data` as an
|
|
455
|
+
`xarray.DataArray`.
|
|
456
|
+
|
|
457
|
+
:param data: input NeXus structure
|
|
458
|
+
:type data: nexusformat.nexus.tree.NXobject
|
|
459
|
+
:raises ValueError: if metadata for `xarray` is absent from `data`
|
|
460
|
+
:return: default plottable data signal in `data`
|
|
461
|
+
:rtype: xarray.DataArray
|
|
462
|
+
'''
|
|
463
|
+
|
|
464
|
+
from xarray import DataArray
|
|
465
|
+
|
|
466
|
+
default_data = data.plottable_data
|
|
467
|
+
|
|
468
|
+
if default_data is None:
|
|
469
|
+
default_data_path = data.attrs['default']
|
|
470
|
+
default_data = data.get(default_data_path)
|
|
471
|
+
if default_data is None:
|
|
472
|
+
raise(ValueError(f'The structure of {data} contains no default data'))
|
|
473
|
+
|
|
474
|
+
default_signal = default_data.attrs.get('signal')
|
|
475
|
+
if default_signal is None:
|
|
476
|
+
raise(ValueError(f'The signal of {default_data} is unknown'))
|
|
477
|
+
default_signal = default_signal.nxdata
|
|
478
|
+
|
|
479
|
+
signal_data = default_data[default_signal].nxdata
|
|
480
|
+
|
|
481
|
+
axes = default_data.attrs['axes']
|
|
482
|
+
coords = {}
|
|
483
|
+
for axis_name in axes:
|
|
484
|
+
axis = default_data[axis_name]
|
|
485
|
+
coords[axis_name] = (axis_name,
|
|
486
|
+
axis.nxdata,
|
|
487
|
+
axis.attrs)
|
|
488
|
+
|
|
489
|
+
dims = tuple(axes)
|
|
490
|
+
|
|
491
|
+
name = default_signal
|
|
492
|
+
|
|
493
|
+
attrs = default_data[default_signal].attrs
|
|
494
|
+
|
|
495
|
+
return(DataArray(data=signal_data,
|
|
496
|
+
coords=coords,
|
|
497
|
+
dims=dims,
|
|
498
|
+
name=name,
|
|
499
|
+
attrs=attrs))
|
|
500
|
+
|
|
501
|
+
class PrintProcessor(Processor):
|
|
502
|
+
'''A Processor to simply print the input data to stdout and return the
|
|
503
|
+
original input data, unchanged in any way.
|
|
504
|
+
'''
|
|
505
|
+
|
|
506
|
+
def _process(self, data):
|
|
507
|
+
'''Print and return the input data.
|
|
508
|
+
|
|
509
|
+
:param data: Input data
|
|
510
|
+
:type data: object
|
|
511
|
+
:return: `data`
|
|
512
|
+
:rtype: object
|
|
513
|
+
'''
|
|
514
|
+
|
|
515
|
+
print(f'{self.__name__} data :')
|
|
516
|
+
|
|
517
|
+
if callable(getattr(data, '_str_tree', None)):
|
|
518
|
+
# If data is likely an NXobject, print its tree representation
|
|
519
|
+
# (since NXobjects' str representations are just their nxname)
|
|
520
|
+
print(data._str_tree(attrs=True, recursive=True))
|
|
521
|
+
else:
|
|
522
|
+
print(str(data))
|
|
523
|
+
|
|
524
|
+
return(data)
|
|
525
|
+
|
|
526
|
+
class StrainAnalysisProcessor(Processor):
|
|
527
|
+
'''A Processor to compute a map of sample strains by fitting bragg peaks in
|
|
528
|
+
1D detector data and analyzing the difference between measured peak
|
|
529
|
+
locations and expected peak locations for the sample measured.
|
|
530
|
+
'''
|
|
531
|
+
|
|
532
|
+
def _process(self, data):
|
|
533
|
+
'''Process the input map detector data & configuration for the strain
|
|
534
|
+
analysis procedure, and return a map of sample strains.
|
|
535
|
+
|
|
536
|
+
:param data: results of `MutlipleReader.read` containing input map
|
|
537
|
+
detector data and strain analysis configuration
|
|
538
|
+
:type data: dict[list[str,object]]
|
|
539
|
+
:return: map of sample strains
|
|
540
|
+
:rtype: xarray.Dataset
|
|
541
|
+
'''
|
|
542
|
+
|
|
543
|
+
strain_analysis_config = self.get_config(data)
|
|
544
|
+
|
|
545
|
+
return(data)
|
|
546
|
+
|
|
547
|
+
def get_config(self, data):
|
|
548
|
+
'''Get instances of the configuration objects needed by this
|
|
549
|
+
`Processor` from a returned value of `Reader.read`
|
|
550
|
+
|
|
551
|
+
:param data: Result of `Reader.read` where at least one item has the
|
|
552
|
+
value `'StrainAnalysisConfig'` for the `'schema'` key.
|
|
553
|
+
:type data: list[dict[str,object]]
|
|
554
|
+
:raises Exception: If valid config objects cannot be constructed from `data`.
|
|
555
|
+
:return: valid instances of the configuration objects with field values
|
|
556
|
+
taken from `data`.
|
|
557
|
+
:rtype: StrainAnalysisConfig
|
|
558
|
+
'''
|
|
559
|
+
|
|
560
|
+
strain_analysis_config = False
|
|
561
|
+
if isinstance(data, list):
|
|
562
|
+
for item in data:
|
|
563
|
+
if isinstance(item, dict):
|
|
564
|
+
schema = item.get('schema')
|
|
565
|
+
if item.get('schema') == 'StrainAnalysisConfig':
|
|
566
|
+
strain_analysis_config = item.get('data')
|
|
567
|
+
|
|
568
|
+
if not strain_analysis_config:
|
|
569
|
+
raise(ValueError('No strain analysis configuration found in input data'))
|
|
570
|
+
|
|
571
|
+
return(strain_analysis_config)
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
class URLResponseProcessor(Processor):
|
|
575
|
+
'''A Processor to decode and return data resulting from from URLReader.read'''
|
|
576
|
+
def _process(self, data):
|
|
577
|
+
'''Take data returned from URLReader.read and return a decoded version
|
|
578
|
+
of the content.
|
|
579
|
+
|
|
580
|
+
:param data: input data (output of URLReader.read)
|
|
581
|
+
:type data: list[dict]
|
|
582
|
+
:return: decoded data contents
|
|
583
|
+
:rtype: object
|
|
584
|
+
'''
|
|
585
|
+
|
|
586
|
+
data = data[0]
|
|
587
|
+
|
|
588
|
+
content = data['data']
|
|
589
|
+
encoding = data['encoding']
|
|
590
|
+
|
|
591
|
+
self.logger.debug(f'Decoding content of type {type(content)} with {encoding}')
|
|
592
|
+
|
|
593
|
+
try:
|
|
594
|
+
content = content.decode(encoding)
|
|
595
|
+
except:
|
|
596
|
+
self.logger.warning(f'Failed to decode content of type {type(content)} with {encoding}')
|
|
597
|
+
|
|
598
|
+
return(content)
|
|
599
|
+
|
|
600
|
+
class XarrayToNexusProcessor(Processor):
|
|
601
|
+
'''A Processor to convert the data in an `xarray` structure to an
|
|
602
|
+
`nexusformat.nexus.NXdata`.
|
|
603
|
+
'''
|
|
604
|
+
|
|
605
|
+
def _process(self, data):
|
|
606
|
+
'''Return `data` represented as an `nexusformat.nexus.NXdata`.
|
|
607
|
+
|
|
608
|
+
:param data: The input `xarray` structure
|
|
609
|
+
:type data: typing.Union[xarray.DataArray, xarray.Dataset]
|
|
610
|
+
:return: The data and metadata in `data`
|
|
611
|
+
:rtype: nexusformat.nexus.NXdata
|
|
612
|
+
'''
|
|
613
|
+
|
|
614
|
+
from nexusformat.nexus import NXdata, NXfield
|
|
615
|
+
|
|
616
|
+
signal = NXfield(value=data.data, name=data.name, attrs=data.attrs)
|
|
617
|
+
|
|
618
|
+
axes = []
|
|
619
|
+
for name, coord in data.coords.items():
|
|
620
|
+
axes.append(NXfield(value=coord.data, name=name, attrs=coord.attrs))
|
|
621
|
+
axes = tuple(axes)
|
|
622
|
+
|
|
623
|
+
return(NXdata(signal=signal, axes=axes))
|
|
624
|
+
|
|
625
|
+
class XarrayToNumpyProcessor(Processor):
|
|
626
|
+
'''A Processor to convert the data in an `xarray.DataArray` structure to an
|
|
627
|
+
`numpy.ndarray`.
|
|
628
|
+
'''
|
|
629
|
+
|
|
630
|
+
def _process(self, data):
|
|
631
|
+
'''Return just the signal values contained in `data`.
|
|
632
|
+
|
|
633
|
+
:param data: The input `xarray.DataArray`
|
|
634
|
+
:type data: xarray.DataArray
|
|
635
|
+
:return: The data in `data`
|
|
636
|
+
:rtype: numpy.ndarray
|
|
637
|
+
'''
|
|
638
|
+
|
|
639
|
+
return(data.data)
|
|
640
|
+
|
|
641
|
+
if __name__ == '__main__':
|
|
642
|
+
from CHAP.processor import main
|
|
643
|
+
main()
|
|
644
|
+
|