ChessAnalysisPipeline 0.0.14__py3-none-any.whl → 0.0.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/__init__.py +1 -1
- CHAP/common/__init__.py +9 -0
- CHAP/common/models/map.py +295 -55
- CHAP/common/processor.py +846 -10
- CHAP/common/reader.py +171 -0
- CHAP/common/writer.py +181 -18
- CHAP/edd/__init__.py +10 -3
- CHAP/edd/models.py +822 -451
- CHAP/edd/processor.py +2221 -756
- CHAP/edd/reader.py +672 -0
- CHAP/edd/utils.py +846 -292
- CHAP/foxden/__init__.py +6 -0
- CHAP/foxden/processor.py +42 -0
- CHAP/foxden/writer.py +65 -0
- CHAP/pipeline.py +1 -1
- CHAP/runner.py +4 -4
- CHAP/tomo/models.py +7 -5
- CHAP/tomo/processor.py +118 -39
- CHAP/utils/__init__.py +1 -0
- CHAP/utils/fit.py +1292 -1315
- CHAP/utils/general.py +393 -53
- CHAP/utils/models.py +567 -0
- CHAP/utils/scanparsers.py +141 -28
- ChessAnalysisPipeline-0.0.15.dist-info/LICENSE +60 -0
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.15.dist-info}/METADATA +1 -1
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.15.dist-info}/RECORD +29 -25
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.15.dist-info}/WHEEL +1 -1
- ChessAnalysisPipeline-0.0.14.dist-info/LICENSE +0 -21
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.15.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.15.dist-info}/top_level.txt +0 -0
CHAP/common/reader.py
CHANGED
|
@@ -8,7 +8,9 @@ Description: Module for Writers used in multiple experiment-specific
|
|
|
8
8
|
|
|
9
9
|
# System modules
|
|
10
10
|
from os.path import (
|
|
11
|
+
isabs,
|
|
11
12
|
isfile,
|
|
13
|
+
join,
|
|
12
14
|
splitext,
|
|
13
15
|
)
|
|
14
16
|
from sys import modules
|
|
@@ -36,6 +38,35 @@ class BinaryFileReader(Reader):
|
|
|
36
38
|
return data
|
|
37
39
|
|
|
38
40
|
|
|
41
|
+
class FabioImageReader(Reader):
|
|
42
|
+
"""Reader for images using the python package
|
|
43
|
+
[`fabio`](https://fabio.readthedocs.io/en/main/).
|
|
44
|
+
"""
|
|
45
|
+
def read(self, filename, frame=None, inputdir='.'):
|
|
46
|
+
"""Return the data from the image file(s) provided.
|
|
47
|
+
|
|
48
|
+
:param filename: The image filename, or glob pattern for image
|
|
49
|
+
filenames, to read.
|
|
50
|
+
:type filename: str
|
|
51
|
+
:param frame: The index of a specific frame to read from the
|
|
52
|
+
file(s), defaults to `None`.
|
|
53
|
+
:type filename: int, optional
|
|
54
|
+
:returns: Image data as a numpy array (or list of numpy
|
|
55
|
+
arrays, if a glob pattern matching more than one file was
|
|
56
|
+
provided).
|
|
57
|
+
"""
|
|
58
|
+
from glob import glob
|
|
59
|
+
import fabio
|
|
60
|
+
|
|
61
|
+
filenames = glob(filename)
|
|
62
|
+
data = []
|
|
63
|
+
for f in filenames:
|
|
64
|
+
image = fabio.open(f, frame=frame)
|
|
65
|
+
data.append(image.data)
|
|
66
|
+
image.close()
|
|
67
|
+
return data
|
|
68
|
+
|
|
69
|
+
|
|
39
70
|
class H5Reader(Reader):
|
|
40
71
|
"""Reader for h5 files.
|
|
41
72
|
"""
|
|
@@ -214,6 +245,146 @@ class NexusReader(Reader):
|
|
|
214
245
|
return nxobject
|
|
215
246
|
|
|
216
247
|
|
|
248
|
+
class NXdataReader(Reader):
|
|
249
|
+
"""Reader for constructing an NXdata object from components"""
|
|
250
|
+
def read(self, name, nxfield_params, signal_name, axes_names, attrs={},
|
|
251
|
+
inputdir='.'):
|
|
252
|
+
"""Return a basic NXdata object constructed from components.
|
|
253
|
+
|
|
254
|
+
:param name: The name of the NXdata group.
|
|
255
|
+
:type name: str
|
|
256
|
+
:param nxfield_params: List of sets of parameters for
|
|
257
|
+
`NXfieldReader` specifying the NXfields belonging to the
|
|
258
|
+
NXdata.
|
|
259
|
+
:type nxfield_params: list[dict]
|
|
260
|
+
:param signal_name: Name of the signal for the NXdata (must be
|
|
261
|
+
one of the names of the NXfields indicated in `nxfields`)
|
|
262
|
+
:type signal: str
|
|
263
|
+
:param axes_names: Name or names of the coordinate axes
|
|
264
|
+
NXfields associated with the signal (must be names of
|
|
265
|
+
NXfields indicated in `nxfields`)
|
|
266
|
+
:type axes_names: Union[str, list[str]]
|
|
267
|
+
:param attrs: Optional dictionary of additional attributes for
|
|
268
|
+
the NXdata
|
|
269
|
+
:type attrs: dict
|
|
270
|
+
:param inputdir: Input directory to use for `NXfieldReader`s,
|
|
271
|
+
defaults to `"."`
|
|
272
|
+
:type inputdir: str
|
|
273
|
+
:returns: A new NXdata object
|
|
274
|
+
:rtype: nexusformat.nexus.NXdata
|
|
275
|
+
"""
|
|
276
|
+
from nexusformat.nexus import NXdata
|
|
277
|
+
|
|
278
|
+
# Read in NXfields
|
|
279
|
+
nxfields = [NXfieldReader().read(**params, inputdir=inputdir)
|
|
280
|
+
for params in nxfield_params]
|
|
281
|
+
nxfields = {nxfield.nxname: nxfield for nxfield in nxfields}
|
|
282
|
+
|
|
283
|
+
# Get signal NXfield
|
|
284
|
+
try:
|
|
285
|
+
nxsignal = nxfields[signal_name]
|
|
286
|
+
except:
|
|
287
|
+
raise ValueError(
|
|
288
|
+
'`signal_name` must be the name of one of the NXfields '
|
|
289
|
+
+ 'indicated in `nxfields`: '
|
|
290
|
+
+ ', '.join(nxfields.keys())
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
# Get axes NXfield(s)
|
|
294
|
+
if isinstance(axes_names, str):
|
|
295
|
+
axes_names = [axes_names]
|
|
296
|
+
try:
|
|
297
|
+
nxaxes = [nxfields[axis_name] for axis_name in axes_names]
|
|
298
|
+
except:
|
|
299
|
+
raise ValueError(
|
|
300
|
+
'`axes_names` must contain only names of NXfields indicated '
|
|
301
|
+
+ 'in `nxfields`: ' + ', '.join(nxfields.keys())
|
|
302
|
+
)
|
|
303
|
+
for i, nxaxis in enumerate(nxaxes):
|
|
304
|
+
if len(nxaxis) != nxsignal.shape[i]:
|
|
305
|
+
raise ValueError(
|
|
306
|
+
f'Shape mismatch on signal dimension {i}: signal '
|
|
307
|
+
+ f'"{nxsignal.nxname}" has {nxsignal.shape[i]} values, '
|
|
308
|
+
+ f'but axis "{nxaxis.nxname}" has {len(nxaxis)} values.')
|
|
309
|
+
|
|
310
|
+
result = NXdata(signal=nxsignal, axes=nxaxes, name=name, attrs=attrs,
|
|
311
|
+
**nxfields)
|
|
312
|
+
self.logger.info(result.tree)
|
|
313
|
+
return result
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
class NXfieldReader(Reader):
|
|
317
|
+
"""Reader for an NXfield with options to modify certain attributes."""
|
|
318
|
+
def read(self, filename, nxpath, nxname=None, update_attrs=None,
|
|
319
|
+
slice_params=None, inputdir='.'):
|
|
320
|
+
"""Return a copy of the indicated NXfield from the file. Name
|
|
321
|
+
and attributes of the returned copy may be modified with the
|
|
322
|
+
`nxname` and `update_attrs` keyword arguments.
|
|
323
|
+
|
|
324
|
+
:param filename: Name of the NeXus file containing the NXfield to read.
|
|
325
|
+
:type filename: str
|
|
326
|
+
:param nxpath: Path in `nxfile` pointing to the NXfield to read.
|
|
327
|
+
:type nxpath: str
|
|
328
|
+
:param nxname: Optional new name for the returned NXfield,
|
|
329
|
+
defaults to None
|
|
330
|
+
:type nxname: str, optional
|
|
331
|
+
:param update_attrs: Optional dictonary used to add to /
|
|
332
|
+
update the original NXfield's attributes, defaults to None
|
|
333
|
+
:type update_attrs: dict, optional
|
|
334
|
+
:param slice_params: Parameters for returning just a slice of
|
|
335
|
+
the full field data. Slice parameters are provided in a
|
|
336
|
+
list dictionaries with integer values for any / all of the
|
|
337
|
+
following keys: `"start"`, `"end"`, `"step"`. Default
|
|
338
|
+
values used are: `"start"` - `0`, `"end"` -- `None`,
|
|
339
|
+
`"step"` -- `1`. The order of the list must correspond to
|
|
340
|
+
the order of the field's axes. Defaults to `None`.
|
|
341
|
+
:type slice_params: list[dict[str, int]], optional
|
|
342
|
+
:param inputdir: Directory containing `nxfile`, defaults to `"."`
|
|
343
|
+
:type inputdir: str
|
|
344
|
+
:returns: A copy of the indicated NXfield (with name and
|
|
345
|
+
attributes optionally modified).
|
|
346
|
+
:rtype: nexusformat.nexus.NXfield
|
|
347
|
+
"""
|
|
348
|
+
# Third party modules
|
|
349
|
+
from nexusformat.nexus import nxload, NXfield
|
|
350
|
+
|
|
351
|
+
# Local modules
|
|
352
|
+
from CHAP.utils.general import nxcopy
|
|
353
|
+
|
|
354
|
+
if not isabs(filename):
|
|
355
|
+
filename = join(inputdir, filename)
|
|
356
|
+
nxroot = nxload(filename)
|
|
357
|
+
nxfield = nxroot[nxpath]
|
|
358
|
+
|
|
359
|
+
if nxname is None:
|
|
360
|
+
nxname = nxfield.nxname
|
|
361
|
+
|
|
362
|
+
attrs = nxfield.attrs
|
|
363
|
+
if update_attrs is not None:
|
|
364
|
+
attrs.update(update_attrs)
|
|
365
|
+
|
|
366
|
+
if slice_params is None:
|
|
367
|
+
value = nxfield.nxdata
|
|
368
|
+
else:
|
|
369
|
+
if len(slice_params) < nxfield.ndim:
|
|
370
|
+
slice_params.extend([{}] * (nxfield.ndim - len(slice_params)))
|
|
371
|
+
if len(slice_params) > nxfield.ndim:
|
|
372
|
+
slice_params = slice_params[0:nxfield.ndim]
|
|
373
|
+
slices = ()
|
|
374
|
+
default_slice = {'start': 0, 'end': None, 'step': 1}
|
|
375
|
+
for s in slice_params:
|
|
376
|
+
for k, v in default_slice.items():
|
|
377
|
+
if k not in s:
|
|
378
|
+
s[k] = v
|
|
379
|
+
slices = (*slices, slice(s['start'], s['end'], s['step']))
|
|
380
|
+
value = nxfield.nxdata[slices]
|
|
381
|
+
|
|
382
|
+
nxfield = NXfield(value=value, name=nxname, attrs=attrs)
|
|
383
|
+
self.logger.debug(f'Result -- nxfield.tree =\n{nxfield.tree}')
|
|
384
|
+
|
|
385
|
+
return nxfield
|
|
386
|
+
|
|
387
|
+
|
|
217
388
|
class SpecReader(Reader):
|
|
218
389
|
"""Reader for CHESS SPEC scans"""
|
|
219
390
|
def read(self, filename=None, spec_config=None, detector_names=[],
|
CHAP/common/writer.py
CHANGED
|
@@ -89,13 +89,15 @@ def write_yaml(data, filename, force_overwrite=False):
|
|
|
89
89
|
|
|
90
90
|
def write_filetree(data, outputdir, force_overwrite=False):
|
|
91
91
|
# System modules
|
|
92
|
-
from os import
|
|
92
|
+
from os import makedirs
|
|
93
93
|
|
|
94
94
|
# Third party modules
|
|
95
95
|
from nexusformat.nexus import (
|
|
96
96
|
NXentry,
|
|
97
|
+
NXsubentry,
|
|
97
98
|
NXgroup,
|
|
98
99
|
NXobject,
|
|
100
|
+
NXroot,
|
|
99
101
|
NXsubentry,
|
|
100
102
|
)
|
|
101
103
|
|
|
@@ -104,7 +106,7 @@ def write_filetree(data, outputdir, force_overwrite=False):
|
|
|
104
106
|
f'{type(data).__name__} as a file tree to disk.')
|
|
105
107
|
|
|
106
108
|
if not os_path.isdir(outputdir):
|
|
107
|
-
|
|
109
|
+
makedirs(outputdir)
|
|
108
110
|
|
|
109
111
|
for k, v in data.items():
|
|
110
112
|
if isinstance(v, NXsubentry) and 'schema' in v.attrs:
|
|
@@ -114,15 +116,28 @@ def write_filetree(data, outputdir, force_overwrite=False):
|
|
|
114
116
|
write_txt(list(v.data), filename, force_overwrite)
|
|
115
117
|
elif schema == 'json':
|
|
116
118
|
write_txt(str(v.data), filename, force_overwrite)
|
|
119
|
+
elif schema == 'yml' or schema == 'yaml':
|
|
120
|
+
from json import loads
|
|
121
|
+
write_yaml(loads(v.data.nxdata), filename, force_overwrite)
|
|
117
122
|
elif schema == 'tif' or schema == 'tiff':
|
|
118
123
|
write_tif(v.data, filename, force_overwrite)
|
|
119
124
|
elif schema == 'h5':
|
|
120
|
-
|
|
125
|
+
if any(isinstance(vv, NXsubentry) for vv in v.values()):
|
|
126
|
+
nxbase = NXroot()
|
|
127
|
+
else:
|
|
128
|
+
nxbase = NXentry()
|
|
121
129
|
for kk, vv in v.attrs.items():
|
|
122
|
-
|
|
130
|
+
if kk not in ('schema', 'filename'):
|
|
131
|
+
nxbase.attrs[kk] = vv
|
|
123
132
|
for kk, vv in v.items():
|
|
124
|
-
|
|
125
|
-
|
|
133
|
+
if isinstance(vv, NXsubentry):
|
|
134
|
+
nxentry = NXentry()
|
|
135
|
+
nxbase[vv.nxname] = nxentry
|
|
136
|
+
for kkk, vvv in vv.items():
|
|
137
|
+
nxentry[kkk] = vvv
|
|
138
|
+
else:
|
|
139
|
+
nxbase[kk] = vv
|
|
140
|
+
write_nexus(nxbase, filename, force_overwrite)
|
|
126
141
|
else:
|
|
127
142
|
raise TypeError(f'Files of type {schema} not yet implemented')
|
|
128
143
|
elif isinstance(v, NXgroup):
|
|
@@ -204,6 +219,44 @@ class FileTreeWriter(Writer):
|
|
|
204
219
|
return data
|
|
205
220
|
|
|
206
221
|
|
|
222
|
+
class H5Writer(Writer):
|
|
223
|
+
"""Writer for H5 files from an `nexusformat.nexus.NXdata` object"""
|
|
224
|
+
def write(self, data, filename, force_overwrite=False):
|
|
225
|
+
"""Write the NeXus object contained in `data` to hdf5 file.
|
|
226
|
+
|
|
227
|
+
:param data: The data to write to file.
|
|
228
|
+
:type data: CHAP.pipeline.PipelineData
|
|
229
|
+
:param filename: The name of the file to write to.
|
|
230
|
+
:param force_overwrite: Flag to allow data in `filename` to be
|
|
231
|
+
overwritten if it already exists, defaults to `False`.
|
|
232
|
+
:type force_overwrite: bool, optional
|
|
233
|
+
:raises RuntimeError: If `filename` already exists and
|
|
234
|
+
`force_overwrite` is `False`.
|
|
235
|
+
:return: The data written to file.
|
|
236
|
+
:rtype: nexusformat.nexus.NXobject
|
|
237
|
+
"""
|
|
238
|
+
# Third party modules
|
|
239
|
+
from h5py import File
|
|
240
|
+
from nexusformat.nexus import NXdata
|
|
241
|
+
|
|
242
|
+
data = self.unwrap_pipelinedata(data)[-1]
|
|
243
|
+
if not isinstance(data, NXdata):
|
|
244
|
+
raise ValueError('Invalid data parameter {(data)}')
|
|
245
|
+
|
|
246
|
+
mode = 'w' if force_overwrite else 'w-'
|
|
247
|
+
with File(filename, mode) as f:
|
|
248
|
+
f[data.signal] = data.nxsignal
|
|
249
|
+
for i, axes in enumerate(data.attrs['axes']):
|
|
250
|
+
f[axes] = data[axes]
|
|
251
|
+
f[data.signal].dims[i].label = \
|
|
252
|
+
f'{axes} ({data[axes].units})' \
|
|
253
|
+
if 'units' in data[axes].attrs else axes
|
|
254
|
+
f[axes].make_scale(axes)
|
|
255
|
+
f[data.signal].dims[i].attach_scale(f[axes])
|
|
256
|
+
|
|
257
|
+
return data
|
|
258
|
+
|
|
259
|
+
|
|
207
260
|
class MatplotlibAnimationWriter(Writer):
|
|
208
261
|
"""Writer for saving matplotlib animations."""
|
|
209
262
|
def write(self, data, filename, fps=1):
|
|
@@ -261,7 +314,7 @@ class MatplotlibFigureWriter(Writer):
|
|
|
261
314
|
|
|
262
315
|
class NexusWriter(Writer):
|
|
263
316
|
"""Writer for NeXus files from `NXobject`-s"""
|
|
264
|
-
def write(self, data, filename, force_overwrite=False):
|
|
317
|
+
def write(self, data, filename, nxpath=None, force_overwrite=False):
|
|
265
318
|
"""Write the NeXus object contained in `data` to file.
|
|
266
319
|
|
|
267
320
|
:param data: The data to write to file.
|
|
@@ -275,26 +328,130 @@ class NexusWriter(Writer):
|
|
|
275
328
|
:return: The data written to file.
|
|
276
329
|
:rtype: nexusformat.nexus.NXobject
|
|
277
330
|
"""
|
|
331
|
+
# Third party modules
|
|
278
332
|
from nexusformat.nexus import (
|
|
279
333
|
NXentry,
|
|
334
|
+
NXFile,
|
|
280
335
|
NXroot,
|
|
281
336
|
)
|
|
337
|
+
import os
|
|
282
338
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
283
|
-
nxclass = data.nxclass
|
|
284
339
|
nxname = data.nxname
|
|
285
|
-
if
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
340
|
+
if not os.path.isfile(filename) and nxpath is not None:
|
|
341
|
+
self.logger.warning(
|
|
342
|
+
f'{filename} does not yet exist. Argument for nxpath ({nxpath}) '
|
|
343
|
+
+ 'will be ignored.')
|
|
344
|
+
nxpath = None
|
|
345
|
+
if nxpath is None:
|
|
346
|
+
nxclass = data.nxclass
|
|
347
|
+
if nxclass == 'NXentry':
|
|
348
|
+
data = NXroot(data)
|
|
349
|
+
data[nxname].set_default()
|
|
350
|
+
elif nxclass != 'NXroot':
|
|
351
|
+
data = NXroot(NXentry(data))
|
|
352
|
+
if nxclass == 'NXdata':
|
|
353
|
+
data.entry[nxname].set_default()
|
|
354
|
+
data.entry.set_default()
|
|
355
|
+
write_nexus(data, filename, force_overwrite)
|
|
356
|
+
else:
|
|
357
|
+
nxfile = NXFile(filename, 'rw')
|
|
358
|
+
root = nxfile.readfile()
|
|
359
|
+
if nxfile.get(nxpath) is None:
|
|
360
|
+
nxpath = root.NXentry[0].nxpath
|
|
361
|
+
self.logger.warning(
|
|
362
|
+
f'Path "{nxpath}" not present in {filename}. '
|
|
363
|
+
+ f'Using {nxpath} instead.')
|
|
364
|
+
full_nxpath = os.path.join(nxpath, nxname)
|
|
365
|
+
self.logger.debug(f'Full path for object to write: {full_nxpath}')
|
|
366
|
+
if nxfile.get(full_nxpath) is not None:
|
|
367
|
+
self.logger.debug(
|
|
368
|
+
f'{os.path.join(nxpath, nxname)} already exists in {filename}')
|
|
369
|
+
if force_overwrite:
|
|
370
|
+
self.logger.warning(
|
|
371
|
+
'Deleting existing NXobject at '
|
|
372
|
+
+ f'{os.path.join(nxpath, nxname)} in {filename}')
|
|
373
|
+
del root[full_nxpath]
|
|
374
|
+
try:
|
|
375
|
+
root[full_nxpath] = data
|
|
376
|
+
except Exception as e:
|
|
377
|
+
nxfile.close()
|
|
378
|
+
raise e
|
|
379
|
+
nxfile.close()
|
|
295
380
|
return data
|
|
296
381
|
|
|
297
382
|
|
|
383
|
+
class PyfaiResultsWriter(Writer):
|
|
384
|
+
"""Writer for results of one or more pyFAI integrations. Able to
|
|
385
|
+
handle multiple output formats. Currently supported formats are:
|
|
386
|
+
.npz, .nxs.
|
|
387
|
+
"""
|
|
388
|
+
def write(self, data, filename, force_overwrite=False):
|
|
389
|
+
"""Save pyFAI integration results to a file. Format is
|
|
390
|
+
determined automatically form the extension of `filename`.
|
|
391
|
+
|
|
392
|
+
:param data: Integration results to save.
|
|
393
|
+
:type data: Union[PipelineData,
|
|
394
|
+
list[pyFAI.containers.IntegrateResult]]
|
|
395
|
+
:param filename: Name of the file to which results will be
|
|
396
|
+
saved. Format of output is determined ffrom the
|
|
397
|
+
extension. Currently supported formats are: `.npz`,
|
|
398
|
+
`.nxs`
|
|
399
|
+
:type filename: str
|
|
400
|
+
"""
|
|
401
|
+
import os
|
|
402
|
+
|
|
403
|
+
from pyFAI.containers import Integrate1dResult, Integrate2dResult
|
|
404
|
+
|
|
405
|
+
try:
|
|
406
|
+
results = self.unwrap_pipelinedata(data)[0]
|
|
407
|
+
except:
|
|
408
|
+
results = data
|
|
409
|
+
if not isinstance(results, list):
|
|
410
|
+
results = [results]
|
|
411
|
+
if not all([isinstance(r, Integrate1dResult) for r in results]) \
|
|
412
|
+
and not all([isinstance(r, Integrate2dResult) for r in results]):
|
|
413
|
+
raise Exception(
|
|
414
|
+
'Bad input data: all items must have the same type -- either '
|
|
415
|
+
+ 'all pyFAI.containers.Integrate1dResult, or all '
|
|
416
|
+
+ 'pyFAI.containers.Integrate2dResult.')
|
|
417
|
+
|
|
418
|
+
if os.path.isfile(filename):
|
|
419
|
+
if force_overwrite:
|
|
420
|
+
self.logger.warning(f'Removing existing file {filename}')
|
|
421
|
+
os.remove(filename)
|
|
422
|
+
else:
|
|
423
|
+
raise Exception(f'{filename} already exists.')
|
|
424
|
+
_, ext = os.path.splitext(filename)
|
|
425
|
+
if ext.lower() == '.npz':
|
|
426
|
+
self.write_npz(results, filename)
|
|
427
|
+
elif ext.lower() == '.nxs':
|
|
428
|
+
self.write_nxs(results, filename)
|
|
429
|
+
else:
|
|
430
|
+
raise Exception(f'Unsupported file format: {ext}')
|
|
431
|
+
self.logger.info(f'Wrote to {filename}')
|
|
432
|
+
return results
|
|
433
|
+
|
|
434
|
+
def write_npz(self, results, filename):
|
|
435
|
+
"""Save `results` to the .npz file, `filename`"""
|
|
436
|
+
import numpy as np
|
|
437
|
+
|
|
438
|
+
data = {'radial': results[0].radial,
|
|
439
|
+
'intensity': [r.intensity for r in results]}
|
|
440
|
+
if hasattr(results[0], 'azimuthal'):
|
|
441
|
+
# 2d results
|
|
442
|
+
data['azimuthal'] = results[0].azimuthal
|
|
443
|
+
if all([r.sigma for r in results]):
|
|
444
|
+
# errors were included
|
|
445
|
+
data['sigma'] = [r.sigma for r in results]
|
|
446
|
+
|
|
447
|
+
np.savez(filename, **data)
|
|
448
|
+
|
|
449
|
+
def write_nxs(results, filename):
|
|
450
|
+
"""Save `results` to the .nxs file, `filename`"""
|
|
451
|
+
raise NotImplementedError
|
|
452
|
+
|
|
453
|
+
|
|
454
|
+
|
|
298
455
|
class TXTWriter(Writer):
|
|
299
456
|
"""Writer for plain text files from string or tuples or lists of
|
|
300
457
|
strings."""
|
|
@@ -345,6 +502,12 @@ class YAMLWriter(Writer):
|
|
|
345
502
|
:rtype: dict
|
|
346
503
|
"""
|
|
347
504
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
505
|
+
try:
|
|
506
|
+
from pydantic import BaseModel
|
|
507
|
+
if isinstance(data, BaseModel):
|
|
508
|
+
data = data.dict()
|
|
509
|
+
except:
|
|
510
|
+
pass
|
|
348
511
|
write_yaml(data, filename, force_overwrite)
|
|
349
512
|
return data
|
|
350
513
|
|
CHAP/edd/__init__.py
CHANGED
|
@@ -1,13 +1,20 @@
|
|
|
1
1
|
"""This subpackage contains `PipelineItems` unique to EDD data
|
|
2
2
|
processing workflows.
|
|
3
3
|
"""
|
|
4
|
-
|
|
4
|
+
from CHAP.edd.reader import (EddMapReader,
|
|
5
|
+
ScanToMapReader,
|
|
6
|
+
SetupNXdataReader,
|
|
7
|
+
UpdateNXdataReader,
|
|
8
|
+
NXdataSliceReader)
|
|
5
9
|
from CHAP.edd.processor import (DiffractionVolumeLengthProcessor,
|
|
6
10
|
LatticeParameterRefinementProcessor,
|
|
7
|
-
|
|
11
|
+
MCAEnergyCalibrationProcessor,
|
|
12
|
+
MCATthCalibrationProcessor,
|
|
8
13
|
MCADataProcessor,
|
|
9
14
|
MCAEnergyCalibrationProcessor,
|
|
10
|
-
|
|
15
|
+
MCACalibratedDataPlotter,
|
|
16
|
+
StrainAnalysisProcessor,
|
|
17
|
+
CreateStrainAnalysisConfigProcessor)
|
|
11
18
|
# from CHAP.edd.writer import
|
|
12
19
|
|
|
13
20
|
from CHAP.common import MapProcessor
|