ChessAnalysisPipeline 0.0.12__py3-none-any.whl → 0.0.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/__init__.py +2 -0
- CHAP/common/__init__.py +7 -2
- CHAP/common/models/map.py +95 -70
- CHAP/common/processor.py +844 -153
- CHAP/common/reader.py +168 -131
- CHAP/common/writer.py +166 -96
- CHAP/edd/__init__.py +2 -0
- CHAP/edd/models.py +94 -48
- CHAP/edd/processor.py +625 -169
- CHAP/edd/utils.py +186 -6
- CHAP/pipeline.py +35 -3
- CHAP/runner.py +40 -13
- CHAP/tomo/models.py +18 -9
- CHAP/tomo/processor.py +1134 -902
- CHAP/utils/fit.py +98 -45
- CHAP/utils/general.py +196 -63
- CHAP/utils/scanparsers.py +403 -94
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/METADATA +1 -1
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/RECORD +23 -23
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/WHEEL +1 -1
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/LICENSE +0 -0
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.12.dist-info → ChessAnalysisPipeline-0.0.14.dist-info}/top_level.txt +0 -0
CHAP/common/writer.py
CHANGED
|
@@ -5,11 +5,10 @@ Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
|
5
5
|
Description: Module for Writers used in multiple experiment-specific workflows.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
-
#
|
|
9
|
-
from os import mkdir
|
|
8
|
+
# System modules
|
|
10
9
|
from os import path as os_path
|
|
11
10
|
|
|
12
|
-
#
|
|
11
|
+
# Local modules
|
|
13
12
|
from CHAP import Writer
|
|
14
13
|
|
|
15
14
|
def write_matplotlibfigure(data, filename, force_overwrite=False):
|
|
@@ -89,6 +88,9 @@ def write_yaml(data, filename, force_overwrite=False):
|
|
|
89
88
|
yaml.dump(data, f, sort_keys=False)
|
|
90
89
|
|
|
91
90
|
def write_filetree(data, outputdir, force_overwrite=False):
|
|
91
|
+
# System modules
|
|
92
|
+
from os import mkdir
|
|
93
|
+
|
|
92
94
|
# Third party modules
|
|
93
95
|
from nexusformat.nexus import (
|
|
94
96
|
NXentry,
|
|
@@ -130,15 +132,15 @@ def write_filetree(data, outputdir, force_overwrite=False):
|
|
|
130
132
|
class ExtractArchiveWriter(Writer):
|
|
131
133
|
"""Writer for tar files from binary data"""
|
|
132
134
|
def write(self, data, filename):
|
|
133
|
-
"""Take a .tar archive represented as bytes in `data`
|
|
134
|
-
write the extracted archive to files.
|
|
135
|
+
"""Take a .tar archive represented as bytes contained in `data`
|
|
136
|
+
and write the extracted archive to files.
|
|
135
137
|
|
|
136
|
-
:param data:
|
|
137
|
-
:type data:
|
|
138
|
-
:param filename:
|
|
139
|
-
files
|
|
138
|
+
:param data: The data to write to archive.
|
|
139
|
+
:type data: list[PipelineData]
|
|
140
|
+
:param filename: The name of the directory to write the archive
|
|
141
|
+
files to.
|
|
140
142
|
:type filename: str
|
|
141
|
-
:return:
|
|
143
|
+
:return: The achived data
|
|
142
144
|
:rtype: bytes
|
|
143
145
|
"""
|
|
144
146
|
# System modules
|
|
@@ -153,86 +155,193 @@ class ExtractArchiveWriter(Writer):
|
|
|
153
155
|
return data
|
|
154
156
|
|
|
155
157
|
|
|
158
|
+
class FileTreeWriter(Writer):
|
|
159
|
+
"""Writer for a file tree in NeXus format"""
|
|
160
|
+
def write(self, data, outputdir, force_overwrite=False):
|
|
161
|
+
"""Write a NeXus format object contained in `data` to a
|
|
162
|
+
directory tree stuctured like the NeXus tree.
|
|
163
|
+
|
|
164
|
+
:param data: The data to write to disk.
|
|
165
|
+
:type data: list[PipelineData]
|
|
166
|
+
:param outputdir: The name of the directory to write to.
|
|
167
|
+
:type outputdir: str
|
|
168
|
+
:param force_overwrite: Flag to allow data to be overwritten
|
|
169
|
+
if it already exists, defaults to `False`.
|
|
170
|
+
:type force_overwrite: bool, optional
|
|
171
|
+
:raises RuntimeError: If `filename` already exists and
|
|
172
|
+
`force_overwrite` is `False`.
|
|
173
|
+
:return: The data written to disk.
|
|
174
|
+
:rtype: Union[nexusformat.nexus.NXroot,
|
|
175
|
+
nexusformat.nexus.NXentry]
|
|
176
|
+
"""
|
|
177
|
+
# Third party modules
|
|
178
|
+
from nexusformat.nexus import (
|
|
179
|
+
NXentry,
|
|
180
|
+
NXroot,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
data = self.unwrap_pipelinedata(data)[-1]
|
|
184
|
+
if isinstance(data, NXroot):
|
|
185
|
+
if 'default' in data.attrs:
|
|
186
|
+
nxentry = data[data.attrs['default']]
|
|
187
|
+
else:
|
|
188
|
+
nxentry = [v for v in data.values()
|
|
189
|
+
if isinstance(data, NXentry)]
|
|
190
|
+
if len(nxentry) == 1:
|
|
191
|
+
nxentry = nxentry[0]
|
|
192
|
+
else:
|
|
193
|
+
raise TypeError('Cannot write object of type '
|
|
194
|
+
f'{type(data).__name__} as a file tree '
|
|
195
|
+
'to disk.')
|
|
196
|
+
elif isinstance(data, NXentry):
|
|
197
|
+
nxentry = data
|
|
198
|
+
else:
|
|
199
|
+
raise TypeError('Cannot write object of type '
|
|
200
|
+
f'{type(data).__name__} as a file tree to disk.')
|
|
201
|
+
|
|
202
|
+
write_filetree(nxentry, outputdir, force_overwrite)
|
|
203
|
+
|
|
204
|
+
return data
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class MatplotlibAnimationWriter(Writer):
|
|
208
|
+
"""Writer for saving matplotlib animations."""
|
|
209
|
+
def write(self, data, filename, fps=1):
|
|
210
|
+
"""Write the matplotlib.animation.ArtistAnimation object
|
|
211
|
+
contained in `data` to file.
|
|
212
|
+
|
|
213
|
+
:param data: The matplotlib animation.
|
|
214
|
+
:type data: list[PipelineData]
|
|
215
|
+
:param filename: The name of the file to write to.
|
|
216
|
+
:type filename: str
|
|
217
|
+
:param fps: Movie frame rate (frames per second),
|
|
218
|
+
defaults to `1`
|
|
219
|
+
:type fps: int, optional
|
|
220
|
+
:return: The original animation.
|
|
221
|
+
:rtype: matplotlib.animation.ArtistAnimation
|
|
222
|
+
"""
|
|
223
|
+
data = self.unwrap_pipelinedata(data)[-1]
|
|
224
|
+
extension = os_path.splitext(filename)[1]
|
|
225
|
+
if not extension:
|
|
226
|
+
data.save(f'{filename}.gif', fps=fps)
|
|
227
|
+
elif extension == '.gif':
|
|
228
|
+
data.save(filename, fps=fps)
|
|
229
|
+
elif extension == '.mp4':
|
|
230
|
+
data.save(filename, writer='ffmpeg', fps=fps)
|
|
231
|
+
|
|
232
|
+
return data
|
|
233
|
+
|
|
234
|
+
|
|
156
235
|
class MatplotlibFigureWriter(Writer):
|
|
157
236
|
"""Writer for saving matplotlib figures to image files."""
|
|
158
237
|
def write(self, data, filename, savefig_kw={}, force_overwrite=False):
|
|
159
|
-
"""Write the matplotlib.
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
:param data:
|
|
163
|
-
:type data:
|
|
164
|
-
:param filename: name of the file to write to.
|
|
165
|
-
:
|
|
166
|
-
|
|
238
|
+
"""Write the matplotlib.figure.Figure contained in `data` to
|
|
239
|
+
file.
|
|
240
|
+
|
|
241
|
+
:param data: The matplotlib figure
|
|
242
|
+
:type data: list[PipelineData]
|
|
243
|
+
:param filename: The name of the file to write to.
|
|
244
|
+
:type filename: str
|
|
245
|
+
:param savefig_kw: Keyword args to pass to
|
|
246
|
+
matplotlib.figure.Figure.savefig, defaults to {}.
|
|
167
247
|
:type savefig_kw: dict, optional
|
|
168
|
-
:param force_overwrite:
|
|
169
|
-
overwritten
|
|
170
|
-
:
|
|
248
|
+
:param force_overwrite: Flag to allow data in `filename` to be
|
|
249
|
+
overwritten if it already exists, defaults to `False`.
|
|
250
|
+
:type force_overwrite: bool, optional
|
|
251
|
+
:raises RuntimeError: If `filename` already exists and
|
|
252
|
+
`force_overwrite` is `False`.
|
|
253
|
+
:return: The original figure object
|
|
254
|
+
:rtype: matplotlib.figure.Figure
|
|
171
255
|
"""
|
|
172
256
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
173
257
|
write_matplotlibfigure(data, filename, force_overwrite)
|
|
258
|
+
|
|
174
259
|
return data
|
|
175
260
|
|
|
176
261
|
|
|
177
262
|
class NexusWriter(Writer):
|
|
178
263
|
"""Writer for NeXus files from `NXobject`-s"""
|
|
179
264
|
def write(self, data, filename, force_overwrite=False):
|
|
180
|
-
"""Write `data` to
|
|
181
|
-
|
|
182
|
-
:param data:
|
|
183
|
-
:type data:
|
|
184
|
-
:param filename: name of the file to write to.
|
|
185
|
-
:param force_overwrite:
|
|
186
|
-
overwritten
|
|
187
|
-
:
|
|
265
|
+
"""Write the NeXus object contained in `data` to file.
|
|
266
|
+
|
|
267
|
+
:param data: The data to write to file.
|
|
268
|
+
:type data: list[PipelineData]
|
|
269
|
+
:param filename: The name of the file to write to.
|
|
270
|
+
:param force_overwrite: Flag to allow data in `filename` to be
|
|
271
|
+
overwritten if it already exists, defaults to `False`.
|
|
272
|
+
:type force_overwrite: bool, optional
|
|
273
|
+
:raises RuntimeError: If `filename` already exists and
|
|
274
|
+
`force_overwrite` is `False`.
|
|
275
|
+
:return: The data written to file.
|
|
276
|
+
:rtype: nexusformat.nexus.NXobject
|
|
188
277
|
"""
|
|
278
|
+
from nexusformat.nexus import (
|
|
279
|
+
NXentry,
|
|
280
|
+
NXroot,
|
|
281
|
+
)
|
|
189
282
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
283
|
+
nxclass = data.nxclass
|
|
284
|
+
nxname = data.nxname
|
|
285
|
+
if nxclass == 'NXentry':
|
|
286
|
+
data = NXroot(data)
|
|
287
|
+
data[nxname].set_default()
|
|
288
|
+
elif nxclass != 'NXroot':
|
|
289
|
+
data = NXroot(NXentry(data))
|
|
290
|
+
if nxclass == 'NXdata':
|
|
291
|
+
data.entry[nxname].set_default()
|
|
292
|
+
data.entry.set_default()
|
|
190
293
|
write_nexus(data, filename, force_overwrite)
|
|
294
|
+
|
|
191
295
|
return data
|
|
192
296
|
|
|
193
297
|
|
|
194
298
|
class TXTWriter(Writer):
|
|
195
|
-
"""Writer for plain text files from string or
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
:type data: str, tuple, list
|
|
203
|
-
:param filename: name of the file to write to.
|
|
299
|
+
"""Writer for plain text files from string or tuples or lists of
|
|
300
|
+
strings."""
|
|
301
|
+
def write(self, data, filename, append=False, force_overwrite=False):
|
|
302
|
+
"""Write a string or tuple or list of strings contained in
|
|
303
|
+
`data` to file.
|
|
304
|
+
|
|
305
|
+
:param data: The data to write to disk.
|
|
306
|
+
:type data: str, tuple[str], list[str]
|
|
307
|
+
:param filename: The name of the file to write to.
|
|
204
308
|
:type filename: str
|
|
205
|
-
:param
|
|
206
|
-
|
|
207
|
-
:type
|
|
208
|
-
:
|
|
209
|
-
`
|
|
210
|
-
:
|
|
309
|
+
:param append: Flag to allow data in `filename` to be
|
|
310
|
+
be appended, defaults to `False`.
|
|
311
|
+
:type append: bool, optional
|
|
312
|
+
:param force_overwrite: Flag to allow data in `filename` to be
|
|
313
|
+
overwritten if it already exists, defaults to `False`.
|
|
314
|
+
:type force_overwrite: bool, optional
|
|
315
|
+
:raises TypeError: If the object contained in `data` is not a
|
|
316
|
+
`str`, `tuple[str]` or `list[str]`.
|
|
317
|
+
:raises RuntimeError: If `filename` already exists and
|
|
211
318
|
`force_overwrite` is `False`.
|
|
212
|
-
:return:
|
|
213
|
-
:rtype: str, tuple, list
|
|
319
|
+
:return: The data written to file.
|
|
320
|
+
:rtype: str, tuple[str], list[str]
|
|
214
321
|
"""
|
|
215
322
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
216
323
|
write_txt(data, filename, force_overwrite, append)
|
|
324
|
+
|
|
217
325
|
return data
|
|
218
326
|
|
|
219
327
|
|
|
220
328
|
class YAMLWriter(Writer):
|
|
221
329
|
"""Writer for YAML files from `dict`-s"""
|
|
222
330
|
def write(self, data, filename, force_overwrite=False):
|
|
223
|
-
"""
|
|
331
|
+
"""Write the dictionary contained in `data` to file.
|
|
224
332
|
|
|
225
|
-
:param data:
|
|
333
|
+
:param data: The data to write to file.
|
|
226
334
|
:type data: dict
|
|
227
|
-
:param filename: name of the file to write to.
|
|
335
|
+
:param filename: The name of the file to write to.
|
|
228
336
|
:type filename: str
|
|
229
|
-
:param force_overwrite:
|
|
230
|
-
overwritten if it already exists
|
|
231
|
-
:type force_overwrite: bool
|
|
232
|
-
:raises TypeError:
|
|
233
|
-
|
|
337
|
+
:param force_overwrite: Flag to allow data in `filename` to be
|
|
338
|
+
overwritten if it already exists, defaults to `False`.
|
|
339
|
+
:type force_overwrite: bool, optional
|
|
340
|
+
:raises TypeError: If the object contained in `data` is not a
|
|
341
|
+
`dict`.
|
|
342
|
+
:raises RuntimeError: If `filename` already exists and
|
|
234
343
|
`force_overwrite` is `False`.
|
|
235
|
-
:return:
|
|
344
|
+
:return: The data written to file.
|
|
236
345
|
:rtype: dict
|
|
237
346
|
"""
|
|
238
347
|
data = self.unwrap_pipelinedata(data)[-1]
|
|
@@ -240,47 +349,8 @@ class YAMLWriter(Writer):
|
|
|
240
349
|
return data
|
|
241
350
|
|
|
242
351
|
|
|
243
|
-
class FileTreeWriter(Writer):
|
|
244
|
-
"""Writer for a file tree in NeXus format"""
|
|
245
|
-
def write(self, data, outputdir, force_overwrite=False):
|
|
246
|
-
"""Write `data` to a NeXus file
|
|
247
|
-
|
|
248
|
-
:param data: the data to write to disk.
|
|
249
|
-
:type data: Union[nexusformat.nexus.NXroot,
|
|
250
|
-
nexusformat.nexus.NXentry]
|
|
251
|
-
:param force_overwrite: flag to allow data to be overwritten,
|
|
252
|
-
if it already exists.
|
|
253
|
-
:return: the original input data
|
|
254
|
-
"""
|
|
255
|
-
# Third party modules
|
|
256
|
-
from nexusformat.nexus import (
|
|
257
|
-
NXentry,
|
|
258
|
-
NXroot,
|
|
259
|
-
)
|
|
260
|
-
|
|
261
|
-
data = self.unwrap_pipelinedata(data)[-1]
|
|
262
|
-
if isinstance(data, NXroot):
|
|
263
|
-
if 'default' in data.attrs:
|
|
264
|
-
nxentry = data[data.attrs['default']]
|
|
265
|
-
else:
|
|
266
|
-
nxentry = [v for v in data.values()
|
|
267
|
-
if isinstance(data, NXentry)]
|
|
268
|
-
if len(nxentry) == 1:
|
|
269
|
-
nxentry = nxentry[0]
|
|
270
|
-
else:
|
|
271
|
-
raise TypeError('Cannot write object of type '
|
|
272
|
-
f'{type(data).__name__} as a file tree '
|
|
273
|
-
'to disk.')
|
|
274
|
-
elif isinstance(data, NXentry):
|
|
275
|
-
nxentry = data
|
|
276
|
-
else:
|
|
277
|
-
raise TypeError('Cannot write object of type '
|
|
278
|
-
f'{type(data).__name__} as a file tree to disk.')
|
|
279
|
-
|
|
280
|
-
write_filetree(nxentry, outputdir, force_overwrite)
|
|
281
|
-
return data
|
|
282
|
-
|
|
283
|
-
|
|
284
352
|
if __name__ == '__main__':
|
|
353
|
+
# Local modules
|
|
285
354
|
from CHAP.writer import main
|
|
355
|
+
|
|
286
356
|
main()
|
CHAP/edd/__init__.py
CHANGED
|
@@ -3,8 +3,10 @@ processing workflows.
|
|
|
3
3
|
"""
|
|
4
4
|
# from CHAP.edd.reader import
|
|
5
5
|
from CHAP.edd.processor import (DiffractionVolumeLengthProcessor,
|
|
6
|
+
LatticeParameterRefinementProcessor,
|
|
6
7
|
MCACeriaCalibrationProcessor,
|
|
7
8
|
MCADataProcessor,
|
|
9
|
+
MCAEnergyCalibrationProcessor,
|
|
8
10
|
StrainAnalysisProcessor)
|
|
9
11
|
# from CHAP.edd.writer import
|
|
10
12
|
|
CHAP/edd/models.py
CHANGED
|
@@ -38,43 +38,70 @@ class MCAElementConfig(BaseModel):
|
|
|
38
38
|
:type detector_name: str
|
|
39
39
|
:ivar num_bins: Number of MCA channels.
|
|
40
40
|
:type num_bins: int, optional
|
|
41
|
-
:ivar
|
|
42
|
-
data should be included after applying a mask (the
|
|
43
|
-
inclusive), defaults to `[]`
|
|
44
|
-
:type
|
|
41
|
+
:ivar include_energy_ranges: List of MCA channel energy ranges
|
|
42
|
+
whose data should be included after applying a mask (the
|
|
43
|
+
bounds are inclusive), defaults to `[[50,150]]`
|
|
44
|
+
:type include_energy_ranges: list[[float, float]], optional
|
|
45
45
|
"""
|
|
46
46
|
detector_name: constr(strip_whitespace=True, min_length=1) = 'mca1'
|
|
47
47
|
num_bins: Optional[conint(gt=0)]
|
|
48
|
-
|
|
48
|
+
max_energy_kev: confloat(gt=0) = 200
|
|
49
|
+
include_energy_ranges: conlist(
|
|
49
50
|
min_items=1,
|
|
50
51
|
item_type=conlist(
|
|
51
|
-
item_type=
|
|
52
|
+
item_type=confloat(ge=0),
|
|
52
53
|
min_items=2,
|
|
53
|
-
max_items=2)) = []
|
|
54
|
+
max_items=2)) = [[50,150]]
|
|
54
55
|
|
|
55
|
-
@validator('
|
|
56
|
-
def
|
|
57
|
-
"""Ensure that no
|
|
56
|
+
@validator('include_energy_ranges', each_item=True)
|
|
57
|
+
def validate_include_energy_range(cls, value, values):
|
|
58
|
+
"""Ensure that no energy ranges are outside the boundary of the
|
|
58
59
|
detector.
|
|
59
60
|
|
|
60
|
-
:param value: Field value to validate (`
|
|
61
|
+
:param value: Field value to validate (`include_energy_ranges`).
|
|
61
62
|
:type values: dict
|
|
62
63
|
:param values: Dictionary of previously validated field values.
|
|
63
64
|
:type values: dict
|
|
64
|
-
:return: The validated value of `
|
|
65
|
+
:return: The validated value of `include_energy_ranges`.
|
|
65
66
|
:rtype: dict
|
|
66
67
|
"""
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
raise ValueError('Invalid bin range in include_bin_ranges '
|
|
72
|
-
f'({value})')
|
|
68
|
+
max_energy_kev = values.get('max_energy_kev')
|
|
69
|
+
value.sort()
|
|
70
|
+
if value[1] > max_energy_kev:
|
|
71
|
+
value[1] = max_energy_kev
|
|
73
72
|
return value
|
|
74
73
|
|
|
74
|
+
@property
|
|
75
|
+
def include_bin_ranges(self):
|
|
76
|
+
"""Return the value of `include_energy_ranges` represented in
|
|
77
|
+
terms of channel indices instead of channel energies.
|
|
78
|
+
"""
|
|
79
|
+
from CHAP.utils.general import index_nearest_down, index_nearest_upp
|
|
80
|
+
|
|
81
|
+
include_bin_ranges = []
|
|
82
|
+
energies = np.linspace(0, self.max_energy_kev, self.num_bins)
|
|
83
|
+
for e_min, e_max in self.include_energy_ranges:
|
|
84
|
+
include_bin_ranges.append(
|
|
85
|
+
[index_nearest_down(energies, e_min),
|
|
86
|
+
index_nearest_upp(energies, e_max)])
|
|
87
|
+
return include_bin_ranges
|
|
88
|
+
|
|
89
|
+
def get_energy_ranges(self, bin_ranges):
|
|
90
|
+
"""Given a list of channel index ranges, return the
|
|
91
|
+
correspongin list of channel energy ranges.
|
|
92
|
+
|
|
93
|
+
:param bin_ranges: A list of channel bin ranges to convert to
|
|
94
|
+
energy ranges.
|
|
95
|
+
:type bin_ranges: list[list[int]]
|
|
96
|
+
:returns: Energy ranges
|
|
97
|
+
:rtype: list[list[float]]
|
|
98
|
+
"""
|
|
99
|
+
energies = np.linspace(0, self.max_energy_kev, self.num_bins)
|
|
100
|
+
return [[energies[i] for i in range_] for range_ in bin_ranges]
|
|
101
|
+
|
|
75
102
|
def mca_mask(self):
|
|
76
103
|
"""Get a boolean mask array to use on this MCA element's data.
|
|
77
|
-
Note that the bounds of self.
|
|
104
|
+
Note that the bounds of self.include_energy_ranges are inclusive.
|
|
78
105
|
|
|
79
106
|
:return: Boolean mask array.
|
|
80
107
|
:rtype: numpy.ndarray
|
|
@@ -94,9 +121,9 @@ class MCAElementConfig(BaseModel):
|
|
|
94
121
|
:rtype: dict
|
|
95
122
|
"""
|
|
96
123
|
d = super().dict(*args, **kwargs)
|
|
97
|
-
d['
|
|
98
|
-
|
|
99
|
-
for i in range(len(d['
|
|
124
|
+
d['include_energy_ranges'] = [
|
|
125
|
+
[float(energy) for energy in d['include_energy_ranges'][i]]
|
|
126
|
+
for i in range(len(d['include_energy_ranges']))]
|
|
100
127
|
return d
|
|
101
128
|
|
|
102
129
|
|
|
@@ -172,7 +199,7 @@ class MCAScanDataConfig(BaseModel):
|
|
|
172
199
|
def validate_detectors(cls, values):
|
|
173
200
|
"""Fill in values for _scanparser / _parfile (if applicable).
|
|
174
201
|
Fill in each detector's num_bins field, if needed.
|
|
175
|
-
Check each detector's
|
|
202
|
+
Check each detector's include_energy_ranges field against the
|
|
176
203
|
flux file, if available.
|
|
177
204
|
|
|
178
205
|
:param values: Dictionary of previously validated field values.
|
|
@@ -212,20 +239,21 @@ class MCAScanDataConfig(BaseModel):
|
|
|
212
239
|
)
|
|
213
240
|
flux = np.loadtxt(flux_file)
|
|
214
241
|
flux_file_energies = flux[:,0]/1.e3
|
|
215
|
-
|
|
242
|
+
flux_e_min = flux_file_energies.min()
|
|
243
|
+
flux_e_max = flux_file_energies.max()
|
|
216
244
|
for detector in detectors:
|
|
217
245
|
mca_bin_energies = np.linspace(
|
|
218
246
|
0, detector.max_energy_kev, detector.num_bins)
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
detector.
|
|
247
|
+
for i, (det_e_min, det_e_max) in enumerate(
|
|
248
|
+
deepcopy(detector.include_energy_ranges)):
|
|
249
|
+
if det_e_min < flux_e_min or det_e_max > flux_e_max:
|
|
250
|
+
energy_range = [min(det_e_min, flux_e_min),
|
|
251
|
+
max(det_e_max, flux_e_max)]
|
|
252
|
+
print(
|
|
253
|
+
f'WARNING: include_energy_ranges[{i}] out of range'
|
|
254
|
+
f' ({detector.include_energy_ranges[i]}): adjusted'
|
|
255
|
+
f' to {energy_range}')
|
|
256
|
+
detector.include_energy_ranges[i] = energy_range
|
|
229
257
|
|
|
230
258
|
return values
|
|
231
259
|
|
|
@@ -381,6 +409,8 @@ class MCAElementCalibrationConfig(MCAElementConfig):
|
|
|
381
409
|
:ivar hkl_indices: List of unique HKL indices to fit peaks for in
|
|
382
410
|
the calibration routine, defaults to `[]`.
|
|
383
411
|
:type hkl_indices: list[int], optional
|
|
412
|
+
:ivar background: Background model for peak fitting.
|
|
413
|
+
:type background: str, list[str], optional
|
|
384
414
|
:ivar tth_initial_guess: Initial guess for 2&theta,
|
|
385
415
|
defaults to `5.0`.
|
|
386
416
|
:type tth_initial_guess: float, optional
|
|
@@ -394,7 +424,7 @@ class MCAElementCalibrationConfig(MCAElementConfig):
|
|
|
394
424
|
:type tth_calibrated: float, optional
|
|
395
425
|
:ivar slope_calibrated: Calibrated value for detector channel
|
|
396
426
|
energy correction linear slope.
|
|
397
|
-
:type
|
|
427
|
+
:type slope_calibrated: float, optional
|
|
398
428
|
:ivar intercept_calibrated: Calibrated value for detector channel
|
|
399
429
|
energy correction y-intercept.
|
|
400
430
|
:type intercept_calibrated: float, optional
|
|
@@ -403,6 +433,7 @@ class MCAElementCalibrationConfig(MCAElementConfig):
|
|
|
403
433
|
tth_max: confloat(gt=0, allow_inf_nan=False) = 90.0
|
|
404
434
|
hkl_tth_tol: confloat(gt=0, allow_inf_nan=False) = 0.15
|
|
405
435
|
hkl_indices: Optional[conlist(item_type=conint(ge=0), min_items=1)] = []
|
|
436
|
+
background: Optional[Union[str, list]]
|
|
406
437
|
tth_initial_guess: confloat(gt=0, le=tth_max, allow_inf_nan=False) = 5.0
|
|
407
438
|
slope_initial_guess: float = 1.0
|
|
408
439
|
intercept_initial_guess: float = 0.0
|
|
@@ -435,10 +466,19 @@ class MCAElementDiffractionVolumeLengthConfig(MCAElementConfig):
|
|
|
435
466
|
:ivar dvl_measured: Placeholder for the measured diffraction
|
|
436
467
|
volume length before writing the data to file.
|
|
437
468
|
:type dvl_measured: float, optional
|
|
469
|
+
:ivar fit_amplitude: Placeholder for amplitude of the gaussian fit.
|
|
470
|
+
:type fit_amplitude: float, optional
|
|
471
|
+
:ivar fit_center: Placeholder for center of the gaussian fit.
|
|
472
|
+
:type fit_center: float, optional
|
|
473
|
+
:ivar fit_sigma: Placeholder for sigma of the gaussian fit.
|
|
474
|
+
:type fit_sigma: float, optional
|
|
438
475
|
"""
|
|
439
476
|
measurement_mode: Optional[Literal['manual', 'auto']] = 'auto'
|
|
440
477
|
sigma_to_dvl_factor: Optional[Literal[3.5, 2.0, 4.0]] = 3.5
|
|
441
478
|
dvl_measured: Optional[confloat(gt=0)] = None
|
|
479
|
+
fit_amplitude: Optional[float] = None
|
|
480
|
+
fit_center: Optional[float] = None
|
|
481
|
+
fit_sigma: Optional[float] = None
|
|
442
482
|
|
|
443
483
|
def dict(self, *args, **kwargs):
|
|
444
484
|
"""Return a representation of this configuration in a
|
|
@@ -452,6 +492,8 @@ class MCAElementDiffractionVolumeLengthConfig(MCAElementConfig):
|
|
|
452
492
|
d = super().dict(*args, **kwargs)
|
|
453
493
|
if self.measurement_mode == 'manual':
|
|
454
494
|
del d['sigma_to_dvl_factor']
|
|
495
|
+
for param in ('amplitude', 'center', 'sigma'):
|
|
496
|
+
d[f'fit_{param}'] = float(d[f'fit_{param}'])
|
|
455
497
|
return d
|
|
456
498
|
|
|
457
499
|
|
|
@@ -460,10 +502,15 @@ class DiffractionVolumeLengthConfig(MCAScanDataConfig):
|
|
|
460
502
|
volume length calculation for an EDD setup using a steel-foil
|
|
461
503
|
raster scan.
|
|
462
504
|
|
|
505
|
+
:ivar sample_thickness: Thickness of scanned foil sample. Quantity
|
|
506
|
+
must be provided in the same units as the values of the
|
|
507
|
+
scanning motor.
|
|
508
|
+
:type sample_thickness: float
|
|
463
509
|
:ivar detectors: Individual detector element DVL
|
|
464
510
|
measurement configurations
|
|
465
511
|
:type detectors: list[MCAElementDiffractionVolumeLengthConfig]
|
|
466
512
|
"""
|
|
513
|
+
sample_thickness: float
|
|
467
514
|
detectors: conlist(min_items=1,
|
|
468
515
|
item_type=MCAElementDiffractionVolumeLengthConfig)
|
|
469
516
|
|
|
@@ -481,18 +528,6 @@ class DiffractionVolumeLengthConfig(MCAScanDataConfig):
|
|
|
481
528
|
scan_numbers=self._parfile.good_scan_numbers())
|
|
482
529
|
return self.scanparser.spec_scan_motor_vals[0]
|
|
483
530
|
|
|
484
|
-
@property
|
|
485
|
-
def scanned_dim_lbl(self):
|
|
486
|
-
"""Return a label for plot axes corresponding to the scanned
|
|
487
|
-
dimension.
|
|
488
|
-
|
|
489
|
-
:return: Name of scanned motor.
|
|
490
|
-
:rtype: str
|
|
491
|
-
"""
|
|
492
|
-
if self._parfile is not None:
|
|
493
|
-
return self.scan_column
|
|
494
|
-
return self.scanparser.spec_scan_motor_mnes[0]
|
|
495
|
-
|
|
496
531
|
|
|
497
532
|
class CeriaConfig(MaterialConfig):
|
|
498
533
|
"""Model for the sample material used in calibrations.
|
|
@@ -629,6 +664,8 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
|
|
|
629
664
|
:type hkl_indices: list[int], optional
|
|
630
665
|
:ivar background: Background model for peak fitting.
|
|
631
666
|
:type background: str, list[str], optional
|
|
667
|
+
:ivar num_proc: Number of processors used for peak fitting.
|
|
668
|
+
:type num_proc: int, optional
|
|
632
669
|
:ivar peak_models: Peak model for peak fitting,
|
|
633
670
|
defaults to `'gaussian'`.
|
|
634
671
|
:type peak_models: Literal['gaussian', 'lorentzian']],
|
|
@@ -656,7 +693,8 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
|
|
|
656
693
|
tth_max: confloat(gt=0, allow_inf_nan=False) = 90.0
|
|
657
694
|
hkl_tth_tol: confloat(gt=0, allow_inf_nan=False) = 0.15
|
|
658
695
|
hkl_indices: Optional[conlist(item_type=conint(ge=0), min_items=1)] = []
|
|
659
|
-
background: Optional[str]
|
|
696
|
+
background: Optional[Union[str, list]]
|
|
697
|
+
num_proc: Optional[conint(gt=0)] = os.cpu_count()
|
|
660
698
|
peak_models: Union[
|
|
661
699
|
conlist(item_type=Literal['gaussian', 'lorentzian'], min_items=1),
|
|
662
700
|
Literal['gaussian', 'lorentzian']] = 'gaussian'
|
|
@@ -667,6 +705,13 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
|
|
|
667
705
|
tth_calibrated: Optional[confloat(gt=0, allow_inf_nan=False)]
|
|
668
706
|
slope_calibrated: Optional[confloat(allow_inf_nan=False)]
|
|
669
707
|
intercept_calibrated: Optional[confloat(allow_inf_nan=False)]
|
|
708
|
+
calibration_bin_ranges: Optional[
|
|
709
|
+
conlist(
|
|
710
|
+
min_items=1,
|
|
711
|
+
item_type=conlist(
|
|
712
|
+
item_type=conint(ge=0),
|
|
713
|
+
min_items=2,
|
|
714
|
+
max_items=2))]
|
|
670
715
|
tth_file: Optional[FilePath]
|
|
671
716
|
tth_map: Optional[np.ndarray] = None
|
|
672
717
|
|
|
@@ -696,6 +741,7 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
|
|
|
696
741
|
'intercept_calibrated', 'num_bins', 'max_energy_kev']
|
|
697
742
|
for field in add_fields:
|
|
698
743
|
setattr(self, field, getattr(calibration, field))
|
|
744
|
+
self.calibration_bin_ranges = calibration.include_bin_ranges
|
|
699
745
|
|
|
700
746
|
def get_tth_map(self, map_config):
|
|
701
747
|
"""Return a map of 2&theta values to use -- may vary at each
|