ChessAnalysisPipeline 0.0.14__py3-none-any.whl → 0.0.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/__init__.py +1 -1
- CHAP/common/__init__.py +13 -0
- CHAP/common/models/integration.py +29 -26
- CHAP/common/models/map.py +395 -224
- CHAP/common/processor.py +1725 -93
- CHAP/common/reader.py +265 -28
- CHAP/common/writer.py +191 -18
- CHAP/edd/__init__.py +9 -2
- CHAP/edd/models.py +886 -665
- CHAP/edd/processor.py +2592 -936
- CHAP/edd/reader.py +889 -0
- CHAP/edd/utils.py +846 -292
- CHAP/foxden/__init__.py +6 -0
- CHAP/foxden/processor.py +42 -0
- CHAP/foxden/writer.py +65 -0
- CHAP/giwaxs/__init__.py +8 -0
- CHAP/giwaxs/models.py +100 -0
- CHAP/giwaxs/processor.py +520 -0
- CHAP/giwaxs/reader.py +5 -0
- CHAP/giwaxs/writer.py +5 -0
- CHAP/pipeline.py +48 -10
- CHAP/runner.py +161 -72
- CHAP/tomo/models.py +31 -29
- CHAP/tomo/processor.py +169 -118
- CHAP/utils/__init__.py +1 -0
- CHAP/utils/fit.py +1292 -1315
- CHAP/utils/general.py +411 -53
- CHAP/utils/models.py +594 -0
- CHAP/utils/parfile.py +10 -2
- ChessAnalysisPipeline-0.0.16.dist-info/LICENSE +60 -0
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.16.dist-info}/METADATA +1 -1
- ChessAnalysisPipeline-0.0.16.dist-info/RECORD +62 -0
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.16.dist-info}/WHEEL +1 -1
- CHAP/utils/scanparsers.py +0 -1431
- ChessAnalysisPipeline-0.0.14.dist-info/LICENSE +0 -21
- ChessAnalysisPipeline-0.0.14.dist-info/RECORD +0 -54
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.16.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.14.dist-info → ChessAnalysisPipeline-0.0.16.dist-info}/top_level.txt +0 -0
CHAP/common/models/map.py
CHANGED
|
@@ -15,16 +15,17 @@ from typing import (
|
|
|
15
15
|
import numpy as np
|
|
16
16
|
from pydantic import (
|
|
17
17
|
BaseModel,
|
|
18
|
+
Field,
|
|
19
|
+
FilePath,
|
|
20
|
+
PrivateAttr,
|
|
18
21
|
conint,
|
|
19
22
|
conlist,
|
|
20
23
|
constr,
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
root_validator,
|
|
24
|
-
validator,
|
|
24
|
+
field_validator,
|
|
25
|
+
model_validator,
|
|
25
26
|
)
|
|
26
27
|
from pyspec.file.spec import FileSpec
|
|
27
|
-
|
|
28
|
+
from typing_extensions import Annotated
|
|
28
29
|
|
|
29
30
|
class Sample(BaseModel):
|
|
30
31
|
"""Class representing a sample metadata configuration.
|
|
@@ -35,7 +36,7 @@ class Sample(BaseModel):
|
|
|
35
36
|
:type description: str, optional
|
|
36
37
|
"""
|
|
37
38
|
name: constr(min_length=1)
|
|
38
|
-
description: Optional[str]
|
|
39
|
+
description: Optional[str] = ''
|
|
39
40
|
|
|
40
41
|
|
|
41
42
|
class SpecScans(BaseModel):
|
|
@@ -44,22 +45,23 @@ class SpecScans(BaseModel):
|
|
|
44
45
|
:ivar spec_file: Path to the SPEC file.
|
|
45
46
|
:type spec_file: str
|
|
46
47
|
:ivar scan_numbers: List of scan numbers to use.
|
|
47
|
-
:type scan_numbers: list[int]
|
|
48
|
+
:type scan_numbers: Union(int, list[int], str)
|
|
48
49
|
:ivar par_file: Path to a non-default SMB par file.
|
|
49
50
|
:type par_file: str, optional
|
|
50
51
|
"""
|
|
51
52
|
spec_file: FilePath
|
|
52
|
-
scan_numbers: conlist(item_type=conint(gt=0),
|
|
53
|
-
par_file: Optional[FilePath]
|
|
53
|
+
scan_numbers: conlist(item_type=conint(gt=0), min_length=1)
|
|
54
|
+
par_file: Optional[FilePath] = None
|
|
54
55
|
|
|
55
|
-
@
|
|
56
|
-
|
|
56
|
+
@field_validator('spec_file')
|
|
57
|
+
@classmethod
|
|
58
|
+
def validate_spec_file(cls, spec_file):
|
|
57
59
|
"""Validate the specified SPEC file.
|
|
58
60
|
|
|
59
61
|
:param spec_file: Path to the SPEC file.
|
|
60
62
|
:type spec_file: str
|
|
61
63
|
:raises ValueError: If the SPEC file is invalid.
|
|
62
|
-
:return: Absolute path to the SPEC file
|
|
64
|
+
:return: Absolute path to the SPEC file.
|
|
63
65
|
:rtype: str
|
|
64
66
|
"""
|
|
65
67
|
try:
|
|
@@ -69,26 +71,29 @@ class SpecScans(BaseModel):
|
|
|
69
71
|
raise ValueError(f'Invalid SPEC file {spec_file}')
|
|
70
72
|
return spec_file
|
|
71
73
|
|
|
72
|
-
@
|
|
73
|
-
|
|
74
|
+
@field_validator('scan_numbers', mode='before')
|
|
75
|
+
@classmethod
|
|
76
|
+
def validate_scan_numbers(cls, scan_numbers, info):
|
|
74
77
|
"""Validate the specified list of scan numbers.
|
|
75
78
|
|
|
76
79
|
:param scan_numbers: List of scan numbers.
|
|
77
|
-
:type scan_numbers: list
|
|
78
|
-
:param
|
|
79
|
-
:type
|
|
80
|
+
:type scan_numbers: Union(int, list[int], str)
|
|
81
|
+
:param info: Pydantic validator info object.
|
|
82
|
+
:type info: pydantic_core._pydantic_core.ValidationInfo
|
|
80
83
|
:raises ValueError: If a specified scan number is not found in
|
|
81
84
|
the SPEC file.
|
|
82
85
|
:return: List of scan numbers.
|
|
83
|
-
:rtype: list
|
|
86
|
+
:rtype: list[int]
|
|
84
87
|
"""
|
|
85
|
-
if isinstance(scan_numbers,
|
|
88
|
+
if isinstance(scan_numbers, int):
|
|
89
|
+
scan_numbers = [scan_numbers]
|
|
90
|
+
elif isinstance(scan_numbers, str):
|
|
86
91
|
# Local modules
|
|
87
92
|
from CHAP.utils.general import string_to_list
|
|
88
93
|
|
|
89
94
|
scan_numbers = string_to_list(scan_numbers)
|
|
90
95
|
|
|
91
|
-
spec_file =
|
|
96
|
+
spec_file = info.data.get('spec_file')
|
|
92
97
|
if spec_file is not None:
|
|
93
98
|
spec_scans = FileSpec(spec_file)
|
|
94
99
|
for scan_number in scan_numbers:
|
|
@@ -98,14 +103,15 @@ class SpecScans(BaseModel):
|
|
|
98
103
|
f'No scan number {scan_number} in {spec_file}')
|
|
99
104
|
return scan_numbers
|
|
100
105
|
|
|
101
|
-
@
|
|
102
|
-
|
|
106
|
+
@field_validator('par_file')
|
|
107
|
+
@classmethod
|
|
108
|
+
def validate_par_file(cls, par_file):
|
|
103
109
|
"""Validate the specified SMB par file.
|
|
104
110
|
|
|
105
111
|
:param par_file: Path to a non-default SMB par file.
|
|
106
112
|
:type par_file: str
|
|
107
113
|
:raises ValueError: If the SMB par file is invalid.
|
|
108
|
-
:return: Absolute path to the SMB par file
|
|
114
|
+
:return: Absolute path to the SMB par file.
|
|
109
115
|
:rtype: str
|
|
110
116
|
"""
|
|
111
117
|
if par_file is None or not par_file:
|
|
@@ -157,7 +163,8 @@ class SpecScans(BaseModel):
|
|
|
157
163
|
coordinate_index = list(
|
|
158
164
|
map_config.coords[independent_dimension.label]).index(
|
|
159
165
|
independent_dimension.get_value(
|
|
160
|
-
self, scan_number, scan_step_index
|
|
166
|
+
self, scan_number, scan_step_index,
|
|
167
|
+
map_config.scalar_data))
|
|
161
168
|
index = (coordinate_index, *index)
|
|
162
169
|
return index
|
|
163
170
|
|
|
@@ -235,10 +242,12 @@ class PointByPointScanData(BaseModel):
|
|
|
235
242
|
"""
|
|
236
243
|
label: constr(min_length=1)
|
|
237
244
|
units: constr(strip_whitespace=True, min_length=1)
|
|
238
|
-
data_type: Literal['spec_motor', '
|
|
245
|
+
data_type: Literal['spec_motor', 'spec_motor_absolute', 'scan_column',
|
|
246
|
+
'smb_par', 'expression']
|
|
239
247
|
name: constr(strip_whitespace=True, min_length=1)
|
|
240
248
|
|
|
241
|
-
@
|
|
249
|
+
@field_validator('label')
|
|
250
|
+
@classmethod
|
|
242
251
|
def validate_label(cls, label):
|
|
243
252
|
"""Validate that the supplied `label` does not conflict with
|
|
244
253
|
any of the values for `label` reserved for certain data needed
|
|
@@ -247,8 +256,7 @@ class PointByPointScanData(BaseModel):
|
|
|
247
256
|
:param label: The value of `label` to validate.
|
|
248
257
|
:type label: str
|
|
249
258
|
:raises ValueError: If `label` is one of the reserved values.
|
|
250
|
-
:return: The
|
|
251
|
-
allowed.
|
|
259
|
+
:return: The originally supplied value `label`.
|
|
252
260
|
:rtype: str
|
|
253
261
|
"""
|
|
254
262
|
if ((not issubclass(cls,CorrectionsData))
|
|
@@ -309,8 +317,49 @@ class PointByPointScanData(BaseModel):
|
|
|
309
317
|
f'for index {index} '
|
|
310
318
|
f'in spec file {scans.spec_file}')
|
|
311
319
|
|
|
312
|
-
def
|
|
313
|
-
|
|
320
|
+
def validate_for_scalar_data(self, scalar_data):
|
|
321
|
+
"""Used for `PointByPointScanData` objects with a `data_type`
|
|
322
|
+
of `'expression'`. Validate that the `scalar_data` field of a
|
|
323
|
+
`MapConfig` object contains all the items necessary for
|
|
324
|
+
evaluating the expression.
|
|
325
|
+
|
|
326
|
+
:param scalar_data: the `scalar_data` field of a `MapConfig`
|
|
327
|
+
that this `PointByPointScanData` object will be validated
|
|
328
|
+
against
|
|
329
|
+
:type scalar_data: list[PointByPointScanData]
|
|
330
|
+
:raises ValueError: if `scalar_data` does not contain items
|
|
331
|
+
needed for evaluating the expression.
|
|
332
|
+
:return: None
|
|
333
|
+
"""
|
|
334
|
+
# Third party modules
|
|
335
|
+
from ast import parse
|
|
336
|
+
from asteval import get_ast_names
|
|
337
|
+
|
|
338
|
+
labels = get_ast_names(parse(self.name))
|
|
339
|
+
for label in ('round', 'np', 'numpy'):
|
|
340
|
+
try:
|
|
341
|
+
labels.remove(label)
|
|
342
|
+
except:
|
|
343
|
+
pass
|
|
344
|
+
for l in labels:
|
|
345
|
+
if l == 'round':
|
|
346
|
+
symtable[l] = round
|
|
347
|
+
continue
|
|
348
|
+
if l in ('np', 'numpy'):
|
|
349
|
+
symtable[l] = np
|
|
350
|
+
continue
|
|
351
|
+
label_found = False
|
|
352
|
+
for s_d in scalar_data:
|
|
353
|
+
if s_d.label == l:
|
|
354
|
+
label_found = True
|
|
355
|
+
break
|
|
356
|
+
if not label_found:
|
|
357
|
+
raise ValueError(
|
|
358
|
+
f'{l} is not the label of an item in scalar_data')
|
|
359
|
+
|
|
360
|
+
def get_value(
|
|
361
|
+
self, spec_scans:SpecScans, scan_number:int, scan_step_index:int=0,
|
|
362
|
+
scalar_data=[], relative=True, ndigits=None):
|
|
314
363
|
"""Return the value recorded for this instance of
|
|
315
364
|
`PointByPointScanData` at a specific scan step.
|
|
316
365
|
|
|
@@ -320,18 +369,32 @@ class PointByPointScanData(BaseModel):
|
|
|
320
369
|
:param scan_number: The number of the scan in which the
|
|
321
370
|
requested scan step occurs.
|
|
322
371
|
:type scan_number: int
|
|
323
|
-
:param scan_step_index: The index of the requested scan step
|
|
324
|
-
|
|
372
|
+
:param scan_step_index: The index of the requested scan step,
|
|
373
|
+
defaults to `0`.
|
|
374
|
+
:type scan_step_index: int, optional
|
|
375
|
+
:param scalar_data: list of scalar data configurations used to
|
|
376
|
+
get values for `PointByPointScanData` objects with
|
|
377
|
+
`data_type == 'expression'`, defaults to `[]`.
|
|
378
|
+
:type scalar_data: list[PointByPointScanData], optional
|
|
379
|
+
:param relative: Whether to return a relative value or not,
|
|
380
|
+
defaults to `True` (only applies to SPEC motor values).
|
|
381
|
+
:type relative: bool, optional
|
|
382
|
+
:params ndigits: Round SPEC motor values to the specified
|
|
383
|
+
number of decimals if set, defaults to `None`.
|
|
384
|
+
:type ndigits: int, optional
|
|
325
385
|
:return: The value recorded of the data represented by this
|
|
326
386
|
instance of `PointByPointScanData` at the scan step
|
|
327
387
|
requested.
|
|
328
388
|
:rtype: float
|
|
329
389
|
"""
|
|
330
|
-
if self.data_type
|
|
390
|
+
if 'spec_motor' in self.data_type:
|
|
391
|
+
if 'absolute' in self.data_type:
|
|
392
|
+
relative = False
|
|
331
393
|
return get_spec_motor_value(spec_scans.spec_file,
|
|
332
394
|
scan_number,
|
|
333
395
|
scan_step_index,
|
|
334
|
-
self.name
|
|
396
|
+
self.name,
|
|
397
|
+
relative, ndigits)
|
|
335
398
|
if self.data_type == 'scan_column':
|
|
336
399
|
return get_spec_counter_value(spec_scans.spec_file,
|
|
337
400
|
scan_number,
|
|
@@ -341,12 +404,19 @@ class PointByPointScanData(BaseModel):
|
|
|
341
404
|
return get_smb_par_value(spec_scans.spec_file,
|
|
342
405
|
scan_number,
|
|
343
406
|
self.name)
|
|
407
|
+
if self.data_type == 'expression':
|
|
408
|
+
return get_expression_value(spec_scans,
|
|
409
|
+
scan_number,
|
|
410
|
+
scan_step_index,
|
|
411
|
+
self.name,
|
|
412
|
+
scalar_data)
|
|
344
413
|
return None
|
|
345
414
|
|
|
346
415
|
|
|
347
416
|
@cache
|
|
348
417
|
def get_spec_motor_value(spec_file:str, scan_number:int,
|
|
349
|
-
scan_step_index:int, spec_mnemonic:str
|
|
418
|
+
scan_step_index:int, spec_mnemonic:str,
|
|
419
|
+
relative=True, ndigits=None):
|
|
350
420
|
"""Return the value recorded for a SPEC motor at a specific scan
|
|
351
421
|
step.
|
|
352
422
|
|
|
@@ -360,6 +430,12 @@ def get_spec_motor_value(spec_file:str, scan_number:int,
|
|
|
360
430
|
:type scan_step_index: int
|
|
361
431
|
:param spec_mnemonic: The menmonic of a SPEC motor.
|
|
362
432
|
:type spec_mnemonic: str
|
|
433
|
+
:param relative: Whether to return a relative value or not,
|
|
434
|
+
defaults to `True`.
|
|
435
|
+
:type relative: bool, optional
|
|
436
|
+
:params ndigits: Round SPEC motor values to the specified
|
|
437
|
+
number of decimals if set, defaults to `None`.
|
|
438
|
+
:type ndigits: int, optional
|
|
363
439
|
:return: The value of the motor at the scan step requested.
|
|
364
440
|
:rtype: float
|
|
365
441
|
"""
|
|
@@ -373,11 +449,15 @@ def get_spec_motor_value(spec_file:str, scan_number:int,
|
|
|
373
449
|
scanparser.spec_scan_shape,
|
|
374
450
|
order='F')
|
|
375
451
|
motor_value = \
|
|
376
|
-
scanparser.
|
|
452
|
+
scanparser.get_spec_scan_motor_vals(
|
|
453
|
+
relative)[motor_i][scan_step[motor_i]]
|
|
377
454
|
else:
|
|
378
|
-
motor_value = scanparser.
|
|
455
|
+
motor_value = scanparser.get_spec_scan_motor_vals(
|
|
456
|
+
relative)[motor_i]
|
|
379
457
|
else:
|
|
380
458
|
motor_value = scanparser.get_spec_positioner_value(spec_mnemonic)
|
|
459
|
+
if ndigits is not None:
|
|
460
|
+
motor_value = round(motor_value, 3)
|
|
381
461
|
return motor_value
|
|
382
462
|
|
|
383
463
|
|
|
@@ -426,24 +506,74 @@ def get_smb_par_value(spec_file:str, scan_number:int, par_name:str):
|
|
|
426
506
|
return scanparser.pars[par_name]
|
|
427
507
|
|
|
428
508
|
|
|
429
|
-
def
|
|
509
|
+
def get_expression_value(spec_scans:SpecScans, scan_number:int,
|
|
510
|
+
scan_step_index:int, expression:str,
|
|
511
|
+
scalar_data:list[PointByPointScanData]):
|
|
512
|
+
"""Return the value of an evaluated expression of other sources of
|
|
513
|
+
point-by-point scalar scan data for a single point.
|
|
514
|
+
|
|
515
|
+
:param spec_scans: An instance of `SpecScans` in which the
|
|
516
|
+
requested scan step occurs.
|
|
517
|
+
:type spec_scans: SpecScans
|
|
518
|
+
:param scan_number: The number of the scan in which the requested
|
|
519
|
+
scan step occurs.
|
|
520
|
+
:type scan_number: int
|
|
521
|
+
:param scan_step_index: The index of the requested scan step.
|
|
522
|
+
:type scan_step_index: int
|
|
523
|
+
:param expression: the string expression to evaluate
|
|
524
|
+
:type expression: str
|
|
525
|
+
:param scalar_data: the `scalar_data` field of a `MapConfig`
|
|
526
|
+
object (used to provide values for variables used in
|
|
527
|
+
`expression`)
|
|
528
|
+
:type scalar_data: list[PointByPointScanData]
|
|
529
|
+
:return: The value of the .par file value for the scan requested.
|
|
530
|
+
:rtype: float
|
|
531
|
+
"""
|
|
532
|
+
# Third party modules
|
|
533
|
+
from ast import parse
|
|
534
|
+
from asteval import get_ast_names, Interpreter
|
|
535
|
+
|
|
536
|
+
labels = get_ast_names(parse(expression))
|
|
537
|
+
symtable = {}
|
|
538
|
+
for l in labels:
|
|
539
|
+
if l == 'round':
|
|
540
|
+
symtable[l] = round
|
|
541
|
+
for s_d in scalar_data:
|
|
542
|
+
if s_d.label == l:
|
|
543
|
+
symtable[l] = s_d.get_value(
|
|
544
|
+
spec_scans, scan_number, scan_step_index, scalar_data)
|
|
545
|
+
aeval = Interpreter(symtable=symtable)
|
|
546
|
+
return aeval(expression)
|
|
547
|
+
|
|
548
|
+
def validate_data_source_for_map_config(data_source, info):
|
|
430
549
|
"""Confirm that an instance of PointByPointScanData is valid for
|
|
431
550
|
the station and scans provided by a map configuration dictionary.
|
|
432
551
|
|
|
433
552
|
:param data_source: The input object to validate.
|
|
434
|
-
:type data_source:
|
|
435
|
-
:param
|
|
436
|
-
:type
|
|
437
|
-
:raises Exception: If `data_source` cannot be validated
|
|
438
|
-
|
|
439
|
-
:return: `data_source`, if it is valid.
|
|
553
|
+
:type data_source: PointByPointScanData
|
|
554
|
+
:param info: Pydantic validator info object.
|
|
555
|
+
:type info: pydantic_core._pydantic_core.ValidationInfo
|
|
556
|
+
:raises Exception: If `data_source` cannot be validated.
|
|
557
|
+
:return: the validated `data_source` instance.
|
|
440
558
|
:rtype: PointByPointScanData
|
|
441
559
|
"""
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
data_source
|
|
445
|
-
|
|
446
|
-
|
|
560
|
+
def _validate_data_source_for_map_config(
|
|
561
|
+
data_source, info, parent_list=None):
|
|
562
|
+
if isinstance(data_source, list):
|
|
563
|
+
return [_validate_data_source_for_map_config(
|
|
564
|
+
d_s, info, parent_list=data_source) for d_s in data_source]
|
|
565
|
+
if data_source is not None:
|
|
566
|
+
values = info.data
|
|
567
|
+
if data_source.data_type == 'expression':
|
|
568
|
+
data_source.validate_for_scalar_data(values['scalar_data'])
|
|
569
|
+
else:
|
|
570
|
+
import_scanparser(
|
|
571
|
+
values['station'], values['experiment_type'])
|
|
572
|
+
data_source.validate_for_station(values['station'])
|
|
573
|
+
data_source.validate_for_spec_scans(values['spec_scans'])
|
|
574
|
+
return data_source
|
|
575
|
+
|
|
576
|
+
return _validate_data_source_for_map_config(data_source, info)
|
|
447
577
|
|
|
448
578
|
|
|
449
579
|
class IndependentDimension(PointByPointScanData):
|
|
@@ -476,11 +606,20 @@ class IndependentDimension(PointByPointScanData):
|
|
|
476
606
|
end: Optional[int] = None
|
|
477
607
|
step: Optional[conint(gt=0)] = 1
|
|
478
608
|
|
|
479
|
-
# @
|
|
480
|
-
#
|
|
481
|
-
#
|
|
609
|
+
# @field_validator('step')
|
|
610
|
+
# @classmethod
|
|
611
|
+
# def validate_step(cls, step):
|
|
612
|
+
# """Validate that the supplied value of `step`.
|
|
613
|
+
#
|
|
614
|
+
# :param step: The value of `step` to validate.
|
|
615
|
+
# :type step: str
|
|
616
|
+
# :raises ValueError: If `step` is zero.
|
|
617
|
+
# :return: The originally supplied value `step`.
|
|
618
|
+
# :rtype: int
|
|
619
|
+
# """
|
|
620
|
+
# if step == 0 :
|
|
482
621
|
# raise ValueError('slice step cannot be zero')
|
|
483
|
-
# return
|
|
622
|
+
# return step
|
|
484
623
|
|
|
485
624
|
|
|
486
625
|
class CorrectionsData(PointByPointScanData):
|
|
@@ -516,7 +655,7 @@ class CorrectionsData(PointByPointScanData):
|
|
|
516
655
|
:return: A list of reserved labels.
|
|
517
656
|
:rtype: list[str]
|
|
518
657
|
"""
|
|
519
|
-
return list(cls.
|
|
658
|
+
return list((*cls.model_fields['label'].annotation.__args__, 'round'))
|
|
520
659
|
|
|
521
660
|
|
|
522
661
|
class PresampleIntensity(CorrectionsData):
|
|
@@ -588,52 +727,68 @@ class SpecConfig(BaseModel):
|
|
|
588
727
|
:type spec_scans: list[SpecScans]
|
|
589
728
|
"""
|
|
590
729
|
station: Literal['id1a3', 'id3a', 'id3b']
|
|
591
|
-
experiment_type: Literal['
|
|
592
|
-
spec_scans: conlist(item_type=SpecScans,
|
|
730
|
+
experiment_type: Literal['EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
|
|
731
|
+
spec_scans: conlist(item_type=SpecScans, min_length=1)
|
|
593
732
|
|
|
594
|
-
@
|
|
595
|
-
|
|
733
|
+
@model_validator(mode='before')
|
|
734
|
+
@classmethod
|
|
735
|
+
def validate_config(cls, data):
|
|
596
736
|
"""Ensure that a valid configuration was provided and finalize
|
|
597
737
|
spec_file filepaths.
|
|
598
738
|
|
|
599
|
-
:param
|
|
600
|
-
:type
|
|
601
|
-
:return: The validated list of
|
|
739
|
+
:param data: Pydantic validator data object.
|
|
740
|
+
:type data: SpecConfig, pydantic_core._pydantic_core.ValidationInfo
|
|
741
|
+
:return: The currently validated list of class properties.
|
|
602
742
|
:rtype: dict
|
|
603
743
|
"""
|
|
604
|
-
inputdir =
|
|
744
|
+
inputdir = data.get('inputdir')
|
|
605
745
|
if inputdir is not None:
|
|
606
|
-
spec_scans =
|
|
746
|
+
spec_scans = data.get('spec_scans')
|
|
607
747
|
for i, scans in enumerate(deepcopy(spec_scans)):
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
748
|
+
if isinstance(scans, dict):
|
|
749
|
+
spec_file = scans['spec_file']
|
|
750
|
+
if not os.path.isabs(spec_file):
|
|
751
|
+
spec_scans[i]['spec_file'] = os.path.join(
|
|
752
|
+
inputdir, spec_file)
|
|
753
|
+
else:
|
|
754
|
+
spec_file = scans.spec_file
|
|
755
|
+
if not os.path.isabs(spec_file):
|
|
756
|
+
spec_scans[i].spec_file = os.path.join(
|
|
757
|
+
inputdir, spec_file)
|
|
758
|
+
data['spec_scans'] = spec_scans
|
|
759
|
+
return data
|
|
760
|
+
|
|
761
|
+
@field_validator('experiment_type')
|
|
762
|
+
@classmethod
|
|
763
|
+
def validate_experiment_type(cls, experiment_type, info):
|
|
618
764
|
"""Ensure values for the station and experiment_type fields are
|
|
619
765
|
compatible
|
|
766
|
+
|
|
767
|
+
:param experiment_type: The value of `experiment_type` to
|
|
768
|
+
validate.
|
|
769
|
+
:type experiment_type: str
|
|
770
|
+
:param info: Pydantic validator info object.
|
|
771
|
+
:type info: pydantic_core._pydantic_core.ValidationInfo
|
|
772
|
+
:raises ValueError: Invalid experiment type.
|
|
773
|
+
:return: The validated field for `experiment_type`.
|
|
774
|
+
:rtype: str
|
|
620
775
|
"""
|
|
621
|
-
station =
|
|
776
|
+
station = info.data.get('station')
|
|
622
777
|
if station == 'id1a3':
|
|
623
|
-
allowed_experiment_types = ['
|
|
778
|
+
allowed_experiment_types = ['EDD', 'SAXSWAXS', 'TOMO']
|
|
624
779
|
elif station == 'id3a':
|
|
625
780
|
allowed_experiment_types = ['EDD', 'TOMO']
|
|
626
781
|
elif station == 'id3b':
|
|
627
|
-
allowed_experiment_types = ['
|
|
782
|
+
allowed_experiment_types = ['GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
|
|
628
783
|
else:
|
|
629
784
|
allowed_experiment_types = []
|
|
630
|
-
if
|
|
785
|
+
if experiment_type not in allowed_experiment_types:
|
|
631
786
|
raise ValueError(
|
|
632
787
|
f'For station {station}, allowed experiment types are '
|
|
633
788
|
f'{", ".join(allowed_experiment_types)}. '
|
|
634
|
-
f'Supplied experiment type {
|
|
635
|
-
import_scanparser(station,
|
|
636
|
-
return
|
|
789
|
+
f'Supplied experiment type {experiment_type} is not allowed.')
|
|
790
|
+
import_scanparser(station, experiment_type)
|
|
791
|
+
return experiment_type
|
|
637
792
|
|
|
638
793
|
|
|
639
794
|
class MapConfig(BaseModel):
|
|
@@ -659,144 +814,159 @@ class MapConfig(BaseModel):
|
|
|
659
814
|
times for SPEC scans. Required when applying a
|
|
660
815
|
CorrectionConfig tool.
|
|
661
816
|
:type dwell_time_actual: DwellTimeActual, optional
|
|
662
|
-
:ivar
|
|
817
|
+
:ivar postsample_intensity: A source of point-by-point postsample
|
|
663
818
|
beam intensity data. Required when applying a CorrectionConfig
|
|
664
819
|
tool with `correction_type='flux_absorption'` or
|
|
665
820
|
`correction_type='flux_absorption_background'`.
|
|
666
|
-
:type
|
|
821
|
+
:type postsample_intensity: PresampleIntensity, optional
|
|
667
822
|
:ivar scalar_data: A list of the sources of data representing
|
|
668
823
|
other scalar raw data values collected at each point on the
|
|
669
824
|
map. In the NeXus file representation of the map, datasets for
|
|
670
825
|
these values will be included, defaults to `[]`.
|
|
671
826
|
:type scalar_data: list[PointByPointScanData], optional
|
|
672
|
-
:ivar map_type: Type of map, structured or unstructured,
|
|
673
|
-
defaults to `'structured'`.
|
|
674
|
-
:type map_type: Literal['structured', 'unstructured'], optional
|
|
675
827
|
"""
|
|
676
828
|
title: constr(strip_whitespace=True, min_length=1)
|
|
677
829
|
station: Literal['id1a3', 'id3a', 'id3b']
|
|
678
|
-
experiment_type: Literal['
|
|
830
|
+
experiment_type: Literal['EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
|
|
679
831
|
sample: Sample
|
|
680
|
-
spec_scans: conlist(item_type=SpecScans,
|
|
681
|
-
independent_dimensions: conlist(
|
|
682
|
-
item_type=IndependentDimension, min_items=1)
|
|
683
|
-
presample_intensity: Optional[PresampleIntensity]
|
|
684
|
-
dwell_time_actual: Optional[DwellTimeActual]
|
|
685
|
-
postsample_intensity: Optional[PostsampleIntensity]
|
|
832
|
+
spec_scans: conlist(item_type=SpecScans, min_length=1)
|
|
686
833
|
scalar_data: Optional[list[PointByPointScanData]] = []
|
|
687
|
-
|
|
688
|
-
|
|
834
|
+
independent_dimensions: conlist(
|
|
835
|
+
item_type=IndependentDimension, min_length=1)
|
|
836
|
+
presample_intensity: Optional[PresampleIntensity] = None
|
|
837
|
+
dwell_time_actual: Optional[DwellTimeActual] = None
|
|
838
|
+
postsample_intensity: Optional[PostsampleIntensity] = None
|
|
839
|
+
attrs: Optional[Annotated[dict, Field(validate_default=True)]] = {}
|
|
840
|
+
# _coords: dict = PrivateAttr()
|
|
689
841
|
_dims: tuple = PrivateAttr()
|
|
690
|
-
_scan_step_indices: list = PrivateAttr()
|
|
691
|
-
_shape: tuple = PrivateAttr()
|
|
692
|
-
|
|
693
|
-
_validate_independent_dimensions =
|
|
694
|
-
'independent_dimensions'
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
'
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
'scalar_data',
|
|
708
|
-
each_item=True,
|
|
709
|
-
allow_reuse=True)(validate_data_source_for_map_config)
|
|
710
|
-
|
|
711
|
-
@root_validator(pre=True)
|
|
712
|
-
def validate_config(cls, values):
|
|
842
|
+
# _scan_step_indices: list = PrivateAttr()
|
|
843
|
+
# _shape: tuple = PrivateAttr()
|
|
844
|
+
|
|
845
|
+
_validate_independent_dimensions = field_validator(
|
|
846
|
+
'independent_dimensions')(validate_data_source_for_map_config)
|
|
847
|
+
_validate_presample_intensity = field_validator(
|
|
848
|
+
'presample_intensity')(validate_data_source_for_map_config)
|
|
849
|
+
_validate_dwell_time_actual = field_validator(
|
|
850
|
+
'dwell_time_actual')(validate_data_source_for_map_config)
|
|
851
|
+
_validate_postsample_intensity = field_validator(
|
|
852
|
+
'postsample_intensity')(validate_data_source_for_map_config)
|
|
853
|
+
_validate_scalar_data = field_validator(
|
|
854
|
+
'scalar_data')(validate_data_source_for_map_config)
|
|
855
|
+
|
|
856
|
+
@model_validator(mode='before')
|
|
857
|
+
@classmethod
|
|
858
|
+
def validate_config(cls, data):
|
|
713
859
|
"""Ensure that a valid configuration was provided and finalize
|
|
714
860
|
spec_file filepaths.
|
|
715
861
|
|
|
716
|
-
:param
|
|
717
|
-
:type
|
|
718
|
-
|
|
862
|
+
:param data: Pydantic validator data object.
|
|
863
|
+
:type data:
|
|
864
|
+
MapConfig, pydantic_core._pydantic_core.ValidationInfo
|
|
865
|
+
:return: The currently validated list of class properties.
|
|
719
866
|
:rtype: dict
|
|
720
867
|
"""
|
|
721
|
-
inputdir =
|
|
868
|
+
inputdir = data.get('inputdir')
|
|
722
869
|
if inputdir is not None:
|
|
723
|
-
spec_scans =
|
|
870
|
+
spec_scans = data.get('spec_scans')
|
|
724
871
|
for i, scans in enumerate(deepcopy(spec_scans)):
|
|
725
872
|
spec_file = scans['spec_file']
|
|
726
873
|
if not os.path.isabs(spec_file):
|
|
727
874
|
spec_scans[i]['spec_file'] = os.path.join(
|
|
728
875
|
inputdir, spec_file)
|
|
729
|
-
spec_scans[i] = SpecScans(**spec_scans[i], **
|
|
730
|
-
|
|
731
|
-
return
|
|
732
|
-
|
|
733
|
-
@validator('map_type', pre=True, always=True)
|
|
734
|
-
def validate_map_type(cls, map_type, values):
|
|
735
|
-
"""Validate the map_type field.
|
|
736
|
-
|
|
737
|
-
:param map_type: Type of map, structured or unstructured,
|
|
738
|
-
defaults to `'structured'`.
|
|
739
|
-
:type map_type: Literal['structured', 'unstructured']]
|
|
740
|
-
:param values: Dictionary of values for all fields of the model.
|
|
741
|
-
:type values: dict
|
|
742
|
-
:return: The validated value for map_type.
|
|
743
|
-
:rtype: str
|
|
744
|
-
"""
|
|
745
|
-
dims = {}
|
|
746
|
-
spec_scans = values.get('spec_scans')
|
|
747
|
-
independent_dimensions = values.get('independent_dimensions')
|
|
748
|
-
import_scanparser(values.get('station'), values.get('experiment_type'))
|
|
749
|
-
for i, dim in enumerate(deepcopy(independent_dimensions)):
|
|
750
|
-
dims[dim.label] = []
|
|
751
|
-
for scans in spec_scans:
|
|
752
|
-
for scan_number in scans.scan_numbers:
|
|
753
|
-
scanparser = scans.get_scanparser(scan_number)
|
|
754
|
-
for scan_step_index in range(
|
|
755
|
-
scanparser.spec_scan_npts):
|
|
756
|
-
dims[dim.label].append(dim.get_value(
|
|
757
|
-
scans, scan_number, scan_step_index))
|
|
758
|
-
dims[dim.label] = np.unique(dims[dim.label])
|
|
759
|
-
if dim.end is None:
|
|
760
|
-
dim.end = len(dims[dim.label])
|
|
761
|
-
dims[dim.label] = dims[dim.label][slice(
|
|
762
|
-
dim.start, dim.end, dim.step)]
|
|
763
|
-
independent_dimensions[i] = dim
|
|
764
|
-
|
|
765
|
-
coords = np.zeros([v.size for v in dims.values()], dtype=np.int64)
|
|
766
|
-
for scans in spec_scans:
|
|
767
|
-
for scan_number in scans.scan_numbers:
|
|
768
|
-
scanparser = scans.get_scanparser(scan_number)
|
|
769
|
-
for scan_step_index in range(scanparser.spec_scan_npts):
|
|
770
|
-
coords[tuple([
|
|
771
|
-
list(dims[dim.label]).index(
|
|
772
|
-
dim.get_value(scans, scan_number, scan_step_index))
|
|
773
|
-
for dim in independent_dimensions])] += 1
|
|
774
|
-
if any(True for v in coords.flatten() if v == 0 or v > 1):
|
|
775
|
-
return 'unstructured'
|
|
776
|
-
else:
|
|
777
|
-
return 'structured'
|
|
876
|
+
spec_scans[i] = SpecScans(**spec_scans[i], **data)
|
|
877
|
+
data['spec_scans'] = spec_scans
|
|
878
|
+
return data
|
|
778
879
|
|
|
779
|
-
|
|
780
|
-
@
|
|
781
|
-
def validate_experiment_type(cls,
|
|
880
|
+
@field_validator('experiment_type')
|
|
881
|
+
@classmethod
|
|
882
|
+
def validate_experiment_type(cls, experiment_type, info):
|
|
782
883
|
"""Ensure values for the station and experiment_type fields are
|
|
783
|
-
compatible
|
|
884
|
+
compatible.
|
|
885
|
+
|
|
886
|
+
:param experiment_type: The value of `experiment_type` to
|
|
887
|
+
validate.
|
|
888
|
+
:type experiment_type: dict
|
|
889
|
+
:param info: Pydantic validator info object.
|
|
890
|
+
:type info: pydantic_core._pydantic_core.ValidationInfo
|
|
891
|
+
:raises ValueError: Invalid experiment type.
|
|
892
|
+
:return: The validated field for `experiment_type`.
|
|
893
|
+
:rtype: str
|
|
784
894
|
"""
|
|
785
|
-
station =
|
|
895
|
+
station = info.data['station']
|
|
786
896
|
if station == 'id1a3':
|
|
787
|
-
allowed_experiment_types = ['
|
|
897
|
+
allowed_experiment_types = ['EDD', 'SAXSWAXS', 'TOMO']
|
|
788
898
|
elif station == 'id3a':
|
|
789
899
|
allowed_experiment_types = ['EDD', 'TOMO']
|
|
790
900
|
elif station == 'id3b':
|
|
791
|
-
allowed_experiment_types = ['
|
|
901
|
+
allowed_experiment_types = ['GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
|
|
792
902
|
else:
|
|
793
903
|
allowed_experiment_types = []
|
|
794
|
-
if
|
|
904
|
+
if experiment_type not in allowed_experiment_types:
|
|
795
905
|
raise ValueError(
|
|
796
906
|
f'For station {station}, allowed experiment types are '
|
|
797
907
|
f'{", ".join(allowed_experiment_types)}. '
|
|
798
|
-
f'Supplied experiment type {
|
|
799
|
-
return
|
|
908
|
+
f'Supplied experiment type {experiment_type} is not allowed.')
|
|
909
|
+
return experiment_type
|
|
910
|
+
|
|
911
|
+
#RV maybe better to use model_validator, see v2 docs?
|
|
912
|
+
@field_validator('attrs')
|
|
913
|
+
@classmethod
|
|
914
|
+
def validate_attrs(cls, attrs, info):
|
|
915
|
+
"""Read any additional attributes depending on the values for
|
|
916
|
+
the station and experiment_type fields.
|
|
917
|
+
|
|
918
|
+
:param attrs: Any additional attributes to the MapConfig class.
|
|
919
|
+
:type attrs: dict
|
|
920
|
+
:param info: Pydantic validator info object.
|
|
921
|
+
:type info: pydantic_core._pydantic_core.ValidationInfo
|
|
922
|
+
:raises ValueError: Invalid attribute.
|
|
923
|
+
:return: The validated field for `attrs`.
|
|
924
|
+
:rtype: dict
|
|
925
|
+
"""
|
|
926
|
+
# Get the map's scan_type for EDD experiments
|
|
927
|
+
values = info.data
|
|
928
|
+
station = values['station']
|
|
929
|
+
experiment_type = values['experiment_type']
|
|
930
|
+
if station in ['id1a3', 'id3a'] and experiment_type == 'EDD':
|
|
931
|
+
attrs['scan_type'] = cls.get_smb_par_attr(values, 'scan_type')
|
|
932
|
+
attrs['config_id'] = cls.get_smb_par_attr(values, 'config_id')
|
|
933
|
+
attrs['dataset_id'] = cls.get_smb_par_attr(values, 'dataset_id')
|
|
934
|
+
axes_labels = {1: 'fly_labx', 2: 'fly_laby', 3: 'fly_labz',
|
|
935
|
+
4: 'fly_ometotal'}
|
|
936
|
+
if attrs['scan_type'] is None:
|
|
937
|
+
return attrs
|
|
938
|
+
if attrs['scan_type'] != 0:
|
|
939
|
+
attrs['fly_axis_labels'] = [
|
|
940
|
+
axes_labels[cls.get_smb_par_attr(values, 'fly_axis0')]]
|
|
941
|
+
if attrs['scan_type'] in (2, 3, 5):
|
|
942
|
+
attrs['fly_axis_labels'].append(
|
|
943
|
+
axes_labels[cls.get_smb_par_attr(values, 'fly_axis1')])
|
|
944
|
+
return attrs
|
|
945
|
+
|
|
946
|
+
@staticmethod
|
|
947
|
+
def get_smb_par_attr(class_fields, label, units='-', name=None):
|
|
948
|
+
"""Read an SMB par file attribute."""
|
|
949
|
+
if name is None:
|
|
950
|
+
name = label
|
|
951
|
+
scalar_data = PointByPointScanData(
|
|
952
|
+
label=label, data_type='smb_par', units=units, name=name)
|
|
953
|
+
values = []
|
|
954
|
+
for scans in class_fields.get('spec_scans'):
|
|
955
|
+
for scan_number in scans.scan_numbers:
|
|
956
|
+
scanparser = scans.get_scanparser(scan_number)
|
|
957
|
+
try:
|
|
958
|
+
values.append(scanparser.pars[name])
|
|
959
|
+
except:
|
|
960
|
+
print(
|
|
961
|
+
f'Warning: No value found for .par file value "{name}"'
|
|
962
|
+
f' on scan {scan_number} in spec file '
|
|
963
|
+
f'{scans.spec_file}.')
|
|
964
|
+
values.append(None)
|
|
965
|
+
values = list(set(values))
|
|
966
|
+
if len(values) != 1:
|
|
967
|
+
raise ValueError(f'More than one {name} in map not allowed '
|
|
968
|
+
f'({values})')
|
|
969
|
+
return values[0]
|
|
800
970
|
|
|
801
971
|
@property
|
|
802
972
|
def all_scalar_data(self):
|
|
@@ -817,9 +987,18 @@ class MapConfig(BaseModel):
|
|
|
817
987
|
"""Return a dictionary of the values of each independent
|
|
818
988
|
dimension across the map.
|
|
819
989
|
"""
|
|
990
|
+
raise RuntimeError(f'property coords not implemented')
|
|
820
991
|
if not hasattr(self, '_coords'):
|
|
992
|
+
scan_type = self.attrs.get('scan_type', -1)
|
|
993
|
+
fly_axis_labels = self.attrs.get('fly_axis_labels', [])
|
|
821
994
|
coords = {}
|
|
822
995
|
for dim in self.independent_dimensions:
|
|
996
|
+
if dim.label in fly_axis_labels:
|
|
997
|
+
relative = True
|
|
998
|
+
ndigits = 3
|
|
999
|
+
else:
|
|
1000
|
+
relative = False
|
|
1001
|
+
ndigits = None
|
|
823
1002
|
coords[dim.label] = []
|
|
824
1003
|
for scans in self.spec_scans:
|
|
825
1004
|
for scan_number in scans.scan_numbers:
|
|
@@ -827,7 +1006,8 @@ class MapConfig(BaseModel):
|
|
|
827
1006
|
for scan_step_index in range(
|
|
828
1007
|
scanparser.spec_scan_npts):
|
|
829
1008
|
coords[dim.label].append(dim.get_value(
|
|
830
|
-
scans, scan_number, scan_step_index
|
|
1009
|
+
scans, scan_number, scan_step_index,
|
|
1010
|
+
self.scalar_data, relative, ndigits))
|
|
831
1011
|
if self.map_type == 'structured':
|
|
832
1012
|
coords[dim.label] = np.unique(coords[dim.label])
|
|
833
1013
|
self._coords = coords
|
|
@@ -839,8 +1019,7 @@ class MapConfig(BaseModel):
|
|
|
839
1019
|
map.
|
|
840
1020
|
"""
|
|
841
1021
|
if not hasattr(self, '_dims'):
|
|
842
|
-
self._dims = [
|
|
843
|
-
dim.label for dim in self.independent_dimensions[::-1]]
|
|
1022
|
+
self._dims = [dim.label for dim in self.independent_dimensions]
|
|
844
1023
|
return self._dims
|
|
845
1024
|
|
|
846
1025
|
@property
|
|
@@ -849,6 +1028,7 @@ class MapConfig(BaseModel):
|
|
|
849
1028
|
object, the scan number, and scan step index for every point
|
|
850
1029
|
on the map.
|
|
851
1030
|
"""
|
|
1031
|
+
raise RuntimeError(f'property scan_step_indices not implemented')
|
|
852
1032
|
if not hasattr(self, '_scan_step_indices'):
|
|
853
1033
|
scan_step_indices = []
|
|
854
1034
|
for scans in self.spec_scans:
|
|
@@ -865,10 +1045,10 @@ class MapConfig(BaseModel):
|
|
|
865
1045
|
"""Return the shape of the map -- a tuple representing the
|
|
866
1046
|
number of unique values of each dimension across the map.
|
|
867
1047
|
"""
|
|
1048
|
+
raise RuntimeError(f'property shape not implemented')
|
|
868
1049
|
if not hasattr(self, '_shape'):
|
|
869
1050
|
if self.map_type == 'structured':
|
|
870
|
-
self._shape = tuple(
|
|
871
|
-
[len(v) for k, v in self.coords.items()][::-1])
|
|
1051
|
+
self._shape = tuple([len(v) for k, v in self.coords.items()])
|
|
872
1052
|
else:
|
|
873
1053
|
self._shape = (len(self.scan_step_indices),)
|
|
874
1054
|
return self._shape
|
|
@@ -882,7 +1062,16 @@ class MapConfig(BaseModel):
|
|
|
882
1062
|
:return: A list of coordinate values.
|
|
883
1063
|
:rtype: dict
|
|
884
1064
|
"""
|
|
1065
|
+
raise RuntimeError(f'get_coords not implemented')
|
|
885
1066
|
if self.map_type == 'structured':
|
|
1067
|
+
scan_type = self.attrs.get('scan_type', -1)
|
|
1068
|
+
fly_axis_labels = self.attrs.get('fly_axis_labels', [])
|
|
1069
|
+
if (scan_type in (3, 5)
|
|
1070
|
+
and len(self.dims) ==
|
|
1071
|
+
len(map_index) + len(fly_axis_labels)):
|
|
1072
|
+
dims = [dim for dim in self.dims if dim not in fly_axis_labels]
|
|
1073
|
+
return {dim:self.coords[dim][i]
|
|
1074
|
+
for dim, i in zip(dims, map_index)}
|
|
886
1075
|
return {dim:self.coords[dim][i]
|
|
887
1076
|
for dim, i in zip(self.dims, map_index)}
|
|
888
1077
|
else:
|
|
@@ -901,6 +1090,7 @@ class MapConfig(BaseModel):
|
|
|
901
1090
|
:return: One frame of raw detector data.
|
|
902
1091
|
:rtype: np.ndarray
|
|
903
1092
|
"""
|
|
1093
|
+
raise RuntimeError(f'get_detector_data not implemented')
|
|
904
1094
|
scans, scan_number, scan_step_index = \
|
|
905
1095
|
self.get_scan_step_index(map_index)
|
|
906
1096
|
scanparser = scans.get_scanparser(scan_number)
|
|
@@ -917,12 +1107,22 @@ class MapConfig(BaseModel):
|
|
|
917
1107
|
step index.
|
|
918
1108
|
:rtype: tuple[SpecScans, int, int]
|
|
919
1109
|
"""
|
|
1110
|
+
raise RuntimeError(f'get_scan_step_index not implemented')
|
|
1111
|
+
scan_type = self.attrs.get('scan_type', -1)
|
|
1112
|
+
fly_axis_labels = self.attrs.get('fly_axis_labels', [])
|
|
920
1113
|
if self.map_type == 'structured':
|
|
921
1114
|
map_coords = self.get_coords(map_index)
|
|
922
1115
|
for scans, scan_number, scan_step_index in self.scan_step_indices:
|
|
923
|
-
coords = {dim.label:
|
|
924
|
-
|
|
925
|
-
|
|
1116
|
+
coords = {dim.label:(
|
|
1117
|
+
dim.get_value(
|
|
1118
|
+
scans, scan_number, scan_step_index,
|
|
1119
|
+
self.scalar_data, True, 3)
|
|
1120
|
+
if dim.label in fly_axis_labels
|
|
1121
|
+
else
|
|
1122
|
+
dim.get_value(
|
|
1123
|
+
scans, scan_number, scan_step_index,
|
|
1124
|
+
self.scalar_data))
|
|
1125
|
+
for dim in self.independent_dimensions}
|
|
926
1126
|
if coords == map_coords:
|
|
927
1127
|
return scans, scan_number, scan_step_index
|
|
928
1128
|
raise RuntimeError(f'Unable to match coordinates {coords}')
|
|
@@ -940,9 +1140,11 @@ class MapConfig(BaseModel):
|
|
|
940
1140
|
:type map_index: tuple
|
|
941
1141
|
:return: Raw data value.
|
|
942
1142
|
"""
|
|
1143
|
+
raise RuntimeError(f'get_value not implemented')
|
|
943
1144
|
scans, scan_number, scan_step_index = \
|
|
944
1145
|
self.get_scan_step_index(map_index)
|
|
945
|
-
return data.get_value(scans, scan_number, scan_step_index
|
|
1146
|
+
return data.get_value(scans, scan_number, scan_step_index,
|
|
1147
|
+
self.scalar_data)
|
|
946
1148
|
|
|
947
1149
|
|
|
948
1150
|
def import_scanparser(station, experiment):
|
|
@@ -954,40 +1156,9 @@ def import_scanparser(station, experiment):
|
|
|
954
1156
|
:type station: str
|
|
955
1157
|
:param experiment: The experiment type.
|
|
956
1158
|
:type experiment: Literal[
|
|
957
|
-
'
|
|
1159
|
+
'EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
|
|
958
1160
|
"""
|
|
959
|
-
|
|
960
|
-
station = station.lower()
|
|
961
|
-
experiment = experiment.lower()
|
|
962
|
-
|
|
963
1161
|
# Local modules
|
|
964
|
-
|
|
965
|
-
if experiment in ('saxswaxs', 'powder'):
|
|
966
|
-
from CHAP.utils.scanparsers \
|
|
967
|
-
import SMBLinearScanParser as ScanParser
|
|
968
|
-
elif experiment == 'edd':
|
|
969
|
-
from CHAP.utils.scanparsers \
|
|
970
|
-
import SMBMCAScanParser as ScanParser
|
|
971
|
-
elif experiment == 'tomo':
|
|
972
|
-
from CHAP.utils.scanparsers \
|
|
973
|
-
import SMBRotationScanParser as ScanParser
|
|
974
|
-
else:
|
|
975
|
-
raise ValueError(
|
|
976
|
-
f'Invalid experiment type for station {station}: {experiment}')
|
|
977
|
-
elif station == 'id3b':
|
|
978
|
-
if experiment == 'saxswaxs':
|
|
979
|
-
from CHAP.utils.scanparsers \
|
|
980
|
-
import FMBSAXSWAXSScanParser as ScanParser
|
|
981
|
-
elif experiment == 'tomo':
|
|
982
|
-
from CHAP.utils.scanparsers \
|
|
983
|
-
import FMBRotationScanParser as ScanParser
|
|
984
|
-
elif experiment == 'xrf':
|
|
985
|
-
from CHAP.utils.scanparsers \
|
|
986
|
-
import FMBXRFScanParser as ScanParser
|
|
987
|
-
else:
|
|
988
|
-
raise ValueError(
|
|
989
|
-
f'Invalid experiment type for station {station}: {experiment}')
|
|
990
|
-
else:
|
|
991
|
-
raise ValueError(f'Invalid station: {station}')
|
|
1162
|
+
from chess_scanparsers import choose_scanparser
|
|
992
1163
|
|
|
993
|
-
globals()['ScanParser'] =
|
|
1164
|
+
globals()['ScanParser'] = choose_scanparser(station, experiment)
|