ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. CHAP/TaskManager.py +216 -0
  2. CHAP/__init__.py +27 -0
  3. CHAP/common/__init__.py +57 -0
  4. CHAP/common/models/__init__.py +8 -0
  5. CHAP/common/models/common.py +124 -0
  6. CHAP/common/models/integration.py +659 -0
  7. CHAP/common/models/map.py +1291 -0
  8. CHAP/common/processor.py +2869 -0
  9. CHAP/common/reader.py +658 -0
  10. CHAP/common/utils.py +110 -0
  11. CHAP/common/writer.py +730 -0
  12. CHAP/edd/__init__.py +23 -0
  13. CHAP/edd/models.py +876 -0
  14. CHAP/edd/processor.py +3069 -0
  15. CHAP/edd/reader.py +1023 -0
  16. CHAP/edd/select_material_params_gui.py +348 -0
  17. CHAP/edd/utils.py +1572 -0
  18. CHAP/edd/writer.py +26 -0
  19. CHAP/foxden/__init__.py +19 -0
  20. CHAP/foxden/models.py +71 -0
  21. CHAP/foxden/processor.py +124 -0
  22. CHAP/foxden/reader.py +224 -0
  23. CHAP/foxden/utils.py +80 -0
  24. CHAP/foxden/writer.py +168 -0
  25. CHAP/giwaxs/__init__.py +11 -0
  26. CHAP/giwaxs/models.py +491 -0
  27. CHAP/giwaxs/processor.py +776 -0
  28. CHAP/giwaxs/reader.py +8 -0
  29. CHAP/giwaxs/writer.py +8 -0
  30. CHAP/inference/__init__.py +7 -0
  31. CHAP/inference/processor.py +69 -0
  32. CHAP/inference/reader.py +8 -0
  33. CHAP/inference/writer.py +8 -0
  34. CHAP/models.py +227 -0
  35. CHAP/pipeline.py +479 -0
  36. CHAP/processor.py +125 -0
  37. CHAP/reader.py +124 -0
  38. CHAP/runner.py +277 -0
  39. CHAP/saxswaxs/__init__.py +7 -0
  40. CHAP/saxswaxs/processor.py +8 -0
  41. CHAP/saxswaxs/reader.py +8 -0
  42. CHAP/saxswaxs/writer.py +8 -0
  43. CHAP/server.py +125 -0
  44. CHAP/sin2psi/__init__.py +7 -0
  45. CHAP/sin2psi/processor.py +8 -0
  46. CHAP/sin2psi/reader.py +8 -0
  47. CHAP/sin2psi/writer.py +8 -0
  48. CHAP/tomo/__init__.py +15 -0
  49. CHAP/tomo/models.py +210 -0
  50. CHAP/tomo/processor.py +3862 -0
  51. CHAP/tomo/reader.py +9 -0
  52. CHAP/tomo/writer.py +59 -0
  53. CHAP/utils/__init__.py +6 -0
  54. CHAP/utils/converters.py +188 -0
  55. CHAP/utils/fit.py +2947 -0
  56. CHAP/utils/general.py +2655 -0
  57. CHAP/utils/material.py +274 -0
  58. CHAP/utils/models.py +595 -0
  59. CHAP/utils/parfile.py +224 -0
  60. CHAP/writer.py +122 -0
  61. MLaaS/__init__.py +0 -0
  62. MLaaS/ktrain.py +205 -0
  63. MLaaS/mnist_img.py +83 -0
  64. MLaaS/tfaas_client.py +371 -0
  65. chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
  66. chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
  67. chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
  68. chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
  69. chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
  70. chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
@@ -0,0 +1,1291 @@
1
+ """Map related Pydantic model classes."""
2
+
3
+ # System modules
4
+ from copy import deepcopy
5
+ from functools import (
6
+ cache,
7
+ lru_cache,
8
+ )
9
+ import os
10
+ from typing import (
11
+ Literal,
12
+ Optional,
13
+ )
14
+
15
+ # Third party modules
16
+ import numpy as np
17
+ from pydantic import (
18
+ Field,
19
+ FilePath,
20
+ PrivateAttr,
21
+ conint,
22
+ conlist,
23
+ constr,
24
+ field_validator,
25
+ model_validator,
26
+ )
27
+ from pyspec.file.spec import FileSpec
28
+ from typing_extensions import Annotated
29
+
30
+ # Local modules
31
+ from CHAP.models import CHAPBaseModel
32
+
33
+
34
+ class Detector(CHAPBaseModel):
35
+ """Class representing a single detector.
36
+
37
+ :ivar id: The detector id (e.g. name or channel index).
38
+ :type id: str
39
+ :ivar attrs: Additional detector configuration attributes.
40
+ :type attrs: dict, optional
41
+ """
42
+ id_: constr(min_length=1) = Field(alias='id')
43
+ attrs: Optional[Annotated[dict, Field(validate_default=True)]] = {}
44
+
45
+ @field_validator('id_', mode='before')
46
+ @classmethod
47
+ def validate_id(cls, id_):
48
+ """Validate the detector id.
49
+
50
+ :param id: The detector id (e.g. name or channel index).
51
+ :type id: int, str
52
+ :return: The detector id.
53
+ :rtype: str
54
+ """
55
+ if isinstance(id_, int):
56
+ return str(id_)
57
+ return id_
58
+
59
+ #RV maybe better to use model_validator, see v2 docs?
60
+ @field_validator('attrs')
61
+ @classmethod
62
+ def validate_attrs(cls, attrs):
63
+ """Validate any additional detector configuration attributes.
64
+
65
+ :param attrs: Any additional attributes to `Detector`.
66
+ :type attrs: dict
67
+ :raises ValueError: Invalid attribute.
68
+ :return: The validated field for `attrs`.
69
+ :rtype: dict
70
+ """
71
+ # RV FIX add eta
72
+ name = attrs.get('name')
73
+ if name is not None:
74
+ if isinstance(name, int):
75
+ attrs['name'] = str(name)
76
+ elif not isinstance(name, str):
77
+ raise ValueError
78
+ return attrs
79
+
80
+ def get_id(self):
81
+ return self.id_
82
+
83
+
84
+ class DetectorConfig(CHAPBaseModel):
85
+ """Class representing a detector configuration.
86
+
87
+ :ivar detectors: Detector list.
88
+ :type detectors: list[Detector]
89
+ """
90
+ detectors: conlist(item_type=Detector, min_length=1)
91
+
92
+
93
+ class Sample(CHAPBaseModel):
94
+ """Class representing a sample metadata configuration.
95
+
96
+ :ivar name: The name of the sample.
97
+ :type name: str
98
+ :ivar description: A description of the sample.
99
+ :type description: str, optional
100
+ """
101
+ name: constr(min_length=1)
102
+ description: Optional[str] = ''
103
+
104
+
105
+ class SpecScans(CHAPBaseModel):
106
+ """Class representing a set of scans from a single SPEC file.
107
+
108
+ :ivar spec_file: Path to the SPEC file.
109
+ :type spec_file: str
110
+ :ivar scan_numbers: List of scan numbers to use.
111
+ :type scan_numbers: Union(int, list[int], str)
112
+ :ivar par_file: Path to a non-default SMB par file.
113
+ :type par_file: str, optional
114
+ """
115
+ spec_file: FilePath
116
+ scan_numbers: conlist(item_type=conint(gt=0), min_length=1)
117
+ par_file: Optional[FilePath] = None
118
+
119
+ @field_validator('spec_file')
120
+ @classmethod
121
+ def validate_spec_file(cls, spec_file):
122
+ """Validate the specified SPEC file.
123
+
124
+ :param spec_file: Path to the SPEC file.
125
+ :type spec_file: str
126
+ :raises ValueError: If the SPEC file is invalid.
127
+ :return: Absolute path to the SPEC file.
128
+ :rtype: str
129
+ """
130
+ try:
131
+ spec_file = os.path.abspath(spec_file)
132
+ FileSpec(spec_file)
133
+ except Exception as exc:
134
+ raise ValueError(f'Invalid SPEC file {spec_file}') from exc
135
+ return spec_file
136
+
137
+ @field_validator('scan_numbers', mode='before')
138
+ @classmethod
139
+ def validate_scan_numbers(cls, scan_numbers, info):
140
+ """Validate the specified list of scan numbers.
141
+
142
+ :param scan_numbers: List of scan numbers.
143
+ :type scan_numbers: Union(int, list[int], str)
144
+ :param info: Pydantic validator info object.
145
+ :type info: pydantic_core._pydantic_core.ValidationInfo
146
+ :raises ValueError: If a specified scan number is not found in
147
+ the SPEC file.
148
+ :return: List of scan numbers.
149
+ :rtype: list[int]
150
+ """
151
+ if isinstance(scan_numbers, int):
152
+ scan_numbers = [scan_numbers]
153
+ elif isinstance(scan_numbers, str):
154
+ # Local modules
155
+ from CHAP.utils.general import string_to_list
156
+
157
+ scan_numbers = string_to_list(scan_numbers)
158
+
159
+ spec_file = info.data.get('spec_file')
160
+ if spec_file is not None:
161
+ spec_scans = FileSpec(spec_file)
162
+ for scan_number in scan_numbers:
163
+ scan = spec_scans.get_scan_by_number(scan_number)
164
+ if scan is None:
165
+ raise ValueError(
166
+ f'No scan number {scan_number} in {spec_file}')
167
+ return scan_numbers
168
+
169
+ @field_validator('par_file', mode='before')
170
+ @classmethod
171
+ def validate_par_file(cls, par_file):
172
+ """Validate the specified SMB par file.
173
+
174
+ :param par_file: Path to a non-default SMB par file.
175
+ :type par_file: str
176
+ :raises ValueError: If the SMB par file is invalid.
177
+ :return: Absolute path to the SMB par file.
178
+ :rtype: str
179
+ """
180
+ if par_file is None or not par_file:
181
+ return None
182
+ par_file = os.path.abspath(par_file)
183
+ if not os.path.isfile(par_file):
184
+ raise ValueError(f'Invalid SMB par file {par_file}')
185
+ return par_file
186
+
187
+ @property
188
+ def scanparsers(self):
189
+ """Returns the list of `ScanParser`s for each of the scans
190
+ specified by the SPEC file and scan numbers belonging to this
191
+ instance of `SpecScans`.
192
+ """
193
+ return [self.get_scanparser(scan_no) for scan_no in self.scan_numbers]
194
+
195
+ def get_scanparser(self, scan_number):
196
+ """Return a `ScanParser` for the specified scan number in the
197
+ specified SPEC file.
198
+
199
+ :param scan_number: Scan number to get a `ScanParser` for.
200
+ :type scan_number: int
201
+ :return: `ScanParser` for the specified scan number.
202
+ :rtype: ScanParser
203
+ """
204
+ if self.par_file is None:
205
+ return get_scanparser(self.spec_file, scan_number)
206
+ return get_scanparser(
207
+ self.spec_file, scan_number, par_file=self.par_file)
208
+
209
+ def get_index(self, scan_number, scan_step_index, map_config):
210
+ """Return a tuple representing the index of a specific step in
211
+ a specific SPEC scan within a map.
212
+
213
+ :param scan_number: Scan number to get index for.
214
+ :type scan_number: int
215
+ :param scan_step_index: Scan step index to get index for.
216
+ :type scan_step_index: int
217
+ :param map_config: Map configuration to get index for.
218
+ :type map_config: MapConfig
219
+ :return: Index for the specified scan number and scan step
220
+ index within the specified map configuration.
221
+ :rtype: tuple
222
+ """
223
+ index = ()
224
+ for independent_dimension in map_config.independent_dimensions:
225
+ coordinate_index = list(
226
+ map_config.coords[independent_dimension.label]).index(
227
+ independent_dimension.get_value(
228
+ self, scan_number, scan_step_index,
229
+ map_config.scalar_data))
230
+ index = (coordinate_index, *index)
231
+ return index
232
+
233
+ def get_detector_data(self, detectors, scan_number, scan_step_index):
234
+ """Return the raw data from the specified detectors at the
235
+ specified scan number and scan step index.
236
+
237
+ :param detectors: List of detector prefixes to get raw data
238
+ for.
239
+ :type detectors: list[str]
240
+ :param scan_number: Scan number to get data for.
241
+ :type scan_number: int
242
+ :param scan_step_index: Scan step index to get data for.
243
+ :type scan_step_index: int
244
+ :return: Data from the specified detectors for the specified
245
+ scan number and scan step index.
246
+ :rtype: list[np.ndarray]
247
+ """
248
+ return get_detector_data(
249
+ tuple([detector.prefix for detector in detectors]),
250
+ self.spec_file,
251
+ scan_number,
252
+ scan_step_index)
253
+
254
+
255
+ @cache
256
+ def get_available_scan_numbers(spec_file):
257
+ """Get the available scan numbers.
258
+
259
+ :param spec_file: Path to the SPEC file.
260
+ :type spec_file: str
261
+ """
262
+ return list(FileSpec(spec_file).scans.keys())
263
+
264
+
265
+ @cache
266
+ def get_scanparser(spec_file, scan_number, par_file=None):
267
+ """Get the scanparser.
268
+
269
+ :param spec_file: Path to the SPEC file.
270
+ :type spec_file: str
271
+ :param scan_number: Scan number to get data for.
272
+ :type scan_number: int
273
+ :param par_file: Path to a SMB par file.
274
+ :type par_file: str, optional
275
+ """
276
+ # pylint: disable=undefined-variable
277
+ if scan_number not in get_available_scan_numbers(spec_file):
278
+ return None
279
+ if par_file is None:
280
+ return ScanParser(spec_file, scan_number)
281
+ return ScanParser(spec_file, scan_number, par_file=par_file)
282
+
283
+
284
+ @lru_cache(maxsize=10)
285
+ def get_detector_data(
286
+ detector_prefixes, spec_file, scan_number, scan_step_index):
287
+ """Get the detector data.
288
+
289
+ :param detector_prefixes: The detector prefixes.
290
+ :type detector_prefixes: Union[tuple[str], list[str]]
291
+ :ivar spec_file: Path to the SPEC file.
292
+ :type spec_file: str
293
+ :param scan_number: Scan number to get data for.
294
+ :type scan_number: int
295
+ :param scan_step_index: The scan step index.
296
+ :type scan_step_index: int
297
+ """
298
+ detector_data = []
299
+ scanparser = get_scanparser(spec_file, scan_number)
300
+ for prefix in detector_prefixes:
301
+ image_data = scanparser.get_detector_data(prefix, scan_step_index)
302
+ detector_data.append(image_data)
303
+ return detector_data
304
+
305
+
306
+ class PointByPointScanData(CHAPBaseModel):
307
+ """Class representing a source of raw scalar-valued data for which
308
+ a value was recorded at every point in a `MapConfig`.
309
+
310
+ :ivar label: A user-defined label for referring to this data in
311
+ the NeXus file and in other tools.
312
+ :type label: str
313
+ :ivar units: The units in which the data were recorded.
314
+ :type units: str
315
+ :ivar data_type: Represents how these data were recorded at time
316
+ of data collection.
317
+ :type data_type: Literal['spec_motor', 'spec_motor_absolute',
318
+ 'scan_column', 'smb_par', 'expression', 'detector_log_timestamps']
319
+ :ivar name: Represents the name with which these raw data were
320
+ recorded at time of data collection.
321
+ :type name: str
322
+ """
323
+ label: constr(min_length=1)
324
+ units: constr(strip_whitespace=True, min_length=1)
325
+ data_type: Literal[
326
+ 'spec_motor', 'spec_motor_absolute', 'scan_column', 'smb_par',
327
+ 'expression', 'detector_log_timestamps']
328
+ name: constr(strip_whitespace=True, min_length=1)
329
+ ndigits: Optional[conint(ge=0)] = None
330
+
331
+ @field_validator('label')
332
+ @classmethod
333
+ def validate_label(cls, label):
334
+ """Validate that the supplied `label` does not conflict with
335
+ any of the values for `label` reserved for certain data needed
336
+ to perform corrections.
337
+
338
+ :param label: The value of `label` to validate.
339
+ :type label: str
340
+ :raises ValueError: If `label` is one of the reserved values.
341
+ :return: The originally supplied value `label`.
342
+ :rtype: str
343
+ """
344
+ if ((not issubclass(cls,CorrectionsData))
345
+ and label in CorrectionsData.reserved_labels()):
346
+ raise ValueError(
347
+ f'{cls.__class__.__name__}.label may not be any of the following '
348
+ f'reserved values: {CorrectionsData.reserved_labels()}')
349
+ return label
350
+
351
+ def validate_for_station(self, station):
352
+ """Validate this instance of `PointByPointScanData` for a
353
+ certain choice of station (beamline).
354
+
355
+ :param station: The name of the station (in 'idxx' format).
356
+ :type station: str
357
+ :raises TypeError: If the station is not compatible with the
358
+ value of the `data_type` attribute for this instance of
359
+ PointByPointScanData.
360
+ """
361
+ if (station.lower() not in ('id1a3', 'id3a')
362
+ and self.data_type == 'smb_par'):
363
+ raise TypeError(
364
+ f'{self.__class__.__name__}.data_type may not be "smb_par" '
365
+ f'when station is "{station}"')
366
+ if (not station.lower() == 'id3b'
367
+ and self.data_type == 'detector_log_timestamps'):
368
+ raise TypeError(
369
+ f'{self.__class__.__name__}.data_type may not be'
370
+ + f' "detector_log_timestamps" when station is "{station}"')
371
+
372
+ def validate_for_spec_scans(
373
+ self, spec_scans, scan_step_index='all'):
374
+ """Validate this instance of `PointByPointScanData` for a list
375
+ of `SpecScans`.
376
+
377
+ :param spec_scans: A list of `SpecScans` whose raw data will
378
+ be checked for the presence of the data represented by
379
+ this instance of `PointByPointScanData`.
380
+ :type spec_scans: list[SpecScans]
381
+ :param scan_step_index: A specific scan step index to validate,
382
+ defaults to `'all'`.
383
+ :type scan_step_index: Union[Literal['all'],int], optional
384
+ :raises RuntimeError: If the data represented by this instance
385
+ of `PointByPointScanData` is missing for the specified
386
+ scan steps.
387
+ """
388
+ for scans in spec_scans:
389
+ for scan_number in scans.scan_numbers:
390
+ scanparser = scans.get_scanparser(scan_number)
391
+ if scan_step_index == 'all':
392
+ scan_step_index_range = range(scanparser.spec_scan_npts)
393
+ else:
394
+ scan_step_index_range = range(
395
+ scan_step_index, 1+scan_step_index)
396
+ for index in scan_step_index_range:
397
+ try:
398
+ self.get_value(scans, scan_number, index)
399
+ except Exception as exc:
400
+ raise RuntimeError(
401
+ f'Could not find data for {self.name} '
402
+ f'(data_type "{self.data_type}") '
403
+ f'on scan number {scan_number} '
404
+ f'for index {index} '
405
+ f'in spec file {scans.spec_file}') from exc
406
+
407
+ def validate_for_scalar_data(self, scalar_data):
408
+ """Used for `PointByPointScanData` objects with a `data_type`
409
+ of `'expression'`. Validate that the `scalar_data` field of a
410
+ `MapConfig` object contains all the items necessary for
411
+ evaluating the expression.
412
+
413
+ :param scalar_data: the `scalar_data` field of a `MapConfig`
414
+ that this `PointByPointScanData` object will be validated
415
+ against.
416
+ :type scalar_data: list[PointByPointScanData]
417
+ :raises ValueError: if `scalar_data` does not contain items
418
+ needed for evaluating the expression.
419
+ """
420
+ # Third party modules
421
+ from ast import parse
422
+ from asteval import get_ast_names
423
+
424
+ labels = get_ast_names(parse(self.name))
425
+ for label in ('round', 'np', 'numpy'):
426
+ try:
427
+ labels.remove(label)
428
+ except Exception:
429
+ pass
430
+ for l in labels:
431
+ if l in ('round', 'np', 'numpy'):
432
+ continue
433
+ label_found = False
434
+ for s_d in scalar_data:
435
+ if s_d.label == l:
436
+ label_found = True
437
+ break
438
+ if not label_found:
439
+ raise ValueError(
440
+ f'{l} is not the label of an item in scalar_data')
441
+
442
+ def get_value(
443
+ self, spec_scans, scan_number, scan_step_index=0,
444
+ scalar_data=None, relative=True, ndigits=None):
445
+ """Return the value recorded for this instance of
446
+ `PointByPointScanData` at a specific scan step.
447
+
448
+ :param spec_scans: An instance of `SpecScans` in which the
449
+ requested scan step occurs.
450
+ :type spec_scans: SpecScans
451
+ :param scan_number: The number of the scan in which the
452
+ requested scan step occurs.
453
+ :type scan_number: int
454
+ :param scan_step_index: The index of the requested scan step,
455
+ defaults to `0`.
456
+ :type scan_step_index: int, optional
457
+ :param scalar_data: list of scalar data configurations used to
458
+ get values for `PointByPointScanData` objects with
459
+ `data_type == 'expression'`.
460
+ :type scalar_data: list[PointByPointScanData], optional
461
+ :param relative: Whether to return a relative value or not,
462
+ defaults to `True` (only applies to SPEC motor values).
463
+ :type relative: bool, optional
464
+ :params ndigits: Round SPEC motor values to the specified
465
+ number of decimals if set.
466
+ :type ndigits: int, optional
467
+ :return: The value recorded of the data represented by this
468
+ instance of `PointByPointScanData` at the scan step
469
+ requested.
470
+ :rtype: float
471
+ """
472
+ if 'spec_motor' in self.data_type:
473
+ if ndigits is None:
474
+ ndigits = self.ndigits
475
+ if 'absolute' in self.data_type:
476
+ relative = False
477
+ return get_spec_motor_value(
478
+ spec_scans.spec_file, scan_number, scan_step_index, self.name,
479
+ relative, ndigits)
480
+ if self.data_type == 'scan_column':
481
+ return get_spec_counter_value(
482
+ spec_scans.spec_file, scan_number, scan_step_index, self.name)
483
+ if self.data_type == 'smb_par':
484
+ return get_smb_par_value(
485
+ spec_scans.spec_file, scan_number, self.name)
486
+ if self.data_type == 'expression':
487
+ if scalar_data is None:
488
+ scalar_data = []
489
+ return get_expression_value(
490
+ spec_scans, scan_number, scan_step_index, self.name,
491
+ scalar_data)
492
+ if self.data_type == 'detector_log_timestamps':
493
+ timestamps = get_detector_log_timestamps(
494
+ spec_scans.spec_file, scan_number, self.name)
495
+ if scan_step_index >= 0:
496
+ return timestamps[scan_step_index]
497
+ return timestamps
498
+ return None
499
+
500
+ @cache
501
+ def get_spec_motor_value(
502
+ spec_file, scan_number, scan_step_index, spec_mnemonic,
503
+ relative=True, ndigits=None):
504
+ """Return the value recorded for a SPEC motor at a specific scan
505
+ step.
506
+
507
+ :param spec_file: Location of a SPEC file in which the requested
508
+ scan step occurs.
509
+ :type spec_scans: str
510
+ :param scan_number: The number of the scan in which the requested
511
+ scan step occurs.
512
+ :type scan_number: int
513
+ :param scan_step_index: The index of the requested scan step.
514
+ :type scan_step_index: int
515
+ :param spec_mnemonic: The menmonic of a SPEC motor.
516
+ :type spec_mnemonic: str
517
+ :param relative: Whether to return a relative value or not,
518
+ defaults to `True`.
519
+ :type relative: bool, optional
520
+ :params ndigits: Round SPEC motor values to the specified
521
+ number of decimals if set.
522
+ :type ndigits: int, optional
523
+ :return: The value of the motor at the scan step requested.
524
+ :rtype: float
525
+ """
526
+ scanparser = get_scanparser(spec_file, scan_number)
527
+ if (hasattr(scanparser, 'spec_scan_motor_mnes')
528
+ and spec_mnemonic in scanparser.spec_scan_motor_mnes):
529
+ motor_i = scanparser.spec_scan_motor_mnes.index(spec_mnemonic)
530
+ if scan_step_index >= 0:
531
+ scan_step = np.unravel_index(
532
+ scan_step_index,
533
+ scanparser.spec_scan_shape,
534
+ order='F')
535
+ motor_value = \
536
+ scanparser.get_spec_scan_motor_vals(
537
+ relative)[motor_i][scan_step[motor_i]]
538
+ else:
539
+ motor_value = scanparser.get_spec_scan_motor_vals(
540
+ relative)[motor_i]
541
+ if len(scanparser.spec_scan_shape) == 2:
542
+ if motor_i == 0:
543
+ motor_value = np.concatenate(
544
+ [motor_value] * scanparser.spec_scan_shape[1])
545
+ else:
546
+ motor_value = np.repeat(
547
+ motor_value, scanparser.spec_scan_shape[0])
548
+ else:
549
+ motor_value = scanparser.get_spec_positioner_value(spec_mnemonic)
550
+ if ndigits is not None:
551
+ motor_value = np.round(motor_value, ndigits)
552
+ return motor_value
553
+
554
+ @cache
555
+ def get_spec_counter_value(
556
+ spec_file, scan_number, scan_step_index, spec_column_label):
557
+ """Return the value recorded for a SPEC counter at a specific scan
558
+ step.
559
+
560
+ :param spec_file: Location of a SPEC file in which the requested
561
+ scan step occurs.
562
+ :type spec_scans: str
563
+ :param scan_number: The number of the scan in which the requested
564
+ scan step occurs.
565
+ :type scan_number: int
566
+ :param scan_step_index: The index of the requested scan step.
567
+ :type scan_step_index: int
568
+ :param spec_column_label: The label of a SPEC data column.
569
+ :type spec_column_label: str
570
+ :return: The value of the counter at the scan step requested.
571
+ :rtype: float
572
+ """
573
+ scanparser = get_scanparser(spec_file, scan_number)
574
+ if scan_step_index >= 0:
575
+ return scanparser.spec_scan_data[spec_column_label][scan_step_index]
576
+ return scanparser.spec_scan_data[spec_column_label]
577
+
578
+ @cache
579
+ def get_smb_par_value(spec_file, scan_number, par_name):
580
+ """Return the value recorded for a specific scan in SMB-tyle .par
581
+ file.
582
+
583
+ :param spec_file: Location of a SPEC file in which the requested
584
+ scan step occurs.
585
+ :type spec_scans: str
586
+ :param scan_number: The number of the scan in which the requested
587
+ scan step occurs.
588
+ :type scan_number: int
589
+ :param par_name: The name of the column in the .par file.
590
+ :type par_name: str
591
+ :return: The value of the .par file value for the scan requested.
592
+ :rtype: float
593
+ """
594
+ scanparser = get_scanparser(spec_file, scan_number)
595
+ return scanparser.pars[par_name]
596
+
597
+ def get_expression_value(
598
+ spec_scans, scan_number, scan_step_index, expression,
599
+ scalar_data):
600
+ """Return the value of an evaluated expression of other sources of
601
+ point-by-point scalar scan data for a single point.
602
+
603
+ :param spec_scans: An instance of `SpecScans` in which the
604
+ requested scan step occurs.
605
+ :type spec_scans: SpecScans
606
+ :param scan_number: The number of the scan in which the requested
607
+ scan step occurs.
608
+ :type scan_number: int
609
+ :param scan_step_index: The index of the requested scan step.
610
+ :type scan_step_index: int
611
+ :param expression: The string expression to evaluate.
612
+ :type expression: str
613
+ :param scalar_data: the `scalar_data` field of a `MapConfig`
614
+ object (used to provide values for variables used in
615
+ `expression`).
616
+ :type scalar_data: list[PointByPointScanData]
617
+ :return: The value of the .par file value for the scan requested.
618
+ :rtype: float
619
+ """
620
+ # Third party modules
621
+ from ast import parse
622
+ from asteval import get_ast_names, Interpreter
623
+
624
+ labels = get_ast_names(parse(expression))
625
+ symtable = {}
626
+ for l in labels:
627
+ if l == 'round':
628
+ symtable[l] = round
629
+ for s_d in scalar_data:
630
+ if s_d.label == l:
631
+ symtable[l] = s_d.get_value(
632
+ spec_scans, scan_number, scan_step_index, scalar_data)
633
+ aeval = Interpreter(symtable=symtable)
634
+ return aeval(expression)
635
+
636
+ @cache
637
+ def get_detector_log_timestamps(spec_file, scan_number, detector_prefix):
638
+ """Return the list of detector timestamps for the given scan &
639
+ detector prefix.
640
+
641
+ :param spec_file: Location of a SPEC file in which the requested
642
+ scan occurs.
643
+ :type spec_scans: str
644
+ :param scan_number: The number of the scan for which to return
645
+ detector log timestamps.
646
+ :type scan_number: int
647
+ :param detector_prefix: The prefix of the detecotr whose log file
648
+ should be used.
649
+ :return: All detector log timestamps for the given scan.
650
+ :rtype: list[float]
651
+ """
652
+ sp = get_scanparser(spec_file, scan_number)
653
+ return sp.get_detector_log_timestamps(detector_prefix)
654
+
655
+ def validate_data_source_for_map_config(data_source, info):
656
+ """Confirm that an instance of PointByPointScanData is valid for
657
+ the station and scans provided by a map configuration dictionary.
658
+
659
+ :param data_source: The input object to validate.
660
+ :type data_source: PointByPointScanData
661
+ :param info: Pydantic validator info object.
662
+ :type info: pydantic_core._pydantic_core.ValidationInfo
663
+ :raises Exception: If `data_source` cannot be validated.
664
+ :return: The validated `data_source` instance.
665
+ :rtype: PointByPointScanData
666
+ """
667
+ def _validate_data_source_for_map_config(data_source, info):
668
+ if isinstance(data_source, list):
669
+ return [_validate_data_source_for_map_config(d_s, info)
670
+ for d_s in data_source]
671
+ if data_source is not None:
672
+ values = info.data
673
+ if data_source.data_type == 'expression':
674
+ data_source.validate_for_scalar_data(values['scalar_data'])
675
+ else:
676
+ import_scanparser(
677
+ values['station'], values['experiment_type'])
678
+ data_source.validate_for_station(values['station'])
679
+ data_source.validate_for_spec_scans(values['spec_scans'])
680
+ return data_source
681
+
682
+ return _validate_data_source_for_map_config(data_source, info)
683
+
684
+
685
+ class IndependentDimension(PointByPointScanData):
686
+ """Class representing the source of data to identify the
687
+ coordinate values along one dimension of a `MapConfig`.
688
+
689
+ :ivar start: Sarting index for slicing all datasets of a
690
+ `MapConfig` along this axis, defaults to `0`.
691
+ :type start: int, optional
692
+ :ivar end: Ending index for slicing all datasets of a `MapConfig`
693
+ along this axis, defaults to the total number of unique values
694
+ along this axis in the associated `MapConfig`.
695
+ :type end: int, optional
696
+ :ivar step: Step for slicing all datasets of a `MapConfig` along
697
+ this axis, defaults to `1`.
698
+ :type step: int, optional
699
+ """
700
+ start: Optional[conint(ge=0)] = 0
701
+ end: Optional[int] = None
702
+ step: Optional[conint(gt=0)] = 1
703
+
704
+ # @field_validator('step')
705
+ # @classmethod
706
+ # def validate_step(cls, step):
707
+ # """Validate that the supplied value of `step`.
708
+ #
709
+ # :param step: The value of `step` to validate.
710
+ # :type step: str
711
+ # :raises ValueError: If `step` is zero.
712
+ # :return: The originally supplied value `step`.
713
+ # :rtype: int
714
+ # """
715
+ # if step == 0 :
716
+ # raise ValueError('slice step cannot be zero')
717
+ # return step
718
+
719
+
720
+ class CorrectionsData(PointByPointScanData):
721
+ """Class representing the special instances of
722
+ `PointByPointScanData` that are used by certain kinds of
723
+ `CorrectionConfig` tools.
724
+
725
+ :ivar label: One of the reserved values required by
726
+ `CorrectionConfig`.
727
+ :type label: Literal['presample_intensity', 'postsample_intensity',
728
+ 'dwell_time_actual']
729
+ :ivar data_type: Represents how these data were recorded at time
730
+ of data collection.
731
+ :type data_type: Literal['scan_column', 'smb_par']
732
+ """
733
+ label: Literal['presample_intensity', 'postsample_intensity',
734
+ 'dwell_time_actual']
735
+ data_type: Literal['scan_column','smb_par']
736
+
737
+ @classmethod
738
+ def reserved_labels(cls):
739
+ """Return a list of all the labels reserved for
740
+ corrections-related scalar data.
741
+
742
+ :return: A list of reserved labels.
743
+ :rtype: list[str]
744
+ """
745
+ return list((*cls.model_fields['label'].annotation.__args__, 'round'))
746
+
747
+
748
+ class PresampleIntensity(CorrectionsData):
749
+ """Class representing a source of raw data for the intensity of
750
+ the beam that is incident on the sample.
751
+
752
+ :ivar label: Must be `'presample_intensity"`.
753
+ :type label: Literal['presample_intensity']
754
+ :ivar units: Must be `'counts'`.
755
+ :type units: Literal['counts']
756
+ """
757
+ label: Literal['presample_intensity'] = 'presample_intensity'
758
+ units: Literal['counts'] = 'counts'
759
+
760
+
761
+ class PostsampleIntensity(CorrectionsData):
762
+ """Class representing a source of raw data for the intensity of
763
+ the beam that has passed through the sample.
764
+
765
+ :ivar label: Must be `'postsample_intensity'`.
766
+ :type label: Literal['postsample_intensity']
767
+ :ivar units: Must be `'counts'`.
768
+ :type units: Literal['counts']
769
+ """
770
+ label: Literal['postsample_intensity'] = 'postsample_intensity'
771
+ units: Literal['counts'] = 'counts'
772
+
773
+
774
+ class DwellTimeActual(CorrectionsData):
775
+ """Class representing a source of raw data for the actual dwell
776
+ time at each scan point in SPEC (with some scan types, this value
777
+ can vary slightly point-to-point from the dwell time specified in
778
+ the command).
779
+
780
+ :ivar label: Must be `'dwell_time_actual'`.
781
+ :type label: Literal['dwell_time_actual']
782
+ :ivar units: Must be `'s'`.
783
+ :type units: Literal['s']
784
+ """
785
+ label: Literal['dwell_time_actual'] = 'dwell_time_actual'
786
+ units: Literal['s'] = 's'
787
+
788
+
789
+ class SpecConfig(CHAPBaseModel):
790
+ """Class representing the raw data for one or more SPEC scans.
791
+
792
+ :ivar station: The name of the station at which the data was
793
+ collected.
794
+ :type station: Literal['id1a3', 'id3a', 'id3b', 'id4b']
795
+ :ivar spec_scans: A list of the SPEC scans that compose the set.
796
+ :type spec_scans: list[SpecScans]
797
+ :ivar experiment_type: Experiment type.
798
+ :type experiment_type: Literal['EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO',
799
+ 'XRF', 'HDRM']
800
+ """
801
+ station: Literal['id1a3', 'id3a', 'id3b', 'id4b']
802
+ experiment_type: Literal[
803
+ 'EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF', 'HDRM']
804
+ spec_scans: conlist(item_type=SpecScans, min_length=1)
805
+
806
+ @model_validator(mode='before')
807
+ @classmethod
808
+ def validate_specconfig_before(cls, data):
809
+ """Ensure that a valid configuration was provided and finalize
810
+ spec_file filepaths.
811
+
812
+ :param data: Pydantic validator data object.
813
+ :type data: SpecConfig,
814
+ pydantic_core._pydantic_core.ValidationInfo
815
+ :return: The currently validated list of class properties.
816
+ :rtype: dict
817
+ """
818
+ inputdir = data.get('inputdir')
819
+ if inputdir is not None:
820
+ spec_scans = data.get('spec_scans')
821
+ for i, scans in enumerate(deepcopy(spec_scans)):
822
+ if isinstance(scans, dict):
823
+ spec_file = scans['spec_file']
824
+ if not os.path.isabs(spec_file):
825
+ spec_scans[i]['spec_file'] = os.path.join(
826
+ inputdir, spec_file)
827
+ else:
828
+ spec_file = scans.spec_file
829
+ if not os.path.isabs(spec_file):
830
+ spec_scans[i].spec_file = os.path.join(
831
+ inputdir, spec_file)
832
+ data['spec_scans'] = spec_scans
833
+ return data
834
+
835
+ @field_validator('experiment_type')
836
+ @classmethod
837
+ def validate_experiment_type(cls, experiment_type, info):
838
+ """Ensure values for the station and experiment_type fields are
839
+ compatible.
840
+
841
+ :param experiment_type: The value of `experiment_type` to
842
+ validate.
843
+ :type experiment_type: str
844
+ :param info: Pydantic validator info object.
845
+ :type info: pydantic_core._pydantic_core.ValidationInfo
846
+ :raises ValueError: Invalid experiment type.
847
+ :return: The validated field for `experiment_type`.
848
+ :rtype: str
849
+ """
850
+ station = info.data.get('station')
851
+ if station == 'id1a3':
852
+ allowed_experiment_types = ['EDD', 'SAXSWAXS', 'TOMO']
853
+ elif station == 'id3a':
854
+ allowed_experiment_types = ['EDD', 'TOMO']
855
+ elif station == 'id3b':
856
+ allowed_experiment_types = ['GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
857
+ elif station == 'id4b':
858
+ allowed_experiment_types = ['HDRM']
859
+ else:
860
+ allowed_experiment_types = []
861
+ if experiment_type not in allowed_experiment_types:
862
+ raise ValueError(
863
+ f'For station {station}, allowed experiment types are '
864
+ f'{", ".join(allowed_experiment_types)}. '
865
+ f'Supplied experiment type {experiment_type} is not allowed.')
866
+ import_scanparser(station, experiment_type)
867
+ return experiment_type
868
+
869
+
870
+ class MapConfig(CHAPBaseModel):
871
+ """Class representing an experiment consisting of one or more SPEC
872
+ scans.
873
+
874
+ :param: did: FOXDEN data identifier.
875
+ :type did: str, optional
876
+ :ivar title: The title for the map configuration.
877
+ :type title: str
878
+ :ivar station: The name of the station at which the map was
879
+ collected.
880
+ :type station: Literal['id1a3', 'id3a', 'id3b', id4b]
881
+ :ivar experiment_type: Experiment type.
882
+ :type experiment_type: Literal['EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO',
883
+ 'XRF', 'HDRM']
884
+ :ivar sample: The sample metadata configuration.
885
+ :type sample: CHAP.commom.models.map.Sample
886
+ :ivar spec_scans: A list of the SPEC scans that compose the map.
887
+ :type spec_scans: list[SpecScans]
888
+ :ivar scalar_data: A list of the sources of data representing
889
+ other scalar raw data values collected at each point on the
890
+ map. In the NeXus file representation of the map, datasets for
891
+ these values will be included, defaults to `[]`.
892
+ :type scalar_data: list[PointByPointScanData], optional
893
+ :ivar independent_dimensions: A list of the sources of data
894
+ representing the raw values of each independent dimension of
895
+ the map.
896
+ :type independent_dimensions: list[PointByPointScanData]
897
+ :ivar presample_intensity: A source of point-by-point presample
898
+ beam intensity data. Required when applying a CorrectionConfig
899
+ tool.
900
+ :type presample_intensity: PresampleIntensity, optional
901
+ :ivar dwell_time_actual: A source of point-by-point actual dwell
902
+ times for SPEC scans. Required when applying a
903
+ CorrectionConfig tool.
904
+ :type dwell_time_actual: DwellTimeActual, optional
905
+ :ivar postsample_intensity: A source of point-by-point postsample
906
+ beam intensity data. Required when applying a CorrectionConfig
907
+ tool with `correction_type='flux_absorption'` or
908
+ `correction_type='flux_absorption_background'`.
909
+ :type postsample_intensity: PresampleIntensity, optional
910
+ :ivar attrs: Additional Map configuration attributes.
911
+ :type attrs: dict, optional
912
+ """
913
+ did: Optional[constr(strip_whitespace=True)] = None
914
+ title: constr(strip_whitespace=True, min_length=1)
915
+ station: Literal['id1a3', 'id3a', 'id3b', 'id4b']
916
+ experiment_type: Literal[
917
+ 'EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF', 'HDRM']
918
+ sample: Sample
919
+ spec_scans: conlist(item_type=SpecScans, min_length=1)
920
+ scalar_data: Optional[conlist(item_type=PointByPointScanData)] = []
921
+ independent_dimensions: conlist(
922
+ item_type=IndependentDimension, min_length=1)
923
+ presample_intensity: Optional[PresampleIntensity] = None
924
+ dwell_time_actual: Optional[DwellTimeActual] = None
925
+ postsample_intensity: Optional[PostsampleIntensity] = None
926
+ attrs: Optional[Annotated[dict, Field(validate_default=True)]] = {}
927
+ # _coords: dict = PrivateAttr()
928
+ _dims: tuple = PrivateAttr()
929
+ # _scan_step_indices: list = PrivateAttr()
930
+ # _shape: tuple = PrivateAttr()
931
+
932
+ _validate_independent_dimensions = field_validator(
933
+ 'independent_dimensions')(validate_data_source_for_map_config)
934
+ _validate_presample_intensity = field_validator(
935
+ 'presample_intensity')(validate_data_source_for_map_config)
936
+ _validate_dwell_time_actual = field_validator(
937
+ 'dwell_time_actual')(validate_data_source_for_map_config)
938
+ _validate_postsample_intensity = field_validator(
939
+ 'postsample_intensity')(validate_data_source_for_map_config)
940
+ _validate_scalar_data = field_validator(
941
+ 'scalar_data')(validate_data_source_for_map_config)
942
+
943
+ @model_validator(mode='before')
944
+ @classmethod
945
+ def validate_mapconfig_before(cls, data, info):
946
+ """Ensure that a valid configuration was provided and finalize
947
+ spec_file filepaths.
948
+
949
+ :param data: Pydantic validator data object.
950
+ :type data:
951
+ MapConfig, pydantic_core._pydantic_core.ValidationInfo
952
+ :return: The currently validated list of class properties.
953
+ :rtype: dict
954
+ """
955
+ if 'spec_file' in data and 'scan_numbers' in data:
956
+ spec_file = data.pop('spec_file')
957
+ scan_numbers = data.pop('scan_numbers')
958
+ if 'par_file' in data:
959
+ par_file = data.pop('par_file')
960
+ else:
961
+ par_file = None
962
+ if 'spec_scans' in data:
963
+ raise ValueError(
964
+ f'Ambiguous SPEC scan information: spec_file={spec_file},'
965
+ f' scan_numbers={scan_numbers}, and '
966
+ f'spec_scans={data["spec_scans"]}')
967
+ if par_file is None:
968
+ data['spec_scans'] = [
969
+ {'spec_file': spec_file, 'scan_numbers': scan_numbers}]
970
+ else:
971
+ data['spec_scans'] = [
972
+ {'spec_file': spec_file, 'scan_numbers': scan_numbers,
973
+ 'par_file': par_file}]
974
+ else:
975
+ spec_scans = data.get('spec_scans')
976
+ if 'spec_scans' in data:
977
+ inputdir = data.get('inputdir')
978
+ if inputdir is None and info.data is not None:
979
+ inputdir = info.data.get('inputdir')
980
+ for i, scans in enumerate(deepcopy(spec_scans)):
981
+ if isinstance(scans, SpecScans):
982
+ scans = scans.model_dump()
983
+ spec_file = scans['spec_file']
984
+ if inputdir is not None and not os.path.isabs(spec_file):
985
+ scans['spec_file'] = os.path.join(inputdir, spec_file)
986
+ spec_scans[i] = SpecScans(**scans, **data)
987
+ data['spec_scans'] = spec_scans
988
+ return data
989
+
990
+ @field_validator('experiment_type')
991
+ @classmethod
992
+ def validate_experiment_type(cls, experiment_type, info):
993
+ """Ensure values for the station and experiment_type fields are
994
+ compatible.
995
+
996
+ :param experiment_type: The value of `experiment_type` to
997
+ validate.
998
+ :type experiment_type: dict
999
+ :param info: Pydantic validator info object.
1000
+ :type info: pydantic_core._pydantic_core.ValidationInfo
1001
+ :raises ValueError: Invalid experiment type.
1002
+ :return: The validated field for `experiment_type`.
1003
+ :rtype: str
1004
+ """
1005
+ station = info.data['station']
1006
+ if station == 'id1a3':
1007
+ allowed_experiment_types = ['EDD', 'SAXSWAXS', 'TOMO']
1008
+ elif station == 'id3a':
1009
+ allowed_experiment_types = ['EDD', 'TOMO']
1010
+ elif station == 'id3b':
1011
+ allowed_experiment_types = ['GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF']
1012
+ elif station == 'id4b':
1013
+ allowed_experiment_types = ['HDRM']
1014
+ else:
1015
+ allowed_experiment_types = []
1016
+ if experiment_type not in allowed_experiment_types:
1017
+ raise ValueError(
1018
+ f'For station {station}, allowed experiment types are '
1019
+ f'{", ".join(allowed_experiment_types)}. '
1020
+ f'Supplied experiment type {experiment_type} is not allowed.')
1021
+ return experiment_type
1022
+
1023
+ #RV maybe better to use model_validator, see v2 docs?
1024
+ @field_validator('attrs')
1025
+ @classmethod
1026
+ def validate_attrs(cls, attrs, info):
1027
+ """Validate any additional attributes depending on the values
1028
+ for the station and experiment_type fields.
1029
+
1030
+ :param attrs: Any additional attributes to the MapConfig class.
1031
+ :type attrs: dict
1032
+ :param info: Pydantic validator info object.
1033
+ :type info: pydantic_core._pydantic_core.ValidationInfo
1034
+ :raises ValueError: Invalid attribute.
1035
+ :return: The validated field for `attrs`.
1036
+ :rtype: dict
1037
+ """
1038
+ # Get the map's scan_type for EDD experiments
1039
+ values = info.data
1040
+ station = values['station']
1041
+ experiment_type = values['experiment_type']
1042
+ if station in ['id1a3', 'id3a'] and experiment_type == 'EDD':
1043
+ scan_type = cls.get_smb_par_attr(values, 'scan_type')
1044
+ if scan_type is not None:
1045
+ attrs['scan_type'] = scan_type
1046
+ attrs['config_id'] = cls.get_smb_par_attr(values, 'config_id')
1047
+ dataset_id = cls.get_smb_par_attr(values, 'dataset_id')
1048
+ if dataset_id is not None:
1049
+ attrs['dataset_id'] = dataset_id
1050
+ if attrs.get('scan_type') is None:
1051
+ return attrs
1052
+ axes_labels = {1: 'fly_labx', 2: 'fly_laby', 3: 'fly_labz',
1053
+ 4: 'fly_ometotal'}
1054
+ if attrs['scan_type'] != 0:
1055
+ attrs['fly_axis_labels'] = [
1056
+ axes_labels[cls.get_smb_par_attr(values, 'fly_axis0')]]
1057
+ if attrs['scan_type'] in (2, 3, 5):
1058
+ attrs['fly_axis_labels'].append(
1059
+ axes_labels[cls.get_smb_par_attr(values, 'fly_axis1')])
1060
+ return attrs
1061
+
1062
+ @staticmethod
1063
+ def get_smb_par_attr(class_fields, label, units='-', name=None):
1064
+ """Read an SMB par file attribute.
1065
+
1066
+ :param class_fields: The Map configuration class fields.
1067
+ :type class_fields: Any
1068
+ :param label: An attrs field key, the user-defined label for
1069
+ referring to this data in the NeXus file and in other
1070
+ tools.
1071
+ :type label: str
1072
+ :param units: The attrs' field unit, defaults to `'-'`.
1073
+ :type units: str
1074
+ :param name: The attrs' field name, the name with which these
1075
+ raw data were recorded at time of data collection,
1076
+ defaults to `label`.
1077
+ :type name: str, optional.
1078
+ """
1079
+ if name is None:
1080
+ name = label
1081
+ PointByPointScanData(
1082
+ label=label, data_type='smb_par', units=units, name=name)
1083
+ values = []
1084
+ for scans in class_fields.get('spec_scans'):
1085
+ for scan_number in scans.scan_numbers:
1086
+ scanparser = scans.get_scanparser(scan_number)
1087
+ try:
1088
+ values.append(scanparser.pars[name])
1089
+ except Exception:
1090
+ # print(
1091
+ # f'Warning: No value found for .par file value "{name}"'
1092
+ # f' on scan {scan_number} in spec file '
1093
+ # f'{scans.spec_file}.')
1094
+ values.append(None)
1095
+ values = list(set(values))
1096
+ if len(values) != 1:
1097
+ raise ValueError(f'More than one {name} in map not allowed '
1098
+ f'({values})')
1099
+ return values[0]
1100
+
1101
+ @property
1102
+ def all_scalar_data(self):
1103
+ """Return a list of all instances of `PointByPointScanData`
1104
+ for which this map configuration will collect dataset-like
1105
+ data (as opposed to axes-like data).
1106
+
1107
+ This will be any and all of the items in the
1108
+ corrections-data-related fields, as well as any additional
1109
+ items in the optional `scalar_data` field.
1110
+ """
1111
+ return [getattr(self, label, None)
1112
+ for label in CorrectionsData.reserved_labels()
1113
+ if getattr(self, label, None) is not None] + self.scalar_data
1114
+
1115
+ @property
1116
+ def coords(self):
1117
+ """Return a dictionary of the values of each independent
1118
+ dimension across the map.
1119
+ """
1120
+ raise RuntimeError('property coords not implemented')
1121
+ if not hasattr(self, '_coords'):
1122
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
1123
+ coords = {}
1124
+ for dim in self.independent_dimensions:
1125
+ if dim.label in fly_axis_labels:
1126
+ relative = True
1127
+ ndigits = 3
1128
+ else:
1129
+ relative = False
1130
+ ndigits = None
1131
+ coords[dim.label] = []
1132
+ for scans in self.spec_scans:
1133
+ for scan_number in scans.scan_numbers:
1134
+ scanparser = scans.get_scanparser(scan_number)
1135
+ for scan_step_index in range(
1136
+ scanparser.spec_scan_npts):
1137
+ coords[dim.label].append(dim.get_value(
1138
+ scans, scan_number, scan_step_index,
1139
+ self.scalar_data, relative, ndigits))
1140
+ if self.map_type == 'structured':
1141
+ coords[dim.label] = np.unique(coords[dim.label])
1142
+ self._coords = coords
1143
+ return self._coords
1144
+
1145
+ @property
1146
+ def dims(self):
1147
+ """Return a tuple of the independent dimension labels for the
1148
+ map.
1149
+ """
1150
+ if not hasattr(self, '_dims'):
1151
+ self._dims = [dim.label for dim in self.independent_dimensions]
1152
+ return self._dims
1153
+
1154
+ @property
1155
+ def scan_step_indices(self):
1156
+ """Return an ordered list in which we can look up the SpecScans
1157
+ object, the scan number, and scan step index for every point
1158
+ on the map.
1159
+ """
1160
+ raise RuntimeError('property scan_step_indices not implemented')
1161
+ if not hasattr(self, '_scan_step_indices'):
1162
+ scan_step_indices = []
1163
+ for scans in self.spec_scans:
1164
+ for scan_number in scans.scan_numbers:
1165
+ scanparser = scans.get_scanparser(scan_number)
1166
+ for scan_step_index in range(scanparser.spec_scan_npts):
1167
+ scan_step_indices.append(
1168
+ (scans, scan_number, scan_step_index))
1169
+ self._scan_step_indices = scan_step_indices
1170
+ return self._scan_step_indices
1171
+
1172
+ @property
1173
+ def shape(self):
1174
+ """Return the shape of the map -- a tuple representing the
1175
+ number of unique values of each dimension across the map.
1176
+ """
1177
+ raise RuntimeError('property shape not implemented')
1178
+ if not hasattr(self, '_shape'):
1179
+ if self.map_type == 'structured':
1180
+ self._shape = tuple([len(v) for k, v in self.coords.items()])
1181
+ else:
1182
+ self._shape = (len(self.scan_step_indices),)
1183
+ return self._shape
1184
+
1185
+ def get_coords(self, map_index):
1186
+ """Return a dictionary of the coordinate names and values of
1187
+ each independent dimension for a given point on the map.
1188
+
1189
+ :param map_index: The map index to return coordinates for.
1190
+ :type map_index: tuple
1191
+ :return: A list of coordinate values.
1192
+ :rtype: dict
1193
+ """
1194
+ raise RuntimeError('get_coords not implemented')
1195
+ if self.map_type == 'structured':
1196
+ scan_type = self.attrs.get('scan_type', -1)
1197
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
1198
+ if (scan_type in (3, 5)
1199
+ and len(self.dims) ==
1200
+ len(map_index) + len(fly_axis_labels)):
1201
+ dims = [dim for dim in self.dims if dim not in fly_axis_labels]
1202
+ return {dim:self.coords[dim][i]
1203
+ for dim, i in zip(dims, map_index)}
1204
+ return {dim:self.coords[dim][i]
1205
+ for dim, i in zip(self.dims, map_index)}
1206
+ return {dim:self.coords[dim][map_index[0]] for dim in self.dims}
1207
+
1208
+ def get_detector_data(self, detector_name, map_index):
1209
+ """Return detector data collected by this map for a given
1210
+ point on the map.
1211
+
1212
+ :param detector_name: Name of the detector for which to return
1213
+ data. Usually the value of the detector's EPICS
1214
+ areaDetector prefix macro, $P.
1215
+ :type detector_name: str
1216
+ :param map_index: The map index to return detector data for.
1217
+ :type map_index: tuple
1218
+ :return: One frame of raw detector data.
1219
+ :rtype: np.ndarray
1220
+ """
1221
+ raise RuntimeError('get_detector_data not implemented')
1222
+ scans, scan_number, scan_step_index = \
1223
+ self.get_scan_step_index(map_index)
1224
+ scanparser = scans.get_scanparser(scan_number)
1225
+ return scanparser.get_detector_data(detector_name, scan_step_index)
1226
+
1227
+ def get_scan_step_index(self, map_index):
1228
+ """Return parameters to identify a single SPEC scan step that
1229
+ corresponds to the map point at the index provided.
1230
+
1231
+ :param map_index: The index of a map point to identify as a
1232
+ specific SPEC scan step index.
1233
+ :type map_index: tuple
1234
+ :return: A `SpecScans` configuration, scan number, and scan
1235
+ step index.
1236
+ :rtype: tuple[SpecScans, int, int]
1237
+ """
1238
+ raise RuntimeError('get_scan_step_index not implemented')
1239
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
1240
+ if self.map_type == 'structured':
1241
+ map_coords = self.get_coords(map_index)
1242
+ for scans, scan_number, scan_step_index in self.scan_step_indices:
1243
+ coords = {dim.label:(
1244
+ dim.get_value(
1245
+ scans, scan_number, scan_step_index,
1246
+ self.scalar_data, True, 3)
1247
+ if dim.label in fly_axis_labels
1248
+ else
1249
+ dim.get_value(
1250
+ scans, scan_number, scan_step_index,
1251
+ self.scalar_data))
1252
+ for dim in self.independent_dimensions}
1253
+ if coords == map_coords:
1254
+ return scans, scan_number, scan_step_index
1255
+ raise RuntimeError(f'Unable to match coordinates {coords}')
1256
+ return self.scan_step_indices[map_index[0]]
1257
+
1258
+ def get_value(self, data, map_index):
1259
+ """Return the raw data collected by a single device at a
1260
+ single point in the map.
1261
+
1262
+ :param data: The device configuration to return a value of raw
1263
+ data for.
1264
+ :type data: PointByPointScanData
1265
+ :param map_index: The map index to return raw data for.
1266
+ :type map_index: tuple
1267
+ :return: Raw data value.
1268
+ """
1269
+ raise RuntimeError('get_value not implemented')
1270
+ scans, scan_number, scan_step_index = \
1271
+ self.get_scan_step_index(map_index)
1272
+ return data.get_value(scans, scan_number, scan_step_index,
1273
+ self.scalar_data)
1274
+
1275
+
1276
+ def import_scanparser(station, experiment):
1277
+ """Given the name of a CHESS station and experiment type, import
1278
+ the corresponding subclass of `ScanParser` as `ScanParser`.
1279
+
1280
+ :param station: The station name
1281
+ ('IDxx', not the beamline acronym).
1282
+ :type station: str
1283
+ :param experiment: The experiment type.
1284
+ :type experiment: Literal[
1285
+ 'EDD', 'GIWAXS', 'SAXSWAXS', 'TOMO', 'XRF', 'HDRM']
1286
+ """
1287
+ # Third party modules
1288
+ # pylint: disable=import-error
1289
+ from chess_scanparsers import choose_scanparser
1290
+
1291
+ globals()['ScanParser'] = choose_scanparser(station, experiment)