ChessAnalysisPipeline 0.0.14__tar.gz → 0.0.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

Files changed (68) hide show
  1. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/__init__.py +1 -1
  2. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/__init__.py +9 -0
  3. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/models/map.py +295 -55
  4. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/processor.py +846 -10
  5. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/reader.py +171 -0
  6. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/writer.py +181 -18
  7. chessanalysispipeline-0.0.15/CHAP/edd/__init__.py +20 -0
  8. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/edd/models.py +822 -451
  9. chessanalysispipeline-0.0.15/CHAP/edd/processor.py +3189 -0
  10. chessanalysispipeline-0.0.15/CHAP/edd/reader.py +677 -0
  11. chessanalysispipeline-0.0.15/CHAP/edd/utils.py +1609 -0
  12. chessanalysispipeline-0.0.15/CHAP/foxden/__init__.py +6 -0
  13. chessanalysispipeline-0.0.15/CHAP/foxden/processor.py +42 -0
  14. chessanalysispipeline-0.0.15/CHAP/foxden/writer.py +65 -0
  15. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/pipeline.py +1 -1
  16. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/runner.py +4 -4
  17. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/tomo/models.py +7 -5
  18. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/tomo/processor.py +118 -39
  19. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/__init__.py +1 -0
  20. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/fit.py +1292 -1315
  21. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/general.py +393 -53
  22. chessanalysispipeline-0.0.15/CHAP/utils/models.py +567 -0
  23. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/scanparsers.py +141 -28
  24. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15/ChessAnalysisPipeline.egg-info}/PKG-INFO +1 -1
  25. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/ChessAnalysisPipeline.egg-info/SOURCES.txt +4 -0
  26. chessanalysispipeline-0.0.15/LICENSE +60 -0
  27. {ChessAnalysisPipeline-0.0.14/ChessAnalysisPipeline.egg-info → chessanalysispipeline-0.0.15}/PKG-INFO +1 -1
  28. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/setup.py +3 -1
  29. ChessAnalysisPipeline-0.0.14/CHAP/edd/__init__.py +0 -13
  30. ChessAnalysisPipeline-0.0.14/CHAP/edd/processor.py +0 -1724
  31. ChessAnalysisPipeline-0.0.14/CHAP/edd/utils.py +0 -1055
  32. ChessAnalysisPipeline-0.0.14/CHAP/sin2psi/reader.py +0 -5
  33. ChessAnalysisPipeline-0.0.14/LICENSE +0 -21
  34. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/TaskManager.py +0 -0
  35. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/models/__init__.py +0 -0
  36. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/common/models/integration.py +0 -0
  37. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/edd/writer.py +0 -0
  38. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/inference/__init__.py +0 -0
  39. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/inference/processor.py +0 -0
  40. {ChessAnalysisPipeline-0.0.14/CHAP/edd → chessanalysispipeline-0.0.15/CHAP/inference}/reader.py +0 -0
  41. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/inference/writer.py +0 -0
  42. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/processor.py +0 -0
  43. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/reader.py +0 -0
  44. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/saxswaxs/__init__.py +0 -0
  45. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/saxswaxs/processor.py +0 -0
  46. {ChessAnalysisPipeline-0.0.14/CHAP/inference → chessanalysispipeline-0.0.15/CHAP/saxswaxs}/reader.py +0 -0
  47. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/saxswaxs/writer.py +0 -0
  48. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/server.py +0 -0
  49. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/sin2psi/__init__.py +0 -0
  50. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/sin2psi/processor.py +0 -0
  51. {ChessAnalysisPipeline-0.0.14/CHAP/saxswaxs → chessanalysispipeline-0.0.15/CHAP/sin2psi}/reader.py +0 -0
  52. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/sin2psi/writer.py +0 -0
  53. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/tomo/__init__.py +0 -0
  54. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/tomo/reader.py +0 -0
  55. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/tomo/writer.py +0 -0
  56. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/material.py +0 -0
  57. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/utils/parfile.py +0 -0
  58. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/CHAP/writer.py +0 -0
  59. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/ChessAnalysisPipeline.egg-info/dependency_links.txt +0 -0
  60. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/ChessAnalysisPipeline.egg-info/entry_points.txt +0 -0
  61. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/ChessAnalysisPipeline.egg-info/requires.txt +0 -0
  62. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/ChessAnalysisPipeline.egg-info/top_level.txt +0 -0
  63. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/MLaaS/__init__.py +0 -0
  64. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/MLaaS/ktrain.py +0 -0
  65. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/MLaaS/mnist_img.py +0 -0
  66. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/MLaaS/tfaas_client.py +0 -0
  67. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/README.md +0 -0
  68. {ChessAnalysisPipeline-0.0.14 → chessanalysispipeline-0.0.15}/setup.cfg +0 -0
@@ -22,4 +22,4 @@ from CHAP.reader import Reader
22
22
  from CHAP.processor import Processor
23
23
  from CHAP.writer import Writer
24
24
 
25
- version = 'v0.0.14'
25
+ version = 'v0.0.15'
@@ -9,9 +9,12 @@ validating input data in some `Processor`s.
9
9
 
10
10
  from CHAP.common.reader import (
11
11
  BinaryFileReader,
12
+ FabioImageReader,
12
13
  H5Reader,
13
14
  MapReader,
14
15
  NexusReader,
16
+ NXdataReader,
17
+ NXfieldReader,
15
18
  SpecReader,
16
19
  URLReader,
17
20
  YAMLReader,
@@ -27,17 +30,23 @@ from CHAP.common.processor import (
27
30
  NexusToNumpyProcessor,
28
31
  NexusToXarrayProcessor,
29
32
  PrintProcessor,
33
+ PyfaiAzimuthalIntegrationProcessor,
30
34
  RawDetectorDataMapProcessor,
31
35
  StrainAnalysisProcessor,
36
+ SetupNXdataProcessor,
37
+ UpdateNXdataProcessor,
38
+ NXdataToDataPointsProcessor,
32
39
  XarrayToNexusProcessor,
33
40
  XarrayToNumpyProcessor,
34
41
  )
35
42
  from CHAP.common.writer import (
36
43
  ExtractArchiveWriter,
37
44
  FileTreeWriter,
45
+ H5Writer,
38
46
  MatplotlibAnimationWriter,
39
47
  MatplotlibFigureWriter,
40
48
  NexusWriter,
49
+ PyfaiResultsWriter,
41
50
  YAMLWriter,
42
51
  TXTWriter,
43
52
  )
@@ -58,6 +58,8 @@ class SpecScans(BaseModel):
58
58
 
59
59
  :param spec_file: Path to the SPEC file.
60
60
  :type spec_file: str
61
+ :param values: Dictionary of validated class field values.
62
+ :type values: dict
61
63
  :raises ValueError: If the SPEC file is invalid.
62
64
  :return: Absolute path to the SPEC file, if it is valid.
63
65
  :rtype: str
@@ -75,7 +77,7 @@ class SpecScans(BaseModel):
75
77
 
76
78
  :param scan_numbers: List of scan numbers.
77
79
  :type scan_numbers: list of int
78
- :param values: Dictionary of values for all fields of the model.
80
+ :param values: Dictionary of validated class field values.
79
81
  :type values: dict
80
82
  :raises ValueError: If a specified scan number is not found in
81
83
  the SPEC file.
@@ -104,6 +106,8 @@ class SpecScans(BaseModel):
104
106
 
105
107
  :param par_file: Path to a non-default SMB par file.
106
108
  :type par_file: str
109
+ :param values: Dictionary of validated class field values.
110
+ :type values: dict
107
111
  :raises ValueError: If the SMB par file is invalid.
108
112
  :return: Absolute path to the SMB par file, if it is valid.
109
113
  :rtype: str
@@ -157,7 +161,8 @@ class SpecScans(BaseModel):
157
161
  coordinate_index = list(
158
162
  map_config.coords[independent_dimension.label]).index(
159
163
  independent_dimension.get_value(
160
- self, scan_number, scan_step_index))
164
+ self, scan_number, scan_step_index,
165
+ map_config.scalar_data))
161
166
  index = (coordinate_index, *index)
162
167
  return index
163
168
 
@@ -235,7 +240,8 @@ class PointByPointScanData(BaseModel):
235
240
  """
236
241
  label: constr(min_length=1)
237
242
  units: constr(strip_whitespace=True, min_length=1)
238
- data_type: Literal['spec_motor', 'scan_column', 'smb_par']
243
+ data_type: Literal['spec_motor', 'spec_motor_absolute', 'scan_column',
244
+ 'smb_par', 'expression']
239
245
  name: constr(strip_whitespace=True, min_length=1)
240
246
 
241
247
  @validator('label')
@@ -309,8 +315,48 @@ class PointByPointScanData(BaseModel):
309
315
  f'for index {index} '
310
316
  f'in spec file {scans.spec_file}')
311
317
 
312
- def get_value(self, spec_scans:SpecScans,
313
- scan_number:int, scan_step_index:int=0):
318
+ def validate_for_scalar_data(self, scalar_data):
319
+ """Used for `PointByPointScanData` objects with a `data_type`
320
+ of `'expression'`. Validate that the `scalar_data` field of a
321
+ `MapConfig` object contains all the items necessary for
322
+ evaluating the expression.
323
+
324
+ :param scalar_data: the `scalar_data` field of a `MapConfig`
325
+ that this `PointByPointScanData` object will be validated
326
+ against
327
+ :type scalar_data: list[PointByPointScanData]
328
+ :raises ValueError: if `scalar_data` does not contain items
329
+ needed for evaluating the expression.
330
+ :return: None
331
+ """
332
+ from ast import parse
333
+ from asteval import get_ast_names
334
+
335
+ labels = get_ast_names(parse(self.name))
336
+ for label in ('round', 'np', 'numpy'):
337
+ try:
338
+ labels.remove(label)
339
+ except:
340
+ pass
341
+ for l in labels:
342
+ if l == 'round':
343
+ symtable[l] = round
344
+ continue
345
+ if l in ('np', 'numpy'):
346
+ symtable[l] = np
347
+ continue
348
+ label_found = False
349
+ for s_d in scalar_data:
350
+ if s_d.label == l:
351
+ label_found = True
352
+ break
353
+ if not label_found:
354
+ raise ValueError(
355
+ f'{l} is not the label of an item in scalar_data')
356
+
357
+ def get_value(
358
+ self, spec_scans:SpecScans, scan_number:int, scan_step_index:int=0,
359
+ scalar_data=[], relative=True, ndigits=None):
314
360
  """Return the value recorded for this instance of
315
361
  `PointByPointScanData` at a specific scan step.
316
362
 
@@ -320,18 +366,32 @@ class PointByPointScanData(BaseModel):
320
366
  :param scan_number: The number of the scan in which the
321
367
  requested scan step occurs.
322
368
  :type scan_number: int
323
- :param scan_step_index: The index of the requested scan step.
324
- :type scan_step_index: int
369
+ :param scan_step_index: The index of the requested scan step,
370
+ defaults to `0`.
371
+ :type scan_step_index: int, optional
372
+ :param scalar_data: list of scalar data configurations used to
373
+ get values for `PointByPointScanData` objects with
374
+ `data_type == 'expression'`, defaults to `[]`.
375
+ :type scalar_data: list[PointByPointScanData], optional
376
+ :param relative: Whether to return a relative value or not,
377
+ defaults to `True` (only applies to SPEC motor values).
378
+ :type relative: bool, optional
379
+ :params ndigits: Round SPEC motor values to the specified
380
+ number of decimals if set, defaults to `None`.
381
+ :type ndigits: int, optional
325
382
  :return: The value recorded of the data represented by this
326
383
  instance of `PointByPointScanData` at the scan step
327
384
  requested.
328
385
  :rtype: float
329
386
  """
330
- if self.data_type == 'spec_motor':
387
+ if 'spec_motor' in self.data_type:
388
+ if 'absolute' in self.data_type:
389
+ relative = False
331
390
  return get_spec_motor_value(spec_scans.spec_file,
332
391
  scan_number,
333
392
  scan_step_index,
334
- self.name)
393
+ self.name,
394
+ relative, ndigits)
335
395
  if self.data_type == 'scan_column':
336
396
  return get_spec_counter_value(spec_scans.spec_file,
337
397
  scan_number,
@@ -341,12 +401,19 @@ class PointByPointScanData(BaseModel):
341
401
  return get_smb_par_value(spec_scans.spec_file,
342
402
  scan_number,
343
403
  self.name)
404
+ elif self.data_type == 'expression':
405
+ return get_expression_value(spec_scans,
406
+ scan_number,
407
+ scan_step_index,
408
+ self.name,
409
+ scalar_data)
344
410
  return None
345
411
 
346
412
 
347
413
  @cache
348
414
  def get_spec_motor_value(spec_file:str, scan_number:int,
349
- scan_step_index:int, spec_mnemonic:str):
415
+ scan_step_index:int, spec_mnemonic:str,
416
+ relative=True, ndigits=None):
350
417
  """Return the value recorded for a SPEC motor at a specific scan
351
418
  step.
352
419
 
@@ -360,6 +427,12 @@ def get_spec_motor_value(spec_file:str, scan_number:int,
360
427
  :type scan_step_index: int
361
428
  :param spec_mnemonic: The menmonic of a SPEC motor.
362
429
  :type spec_mnemonic: str
430
+ :param relative: Whether to return a relative value or not,
431
+ defaults to `True`.
432
+ :type relative: bool, optional
433
+ :params ndigits: Round SPEC motor values to the specified
434
+ number of decimals if set, defaults to `None`.
435
+ :type ndigits: int, optional
363
436
  :return: The value of the motor at the scan step requested.
364
437
  :rtype: float
365
438
  """
@@ -373,11 +446,15 @@ def get_spec_motor_value(spec_file:str, scan_number:int,
373
446
  scanparser.spec_scan_shape,
374
447
  order='F')
375
448
  motor_value = \
376
- scanparser.spec_scan_motor_vals[motor_i][scan_step[motor_i]]
449
+ scanparser.get_spec_scan_motor_vals(
450
+ relative)[motor_i][scan_step[motor_i]]
377
451
  else:
378
- motor_value = scanparser.spec_scan_motor_vals[motor_i]
452
+ motor_value = scanparser.get_spec_scan_motor_vals(
453
+ relative)[motor_i]
379
454
  else:
380
455
  motor_value = scanparser.get_spec_positioner_value(spec_mnemonic)
456
+ if ndigits is not None:
457
+ motor_value = round(motor_value, 3)
381
458
  return motor_value
382
459
 
383
460
 
@@ -426,6 +503,43 @@ def get_smb_par_value(spec_file:str, scan_number:int, par_name:str):
426
503
  return scanparser.pars[par_name]
427
504
 
428
505
 
506
+ def get_expression_value(spec_scans:SpecScans, scan_number:int,
507
+ scan_step_index:int, expression:str,
508
+ scalar_data:list[PointByPointScanData]):
509
+ """Return the value of an evaluated expression of other sources of
510
+ point-by-point scalar scan data for a single point.
511
+
512
+ :param spec_scans: An instance of `SpecScans` in which the
513
+ requested scan step occurs.
514
+ :type spec_scans: SpecScans
515
+ :param scan_number: The number of the scan in which the requested
516
+ scan step occurs.
517
+ :type scan_number: int
518
+ :param scan_step_index: The index of the requested scan step.
519
+ :type scan_step_index: int
520
+ :param expression: the string expression to evaluate
521
+ :type expression: str
522
+ :param scalar_data: the `scalar_data` field of a `MapConfig`
523
+ object (used to provide values for variables used in
524
+ `expression`)
525
+ :type scalar_data: list[PointByPointScanData]
526
+ :return: The value of the .par file value for the scan requested.
527
+ :rtype: float
528
+ """
529
+ from ast import parse
530
+ from asteval import get_ast_names, Interpreter
531
+ labels = get_ast_names(parse(expression))
532
+ symtable = {}
533
+ for l in labels:
534
+ if l == 'round':
535
+ symtable[l] = round
536
+ for s_d in scalar_data:
537
+ if s_d.label == l:
538
+ symtable[l] = s_d.get_value(
539
+ spec_scans, scan_number, scan_step_index, scalar_data)
540
+ aeval = Interpreter(symtable=symtable)
541
+ return aeval(expression)
542
+
429
543
  def validate_data_source_for_map_config(data_source, values):
430
544
  """Confirm that an instance of PointByPointScanData is valid for
431
545
  the station and scans provided by a map configuration dictionary.
@@ -439,11 +553,22 @@ def validate_data_source_for_map_config(data_source, values):
439
553
  :return: `data_source`, if it is valid.
440
554
  :rtype: PointByPointScanData
441
555
  """
442
- if data_source is not None:
443
- import_scanparser(values.get('station'), values.get('experiment_type'))
444
- data_source.validate_for_station(values.get('station'))
445
- data_source.validate_for_spec_scans(values.get('spec_scans'))
446
- return data_source
556
+ def _validate_data_source_for_map_config(
557
+ data_source, values, parent_list=None):
558
+ if isinstance(data_source, list):
559
+ return [_validate_data_source_for_map_config(
560
+ d_s, values, parent_list=data_source) for d_s in data_source]
561
+ if data_source is not None:
562
+ if data_source.data_type == 'expression':
563
+ data_source.validate_for_scalar_data(
564
+ values.get('scalar_data', parent_list))
565
+ else:
566
+ import_scanparser(
567
+ values.get('station'), values.get('experiment_type'))
568
+ data_source.validate_for_station(values.get('station'))
569
+ data_source.validate_for_spec_scans(values.get('spec_scans'))
570
+ return(data_source)
571
+ return _validate_data_source_for_map_config(data_source, values)
447
572
 
448
573
 
449
574
  class IndependentDimension(PointByPointScanData):
@@ -516,7 +641,7 @@ class CorrectionsData(PointByPointScanData):
516
641
  :return: A list of reserved labels.
517
642
  :rtype: list[str]
518
643
  """
519
- return list(cls.__fields__['label'].type_.__args__)
644
+ return list((*cls.__fields__['label'].type_.__args__, 'round'))
520
645
 
521
646
 
522
647
  class PresampleIntensity(CorrectionsData):
@@ -596,7 +721,7 @@ class SpecConfig(BaseModel):
596
721
  """Ensure that a valid configuration was provided and finalize
597
722
  spec_file filepaths.
598
723
 
599
- :param values: Dictionary of class field values.
724
+ :param values: Dictionary of validated class field values.
600
725
  :type values: dict
601
726
  :return: The validated list of `values`.
602
727
  :rtype: dict
@@ -617,6 +742,14 @@ class SpecConfig(BaseModel):
617
742
  def validate_experiment_type(cls, value, values):
618
743
  """Ensure values for the station and experiment_type fields are
619
744
  compatible
745
+
746
+ :param value: Field value to validate (`experiment_type`).
747
+ :type value: str
748
+ :param values: Dictionary of validated class field values.
749
+ :type values: dict
750
+ :raises ValueError: Invalid experiment type.
751
+ :return: The validated field for `experiment_type`.
752
+ :rtype: str
620
753
  """
621
754
  station = values.get('station')
622
755
  if station == 'id1a3':
@@ -659,11 +792,11 @@ class MapConfig(BaseModel):
659
792
  times for SPEC scans. Required when applying a
660
793
  CorrectionConfig tool.
661
794
  :type dwell_time_actual: DwellTimeActual, optional
662
- :ivar presample_intensity: A source of point-by-point postsample
795
+ :ivar postsample_intensity: A source of point-by-point postsample
663
796
  beam intensity data. Required when applying a CorrectionConfig
664
797
  tool with `correction_type='flux_absorption'` or
665
798
  `correction_type='flux_absorption_background'`.
666
- :type presample_intensity: PresampleIntensity, optional
799
+ :type postsample_intensity: PresampleIntensity, optional
667
800
  :ivar scalar_data: A list of the sources of data representing
668
801
  other scalar raw data values collected at each point on the
669
802
  map. In the NeXus file representation of the map, datasets for
@@ -684,6 +817,7 @@ class MapConfig(BaseModel):
684
817
  dwell_time_actual: Optional[DwellTimeActual]
685
818
  postsample_intensity: Optional[PostsampleIntensity]
686
819
  scalar_data: Optional[list[PointByPointScanData]] = []
820
+ attrs: Optional[dict] = {}
687
821
  map_type: Optional[Literal['structured', 'unstructured']] = 'structured'
688
822
  _coords: dict = PrivateAttr()
689
823
  _dims: tuple = PrivateAttr()
@@ -705,7 +839,6 @@ class MapConfig(BaseModel):
705
839
  allow_reuse=True)(validate_data_source_for_map_config)
706
840
  _validate_scalar_data = validator(
707
841
  'scalar_data',
708
- each_item=True,
709
842
  allow_reuse=True)(validate_data_source_for_map_config)
710
843
 
711
844
  @root_validator(pre=True)
@@ -713,7 +846,7 @@ class MapConfig(BaseModel):
713
846
  """Ensure that a valid configuration was provided and finalize
714
847
  spec_file filepaths.
715
848
 
716
- :param values: Dictionary of class field values.
849
+ :param values: Dictionary of validated class field values.
717
850
  :type values: dict
718
851
  :return: The validated list of `values`.
719
852
  :rtype: dict
@@ -730,6 +863,67 @@ class MapConfig(BaseModel):
730
863
  values['spec_scans'] = spec_scans
731
864
  return values
732
865
 
866
+ @validator('experiment_type')
867
+ def validate_experiment_type(cls, value, values):
868
+ """Ensure values for the station and experiment_type fields are
869
+ compatible.
870
+
871
+ :param value: Field value to validate (`experiment_type`).
872
+ :type value: dict
873
+ :param values: Dictionary of validated class field values.
874
+ :type values: dict
875
+ :raises ValueError: Invalid experiment type.
876
+ :return: The validated field for `experiment_type`.
877
+ :rtype: str
878
+ """
879
+ station = values['station']
880
+ if station == 'id1a3':
881
+ allowed_experiment_types = ['SAXSWAXS', 'EDD', 'TOMO']
882
+ elif station == 'id3a':
883
+ allowed_experiment_types = ['EDD', 'TOMO']
884
+ elif station == 'id3b':
885
+ allowed_experiment_types = ['SAXSWAXS', 'XRF', 'TOMO']
886
+ else:
887
+ allowed_experiment_types = []
888
+ if value not in allowed_experiment_types:
889
+ raise ValueError(
890
+ f'For station {station}, allowed experiment types are '
891
+ f'{", ".join(allowed_experiment_types)}. '
892
+ f'Supplied experiment type {value} is not allowed.')
893
+ return value
894
+
895
+ @validator('attrs', always=True)
896
+ def validate_attrs(cls, value, values):
897
+ """Read any additional attributes depending on the values for
898
+ the station and experiment_type fields.
899
+
900
+ :param value: Field value to validate (`attrs`).
901
+ :type value: dict
902
+ :param values: Dictionary of validated class field values.
903
+ :type values: dict
904
+ :raises ValueError: Invalid attribute.
905
+ :return: The validated field for `attrs`.
906
+ :rtype: dict
907
+ """
908
+ # Get the map's scan_type for EDD experiments
909
+ station = values['station']
910
+ experiment_type = values['experiment_type']
911
+ if station in ['id1a3', 'id3a'] and experiment_type == 'EDD':
912
+ value['scan_type'] = cls.get_smb_par_attr(values, 'scan_type')
913
+ value['config_id'] = cls.get_smb_par_attr(values, 'config_id')
914
+ value['dataset_id'] = cls.get_smb_par_attr(values, 'dataset_id')
915
+ axes_labels = {1: 'fly_labx', 2: 'fly_laby', 3: 'fly_labz',
916
+ 4: 'fly_ometotal'}
917
+ if value['scan_type'] is None:
918
+ return value
919
+ if value['scan_type'] != 0:
920
+ value['fly_axis_labels'] = [
921
+ axes_labels[cls.get_smb_par_attr(values, 'fly_axis0')]]
922
+ if value['scan_type'] in (2, 3, 5):
923
+ value['fly_axis_labels'].append(
924
+ axes_labels[cls.get_smb_par_attr(values, 'fly_axis1')])
925
+ return value
926
+
733
927
  @validator('map_type', pre=True, always=True)
734
928
  def validate_map_type(cls, map_type, values):
735
929
  """Validate the map_type field.
@@ -737,16 +931,26 @@ class MapConfig(BaseModel):
737
931
  :param map_type: Type of map, structured or unstructured,
738
932
  defaults to `'structured'`.
739
933
  :type map_type: Literal['structured', 'unstructured']]
740
- :param values: Dictionary of values for all fields of the model.
934
+ :param values: Dictionary of validated class field values.
741
935
  :type values: dict
742
936
  :return: The validated value for map_type.
743
937
  :rtype: str
744
938
  """
745
939
  dims = {}
746
- spec_scans = values.get('spec_scans')
747
- independent_dimensions = values.get('independent_dimensions')
748
- import_scanparser(values.get('station'), values.get('experiment_type'))
940
+ attrs = values.get('attrs', {})
941
+ scan_type = attrs.get('scan_type', -1)
942
+ fly_axis_labels = attrs.get('fly_axis_labels', [])
943
+ spec_scans = values['spec_scans']
944
+ independent_dimensions = values['independent_dimensions']
945
+ scalar_data = values['scalar_data']
946
+ import_scanparser(values['station'], values['experiment_type'])
749
947
  for i, dim in enumerate(deepcopy(independent_dimensions)):
948
+ if dim.label in fly_axis_labels:
949
+ relative = True
950
+ ndigits = 3
951
+ else:
952
+ relative = False
953
+ ndigits = None
750
954
  dims[dim.label] = []
751
955
  for scans in spec_scans:
752
956
  for scan_number in scans.scan_numbers:
@@ -754,7 +958,8 @@ class MapConfig(BaseModel):
754
958
  for scan_step_index in range(
755
959
  scanparser.spec_scan_npts):
756
960
  dims[dim.label].append(dim.get_value(
757
- scans, scan_number, scan_step_index))
961
+ scans, scan_number, scan_step_index,
962
+ scalar_data, relative, ndigits))
758
963
  dims[dim.label] = np.unique(dims[dim.label])
759
964
  if dim.end is None:
760
965
  dim.end = len(dims[dim.label])
@@ -769,34 +974,42 @@ class MapConfig(BaseModel):
769
974
  for scan_step_index in range(scanparser.spec_scan_npts):
770
975
  coords[tuple([
771
976
  list(dims[dim.label]).index(
772
- dim.get_value(scans, scan_number, scan_step_index))
977
+ dim.get_value(scans, scan_number, scan_step_index,
978
+ scalar_data, True, 3))
979
+ if dim.label in fly_axis_labels else
980
+ list(dims[dim.label]).index(
981
+ dim.get_value(scans, scan_number, scan_step_index,
982
+ scalar_data))
773
983
  for dim in independent_dimensions])] += 1
774
984
  if any(True for v in coords.flatten() if v == 0 or v > 1):
775
985
  return 'unstructured'
776
986
  else:
777
987
  return 'structured'
778
988
 
779
-
780
- @validator('experiment_type')
781
- def validate_experiment_type(cls, value, values):
782
- """Ensure values for the station and experiment_type fields are
783
- compatible
784
- """
785
- station = values.get('station')
786
- if station == 'id1a3':
787
- allowed_experiment_types = ['SAXSWAXS', 'EDD', 'TOMO']
788
- elif station == 'id3a':
789
- allowed_experiment_types = ['EDD', 'TOMO']
790
- elif station == 'id3b':
791
- allowed_experiment_types = ['SAXSWAXS', 'XRF', 'TOMO']
792
- else:
793
- allowed_experiment_types = []
794
- if value not in allowed_experiment_types:
795
- raise ValueError(
796
- f'For station {station}, allowed experiment types are '
797
- f'{", ".join(allowed_experiment_types)}. '
798
- f'Supplied experiment type {value} is not allowed.')
799
- return value
989
+ @staticmethod
990
+ def get_smb_par_attr(class_fields, label, units='-', name=None):
991
+ """Read an SMB par file attribute."""
992
+ if name is None:
993
+ name = label
994
+ scalar_data = PointByPointScanData(
995
+ label=label, data_type='smb_par', units=units, name=name)
996
+ values = []
997
+ for scans in class_fields.get('spec_scans'):
998
+ for scan_number in scans.scan_numbers:
999
+ scanparser = scans.get_scanparser(scan_number)
1000
+ try:
1001
+ values.append(scanparser.pars[name])
1002
+ except:
1003
+ print(
1004
+ f'Warning: No value found for .par file value "{name}"'
1005
+ + f' on scan {scan_number} in spec file '
1006
+ + f'{scans.spec_file}.')
1007
+ values.append(None)
1008
+ values = list(set(values))
1009
+ if len(values) != 1:
1010
+ raise ValueError(f'More than one {name} in map not allowed '
1011
+ f'({values})')
1012
+ return values[0]
800
1013
 
801
1014
  @property
802
1015
  def all_scalar_data(self):
@@ -818,8 +1031,16 @@ class MapConfig(BaseModel):
818
1031
  dimension across the map.
819
1032
  """
820
1033
  if not hasattr(self, '_coords'):
1034
+ scan_type = self.attrs.get('scan_type', -1)
1035
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
821
1036
  coords = {}
822
1037
  for dim in self.independent_dimensions:
1038
+ if dim.label in fly_axis_labels:
1039
+ relative = True
1040
+ ndigits = 3
1041
+ else:
1042
+ relative = False
1043
+ ndigits = None
823
1044
  coords[dim.label] = []
824
1045
  for scans in self.spec_scans:
825
1046
  for scan_number in scans.scan_numbers:
@@ -827,7 +1048,8 @@ class MapConfig(BaseModel):
827
1048
  for scan_step_index in range(
828
1049
  scanparser.spec_scan_npts):
829
1050
  coords[dim.label].append(dim.get_value(
830
- scans, scan_number, scan_step_index))
1051
+ scans, scan_number, scan_step_index,
1052
+ self.scalar_data, relative, ndigits))
831
1053
  if self.map_type == 'structured':
832
1054
  coords[dim.label] = np.unique(coords[dim.label])
833
1055
  self._coords = coords
@@ -883,6 +1105,14 @@ class MapConfig(BaseModel):
883
1105
  :rtype: dict
884
1106
  """
885
1107
  if self.map_type == 'structured':
1108
+ scan_type = self.attrs.get('scan_type', -1)
1109
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
1110
+ if (scan_type in (3, 5)
1111
+ and len(self.dims) ==
1112
+ len(map_index) + len(fly_axis_labels)):
1113
+ dims = [dim for dim in self.dims if dim not in fly_axis_labels]
1114
+ return {dim:self.coords[dim][i]
1115
+ for dim, i in zip(dims, map_index)}
886
1116
  return {dim:self.coords[dim][i]
887
1117
  for dim, i in zip(self.dims, map_index)}
888
1118
  else:
@@ -917,12 +1147,21 @@ class MapConfig(BaseModel):
917
1147
  step index.
918
1148
  :rtype: tuple[SpecScans, int, int]
919
1149
  """
1150
+ scan_type = self.attrs.get('scan_type', -1)
1151
+ fly_axis_labels = self.attrs.get('fly_axis_labels', [])
920
1152
  if self.map_type == 'structured':
921
1153
  map_coords = self.get_coords(map_index)
922
1154
  for scans, scan_number, scan_step_index in self.scan_step_indices:
923
- coords = {dim.label:dim.get_value(
924
- scans, scan_number, scan_step_index)
925
- for dim in self.independent_dimensions}
1155
+ coords = {dim.label:(
1156
+ dim.get_value(
1157
+ scans, scan_number, scan_step_index,
1158
+ self.scalar_data, True, 3)
1159
+ if dim.label in fly_axis_labels
1160
+ else
1161
+ dim.get_value(
1162
+ scans, scan_number, scan_step_index,
1163
+ self.scalar_data))
1164
+ for dim in self.independent_dimensions}
926
1165
  if coords == map_coords:
927
1166
  return scans, scan_number, scan_step_index
928
1167
  raise RuntimeError(f'Unable to match coordinates {coords}')
@@ -942,7 +1181,8 @@ class MapConfig(BaseModel):
942
1181
  """
943
1182
  scans, scan_number, scan_step_index = \
944
1183
  self.get_scan_step_index(map_index)
945
- return data.get_value(scans, scan_number, scan_step_index)
1184
+ return data.get_value(scans, scan_number, scan_step_index,
1185
+ self.scalar_data)
946
1186
 
947
1187
 
948
1188
  def import_scanparser(station, experiment):