ChessAnalysisPipeline 0.0.15__py3-none-any.whl → 0.0.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

CHAP/edd/models.py CHANGED
@@ -1,4 +1,5 @@
1
1
  # System modules
2
+ from copy import deepcopy
2
3
  import os
3
4
  from pathlib import PosixPath
4
5
  from typing import (
@@ -13,6 +14,7 @@ from hexrd.material import Material
13
14
  from pydantic import (
14
15
  BaseModel,
15
16
  DirectoryPath,
17
+ Field,
16
18
  FilePath,
17
19
  PrivateAttr,
18
20
  StrictBool,
@@ -20,16 +22,16 @@ from pydantic import (
20
22
  conint,
21
23
  conlist,
22
24
  constr,
23
- root_validator,
24
- validator,
25
+ field_validator,
26
+ model_validator,
25
27
  )
26
28
  from scipy.interpolate import interp1d
29
+ from typing_extensions import Annotated
27
30
 
28
31
  # Local modules
29
32
  from CHAP.common.models.map import MapConfig
30
33
  from CHAP.utils.parfile import ParFile
31
- from CHAP.utils.scanparsers import SMBMCAScanParser as ScanParser
32
-
34
+ from chess_scanparsers import SMBMCAScanParser as ScanParser
33
35
 
34
36
  # Baseline configuration class
35
37
 
@@ -65,33 +67,27 @@ class MaterialConfig(BaseModel):
65
67
  :ivar sgnum: Space group of the material.
66
68
  :type sgnum: int, optional
67
69
  """
68
- material_name: Optional[constr(strip_whitespace=True, min_length=1)]
70
+ material_name: Optional[constr(strip_whitespace=True, min_length=1)] = None
69
71
  lattice_parameters: Optional[Union[
70
72
  confloat(gt=0),
71
- conlist(item_type=confloat(gt=0), min_items=1, max_items=6)]]
72
- sgnum: Optional[conint(ge=0)]
73
+ conlist(min_length=1, max_length=6, item_type=confloat(gt=0))]] = None
74
+ sgnum: Optional[conint(ge=0)] = None
73
75
 
74
76
  _material: Optional[Material]
75
77
 
76
- class Config:
77
- underscore_attrs_are_private = False
78
-
79
- @root_validator
80
- def validate_material(cls, values):
78
+ @model_validator(mode='after')
79
+ def validate_material(self):
81
80
  """Create and validate the private attribute _material.
82
81
 
83
- :param values: Dictionary of previously validated field values.
84
- :type values: dict
85
- :return: The validated list of `values`.
82
+ :return: The validated list of class properties.
86
83
  :rtype: dict
87
84
  """
88
85
  # Local modules
89
86
  from CHAP.edd.utils import make_material
90
87
 
91
- values['_material'] = make_material(values.get('material_name'),
92
- values.get('sgnum'),
93
- values.get('lattice_parameters'))
94
- return values
88
+ self._material = make_material(
89
+ self.material_name, self.sgnum, self.lattice_parameters)
90
+ return self
95
91
 
96
92
  def unique_hkls_ds(self, tth_tol=0.15, tth_max=90.0):
97
93
  """Get a list of unique HKLs and their lattice spacings.
@@ -103,7 +99,7 @@ class MaterialConfig(BaseModel):
103
99
  defaults to `90.0`.
104
100
  :type tth_max: float, optional
105
101
  :return: Unique HKLs and their lattice spacings in angstroms.
106
- :rtype: np.ndarray, np.ndarray
102
+ :rtype: numpy.ndarray, numpy.ndarray
107
103
  """
108
104
  # Local modules
109
105
  from CHAP.edd.utils import get_unique_hkls_ds
@@ -118,7 +114,7 @@ class MaterialConfig(BaseModel):
118
114
  :rtype: dict
119
115
  """
120
116
  d = super().dict(*args, **kwargs)
121
- for k,v in d.items():
117
+ for k, v in d.items():
122
118
  if isinstance(v, PosixPath):
123
119
  d[k] = str(v)
124
120
  if '_material' in d:
@@ -139,7 +135,22 @@ class MCAElementConfig(BaseModel):
139
135
  :type num_bins: int, optional
140
136
  """
141
137
  detector_name: constr(strip_whitespace=True, min_length=1) = 'mca1'
142
- num_bins: Optional[conint(gt=0)]
138
+ num_bins: Optional[conint(gt=0)] = None
139
+
140
+ @field_validator('detector_name', mode='before')
141
+ @classmethod
142
+ def validate_detector_name(cls, detector_name):
143
+ """Validate the specified detector name.
144
+
145
+ :ivar detector_name: Name of the MCA detector element in the scan.
146
+ :type detector_name: Union(str, int)
147
+ :raises ValueError: Invalid detector_name.
148
+ :return: detector_name.
149
+ :rtype: str
150
+ """
151
+ if isinstance(detector_name, int):
152
+ return str(detector_name)
153
+ return detector_name
143
154
 
144
155
  def dict(self, *args, **kwargs):
145
156
  """Return a representation of this configuration in a
@@ -185,47 +196,55 @@ class MCAElementCalibrationConfig(MCAElementConfig):
185
196
  tth_max: confloat(gt=0, allow_inf_nan=False) = 90.0
186
197
  hkl_tth_tol: confloat(gt=0, allow_inf_nan=False) = 0.15
187
198
  energy_calibration_coeffs: conlist(
188
- min_items=3, max_items=3,
199
+ min_length=3, max_length=3,
189
200
  item_type=confloat(allow_inf_nan=False)) = [0, 0, 1]
190
- background: Optional[Union[str, list]]
201
+ background: Optional[Union[str, list]] = None
191
202
  baseline: Optional[Union[bool, BaselineConfig]] = False
192
203
  tth_initial_guess: confloat(gt=0, le=tth_max, allow_inf_nan=False) = 5.0
193
- tth_calibrated: Optional[confloat(gt=0, allow_inf_nan=False)]
194
- include_energy_ranges: conlist(
195
- min_items=1,
196
- item_type=conlist(
197
- item_type=confloat(ge=25),
198
- min_items=2,
199
- max_items=2)) = [[50, 150]]
204
+ tth_calibrated: Optional[confloat(gt=0, allow_inf_nan=False)] = None
205
+ include_energy_ranges: Annotated[
206
+ conlist(
207
+ min_length=1,
208
+ item_type=conlist(
209
+ min_length=2,
210
+ max_length=2,
211
+ item_type=confloat(ge=25))),
212
+ Field(validate_default=True)] = [[50, 150]]
200
213
 
201
214
  _hkl_indices: list = PrivateAttr()
202
215
 
203
- @validator('include_energy_ranges', each_item=True)
204
- def validate_include_energy_range(cls, value, values):
216
+ @field_validator('include_energy_ranges')
217
+ @classmethod
218
+ def validate_include_energy_range(cls, include_energy_ranges, info):
205
219
  """Ensure that no energy ranges are outside the boundary of the
206
220
  detector.
207
221
 
208
- :param value: Field value to validate (`include_energy_ranges`).
209
- :type values: dict
210
- :param values: Dictionary of previously validated field values.
211
- :type values: dict
212
- :return: The validated value of `include_energy_ranges`.
222
+ :param include_energy_ranges:
223
+ The value of `include_energy_ranges` to validate.
224
+ :type include_energy_ranges: dict
225
+ :param info: Pydantic validator info object.
226
+ :type info: pydantic_core._pydantic_core.ValidationInfo
213
227
  :rtype: dict
214
228
  """
215
- value.sort()
216
- n_max = values.get('num_bins')
229
+ n_max = info.data.get('num_bins')
230
+ for i in range(len(include_energy_ranges)):
231
+ include_energy_ranges[i].sort()
217
232
  if n_max is not None:
218
233
  n_max -= 1
219
- a, b, c = values.get('energy_calibration_coeffs')
234
+ a, b, c = info.data.get('energy_calibration_coeffs')
220
235
  e_max = (a*n_max + b)*n_max +c
221
- if value[0] < c or value[1] > e_max:
222
- newvalue = [float(max(value[0], c)),
223
- float(min(value[1], e_max))]
224
- print(
225
- f'WARNING: include_energy_range out of range'
226
- f' ({value}): adjusted to {newvalue}')
227
- value = newvalue
228
- return value
236
+ for i, include_energy_range in enumerate(
237
+ deepcopy(include_energy_ranges)):
238
+ if (include_energy_range[0] < c
239
+ or include_energy_range[1] > e_max):
240
+ include_energy_ranges[i] = [
241
+ float(max(include_energy_range[0], c)),
242
+ float(min(include_energy_range[1], e_max))]
243
+ print(
244
+ f'WARNING: include_energy_range out of range'
245
+ f' ({include_energy_range}): adjusted to '
246
+ f'{include_energy_ranges[i]}')
247
+ return include_energy_ranges
229
248
 
230
249
  @property
231
250
  def energies(self):
@@ -293,10 +312,17 @@ class MCAElementCalibrationConfig(MCAElementConfig):
293
312
  """Set the private attribute `hkl_indices`."""
294
313
  self._hkl_indices = hkl_indices
295
314
 
296
- #RV need def dict?
297
- # d['include_energy_ranges'] = [
298
- # [float(energy) for energy in d['include_energy_ranges'][i]]
299
- # for i in range(len(d['include_energy_ranges']))]
315
+ def dict(self, *args, **kwargs):
316
+ """Return a representation of this configuration in a
317
+ dictionary that is suitable for dumping to a YAML file.
318
+
319
+ :return: Dictionary representation of the configuration.
320
+ :rtype: dict
321
+ """
322
+ d = super().dict(*args, **kwargs)
323
+ if '_hkl_indices:' in d:
324
+ del d['_hkl_indices:']
325
+ return d
300
326
 
301
327
 
302
328
  class MCAElementDiffractionVolumeLengthConfig(MCAElementConfig):
@@ -327,11 +353,11 @@ class MCAElementDiffractionVolumeLengthConfig(MCAElementConfig):
327
353
  """
328
354
  include_bin_ranges: Optional[
329
355
  conlist(
330
- min_items=1,
356
+ min_length=1,
331
357
  item_type=conlist(
332
- item_type=conint(ge=0),
333
- min_items=2,
334
- max_items=2))]
358
+ min_length=2,
359
+ max_length=2,
360
+ item_type=conint(ge=0)))] = None
335
361
  measurement_mode: Optional[Literal['manual', 'auto']] = 'auto'
336
362
  sigma_to_dvl_factor: Optional[Literal[3.5, 2.0, 4.0]] = 3.5
337
363
  dvl_measured: Optional[confloat(gt=0)] = None
@@ -420,7 +446,7 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
420
446
  :ivar tth_file: Path to the file with the 2&theta map.
421
447
  :type tth_file: FilePath, optional
422
448
  :ivar tth_map: Map of the 2&theta values.
423
- :type tth_map: np.ndarray, optional
449
+ :type tth_map: numpy.ndarray, optional
424
450
  :ivar include_energy_ranges: List of MCA channel energy ranges
425
451
  in keV whose data should be included after applying a mask
426
452
  (bounds are inclusive), defaults to `[[50, 150]]`
@@ -429,36 +455,36 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
429
455
  tth_max: confloat(gt=0, allow_inf_nan=False) = 90.0
430
456
  hkl_tth_tol: confloat(gt=0, allow_inf_nan=False) = 0.15
431
457
  hkl_indices: Optional[conlist(item_type=conint(ge=0))] = []
432
- background: Optional[Union[str, list]]
458
+ background: Optional[Union[str, list]] = None
433
459
  baseline: Optional[Union[bool, BaselineConfig]] = False
434
460
  num_proc: Optional[conint(gt=0)] = os.cpu_count()
435
461
  peak_models: Union[
436
- conlist(item_type=Literal['gaussian', 'lorentzian'], min_items=1),
462
+ conlist(min_length=1, item_type=Literal['gaussian', 'lorentzian']),
437
463
  Literal['gaussian', 'lorentzian']] = 'gaussian'
438
464
  fwhm_min: confloat(gt=0, allow_inf_nan=False) = 0.25
439
465
  fwhm_max: confloat(gt=0, allow_inf_nan=False) = 2.0
440
466
  centers_range: confloat(gt=0, allow_inf_nan=False) = 2.0
441
- rel_height_cutoff: Optional[confloat(gt=0, lt=1.0, allow_inf_nan=False)]
442
-
443
- tth_calibrated: Optional[confloat(gt=0, allow_inf_nan=False)]
467
+ rel_height_cutoff: Optional[
468
+ confloat(gt=0, lt=1.0, allow_inf_nan=False)] = None
469
+ tth_calibrated: Optional[confloat(gt=0, allow_inf_nan=False)] = None
444
470
  energy_calibration_coeffs: conlist(
445
- min_items=3, max_items=3,
471
+ min_length=3, max_length=3,
446
472
  item_type=confloat(allow_inf_nan=False)) = [0, 0, 1]
447
473
  calibration_bin_ranges: Optional[
448
474
  conlist(
449
- min_items=1,
475
+ min_length=1,
450
476
  item_type=conlist(
451
- item_type=conint(ge=0),
452
- min_items=2,
453
- max_items=2))]
454
- tth_file: Optional[FilePath]
477
+ min_length=2,
478
+ max_length=2,
479
+ item_type=conint(ge=0)))] = None
480
+ tth_file: Optional[FilePath] = None
455
481
  tth_map: Optional[np.ndarray] = None
456
482
  include_energy_ranges: conlist(
457
- min_items=1,
483
+ min_length=1,
458
484
  item_type=conlist(
459
- item_type=confloat(ge=25),
460
- min_items=2,
461
- max_items=2)) = [[50, 150]]
485
+ min_length=2,
486
+ max_length=2,
487
+ item_type=confloat(ge=25))) = [[50, 150]]
462
488
 
463
489
  #RV lots of overlap with MCAElementCalibrationConfig (only missing
464
490
  # tth_initial_guess)
@@ -468,7 +494,8 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
468
494
  # the unique fields tth_initial_guess added?
469
495
  # Revisit when we redo the detectors
470
496
 
471
- @validator('hkl_indices', pre=True)
497
+ @field_validator('hkl_indices', mode='before')
498
+ @classmethod
472
499
  def validate_hkl_indices(cls, hkl_indices):
473
500
  if isinstance(hkl_indices, str):
474
501
  # Local modules
@@ -555,7 +582,7 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
555
582
 
556
583
  :param map_shape: The shape of the suplied 2&theta map.
557
584
  :return: Map of 2&theta values.
558
- :rtype: np.ndarray
585
+ :rtype: numpy.ndarray
559
586
  """
560
587
  if getattr(self, 'tth_map', None) is not None:
561
588
  if self.tth_map.shape != map_shape:
@@ -573,7 +600,7 @@ class MCAElementStrainAnalysisConfig(MCAElementConfig):
573
600
  :rtype: dict
574
601
  """
575
602
  d = super().dict(*args, **kwargs)
576
- for k,v in d.items():
603
+ for k, v in d.items():
577
604
  if isinstance(v, PosixPath):
578
605
  d[k] = str(v)
579
606
  if isinstance(v, np.ndarray):
@@ -588,7 +615,7 @@ class MCAScanDataConfig(BaseModel):
588
615
  a single scan and construct a mask for it.
589
616
 
590
617
  :ivar inputdir: Input directory, used only if any file in the
591
- configuration is not an absolute path.
618
+ configuration is not an absolute path.
592
619
  :type inputdir: str, optional
593
620
  :ivar spec_file: Path to the SPEC file containing the scan.
594
621
  :type spec_file: str, optional
@@ -602,86 +629,84 @@ class MCAScanDataConfig(BaseModel):
602
629
  configurations.
603
630
  :type detectors: list[MCAElementConfig]
604
631
  """
605
- inputdir: Optional[DirectoryPath]
606
- spec_file: Optional[FilePath]
607
- scan_number: Optional[conint(gt=0)]
608
- par_file: Optional[FilePath]
609
- scan_column: Optional[str]
610
- detectors: conlist(min_items=1, item_type=MCAElementConfig)
611
-
612
- _parfile: Optional[ParFile]
613
- _scanparser: Optional[ScanParser]
614
-
615
- class Config:
616
- underscore_attrs_are_private = False
617
-
618
- @root_validator(pre=True)
619
- def validate_scan(cls, values):
632
+ inputdir: Optional[DirectoryPath] = None
633
+ spec_file: Optional[FilePath] = None
634
+ scan_number: Optional[conint(gt=0)] = None
635
+ par_file: Optional[FilePath] = None
636
+ scan_column: Optional[str] = None
637
+ detectors: conlist(min_length=1, item_type=MCAElementConfig)
638
+
639
+ _parfile: Optional[ParFile] = None
640
+ _scanparser: Optional[ScanParser] = None
641
+
642
+ @model_validator(mode='before')
643
+ @classmethod
644
+ def validate_scan(cls, data):
620
645
  """Finalize file paths for spec_file and par_file.
621
646
 
622
- :param values: Dictionary of class field values.
623
- :type values: dict
647
+ :param data: Pydantic validator data object.
648
+ :type data: MCAScanDataConfig,
649
+ pydantic_core._pydantic_core.ValidationInfo
624
650
  :raises ValueError: Invalid SPEC or par file.
625
- :return: The validated list of `values`.
651
+ :return: The validated list of class properties.
626
652
  :rtype: dict
627
653
  """
628
- inputdir = values.get('inputdir')
629
- spec_file = values.get('spec_file')
630
- par_file = values.get('par_file')
654
+ inputdir = data.get('inputdir')
655
+ spec_file = data.get('spec_file')
656
+ par_file = data.get('par_file')
631
657
  if spec_file is not None and par_file is not None:
632
658
  raise ValueError('Use either spec_file or par_file, not both')
633
659
  elif spec_file is not None:
634
660
  if inputdir is not None and not os.path.isabs(spec_file):
635
- values['spec_file'] = os.path.join(inputdir, spec_file)
661
+ data['spec_file'] = os.path.join(inputdir, spec_file)
636
662
  elif par_file is not None:
637
663
  if inputdir is not None and not os.path.isabs(par_file):
638
- values['par_file'] = os.path.join(inputdir, par_file)
639
- if 'scan_column' not in values:
664
+ data['par_file'] = os.path.join(inputdir, par_file)
665
+ if 'scan_column' not in data:
640
666
  raise ValueError(
641
667
  'scan_column is required when par_file is used')
642
- if isinstance(values['scan_column'], str):
668
+ if isinstance(data['scan_column'], str):
643
669
  parfile = ParFile(par_file)
644
- if values['scan_column'] not in parfile.column_names:
670
+ if data['scan_column'] not in parfile.column_names:
645
671
  raise ValueError(
646
- f'No column named {values["scan_column"]} in '
647
- + '{values["par_file"]}. Options: '
672
+ f'No column named {data["scan_column"]} in '
673
+ + '{data["par_file"]}. Options: '
648
674
  + ', '.join(parfile.column_names))
649
675
  else:
650
676
  raise ValueError('Must use either spec_file or par_file')
651
677
 
652
- return values
678
+ return data
653
679
 
654
- @root_validator
655
- def validate_detectors(cls, values):
680
+ @model_validator(mode='after')
681
+ def validate_detectors(self):
656
682
  """Fill in values for _scanparser / _parfile (if applicable).
657
683
  Fill in each detector's num_bins field, if needed.
658
684
  Check each detector's include_energy_ranges field against the
659
685
  flux file, if available.
660
686
 
661
- :param values: Dictionary of previously validated field values.
662
- :type values: dict
663
687
  :raises ValueError: Unable to obtain a value for num_bins.
664
- :return: The validated list of `values`.
688
+ :return: The validated list of class properties.
665
689
  :rtype: dict
666
690
  """
667
- spec_file = values.get('spec_file')
668
- par_file = values.get('par_file')
669
- detectors = values.get('detectors')
670
- flux_file = values.get('flux_file')
691
+ spec_file = self.spec_file
692
+ par_file = self.par_file
693
+ detectors = self.detectors
694
+ flux_file = self.flux_file
671
695
  if spec_file is not None:
672
- values['_scanparser'] = ScanParser(
673
- spec_file, values.get('scan_number'))
674
- values['_parfile'] = None
696
+ self._scanparser = ScanParser(
697
+ spec_file, self.scan_number)
698
+ self._parfile = None
675
699
  elif par_file is not None:
676
- values['_parfile'] = ParFile(par_file)
677
- values['_scanparser'] = ScanParser(
678
- values['_parfile'].spec_file,
679
- values['_parfile'].good_scan_numbers()[0])
700
+ self._parfile = ParFile(par_file)
701
+ self._scanparser = ScanParser(
702
+ self._parfile.spec_file,
703
+ self._parfile.good_scan_numbers()[0])
680
704
  for detector in detectors:
681
705
  if detector.num_bins is None:
682
706
  try:
683
- detector.num_bins = values['_scanparser']\
684
- .get_detector_num_bins(detector.detector_name)
707
+ detector.num_bins = \
708
+ self._scanparser.get_detector_num_bins(
709
+ detector.detector_name)
685
710
  except Exception as e:
686
711
  raise ValueError('No value found for num_bins') from e
687
712
  if flux_file is not None:
@@ -704,7 +729,7 @@ class MCAScanDataConfig(BaseModel):
704
729
  f' to {energy_range}')
705
730
  detector.include_energy_ranges[i] = energy_range
706
731
 
707
- return values
732
+ return self
708
733
 
709
734
  @property
710
735
  def scanparser(self):
@@ -716,39 +741,6 @@ class MCAScanDataConfig(BaseModel):
716
741
  self._scanparser = scanparser
717
742
  return scanparser
718
743
 
719
- def mca_data(self, detector_config, scan_step_index=None):
720
- """Get the array of MCA data collected by the scan.
721
-
722
- :param detector_config: Detector for which data is returned.
723
- :type detector_config: MCAElementConfig
724
- :param scan_step_index: Only return the MCA spectrum for the
725
- given scan step index, defaults to `None`, which returns
726
- all the available MCA spectra.
727
- :type scan_step_index: int, optional
728
- :return: The current detectors's MCA data.
729
- :rtype: np.ndarray
730
- """
731
- detector_name = detector_config.detector_name
732
- if self._parfile is not None:
733
- if scan_step_index is None:
734
- data = np.asarray(
735
- [ScanParser(self._parfile.spec_file, scan_number)\
736
- .get_all_detector_data(detector_name)[0] \
737
- for scan_number in self._parfile.good_scan_numbers()])
738
- else:
739
- data = ScanParser(
740
- self._parfile.spec_file,
741
- self._parfile.good_scan_numbers()[scan_step_index])\
742
- .get_all_detector_data(detector_name)
743
- else:
744
- if scan_step_index is None:
745
- data = self.scanparser.get_all_detector_data(
746
- detector_name)
747
- else:
748
- data = self.scanparser.get_detector_data(
749
- detector_config.detector_name, scan_step_index)
750
- return data
751
-
752
744
  def dict(self, *args, **kwargs):
753
745
  """Return a representation of this configuration in a
754
746
  dictionary that is suitable for dumping to a YAML file.
@@ -757,7 +749,7 @@ class MCAScanDataConfig(BaseModel):
757
749
  :rtype: dict
758
750
  """
759
751
  d = super().dict(*args, **kwargs)
760
- for k,v in d.items():
752
+ for k, v in d.items():
761
753
  if isinstance(v, PosixPath):
762
754
  d[k] = str(v)
763
755
  if d.get('_parfile') is None:
@@ -786,8 +778,8 @@ class DiffractionVolumeLengthConfig(MCAScanDataConfig):
786
778
  :type detectors: list[MCAElementDiffractionVolumeLengthConfig]
787
779
  """
788
780
  sample_thickness: float
789
- detectors: conlist(min_items=1,
790
- item_type=MCAElementDiffractionVolumeLengthConfig)
781
+ detectors: conlist(
782
+ min_length=1, item_type=MCAElementDiffractionVolumeLengthConfig)
791
783
 
792
784
  @property
793
785
  def scanned_vals(self):
@@ -795,7 +787,7 @@ class DiffractionVolumeLengthConfig(MCAScanDataConfig):
795
787
  over the course of the raster scan.
796
788
 
797
789
  :return: List of scanned motor values
798
- :rtype: np.ndarray
790
+ :rtype: numpy.ndarray
799
791
  """
800
792
  if self._parfile is not None:
801
793
  return self._parfile.get_values(
@@ -804,18 +796,21 @@ class DiffractionVolumeLengthConfig(MCAScanDataConfig):
804
796
  return self.scanparser.spec_scan_motor_vals[0]
805
797
 
806
798
 
807
- class MCAEnergyCalibrationConfig(MCAScanDataConfig):
799
+ class MCAEnergyCalibrationConfig(BaseModel):
808
800
  """
809
801
  Class representing metadata required to perform an energy
810
802
  calibration for an MCA detector.
811
803
 
804
+ :ivar inputdir: Input directory, used only if any file in the
805
+ configuration is not an absolute path.
806
+ :type inputdir: str, optional
812
807
  :ivar scan_step_indices: Optional scan step indices to use for the
813
808
  calibration. If not specified, the calibration will be
814
809
  performed on the average of all MCA spectra for the scan.
815
810
  :type scan_step_indices: list[int], optional
816
811
  :ivar detectors: List of individual MCA detector element
817
812
  calibration configurations.
818
- :type detectors: list[MCAElementCalibrationConfig]
813
+ :type detectors: list[MCAElementCalibrationConfig], optional
819
814
  :ivar flux_file: File name of the csv flux file containing station
820
815
  beam energy in eV (column 0) versus flux (column 1).
821
816
  :type flux_file: str, optional
@@ -838,51 +833,54 @@ class MCAEnergyCalibrationConfig(MCAScanDataConfig):
838
833
  :type fit_index_ranges: list[[int, int]], optional
839
834
 
840
835
  """
841
- scan_step_indices: Optional[conlist(min_items=1, item_type=conint(ge=0))]
842
- detectors: conlist(min_items=1, item_type=MCAElementCalibrationConfig)
843
- flux_file: Optional[FilePath]
836
+ inputdir: Optional[DirectoryPath] = None
837
+ scan_step_indices: Optional[Annotated[conlist(
838
+ min_length=1, item_type=conint(ge=0)),
839
+ Field(validate_default=True)]] = None
840
+ detectors: Optional[conlist(item_type=MCAElementCalibrationConfig)] = None
841
+ flux_file: Optional[FilePath] = None
844
842
  material: Optional[MaterialConfig] = MaterialConfig(
845
843
  material_name='CeO2', lattice_parameters=5.41153, sgnum=225)
846
- peak_energies: conlist(item_type=confloat(gt=0), min_items=2)
844
+ peak_energies: conlist(min_length=2, item_type=confloat(gt=0))
847
845
  max_peak_index: conint(ge=0)
848
846
  fit_index_ranges: Optional[
849
847
  conlist(
850
- min_items=1,
848
+ min_length=1,
851
849
  item_type=conlist(
852
- item_type=conint(ge=0),
853
- min_items=2,
854
- max_items=2))]
850
+ min_length=2,
851
+ max_length=2,
852
+ item_type=conint(ge=0)))] = None
855
853
 
856
- @root_validator(pre=True)
857
- def validate_config(cls, values):
854
+ @model_validator(mode='before')
855
+ @classmethod
856
+ def validate_config(cls, data):
858
857
  """Ensure that a valid configuration was provided and finalize
859
858
  flux_file filepath.
860
859
 
861
- :param values: Dictionary of class field values.
862
- :type values: dict
863
- :return: The validated list of `values`.
860
+ :param data: Pydantic validator data object.
861
+ :type data: MCAEnergyCalibrationConfig,
862
+ pydantic_core._pydantic_core.ValidationInfo
863
+ :return: The currently validated list of class properties.
864
864
  :rtype: dict
865
865
  """
866
- inputdir = values.get('inputdir')
866
+ inputdir = data.get('inputdir')
867
867
  if inputdir is not None:
868
- flux_file = values.get('flux_file')
868
+ flux_file = data.get('flux_file')
869
869
  if flux_file is not None and not os.path.isabs(flux_file):
870
- values['flux_file'] = os.path.join(inputdir, flux_file)
870
+ data['flux_file'] = os.path.join(inputdir, flux_file)
871
871
 
872
- return values
872
+ return data
873
873
 
874
- @validator('scan_step_indices', pre=True, always=True)
875
- def validate_scan_step_indices(cls, scan_step_indices, values):
874
+ @field_validator('scan_step_indices', mode='before')
875
+ @classmethod
876
+ def validate_scan_step_indices(cls, scan_step_indices):
876
877
  """Validate the specified list of scan numbers.
877
878
 
878
879
  :ivar scan_step_indices: Optional scan step indices to use for the
879
880
  calibration. If not specified, the calibration will be
880
881
  performed on the average of all MCA spectra for the scan.
881
882
  :type scan_step_indices: list[int], optional
882
- :param values: Dictionary of validated class field values.
883
- :type values: dict
884
- :raises ValueError: If a specified scan number is not found in
885
- the SPEC file.
883
+ :raises ValueError: Invalid experiment type.
886
884
  :return: List of step indices.
887
885
  :rtype: list of int
888
886
  """
@@ -894,21 +892,22 @@ class MCAEnergyCalibrationConfig(MCAScanDataConfig):
894
892
  scan_step_indices, raise_error=True)
895
893
  return scan_step_indices
896
894
 
897
- @validator('max_peak_index')
898
- def validate_max_peak_index(cls, max_peak_index, values):
895
+ @field_validator('max_peak_index')
896
+ @classmethod
897
+ def validate_max_peak_index(cls, max_peak_index, info):
899
898
  """Validate the specified index of the XRF peak with the
900
899
  highest amplitude.
901
900
 
902
901
  :ivar max_peak_index: The index of the XRF peak with the
903
902
  highest amplitude.
904
903
  :type max_peak_index: int
905
- :param values: Dictionary of validated class field values.
906
- :type values: dict
904
+ :param info: Pydantic validator info object.
905
+ :type info: pydantic_core._pydantic_core.ValidationInfo
907
906
  :raises ValueError: Invalid max_peak_index.
908
907
  :return: The validated value of `max_peak_index`.
909
908
  :rtype: int
910
909
  """
911
- peak_energies = values.get('peak_energies')
910
+ peak_energies = info.data.get('peak_energies')
912
911
  if not 0 <= max_peak_index < len(peak_energies):
913
912
  raise ValueError('max_peak_index out of bounds')
914
913
  return max_peak_index
@@ -925,31 +924,6 @@ class MCAEnergyCalibrationConfig(MCAScanDataConfig):
925
924
  energies = flux[:,0]/1.e3
926
925
  return energies.min(), energies.max()
927
926
 
928
- def mca_data(self, detector_config):
929
- """Get the array of MCA data to use for calibration.
930
-
931
- :param detector_config: Detector for which data is returned.
932
- :type detector_config: MCAElementConfig
933
- :return: The current detectors's MCA data.
934
- :rtype: np.ndarray
935
- """
936
- if self.scan_step_indices is None:
937
- data = super().mca_data(detector_config)
938
- if self.scanparser.spec_scan_npts > 1:
939
- data = np.average(data, axis=0)
940
- else:
941
- data = data[0]
942
- elif len(self.scan_step_indices) == 1:
943
- data = super().mca_data(
944
- detector_config, scan_step_index=self.scan_step_indices[0])
945
- else:
946
- data = []
947
- for scan_step_index in self.scan_step_indices:
948
- data.append(super().mca_data(
949
- detector_config, scan_step_index=scan_step_index))
950
- data = np.average(data, axis=0)
951
- return data
952
-
953
927
  def flux_correction_interpolation_function(self):
954
928
  """
955
929
  Get an interpolation function to correct MCA data for the
@@ -966,6 +940,18 @@ class MCAEnergyCalibrationConfig(MCAScanDataConfig):
966
940
  interpolation_function = interp1d(energies, relative_intensities)
967
941
  return interpolation_function
968
942
 
943
+ def dict(self, *args, **kwargs):
944
+ """Return a representation of this configuration in a
945
+ dictionary that is suitable for dumping to a YAML file.
946
+
947
+ :return: Dictionary representation of the configuration.
948
+ :rtype: dict
949
+ """
950
+ d = super().dict(*args, **kwargs)
951
+ if 'inputdir' in d:
952
+ del d['inputdir']
953
+ return d
954
+
969
955
 
970
956
  class MCATthCalibrationConfig(MCAEnergyCalibrationConfig):
971
957
  """
@@ -1011,22 +997,11 @@ class StrainAnalysisConfig(BaseModel):
1011
997
  strain analysis.
1012
998
 
1013
999
  :ivar inputdir: Input directory, used only if any file in the
1014
- configuration is not an absolute path.
1000
+ configuration is not an absolute path.
1015
1001
  :type inputdir: str, optional
1016
- :ivar map_config: The map configuration for the MCA data on which
1017
- the strain analysis is performed.
1018
- :type map_config: CHAP.common.models.map.MapConfig, optional
1019
- :ivar par_file: Path to the par file associated with the scan.
1020
- :type par_file: str, optional
1021
- :ivar dataset_id: Integer ID of the SMB-style EDD dataset.
1022
- :type dataset_id: int, optional
1023
- :ivar par_dims: List of independent dimensions.
1024
- :type par_dims: list[dict[str,str]], optional
1025
- :ivar other_dims: List of other column names from `par_file`.
1026
- :type other_dims: list[dict[str,str]], optional
1027
1002
  :ivar detectors: List of individual detector element strain
1028
- analysis configurations
1029
- :type detectors: list[MCAElementStrainAnalysisConfig]
1003
+ analysis configurations, defaults to `None` (use all detectors).
1004
+ :type detectors: list[MCAElementStrainAnalysisConfig], optional
1030
1005
  :ivar materials: Sample material configurations.
1031
1006
  :type materials: list[MaterialConfig]
1032
1007
  :ivar flux_file: File name of the csv flux file containing station
@@ -1035,247 +1010,122 @@ class StrainAnalysisConfig(BaseModel):
1035
1010
  :ivar sum_axes: Whether to sum over the fly axis or not
1036
1011
  for EDD scan types not 0, defaults to `True`.
1037
1012
  :type sum_axes: bool, optional
1013
+ :ivar oversampling: FIX
1014
+ :type oversampling: FIX
1038
1015
  """
1039
- inputdir: Optional[DirectoryPath]
1040
- map_config: Optional[MapConfig]
1041
- par_file: Optional[FilePath]
1042
- dataset_id: Optional[int]
1043
- par_dims: Optional[list[dict[str,str]]]
1044
- other_dims: Optional[list[dict[str,str]]]
1045
- detectors: conlist(min_items=1, item_type=MCAElementStrainAnalysisConfig)
1016
+ inputdir: Optional[DirectoryPath] = None
1017
+ detectors: Optional[conlist(
1018
+ min_length=1, item_type=MCAElementStrainAnalysisConfig)] = None
1046
1019
  materials: list[MaterialConfig]
1047
- flux_file: Optional[FilePath]
1048
- sum_axes: Optional[list[str]]
1049
- oversampling: Optional[dict] = {'num': 10}
1050
-
1051
- _parfile: Optional[ParFile]
1052
-
1053
- @root_validator(pre=True)
1054
- def validate_config(cls, values):
1020
+ flux_file: Optional[FilePath] = None
1021
+ sum_axes: Optional[bool] = True
1022
+ oversampling: Optional[
1023
+ Annotated[dict, Field(validate_default=True)]] = {'num': 10}
1024
+
1025
+ @model_validator(mode='before')
1026
+ @classmethod
1027
+ def validate_config(cls, data):
1055
1028
  """Ensure that a valid configuration was provided and finalize
1056
- input filepaths.
1029
+ flux_file filepath.
1057
1030
 
1058
- :param values: Dictionary of class field values.
1059
- :type values: dict
1060
- :raises ValueError: Missing par_dims value.
1061
- :return: The validated list of `values`.
1031
+ :param data: Pydantic validator data object.
1032
+ :type data: MCAEnergyCalibrationConfig,
1033
+ pydantic_core._pydantic_core.ValidationInfo
1034
+ :return: The currently validated list of class properties.
1062
1035
  :rtype: dict
1063
1036
  """
1064
- inputdir = values.get('inputdir')
1065
- flux_file = values.get('flux_file')
1066
- par_file = values.get('par_file')
1067
- if (inputdir is not None and flux_file is not None
1068
- and not os.path.isabs(flux_file)):
1069
- values['flux_file'] = os.path.join(inputdir, flux_file)
1070
- if par_file is not None:
1071
- if inputdir is not None and not os.path.isabs(par_file):
1072
- values['par_file'] = os.path.join(inputdir, par_file)
1073
- if 'dataset_id' in values:
1074
- from CHAP.edd import EddMapReader
1075
- values['_parfile'] = ParFile(values['par_file'])
1076
- values['map_config'] = EddMapReader().read(
1077
- values['par_file'], values['dataset_id'])
1078
- elif 'par_dims' in values:
1079
- values['_parfile'] = ParFile(values['par_file'])
1080
- values['map_config'] = values['_parfile'].get_map(
1081
- 'EDD', 'id1a3', values['par_dims'],
1082
- other_dims=values.get('other_dims', []))
1083
- else:
1084
- raise ValueError(
1085
- 'dataset_id or par_dims is required when using par_file')
1086
- map_config = values.get('map_config')
1087
- if isinstance(map_config, dict):
1088
- for i, scans in enumerate(map_config.get('spec_scans')):
1089
- spec_file = scans.get('spec_file')
1090
- if inputdir is not None and not os.path.isabs(spec_file):
1091
- values['map_config']['spec_scans'][i]['spec_file'] = \
1092
- os.path.join(inputdir, spec_file)
1093
- return values
1094
-
1095
- @validator('detectors', pre=True, each_item=True)
1096
- def validate_tth_file(cls, detector, values):
1037
+ inputdir = data.get('inputdir')
1038
+ if inputdir is not None:
1039
+ flux_file = data.get('flux_file')
1040
+ if flux_file is not None and not os.path.isabs(flux_file):
1041
+ data['flux_file'] = os.path.join(inputdir, flux_file)
1042
+
1043
+ return data
1044
+
1045
+ @field_validator('detectors', mode='before')
1046
+ @classmethod
1047
+ def validate_tth_file(cls, detectors, info):
1097
1048
  """Finalize value for tth_file for each detector"""
1098
- inputdir = values.get('inputdir')
1099
- tth_file = detector.get('tth_file')
1100
- if tth_file:
1101
- if not os.path.isabs(tth_file):
1102
- detector['tth_file'] = os.path.join(inputdir, tth_file)
1103
- return detector
1104
-
1105
- @validator('detectors', each_item=True)
1106
- def validate_tth(cls, detector, values):
1107
- """Validate detector element tth_file field. It may only be
1108
- used if StrainAnalysisConfig used par_file.
1109
- """
1110
- if detector.tth_file is not None:
1111
- if not values.get('par_file'):
1112
- raise ValueError(
1113
- 'variable tth angles may only be used with a '
1114
- + 'StrainAnalysisConfig that uses par_file.')
1115
- else:
1116
- try:
1117
- detector.tth_map = ParFile(values['par_file']).map_values(
1118
- values['map_config'], np.loadtxt(detector.tth_file))
1119
- except Exception as e:
1120
- raise ValueError(
1121
- 'Could not get map of tth angles from '
1122
- + f'{detector.tth_file}') from e
1123
- return detector
1124
-
1125
- @validator('sum_axes', always=True)
1126
- def validate_sum_axes(cls, value, values):
1127
- """Validate the sum_axes field.
1128
-
1129
- :param value: Field value to validate (`sum_axes`).
1130
- :type value: bool
1131
- :param values: Dictionary of validated class field values.
1132
- :type values: dict
1133
- :return: The validated value for sum_axes.
1134
- :rtype: bool
1135
- """
1136
- if value is None:
1137
- map_config = values.get('map_config')
1138
- if map_config is not None:
1139
- if map_config.attrs['scan_type'] < 3:
1140
- value = value
1141
- else:
1142
- value = map_config.attrs.get('fly_axis_labels', [])
1143
- return value
1144
-
1145
- @validator('oversampling', always=True)
1146
- def validate_oversampling(cls, value, values):
1049
+ inputdir = info.data.get('inputdir')
1050
+ for detector in detectors:
1051
+ tth_file = detector.get('tth_file')
1052
+ if tth_file is not None:
1053
+ if not os.path.isabs(tth_file):
1054
+ detector['tth_file'] = os.path.join(inputdir, tth_file)
1055
+ return detectors
1056
+
1057
+ # FIX tth_file/tth_map not updated
1058
+ # @field_validator('detectors')
1059
+ # @classmethod
1060
+ # def validate_tth(cls, detectors, info):
1061
+ # """Validate detector element tth_file field. It may only be
1062
+ # used if StrainAnalysisConfig used par_file.
1063
+ # """
1064
+ # for detector in detectors:
1065
+ # tth_file = detector.tth_file
1066
+ # if tth_file is not None:
1067
+ # if not info.data.get('par_file'):
1068
+ # raise ValueError(
1069
+ # 'variable tth angles may only be used with a '
1070
+ # 'StrainAnalysisConfig that uses par_file.')
1071
+ # else:
1072
+ # try:
1073
+ # detector.tth_map = ParFile(
1074
+ # info.data['par_file']).map_values(
1075
+ # info.data['map_config'],
1076
+ # np.loadtxt(tth_file))
1077
+ # except Exception as e:
1078
+ # raise ValueError(
1079
+ # 'Could not get map of tth angles from '
1080
+ # f'{tth_file}') from e
1081
+ # return detectors
1082
+
1083
+ @field_validator('oversampling')
1084
+ @classmethod
1085
+ def validate_oversampling(cls, oversampling, info):
1147
1086
  """Validate the oversampling field.
1148
1087
 
1149
- :param value: Field value to validate (`oversampling`).
1150
- :type value: bool
1151
- :param values: Dictionary of validated class field values.
1152
- :type values: dict
1088
+ :param oversampling: The value of `oversampling` to validate.
1089
+ :type oversampling: dict
1090
+ :param info: Pydantic validator info object.
1091
+ :type info: StrainAnalysisConfig,
1092
+ pydantic_core._pydantic_core.ValidationInfo
1153
1093
  :return: The validated value for oversampling.
1154
1094
  :rtype: bool
1155
1095
  """
1156
1096
  # Local modules
1157
1097
  from CHAP.utils.general import is_int
1158
1098
 
1159
- map_config = values.get('map_config')
1099
+ raise ValueError('oversampling not updated yet')
1100
+ map_config = info.data.get('map_config')
1160
1101
  if map_config is None or map_config.attrs['scan_type'] < 3:
1161
1102
  return None
1162
- if value is None:
1103
+ if oversampling is None:
1163
1104
  return {'num': 10}
1164
- if 'start' in value and not is_int(value['start'], ge=0):
1105
+ if 'start' in oversampling and not is_int(oversampling['start'], ge=0):
1165
1106
  raise ValueError('Invalid "start" parameter in "oversampling" '
1166
- f'field ({value["start"]})')
1167
- if 'end' in value and not is_int(value['end'], gt=0):
1107
+ f'field ({oversampling["start"]})')
1108
+ if 'end' in oversampling and not is_int(oversampling['end'], gt=0):
1168
1109
  raise ValueError('Invalid "end" parameter in "oversampling" '
1169
- f'field ({value["end"]})')
1170
- if 'width' in value and not is_int(value['width'], gt=0):
1110
+ f'field ({oversampling["end"]})')
1111
+ if 'width' in oversampling and not is_int(oversampling['width'], gt=0):
1171
1112
  raise ValueError('Invalid "width" parameter in "oversampling" '
1172
- f'field ({value["width"]})')
1173
- if 'stride' in value and not is_int(value['stride'], gt=0):
1113
+ f'field ({oversampling["width"]})')
1114
+ if ('stride' in oversampling
1115
+ and not is_int(oversampling['stride'], gt=0)):
1174
1116
  raise ValueError('Invalid "stride" parameter in "oversampling" '
1175
- f'field ({value["stride"]})')
1176
- if 'num' in value and not is_int(value['num'], gt=0):
1117
+ f'field ({oversampling["stride"]})')
1118
+ if 'num' in oversampling and not is_int(oversampling['num'], gt=0):
1177
1119
  raise ValueError('Invalid "num" parameter in "oversampling" '
1178
- f'field ({value["num"]})')
1179
- if 'mode' in value and 'mode' not in ('valid', 'full'):
1120
+ f'field ({oversampling["num"]})')
1121
+ if 'mode' in oversampling and 'mode' not in ('valid', 'full'):
1180
1122
  raise ValueError('Invalid "mode" parameter in "oversampling" '
1181
- f'field ({value["mode"]})')
1182
- if not ('width' in value or 'stride' in value or 'num' in value):
1123
+ f'field ({oversampling["mode"]})')
1124
+ if not ('width' in oversampling or 'stride' in oversampling
1125
+ or 'num' in oversampling):
1183
1126
  raise ValueError('Invalid input parameters, specify at least one '
1184
1127
  'of "width", "stride" or "num"')
1185
- return value
1186
-
1187
- def mca_data(self, detector=None, map_index=None):
1188
- """Get MCA data for a single or multiple detector elements.
1189
-
1190
- :param detector: Detector(s) for which data is returned,
1191
- defaults to `None`, which return MCA data for all
1192
- detector elements.
1193
- :type detector: Union[int, MCAElementStrainAnalysisConfig],
1194
- optional
1195
- :param map_index: Index of a single point in the map, defaults
1196
- to `None`, which returns MCA data for each point in the map.
1197
- :type map_index: tuple, optional
1198
- :return: A single MCA spectrum.
1199
- :rtype: np.ndarray
1200
- """
1201
- if detector is None:
1202
- mca_data = []
1203
- for detector_config in self.detectors:
1204
- mca_data.append(
1205
- self.mca_data(detector_config, map_index))
1206
- return np.asarray(mca_data)
1207
- else:
1208
- if isinstance(detector, int):
1209
- detector_config = self.detectors[detector]
1210
- else:
1211
- if not isinstance(detector, MCAElementStrainAnalysisConfig):
1212
- raise ValueError('Invalid parameter detector ({detector})')
1213
- detector_config = detector
1214
- if map_index is None:
1215
- mca_data = []
1216
- for map_index in np.ndindex(self.map_config.shape):
1217
- mca_data.append(self.mca_data(
1218
- detector_config, map_index))
1219
- mca_data = np.reshape(
1220
- mca_data, (*self.map_config.shape, len(mca_data[0])))
1221
- if self.sum_axes:
1222
- scan_type = self.map_config.attrs['scan_type']
1223
- if self.map_config.map_type == 'structured':
1224
- sum_axis_indices = []
1225
- for axis in self.sum_axes:
1226
- sum_axis_indices.append(
1227
- self.map_config.dims.index(axis))
1228
- mca_data = np.sum(
1229
- mca_data, tuple(sorted(sum_axis_indices)))
1230
- if scan_type == 4:
1231
- raise NotImplementedError(
1232
- 'Oversampling scan types not tested yet.')
1233
- from CHAP.edd.utils import get_rolling_sum_spectra
1234
- mca_data = get_rolling_sum_spectra(
1235
- mca_data,
1236
- self.map_config.dims.index(fly_axis_labels[0]),
1237
- self.oversampling.get('start', 0),
1238
- self.oversampling.get('end'),
1239
- self.oversampling.get('width'),
1240
- self.oversampling.get('stride'),
1241
- self.oversampling.get('num'),
1242
- self.oversampling.get('mode', 'valid'))
1243
- elif scan_type not in (0, 1, 2, 3, 5):
1244
- raise ValueError(
1245
- f'scan_type {scan_type} not implemented yet '
1246
- 'in StrainAnalysisConfig.mca_data()')
1247
- else:
1248
- # Perform summing along axes of an unstructured map
1249
- map_dims = self.map_config.dims
1250
- map_coords = self.map_config.coords
1251
- map_length = len(map_coords[map_dims[0]])
1252
- for sum_axis in self.sum_axes:
1253
- axis_index = map_dims.index(sum_axis)
1254
- sum_map_indices = {}
1255
- for i in range(map_length):
1256
- coord = tuple(
1257
- v[i] for k, v in map_coords.items() \
1258
- if k != sum_axis)
1259
- if coord not in sum_map_indices:
1260
- sum_map_indices[coord] = []
1261
- sum_map_indices[coord].append(i)
1262
- map_dims = (*map_dims[:axis_index],
1263
- *map_dims[axis_index + 1:])
1264
- sum_indices_list = sum_map_indices.values()
1265
- map_coords = {
1266
- dim: [map_coords[dim][sum_indices[0]] \
1267
- for sum_indices in sum_indices_list] \
1268
- for dim in map_dims}
1269
- map_length = len(map_coords[map_dims[0]])
1270
- mca_data = np.asarray(
1271
- [np.sum(mca_data[sum_indices], axis=0) \
1272
- for sum_indices in sum_indices_list])
1273
- return mca_data
1274
- else:
1275
- return np.asarray(mca_data)
1276
- else:
1277
- return self.map_config.get_detector_data(
1278
- detector_config.detector_name, map_index)
1128
+ return oversampling
1279
1129
 
1280
1130
  def dict(self, *args, **kwargs):
1281
1131
  """Return a representation of this configuration in a
@@ -1285,9 +1135,9 @@ class StrainAnalysisConfig(BaseModel):
1285
1135
  :rtype: dict
1286
1136
  """
1287
1137
  d = super().dict(*args, **kwargs)
1288
- for k,v in d.items():
1138
+ for k, v in d.items():
1289
1139
  if isinstance(v, PosixPath):
1290
1140
  d[k] = str(v)
1291
- if '_scanparser' in d:
1292
- del d['_scanparser']
1141
+ if 'inputdir' in d:
1142
+ del d[k]
1293
1143
  return d