ChessAnalysisPipeline 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

Files changed (43) hide show
  1. CHAP/TaskManager.py +214 -0
  2. CHAP/common/models/__init__.py +0 -2
  3. CHAP/common/models/integration.py +392 -249
  4. CHAP/common/models/map.py +350 -198
  5. CHAP/common/processor.py +229 -191
  6. CHAP/common/reader.py +52 -39
  7. CHAP/common/utils/__init__.py +0 -37
  8. CHAP/common/utils/fit.py +1197 -991
  9. CHAP/common/utils/general.py +629 -372
  10. CHAP/common/utils/material.py +158 -121
  11. CHAP/common/utils/scanparsers.py +735 -339
  12. CHAP/common/writer.py +31 -25
  13. CHAP/edd/models.py +65 -51
  14. CHAP/edd/processor.py +136 -113
  15. CHAP/edd/reader.py +1 -1
  16. CHAP/edd/writer.py +1 -1
  17. CHAP/inference/processor.py +35 -28
  18. CHAP/inference/reader.py +1 -1
  19. CHAP/inference/writer.py +1 -1
  20. CHAP/pipeline.py +14 -28
  21. CHAP/processor.py +44 -75
  22. CHAP/reader.py +49 -40
  23. CHAP/runner.py +73 -32
  24. CHAP/saxswaxs/processor.py +1 -1
  25. CHAP/saxswaxs/reader.py +1 -1
  26. CHAP/saxswaxs/writer.py +1 -1
  27. CHAP/server.py +130 -0
  28. CHAP/sin2psi/processor.py +1 -1
  29. CHAP/sin2psi/reader.py +1 -1
  30. CHAP/sin2psi/writer.py +1 -1
  31. CHAP/tomo/__init__.py +1 -4
  32. CHAP/tomo/models.py +53 -31
  33. CHAP/tomo/processor.py +1326 -902
  34. CHAP/tomo/reader.py +4 -2
  35. CHAP/tomo/writer.py +4 -2
  36. CHAP/writer.py +47 -41
  37. {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/METADATA +1 -1
  38. ChessAnalysisPipeline-0.0.6.dist-info/RECORD +52 -0
  39. ChessAnalysisPipeline-0.0.4.dist-info/RECORD +0 -50
  40. {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/LICENSE +0 -0
  41. {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/WHEEL +0 -0
  42. {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/entry_points.txt +0 -0
  43. {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/top_level.txt +0 -0
CHAP/common/models/map.py CHANGED
@@ -6,17 +6,15 @@ import numpy as np
6
6
  from pydantic import (BaseModel,
7
7
  conint,
8
8
  conlist,
9
- confloat,
10
9
  constr,
11
10
  FilePath,
12
11
  PrivateAttr,
13
- ValidationError,
14
12
  validator)
15
13
  from pyspec.file.spec import FileSpec
16
14
 
15
+
17
16
  class Sample(BaseModel):
18
- """
19
- Class representing a sample metadata configuration.
17
+ """Class representing a sample metadata configuration.
20
18
 
21
19
  :ivar name: The name of the sample.
22
20
  :type name: str
@@ -26,9 +24,9 @@ class Sample(BaseModel):
26
24
  name: constr(min_length=1)
27
25
  description: Optional[str]
28
26
 
27
+
29
28
  class SpecScans(BaseModel):
30
- """
31
- Class representing a set of scans from a single SPEC file.
29
+ """Class representing a set of scans from a single SPEC file.
32
30
 
33
31
  :ivar spec_file: Path to the SPEC file.
34
32
  :type spec_file: str
@@ -37,10 +35,10 @@ class SpecScans(BaseModel):
37
35
  """
38
36
  spec_file: FilePath
39
37
  scan_numbers: conlist(item_type=conint(gt=0), min_items=1)
38
+
40
39
  @validator('spec_file', allow_reuse=True)
41
40
  def validate_spec_file(cls, spec_file):
42
- """
43
- Validate the specified SPEC file.
41
+ """Validate the specified SPEC file.
44
42
 
45
43
  :param spec_file: Path to the SPEC file.
46
44
  :type spec_file: str
@@ -52,19 +50,19 @@ class SpecScans(BaseModel):
52
50
  spec_file = os.path.abspath(spec_file)
53
51
  sspec_file = FileSpec(spec_file)
54
52
  except:
55
- raise(ValueError(f'Invalid SPEC file {spec_file}'))
56
- else:
57
- return(spec_file)
53
+ raise ValueError(f'Invalid SPEC file {spec_file}')
54
+ return spec_file
55
+
58
56
  @validator('scan_numbers', allow_reuse=True)
59
57
  def validate_scan_numbers(cls, scan_numbers, values):
60
- """
61
- Validate the specified list of scan numbers.
58
+ """Validate the specified list of scan numbers.
62
59
 
63
60
  :param scan_numbers: List of scan numbers.
64
61
  :type scan_numbers: list of int
65
62
  :param values: Dictionary of values for all fields of the model.
66
63
  :type values: dict
67
- :raises ValueError: If a specified scan number is not found in the SPEC file.
64
+ :raises ValueError: If a specified scan number is not found in
65
+ the SPEC file.
68
66
  :return: List of scan numbers.
69
67
  :rtype: list of int
70
68
  """
@@ -74,29 +72,32 @@ class SpecScans(BaseModel):
74
72
  for scan_number in scan_numbers:
75
73
  scan = spec_scans.get_scan_by_number(scan_number)
76
74
  if scan is None:
77
- raise(ValueError(f'There is no scan number {scan_number} in {spec_file}'))
78
- return(scan_numbers)
75
+ raise ValueError(
76
+ f'No scan number {scan_number} in {spec_file}')
77
+ return scan_numbers
79
78
 
80
79
  @property
81
80
  def scanparsers(self):
82
- '''A list of `ScanParser`s for each of the scans specified by the SPEC
83
- file and scan numbers belonging to this instance of `SpecScans`
84
- '''
85
- return([self.get_scanparser(scan_no) for scan_no in self.scan_numbers])
81
+ """A list of `ScanParser`s for each of the scans specified by
82
+ the SPEC file and scan numbers belonging to this instance of
83
+ `SpecScans`
84
+ """
85
+ return [self.get_scanparser(scan_no) for scan_no in self.scan_numbers]
86
86
 
87
87
  def get_scanparser(self, scan_number):
88
- """This method returns a `ScanParser` for the specified scan number in
89
- the specified SPEC file.
88
+ """This method returns a `ScanParser` for the specified scan
89
+ number in the specified SPEC file.
90
90
 
91
91
  :param scan_number: Scan number to get a `ScanParser` for
92
92
  :type scan_number: int
93
93
  :return: `ScanParser` for the specified scan number
94
94
  :rtype: ScanParser
95
95
  """
96
- return(get_scanparser(self.spec_file, scan_number))
96
+ return get_scanparser(self.spec_file, scan_number)
97
+
97
98
  def get_index(self, scan_number:int, scan_step_index:int, map_config):
98
- """This method returns a tuple representing the index of a specific step
99
- in a specific spec scan within a map.
99
+ """This method returns a tuple representing the index of a
100
+ specific step in a specific spec scan within a map.
100
101
 
101
102
  :param scan_number: Scan number to get index for
102
103
  :type scan_number: int
@@ -104,19 +105,25 @@ class SpecScans(BaseModel):
104
105
  :type scan_step_index: int
105
106
  :param map_config: Map configuration to get index for
106
107
  :type map_config: MapConfig
107
- :return: Index for the specified scan number and scan step index within
108
- the specified map configuration
108
+ :return: Index for the specified scan number and scan step
109
+ index within the specified map configuration
109
110
  :rtype: tuple
110
111
  """
111
112
  index = ()
112
113
  for independent_dimension in map_config.independent_dimensions:
113
- coordinate_index = list(map_config.coords[independent_dimension.label]).index(independent_dimension.get_value(self, scan_number, scan_step_index))
114
+ coordinate_index = list(
115
+ map_config.coords[independent_dimension.label]).index(
116
+ independent_dimension.get_value(
117
+ self, scan_number, scan_step_index))
114
118
  index = (coordinate_index, *index)
115
- return(index)
116
- def get_detector_data(self, detectors:list, scan_number:int, scan_step_index:int):
117
- """
118
- Return the raw data from the specified detectors at the specified scan
119
- number and scan step index.
119
+ return index
120
+
121
+ def get_detector_data(self,
122
+ detectors:list,
123
+ scan_number:int,
124
+ scan_step_index:int):
125
+ """Return the raw data from the specified detectors at the
126
+ specified scan number and scan step index.
120
127
 
121
128
  :param detectors: List of detector prefixes to get raw data for
122
129
  :type detectors: list[str]
@@ -124,90 +131,116 @@ class SpecScans(BaseModel):
124
131
  :type scan_number: int
125
132
  :param scan_step_index: Scan step index to get data for
126
133
  :type scan_step_index: int
127
- :return: Data from the specified detectors for the specified scan number
128
- and scan step index
134
+ :return: Data from the specified detectors for the specified
135
+ scan number and scan step index
129
136
  :rtype: list[np.ndarray]
130
137
  """
131
- return(get_detector_data(tuple([detector.prefix for detector in detectors]), self.spec_file, scan_number, scan_step_index))
138
+ return get_detector_data(
139
+ tuple([detector.prefix for detector in detectors]),
140
+ self.spec_file,
141
+ scan_number,
142
+ scan_step_index)
143
+
144
+
132
145
  @cache
133
146
  def get_available_scan_numbers(spec_file:str):
134
147
  scans = FileSpec(spec_file).scans
135
148
  scan_numbers = list(scans.keys())
136
- return(scan_numbers)
149
+ return scan_numbers
150
+
151
+
137
152
  @cache
138
153
  def get_scanparser(spec_file:str, scan_number:int):
139
154
  if scan_number not in get_available_scan_numbers(spec_file):
140
- return(None)
141
- else:
142
- return(ScanParser(spec_file, scan_number))
155
+ return None
156
+ return ScanParser(spec_file, scan_number)
157
+
158
+
143
159
  @lru_cache(maxsize=10)
144
- def get_detector_data(detector_prefixes:tuple, spec_file:str, scan_number:int, scan_step_index:int):
160
+ def get_detector_data(
161
+ detector_prefixes:tuple,
162
+ spec_file:str,
163
+ scan_number:int,
164
+ scan_step_index:int):
145
165
  detector_data = []
146
166
  scanparser = get_scanparser(spec_file, scan_number)
147
167
  for prefix in detector_prefixes:
148
168
  image_data = scanparser.get_detector_data(prefix, scan_step_index)
149
169
  detector_data.append(image_data)
150
- return(detector_data)
170
+ return detector_data
171
+
151
172
 
152
173
  class PointByPointScanData(BaseModel):
153
- """Class representing a source of raw scalar-valued data for which a value
154
- was recorded at every point in a `MapConfig`.
174
+ """Class representing a source of raw scalar-valued data for which
175
+ a value was recorded at every point in a `MapConfig`.
155
176
 
156
- :ivar label: A user-defined label for referring to this data in the NeXus
157
- file and in other tools.
177
+ :ivar label: A user-defined label for referring to this data in
178
+ the NeXus file and in other tools.
158
179
  :type label: str
159
180
  :ivar units: The units in which the data were recorded.
160
181
  :type units: str
161
- :ivar data_type: Represents how these data were recorded at time of data
162
- collection.
182
+ :ivar data_type: Represents how these data were recorded at time
183
+ of data collection.
163
184
  :type data_type: Literal['spec_motor', 'scan_column', 'smb_par']
164
- :ivar name: Represents the name with which these raw data were recorded at
165
- time of data collection.
185
+ :ivar name: Represents the name with which these raw data were
186
+ recorded at time of data collection.
166
187
  :type name: str
167
188
  """
168
189
  label: constr(min_length=1)
169
190
  units: constr(strip_whitespace=True, min_length=1)
170
191
  data_type: Literal['spec_motor', 'scan_column', 'smb_par']
171
192
  name: constr(strip_whitespace=True, min_length=1)
193
+
172
194
  @validator('label')
173
195
  def validate_label(cls, label):
174
- """Validate that the supplied `label` does not conflict with any of the
175
- values for `label` reserved for certain data needed to perform
176
- corrections.
177
-
196
+ """Validate that the supplied `label` does not conflict with
197
+ any of the values for `label` reserved for certain data needed
198
+ to perform corrections.
199
+
178
200
  :param label: The value of `label` to validate
179
201
  :type label: str
180
202
  :raises ValueError: If `label` is one of the reserved values.
181
- :return: The original supplied value `label`, if it is allowed.
203
+ :return: The original supplied value `label`, if it is
204
+ allowed.
182
205
  :rtype: str
183
206
  """
184
- #if (not issubclass(cls,CorrectionsData)) and label in CorrectionsData.__fields__['label'].type_.__args__:
185
- if (not issubclass(cls,CorrectionsData)) and label in CorrectionsData.reserved_labels():
186
- raise(ValueError(f'{cls.__name__}.label may not be any of the following reserved values: {CorrectionsData.reserved_labels()}'))
187
- return(label)
207
+ if ((not issubclass(cls,CorrectionsData))
208
+ and label in CorrectionsData.reserved_labels()):
209
+ raise ValueError(
210
+ f'{cls.__name__}.label may not be any of the following '
211
+ f'reserved values: {CorrectionsData.reserved_labels()}')
212
+ return label
213
+
188
214
  def validate_for_station(self, station:str):
189
- """Validate this instance of `PointByPointScanData` for a certain choice
190
- of station (beamline).
191
-
215
+ """Validate this instance of `PointByPointScanData` for a
216
+ certain choice of station (beamline).
217
+
192
218
  :param station: The name of the station (in 'idxx' format).
193
219
  :type station: str
194
- :raises TypeError: If the station is not compatible with the value of the
195
- `data_type` attribute for this instance of PointByPointScanData.
220
+ :raises TypeError: If the station is not compatible with the
221
+ value of the `data_type` attribute for this instance of
222
+ PointByPointScanData.
196
223
  :return: None
197
224
  :rtype: None
198
225
  """
199
- if station.lower() not in ('id1a3', 'id3a') and self.data_type == 'smb_par':
200
- raise(TypeError(f'{self.__class__.__name__}.data_type may not be "smb_par" when station is "{station}"'))
201
- def validate_for_spec_scans(self, spec_scans:list[SpecScans], scan_step_index:Union[Literal['all'],int]='all'):
202
- """Validate this instance of `PointByPointScanData` for a list of
203
- `SpecScans`.
204
-
205
- :param spec_scans: A list of `SpecScans` whose raw data will be checked
206
- for the presence of the data represented by this instance of
207
- `PointByPointScanData`
226
+ if (station.lower() not in ('id1a3', 'id3a')
227
+ and self.data_type == 'smb_par'):
228
+ raise TypeError(
229
+ f'{self.__class__.__name__}.data_type may not be "smb_par" '
230
+ f'when station is "{station}"')
231
+
232
+ def validate_for_spec_scans(
233
+ self, spec_scans:list[SpecScans],
234
+ scan_step_index:Union[Literal['all'],int] = 'all'):
235
+ """Validate this instance of `PointByPointScanData` for a list
236
+ of `SpecScans`.
237
+
238
+ :param spec_scans: A list of `SpecScans` whose raw data will
239
+ be checked for the presence of the data represented by
240
+ this instance of `PointByPointScanData`
208
241
  :type spec_scans: list[SpecScans]
209
- :param scan_step_index: A specific scan step index to validate, defaults
210
- to `'all'`.
242
+ :param scan_step_index: A specific scan step index to validate,
243
+ defaults to `'all'`.
211
244
  :type scan_step_index: Union[Literal['all'],int], optional
212
245
  :raises RuntimeError: If the data represented by this instance of
213
246
  `PointByPointScanData` is missing for the specified scan steps.
@@ -220,39 +253,65 @@ class PointByPointScanData(BaseModel):
220
253
  if scan_step_index == 'all':
221
254
  scan_step_index_range = range(scanparser.spec_scan_npts)
222
255
  else:
223
- scan_step_index_range = range(scan_step_index,scan_step_index+1)
256
+ scan_step_index_range = range(
257
+ scan_step_index, 1+scan_step_index)
224
258
  for index in scan_step_index_range:
225
259
  try:
226
260
  self.get_value(scans, scan_number, index)
227
261
  except:
228
- raise(RuntimeError(f'Could not find data for {self.name} (data_type "{self.data_type}") on scan number {scan_number} for index {index} in spec file {scans.spec_file}'))
229
- def get_value(self, spec_scans:SpecScans, scan_number:int, scan_step_index:int):
230
- """Return the value recorded for this instance of `PointByPointScanData`
231
- at a specific scan step.
232
-
233
- :param spec_scans: An instance of `SpecScans` in which the requested scan step occurs.
262
+ raise RuntimeError(
263
+ f'Could not find data for {self.name} '
264
+ f'(data_type "{self.data_type}") '
265
+ f'on scan number {scan_number} '
266
+ f'for index {index} '
267
+ f'in spec file {scans.spec_file}')
268
+
269
+ def get_value(self, spec_scans:SpecScans,
270
+ scan_number:int, scan_step_index:int):
271
+ """Return the value recorded for this instance of
272
+ `PointByPointScanData` at a specific scan step.
273
+
274
+ :param spec_scans: An instance of `SpecScans` in which the
275
+ requested scan step occurs.
234
276
  :type spec_scans: SpecScans
235
- :param scan_number: The number of the scan in which the requested scan step occurs.
277
+ :param scan_number: The number of the scan in which the
278
+ requested scan step occurs.
236
279
  :type scan_number: int
237
280
  :param scan_step_index: The index of the requested scan step.
238
281
  :type scan_step_index: int
239
- :return: The value recorded of the data represented by this instance of
240
- `PointByPointScanData` at the scan step requested
282
+ :return: The value recorded of the data represented by this
283
+ instance of `PointByPointScanData` at the scan step
284
+ requested
241
285
  :rtype: float
242
286
  """
243
287
  if self.data_type == 'spec_motor':
244
- return(get_spec_motor_value(spec_scans.spec_file, scan_number, scan_step_index, self.name))
245
- elif self.data_type == 'scan_column':
246
- return(get_spec_counter_value(spec_scans.spec_file, scan_number, scan_step_index, self.name))
247
- elif self.data_type == 'smb_par':
248
- return(get_smb_par_value(spec_scans.spec_file, scan_number, self.name))
288
+ return get_spec_motor_value(spec_scans.spec_file,
289
+ scan_number,
290
+ scan_step_index,
291
+ self.name)
292
+ if self.data_type == 'scan_column':
293
+ return get_spec_counter_value(spec_scans.spec_file,
294
+ scan_number,
295
+ scan_step_index,
296
+ self.name)
297
+ if self.data_type == 'smb_par':
298
+ return get_smb_par_value(spec_scans.spec_file,
299
+ scan_number,
300
+ self.name)
301
+ return None
302
+
303
+
249
304
  @cache
250
- def get_spec_motor_value(spec_file:str, scan_number:int, scan_step_index:int, spec_mnemonic:str):
251
- """Return the value recorded for a SPEC motor at a specific scan step.
305
+ def get_spec_motor_value(spec_file:str, scan_number:int,
306
+ scan_step_index:int, spec_mnemonic:str):
307
+ """Return the value recorded for a SPEC motor at a specific scan
308
+ step.
252
309
 
253
- :param spec_file: Location of a SPEC file in which the requested scan step occurs.
310
+ :param spec_file: Location of a SPEC file in which the requested
311
+ scan step occurs.
254
312
  :type spec_scans: str
255
- :param scan_number: The number of the scan in which the requested scan step occurs.
313
+ :param scan_number: The number of the scan in which the requested
314
+ scan step occurs.
256
315
  :type scan_number: int
257
316
  :param scan_step_index: The index of the requested scan step.
258
317
  :type scan_step_index: int
@@ -265,20 +324,30 @@ def get_spec_motor_value(spec_file:str, scan_number:int, scan_step_index:int, sp
265
324
  if spec_mnemonic in scanparser.spec_scan_motor_mnes:
266
325
  motor_i = scanparser.spec_scan_motor_mnes.index(spec_mnemonic)
267
326
  if scan_step_index >= 0:
268
- scan_step = np.unravel_index(scan_step_index, scanparser.spec_scan_shape, order='F')
269
- motor_value = scanparser.spec_scan_motor_vals[motor_i][scan_step[motor_i]]
327
+ scan_step = np.unravel_index(
328
+ scan_step_index,
329
+ scanparser.spec_scan_shape,
330
+ order='F')
331
+ motor_value = \
332
+ scanparser.spec_scan_motor_vals[motor_i][scan_step[motor_i]]
270
333
  else:
271
334
  motor_value = scanparser.spec_scan_motor_vals[motor_i]
272
335
  else:
273
336
  motor_value = scanparser.get_spec_positioner_value(spec_mnemonic)
274
- return(motor_value)
337
+ return motor_value
338
+
339
+
275
340
  @cache
276
- def get_spec_counter_value(spec_file:str, scan_number:int, scan_step_index:int, spec_column_label:str):
277
- """Return the value recorded for a SPEC counter at a specific scan step.
341
+ def get_spec_counter_value(spec_file:str, scan_number:int,
342
+ scan_step_index:int, spec_column_label:str):
343
+ """Return the value recorded for a SPEC counter at a specific scan
344
+ step.
278
345
 
279
- :param spec_file: Location of a SPEC file in which the requested scan step occurs.
346
+ :param spec_file: Location of a SPEC file in which the requested
347
+ scan step occurs.
280
348
  :type spec_scans: str
281
- :param scan_number: The number of the scan in which the requested scan step occurs.
349
+ :param scan_number: The number of the scan in which the requested
350
+ scan step occurs.
282
351
  :type scan_number: int
283
352
  :param scan_step_index: The index of the requested scan step.
284
353
  :type scan_step_index: int
@@ -289,16 +358,20 @@ def get_spec_counter_value(spec_file:str, scan_number:int, scan_step_index:int,
289
358
  """
290
359
  scanparser = get_scanparser(spec_file, scan_number)
291
360
  if scan_step_index >= 0:
292
- return(scanparser.spec_scan_data[spec_column_label][scan_step_index])
293
- else:
294
- return(scanparser.spec_scan_data[spec_column_label])
361
+ return scanparser.spec_scan_data[spec_column_label][scan_step_index]
362
+ return scanparser.spec_scan_data[spec_column_label]
363
+
364
+
295
365
  @cache
296
366
  def get_smb_par_value(spec_file:str, scan_number:int, par_name:str):
297
- """Return the value recorded for a specific scan in SMB-tyle .par file.
367
+ """Return the value recorded for a specific scan in SMB-tyle .par
368
+ file.
298
369
 
299
- :param spec_file: Location of a SPEC file in which the requested scan step occurs.
370
+ :param spec_file: Location of a SPEC file in which the requested
371
+ scan step occurs.
300
372
  :type spec_scans: str
301
- :param scan_number: The number of the scan in which the requested scan step occurs.
373
+ :param scan_number: The number of the scan in which the requested
374
+ scan step occurs.
302
375
  :type scan_number: int
303
376
  :param par_name: The name of the column in the .par file
304
377
  :type par_name: str
@@ -306,121 +379,156 @@ def get_smb_par_value(spec_file:str, scan_number:int, par_name:str):
306
379
  :rtype: float
307
380
  """
308
381
  scanparser = get_scanparser(spec_file, scan_number)
309
- return(scanparser.pars[par_name])
382
+ return scanparser.pars[par_name]
383
+
384
+
310
385
  def validate_data_source_for_map_config(data_source, values):
386
+ """Confirm that an instance of PointByPointScanData is valid for
387
+ the station and scans provided by a map configuration dictionary.
388
+
389
+ :param data_source: the input object to validate
390
+ :type data_source: PintByPointScanData
391
+ :param values: the map configuration dictionary
392
+ :type values: dict
393
+ :raises Exception: if `data_source` cannot be validated for
394
+ `values`.
395
+ :return: `data_source`, iff it is valid.
396
+ :rtype: PointByPointScanData
397
+ """
311
398
  if data_source is not None:
312
399
  import_scanparser(values.get('station'), values.get('experiment_type'))
313
400
  data_source.validate_for_station(values.get('station'))
314
401
  data_source.validate_for_spec_scans(values.get('spec_scans'))
315
- return(data_source)
402
+ return data_source
316
403
 
317
- class CorrectionsData(PointByPointScanData):
318
- """Class representing the special instances of `PointByPointScanData` that
319
- are used by certain kinds of `CorrectionConfig` tools.
320
404
 
321
- :ivar label: One of the reserved values required by `CorrectionConfig`,
322
- `'presample_intensity'`, `'postsample_intensity'`, or
323
- `'dwell_time_actual'`.
324
- :type label: Literal['presample_intensity','postsample_intensity','dwell_time_actual']
405
+ class CorrectionsData(PointByPointScanData):
406
+ """Class representing the special instances of
407
+ `PointByPointScanData` that are used by certain kinds of
408
+ `CorrectionConfig` tools.
409
+
410
+ :ivar label: One of the reserved values required by
411
+ `CorrectionConfig`, `'presample_intensity'`,
412
+ `'postsample_intensity'`, or `'dwell_time_actual'`.
413
+ :type label: Literal['presample_intensity',
414
+ 'postsample_intensity',
415
+ 'dwell_time_actual']
325
416
  :ivar units: The units in which the data were recorded.
326
417
  :type units: str
327
- :ivar data_type: Represents how these data were recorded at time of data
328
- collection.
418
+ :ivar data_type: Represents how these data were recorded at time
419
+ of data collection.
329
420
  :type data_type: Literal['scan_column', 'smb_par']
330
- :ivar name: Represents the name with which these raw data were recorded at
331
- time of data collection.
421
+ :ivar name: Represents the name with which these raw data were
422
+ recorded at time of data collection.
332
423
  :type name: str
333
424
  """
334
- label: Literal['presample_intensity','postsample_intensity','dwell_time_actual']
425
+ label: Literal['presample_intensity',
426
+ 'postsample_intensity',
427
+ 'dwell_time_actual']
335
428
  data_type: Literal['scan_column','smb_par']
429
+
336
430
  @classmethod
337
431
  def reserved_labels(cls):
338
- """Return a list of all the labels reserved for corrections-related
339
- scalar data.
432
+ """Return a list of all the labels reserved for
433
+ corrections-related scalar data.
340
434
 
341
435
  :return: A list of reserved labels
342
436
  :rtype: list[str]
343
437
  """
344
- return(list(cls.__fields__['label'].type_.__args__))
438
+ return list(cls.__fields__['label'].type_.__args__)
439
+
440
+
345
441
  class PresampleIntensity(CorrectionsData):
346
- """Class representing a source of raw data for the intensity of the beam that
347
- is incident on the sample.
442
+ """Class representing a source of raw data for the intensity of
443
+ the beam that is incident on the sample.
348
444
 
349
445
  :ivar label: Must be `"presample_intensity"`
350
446
  :type label: Literal["presample_intensity"]
351
447
  :ivar units: Must be `"counts"`
352
448
  :type units: Literal["counts"]
353
- :ivar data_type: Represents how these data were recorded at time of data
354
- collection.
449
+ :ivar data_type: Represents how these data were recorded at time
450
+ of data collection.
355
451
  :type data_type: Literal['scan_column', 'smb_par']
356
- :ivar name: Represents the name with which these raw data were recorded at
357
- time of data collection.
452
+ :ivar name: Represents the name with which these raw data were
453
+ recorded at time of data collection.
358
454
  :type name: str
359
455
  """
360
456
  label: Literal['presample_intensity'] = 'presample_intensity'
361
457
  units: Literal['counts'] = 'counts'
458
+
459
+
362
460
  class PostsampleIntensity(CorrectionsData):
363
- """Class representing a source of raw data for the intensity of the beam that
364
- has passed through the sample.
461
+ """Class representing a source of raw data for the intensity of
462
+ the beam that has passed through the sample.
365
463
 
366
464
  :ivar label: Must be `"postsample_intensity"`
367
465
  :type label: Literal["postsample_intensity"]
368
466
  :ivar units: Must be `"counts"`
369
467
  :type units: Literal["counts"]
370
- :ivar data_type: Represents how these data were recorded at time of data
371
- collection.
468
+ :ivar data_type: Represents how these data were recorded at time
469
+ of data collection.
372
470
  :type data_type: Literal['scan_column', 'smb_par']
373
- :ivar name: Represents the name with which these raw data were recorded at
374
- time of data collection.
471
+ :ivar name: Represents the name with which these raw data were
472
+ recorded at time of data collection.
375
473
  :type name: str
376
474
  """
377
475
  label: Literal['postsample_intensity'] = 'postsample_intensity'
378
476
  units: Literal['counts'] = 'counts'
477
+
478
+
379
479
  class DwellTimeActual(CorrectionsData):
380
- """Class representing a source of raw data for the actual dwell time at each
381
- scan point in SPEC (with some scan types, this value can vary slightly
382
- point-to-point from the dwell time specified in the command).
480
+ """Class representing a source of raw data for the actual dwell
481
+ time at each scan point in SPEC (with some scan types, this value
482
+ can vary slightly point-to-point from the dwell time specified in
483
+ the command).
383
484
 
384
485
  :ivar label: Must be `"dwell_time_actual"`
385
486
  :type label: Literal["dwell_time_actual"]
386
487
  :ivar units: Must be `"counts"`
387
488
  :type units: Literal["counts"]
388
- :ivar data_type: Represents how these data were recorded at time of data
389
- collection.
489
+ :ivar data_type: Represents how these data were recorded at time
490
+ of data collection.
390
491
  :type data_type: Literal['scan_column', 'smb_par']
391
- :ivar name: Represents the name with which these raw data were recorded at
392
- time of data collection.
492
+ :ivar name: Represents the name with which these raw data were
493
+ recorded at time of data collection.
393
494
  :type name: str
394
495
  """
395
496
  label: Literal['dwell_time_actual'] = 'dwell_time_actual'
396
497
  units: Literal['s'] = 's'
397
498
 
499
+
398
500
  class MapConfig(BaseModel):
399
- """Class representing an experiment consisting of one or more SPEC scans.
501
+ """Class representing an experiment consisting of one or more SPEC
502
+ scans.
400
503
 
401
504
  :ivar title: The title for the map configuration.
402
505
  :type title: str
403
- :ivar station: The name of the station at which the map was collected.
506
+ :ivar station: The name of the station at which the map was
507
+ collected.
404
508
  :type station: Literal['id1a3','id3a','id3b']
405
509
  :ivar spec_scans: A list of the spec scans that compose the map.
406
510
  :type spec_scans: list[SpecScans]
407
- :ivar independent_dimensions: A list of the sources of data representing the
408
- raw values of each independent dimension of the map.
511
+ :ivar independent_dimensions: A list of the sources of data
512
+ representing the raw values of each independent dimension of
513
+ the map.
409
514
  :type independent_dimensions: list[PointByPointScanData]
410
- :ivar presample_intensity: A source of point-by-point presample beam
411
- intensity data. Required when applying a CorrectionConfig tool.
515
+ :ivar presample_intensity: A source of point-by-point presample
516
+ beam intensity data. Required when applying a CorrectionConfig
517
+ tool.
412
518
  :type presample_intensity: Optional[PresampleIntensity]
413
- :ivar dwell_time_actual: A source of point-by-point actual dwell times for
414
- spec scans. Required when applying a CorrectionConfig tool.
519
+ :ivar dwell_time_actual: A source of point-by-point actual dwell
520
+ times for spec scans. Required when applying a
521
+ CorrectionConfig tool.
415
522
  :type dwell_time_actual: Optional[DwellTimeActual]
416
- :ivar presample_intensity: A source of point-by-point postsample beam
417
- intensity data. Required when applying a CorrectionConfig tool with
418
- `correction_type="flux_absorption"` or
523
+ :ivar presample_intensity: A source of point-by-point postsample
524
+ beam intensity data. Required when applying a CorrectionConfig
525
+ tool with `correction_type="flux_absorption"` or
419
526
  `correction_type="flux_absorption_background"`.
420
527
  :type presample_intensity: Optional[PresampleIntensity]
421
- :ivar scalar_data: A list of the sources of data representing other scalar
422
- raw data values collected at each point ion the map. In the NeXus file
423
- representation of the map, datasets for these values will be included.
528
+ :ivar scalar_data: A list of the sources of data representing
529
+ other scalar raw data values collected at each point ion the
530
+ map. In the NeXus file representation of the map, datasets for
531
+ these values will be included.
424
532
  :type scalar_values: Optional[list[PointByPointScanData]]
425
533
  """
426
534
  title: constr(strip_whitespace=True, min_length=1)
@@ -428,20 +536,37 @@ class MapConfig(BaseModel):
428
536
  experiment_type: Literal['SAXSWAXS', 'EDD', 'XRF', 'TOMO']
429
537
  sample: Sample
430
538
  spec_scans: conlist(item_type=SpecScans, min_items=1)
431
- independent_dimensions: conlist(item_type=PointByPointScanData, min_items=1)
539
+ independent_dimensions: conlist(item_type=PointByPointScanData,
540
+ min_items=1)
432
541
  presample_intensity: Optional[PresampleIntensity]
433
542
  dwell_time_actual: Optional[DwellTimeActual]
434
543
  postsample_intensity: Optional[PostsampleIntensity]
435
544
  scalar_data: Optional[list[PointByPointScanData]] = []
436
545
  _coords: dict = PrivateAttr()
437
- _validate_independent_dimensions = validator('independent_dimensions', each_item=True, allow_reuse=True)(validate_data_source_for_map_config)
438
- _validate_presample_intensity = validator('presample_intensity', allow_reuse=True)(validate_data_source_for_map_config)
439
- _validate_dwell_time_actual = validator('dwell_time_actual', allow_reuse=True)(validate_data_source_for_map_config)
440
- _validate_postsample_intensity = validator('postsample_intensity', allow_reuse=True)(validate_data_source_for_map_config)
441
- _validate_scalar_data = validator('scalar_data', each_item=True, allow_reuse=True)(validate_data_source_for_map_config)
546
+
547
+ _validate_independent_dimensions = validator(
548
+ 'independent_dimensions',
549
+ each_item=True,
550
+ allow_reuse=True)(validate_data_source_for_map_config)
551
+ _validate_presample_intensity = validator(
552
+ 'presample_intensity',
553
+ allow_reuse=True)(validate_data_source_for_map_config)
554
+ _validate_dwell_time_actual = validator(
555
+ 'dwell_time_actual',
556
+ allow_reuse=True)(validate_data_source_for_map_config)
557
+ _validate_postsample_intensity = validator(
558
+ 'postsample_intensity',
559
+ allow_reuse=True)(validate_data_source_for_map_config)
560
+ _validate_scalar_data = validator(
561
+ 'scalar_data',
562
+ each_item=True,
563
+ allow_reuse=True)(validate_data_source_for_map_config)
564
+
442
565
  @validator('experiment_type')
443
566
  def validate_experiment_type(cls, value, values):
444
- '''Ensure values for the station and experiment_type fields are compatible'''
567
+ """Ensure values for the station and experiment_type fields are
568
+ compatible
569
+ """
445
570
  station = values.get('station')
446
571
  if station == 'id1a3':
447
572
  allowed_experiment_types = ['SAXSWAXS', 'EDD', 'TOMO']
@@ -452,18 +577,22 @@ class MapConfig(BaseModel):
452
577
  else:
453
578
  allowed_experiment_types = []
454
579
  if value not in allowed_experiment_types:
455
- raise(ValueError(f'For station {station}, allowed experiment types are {allowed_experiment_types} (suuplied experiment type {value} is not allowed)'))
456
- return(value)
580
+ raise ValueError(
581
+ f'For station {station}, allowed experiment types are '
582
+ f'{", ".join(allowed_experiment_types)}. '
583
+ f'Supplied experiment type {value} is not allowed.')
584
+ return value
585
+
457
586
  @property
458
587
  def coords(self):
459
- """Return a dictionary of the values of each independent dimension across
460
- the map.
588
+ """Return a dictionary of the values of each independent
589
+ dimension across the map.
461
590
 
462
591
  :returns: A dictionary ofthe map's coordinate values.
463
592
  :rtype: dict[str,list[float]]
464
593
  """
465
594
  try:
466
- return(self._coords)
595
+ coords = self._coords
467
596
  except:
468
597
  coords = {}
469
598
  for independent_dimension in self.independent_dimensions:
@@ -471,65 +600,88 @@ class MapConfig(BaseModel):
471
600
  for scans in self.spec_scans:
472
601
  for scan_number in scans.scan_numbers:
473
602
  scanparser = scans.get_scanparser(scan_number)
474
- for scan_step_index in range(scanparser.spec_scan_npts):
475
- coords[independent_dimension.label].append(independent_dimension.get_value(scans, scan_number, scan_step_index))
476
- coords[independent_dimension.label] = np.unique(coords[independent_dimension.label])
477
- self._coords = coords
478
- return(self._coords)
603
+ for scan_step_index in range(
604
+ scanparser.spec_scan_npts):
605
+ coords[independent_dimension.label].append(
606
+ independent_dimension.get_value(
607
+ scans, scan_number, scan_step_index))
608
+ coords[independent_dimension.label] = np.unique(
609
+ coords[independent_dimension.label])
610
+ self._coords = coords
611
+ return coords
612
+
479
613
  @property
480
614
  def dims(self):
481
- """Return a tuple of the independent dimension labels for the map."""
482
- return([point_by_point_scan_data.label for point_by_point_scan_data in self.independent_dimensions[::-1]])
615
+ """Return a tuple of the independent dimension labels for the
616
+ map.
617
+ """
618
+ return [point_by_point_scan_data.label
619
+ for point_by_point_scan_data
620
+ in self.independent_dimensions[::-1]]
621
+
483
622
  @property
484
623
  def shape(self):
485
- """Return the shape of the map -- a tuple representing the number of
486
- unique values of each dimension across the map.
624
+ """Return the shape of the map -- a tuple representing the
625
+ number of unique values of each dimension across the map.
487
626
  """
488
- return(tuple([len(values) for key,values in self.coords.items()][::-1]))
627
+ return tuple([len(values) for key,values in self.coords.items()][::-1])
628
+
489
629
  @property
490
630
  def all_scalar_data(self):
491
- """Return a list of all instances of `PointByPointScanData` for which
492
- this map configuration will collect dataset-like data (as opposed to
493
- axes-like data).
631
+ """Return a list of all instances of `PointByPointScanData`
632
+ for which this map configuration will collect dataset-like
633
+ data (as opposed to axes-like data).
634
+
635
+ This will be any and all of the items in the
636
+ corrections-data-related fields, as well as any additional
637
+ items in the optional `scalar_data` field.
638
+ """
639
+ return [getattr(self, label, None)
640
+ for label in CorrectionsData.reserved_labels()
641
+ if getattr(self, label, None) is not None] + self.scalar_data
494
642
 
495
- This will be any and all of the items in the corrections-data-related
496
- fields, as well as any additional items in the optional `scalar_data`
497
- field."""
498
- return([getattr(self,l,None) for l in CorrectionsData.reserved_labels() if getattr(self,l,None) is not None] + self.scalar_data)
499
643
 
500
644
  def import_scanparser(station, experiment):
501
- '''Given the name of a CHESS station and experiment type, import the
502
- corresponding subclass of `ScanParser` as `ScanParser`.
645
+ """Given the name of a CHESS station and experiment type, import
646
+ the corresponding subclass of `ScanParser` as `ScanParser`.
503
647
 
504
648
  :param station: The station name ("IDxx", not the beamline acronym)
505
649
  :type station: str
506
650
  :param experiment: The experiment type
507
651
  :type experiment: Literal["SAXSWAXS","EDD","XRF","Tomo","Powder"]
508
652
  :return: None
509
- '''
653
+ """
510
654
 
511
655
  station = station.lower()
512
656
  experiment = experiment.lower()
513
657
 
514
658
  if station in ('id1a3', 'id3a'):
515
659
  if experiment in ('saxswaxs', 'powder'):
516
- from CHAP.common.utils.scanparsers import SMBLinearScanParser as ScanParser
660
+ from CHAP.common.utils.scanparsers \
661
+ import SMBLinearScanParser as ScanParser
517
662
  elif experiment == 'edd':
518
- from CHAP.common.utils.scanparsers import SMBMCAScanParser as ScanParser
663
+ from CHAP.common.utils.scanparsers \
664
+ import SMBMCAScanParser as ScanParser
519
665
  elif experiment == 'tomo':
520
- from CHAP.common.utils.scanparsers import SMBRotationScanParser as ScanParser
666
+ from CHAP.common.utils.scanparsers \
667
+ import SMBRotationScanParser as ScanParser
521
668
  else:
522
- raise(ValueError(f'Invalid experiment type for station {station}: {experiment}'))
669
+ raise ValueError(
670
+ f'Invalid experiment type for station {station}: {experiment}')
523
671
  elif station == 'id3b':
524
672
  if experiment == 'saxswaxs':
525
- from CHAP.common.utils.scanparsers import FMBSAXSWAXSScanParser as ScanParser
673
+ from CHAP.common.utils.scanparsers \
674
+ import FMBSAXSWAXSScanParser as ScanParser
526
675
  elif experiment == 'tomo':
527
- from CHAP.common.utils.scanparsers import FMBRotationScanParser as ScanParser
676
+ from CHAP.common.utils.scanparsers \
677
+ import FMBRotationScanParser as ScanParser
528
678
  elif experiment == 'xrf':
529
- from CHAP.common.utils.scanparsers import FMBXRFScanParser as ScanParser
679
+ from CHAP.common.utils.scanparsers \
680
+ import FMBXRFScanParser as ScanParser
530
681
  else:
531
- raise(ValueError(f'Invalid experiment type for station {station}: {experiment}'))
682
+ raise ValueError(
683
+ f'Invalid experiment type for station {station}: {experiment}')
532
684
  else:
533
- raise(ValueError(f'Invalid station: {station}'))
685
+ raise ValueError(f'Invalid station: {station}')
534
686
 
535
687
  globals()['ScanParser'] = ScanParser