ChessAnalysisPipeline 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

CHAP/common/reader.py CHANGED
@@ -8,11 +8,12 @@ Description: Module for Writers used in multiple experiment-specific
8
8
 
9
9
  # System modules
10
10
  from os.path import (
11
+ isabs,
11
12
  isfile,
13
+ join,
12
14
  splitext,
13
15
  )
14
16
  from sys import modules
15
- from time import time
16
17
 
17
18
  # Third party modules
18
19
  import numpy as np
@@ -37,10 +38,39 @@ class BinaryFileReader(Reader):
37
38
  return data
38
39
 
39
40
 
41
+ class FabioImageReader(Reader):
42
+ """Reader for images using the python package
43
+ [`fabio`](https://fabio.readthedocs.io/en/main/).
44
+ """
45
+ def read(self, filename, frame=None, inputdir='.'):
46
+ """Return the data from the image file(s) provided.
47
+
48
+ :param filename: The image filename, or glob pattern for image
49
+ filenames, to read.
50
+ :type filename: str
51
+ :param frame: The index of a specific frame to read from the
52
+ file(s), defaults to `None`.
53
+ :type filename: int, optional
54
+ :returns: Image data as a numpy array (or list of numpy
55
+ arrays, if a glob pattern matching more than one file was
56
+ provided).
57
+ """
58
+ from glob import glob
59
+ import fabio
60
+
61
+ filenames = glob(filename)
62
+ data = []
63
+ for f in filenames:
64
+ image = fabio.open(f, frame=frame)
65
+ data.append(image.data)
66
+ image.close()
67
+ return data
68
+
69
+
40
70
  class H5Reader(Reader):
41
71
  """Reader for h5 files.
42
72
  """
43
- def read(self, filename, h5path='/'):
73
+ def read(self, filename, h5path='/', idx=None):
44
74
  """Return the data object stored at `h5path` in an h5-file.
45
75
 
46
76
  :param filename: The name of the h5-file to read from.
@@ -55,6 +85,8 @@ class H5Reader(Reader):
55
85
  from h5py import File
56
86
 
57
87
  data = File(filename, 'r')[h5path]
88
+ if idx is not None:
89
+ data = data[tuple(idx)]
58
90
  return data
59
91
 
60
92
 
@@ -142,8 +174,8 @@ class MapReader(Reader):
142
174
  attrs={'long_name': f'{dim.label} ({dim.units})',
143
175
  'data_type': dim.data_type,
144
176
  'local_name': dim.name})
145
- if map_config.map_type == 'structured':
146
- nxentry.data.attrs[f'{dim.label}_indices'] = i
177
+ # if map_config.map_type == 'structured':
178
+ # nxentry.data.attrs[f'{dim.label}_indices'] = i
147
179
 
148
180
  # Create empty NXfields for all scalar data present in the
149
181
  # provided map configuration
@@ -167,15 +199,13 @@ class MapReader(Reader):
167
199
  # Create empty NXfields of appropriate shape for raw
168
200
  # detector data
169
201
  for detector_name in detector_names:
202
+ if not isinstance(detector_name, str):
203
+ detector_name = str(detector_name)
170
204
  detector_data = map_config.get_detector_data(
171
205
  detector_name, (0,) * len(map_config.shape))
172
206
  nxentry.data[detector_name] = NXfield(value=np.zeros(
173
207
  (*map_config.shape, *detector_data.shape)),
174
208
  dtype=detector_data.dtype)
175
- # data_shape = list(map_config.shape)+list(detector_data.shape)
176
- # nxentry.data[detector_name] = NXfield(
177
- # value=np.zeros(data_shape), shape=data_shape,
178
- # dtype=detector_data.dtype)
179
209
 
180
210
  # Read and fill in maps of raw data
181
211
  if len(map_config.all_scalar_data) > 0 or len(detector_names) > 0:
@@ -184,6 +214,8 @@ class MapReader(Reader):
184
214
  nxentry.data[data.label][map_index] = map_config.get_value(
185
215
  data, map_index)
186
216
  for detector_name in detector_names:
217
+ if not isinstance(detector_name, str):
218
+ detector_name = str(detector_name)
187
219
  nxentry.data[detector_name][map_index] = \
188
220
  map_config.get_detector_data(detector_name, map_index)
189
221
 
@@ -213,6 +245,146 @@ class NexusReader(Reader):
213
245
  return nxobject
214
246
 
215
247
 
248
+ class NXdataReader(Reader):
249
+ """Reader for constructing an NXdata object from components"""
250
+ def read(self, name, nxfield_params, signal_name, axes_names, attrs={},
251
+ inputdir='.'):
252
+ """Return a basic NXdata object constructed from components.
253
+
254
+ :param name: The name of the NXdata group.
255
+ :type name: str
256
+ :param nxfield_params: List of sets of parameters for
257
+ `NXfieldReader` specifying the NXfields belonging to the
258
+ NXdata.
259
+ :type nxfield_params: list[dict]
260
+ :param signal_name: Name of the signal for the NXdata (must be
261
+ one of the names of the NXfields indicated in `nxfields`)
262
+ :type signal: str
263
+ :param axes_names: Name or names of the coordinate axes
264
+ NXfields associated with the signal (must be names of
265
+ NXfields indicated in `nxfields`)
266
+ :type axes_names: Union[str, list[str]]
267
+ :param attrs: Optional dictionary of additional attributes for
268
+ the NXdata
269
+ :type attrs: dict
270
+ :param inputdir: Input directory to use for `NXfieldReader`s,
271
+ defaults to `"."`
272
+ :type inputdir: str
273
+ :returns: A new NXdata object
274
+ :rtype: nexusformat.nexus.NXdata
275
+ """
276
+ from nexusformat.nexus import NXdata
277
+
278
+ # Read in NXfields
279
+ nxfields = [NXfieldReader().read(**params, inputdir=inputdir)
280
+ for params in nxfield_params]
281
+ nxfields = {nxfield.nxname: nxfield for nxfield in nxfields}
282
+
283
+ # Get signal NXfield
284
+ try:
285
+ nxsignal = nxfields[signal_name]
286
+ except:
287
+ raise ValueError(
288
+ '`signal_name` must be the name of one of the NXfields '
289
+ + 'indicated in `nxfields`: '
290
+ + ', '.join(nxfields.keys())
291
+ )
292
+
293
+ # Get axes NXfield(s)
294
+ if isinstance(axes_names, str):
295
+ axes_names = [axes_names]
296
+ try:
297
+ nxaxes = [nxfields[axis_name] for axis_name in axes_names]
298
+ except:
299
+ raise ValueError(
300
+ '`axes_names` must contain only names of NXfields indicated '
301
+ + 'in `nxfields`: ' + ', '.join(nxfields.keys())
302
+ )
303
+ for i, nxaxis in enumerate(nxaxes):
304
+ if len(nxaxis) != nxsignal.shape[i]:
305
+ raise ValueError(
306
+ f'Shape mismatch on signal dimension {i}: signal '
307
+ + f'"{nxsignal.nxname}" has {nxsignal.shape[i]} values, '
308
+ + f'but axis "{nxaxis.nxname}" has {len(nxaxis)} values.')
309
+
310
+ result = NXdata(signal=nxsignal, axes=nxaxes, name=name, attrs=attrs,
311
+ **nxfields)
312
+ self.logger.info(result.tree)
313
+ return result
314
+
315
+
316
+ class NXfieldReader(Reader):
317
+ """Reader for an NXfield with options to modify certain attributes."""
318
+ def read(self, filename, nxpath, nxname=None, update_attrs=None,
319
+ slice_params=None, inputdir='.'):
320
+ """Return a copy of the indicated NXfield from the file. Name
321
+ and attributes of the returned copy may be modified with the
322
+ `nxname` and `update_attrs` keyword arguments.
323
+
324
+ :param filename: Name of the NeXus file containing the NXfield to read.
325
+ :type filename: str
326
+ :param nxpath: Path in `nxfile` pointing to the NXfield to read.
327
+ :type nxpath: str
328
+ :param nxname: Optional new name for the returned NXfield,
329
+ defaults to None
330
+ :type nxname: str, optional
331
+ :param update_attrs: Optional dictonary used to add to /
332
+ update the original NXfield's attributes, defaults to None
333
+ :type update_attrs: dict, optional
334
+ :param slice_params: Parameters for returning just a slice of
335
+ the full field data. Slice parameters are provided in a
336
+ list dictionaries with integer values for any / all of the
337
+ following keys: `"start"`, `"end"`, `"step"`. Default
338
+ values used are: `"start"` - `0`, `"end"` -- `None`,
339
+ `"step"` -- `1`. The order of the list must correspond to
340
+ the order of the field's axes. Defaults to `None`.
341
+ :type slice_params: list[dict[str, int]], optional
342
+ :param inputdir: Directory containing `nxfile`, defaults to `"."`
343
+ :type inputdir: str
344
+ :returns: A copy of the indicated NXfield (with name and
345
+ attributes optionally modified).
346
+ :rtype: nexusformat.nexus.NXfield
347
+ """
348
+ # Third party modules
349
+ from nexusformat.nexus import nxload, NXfield
350
+
351
+ # Local modules
352
+ from CHAP.utils.general import nxcopy
353
+
354
+ if not isabs(filename):
355
+ filename = join(inputdir, filename)
356
+ nxroot = nxload(filename)
357
+ nxfield = nxroot[nxpath]
358
+
359
+ if nxname is None:
360
+ nxname = nxfield.nxname
361
+
362
+ attrs = nxfield.attrs
363
+ if update_attrs is not None:
364
+ attrs.update(update_attrs)
365
+
366
+ if slice_params is None:
367
+ value = nxfield.nxdata
368
+ else:
369
+ if len(slice_params) < nxfield.ndim:
370
+ slice_params.extend([{}] * (nxfield.ndim - len(slice_params)))
371
+ if len(slice_params) > nxfield.ndim:
372
+ slice_params = slice_params[0:nxfield.ndim]
373
+ slices = ()
374
+ default_slice = {'start': 0, 'end': None, 'step': 1}
375
+ for s in slice_params:
376
+ for k, v in default_slice.items():
377
+ if k not in s:
378
+ s[k] = v
379
+ slices = (*slices, slice(s['start'], s['end'], s['step']))
380
+ value = nxfield.nxdata[slices]
381
+
382
+ nxfield = NXfield(value=value, name=nxname, attrs=attrs)
383
+ self.logger.debug(f'Result -- nxfield.tree =\n{nxfield.tree}')
384
+
385
+ return nxfield
386
+
387
+
216
388
  class SpecReader(Reader):
217
389
  """Reader for CHESS SPEC scans"""
218
390
  def read(self, filename=None, spec_config=None, detector_names=[],
CHAP/common/writer.py CHANGED
@@ -89,13 +89,15 @@ def write_yaml(data, filename, force_overwrite=False):
89
89
 
90
90
  def write_filetree(data, outputdir, force_overwrite=False):
91
91
  # System modules
92
- from os import mkdir
92
+ from os import makedirs
93
93
 
94
94
  # Third party modules
95
95
  from nexusformat.nexus import (
96
96
  NXentry,
97
+ NXsubentry,
97
98
  NXgroup,
98
99
  NXobject,
100
+ NXroot,
99
101
  NXsubentry,
100
102
  )
101
103
 
@@ -104,7 +106,7 @@ def write_filetree(data, outputdir, force_overwrite=False):
104
106
  f'{type(data).__name__} as a file tree to disk.')
105
107
 
106
108
  if not os_path.isdir(outputdir):
107
- mkdir(outputdir)
109
+ makedirs(outputdir)
108
110
 
109
111
  for k, v in data.items():
110
112
  if isinstance(v, NXsubentry) and 'schema' in v.attrs:
@@ -114,15 +116,28 @@ def write_filetree(data, outputdir, force_overwrite=False):
114
116
  write_txt(list(v.data), filename, force_overwrite)
115
117
  elif schema == 'json':
116
118
  write_txt(str(v.data), filename, force_overwrite)
119
+ elif schema == 'yml' or schema == 'yaml':
120
+ from json import loads
121
+ write_yaml(loads(v.data.nxdata), filename, force_overwrite)
117
122
  elif schema == 'tif' or schema == 'tiff':
118
123
  write_tif(v.data, filename, force_overwrite)
119
124
  elif schema == 'h5':
120
- nxentry = NXentry()
125
+ if any(isinstance(vv, NXsubentry) for vv in v.values()):
126
+ nxbase = NXroot()
127
+ else:
128
+ nxbase = NXentry()
121
129
  for kk, vv in v.attrs.items():
122
- nxentry.attrs[kk] = vv
130
+ if kk not in ('schema', 'filename'):
131
+ nxbase.attrs[kk] = vv
123
132
  for kk, vv in v.items():
124
- nxentry[kk] = vv
125
- write_nexus(nxentry, filename, force_overwrite)
133
+ if isinstance(vv, NXsubentry):
134
+ nxentry = NXentry()
135
+ nxbase[vv.nxname] = nxentry
136
+ for kkk, vvv in vv.items():
137
+ nxentry[kkk] = vvv
138
+ else:
139
+ nxbase[kk] = vv
140
+ write_nexus(nxbase, filename, force_overwrite)
126
141
  else:
127
142
  raise TypeError(f'Files of type {schema} not yet implemented')
128
143
  elif isinstance(v, NXgroup):
@@ -136,7 +151,7 @@ class ExtractArchiveWriter(Writer):
136
151
  and write the extracted archive to files.
137
152
 
138
153
  :param data: The data to write to archive.
139
- :type data: CHAP.pipeline.PipelineData
154
+ :type data: list[PipelineData]
140
155
  :param filename: The name of the directory to write the archive
141
156
  files to.
142
157
  :type filename: str
@@ -162,7 +177,7 @@ class FileTreeWriter(Writer):
162
177
  directory tree stuctured like the NeXus tree.
163
178
 
164
179
  :param data: The data to write to disk.
165
- :type data: CHAP.pipeline.PipelineData
180
+ :type data: list[PipelineData]
166
181
  :param outputdir: The name of the directory to write to.
167
182
  :type outputdir: str
168
183
  :param force_overwrite: Flag to allow data to be overwritten
@@ -204,6 +219,44 @@ class FileTreeWriter(Writer):
204
219
  return data
205
220
 
206
221
 
222
+ class H5Writer(Writer):
223
+ """Writer for H5 files from an `nexusformat.nexus.NXdata` object"""
224
+ def write(self, data, filename, force_overwrite=False):
225
+ """Write the NeXus object contained in `data` to hdf5 file.
226
+
227
+ :param data: The data to write to file.
228
+ :type data: CHAP.pipeline.PipelineData
229
+ :param filename: The name of the file to write to.
230
+ :param force_overwrite: Flag to allow data in `filename` to be
231
+ overwritten if it already exists, defaults to `False`.
232
+ :type force_overwrite: bool, optional
233
+ :raises RuntimeError: If `filename` already exists and
234
+ `force_overwrite` is `False`.
235
+ :return: The data written to file.
236
+ :rtype: nexusformat.nexus.NXobject
237
+ """
238
+ # Third party modules
239
+ from h5py import File
240
+ from nexusformat.nexus import NXdata
241
+
242
+ data = self.unwrap_pipelinedata(data)[-1]
243
+ if not isinstance(data, NXdata):
244
+ raise ValueError('Invalid data parameter {(data)}')
245
+
246
+ mode = 'w' if force_overwrite else 'w-'
247
+ with File(filename, mode) as f:
248
+ f[data.signal] = data.nxsignal
249
+ for i, axes in enumerate(data.attrs['axes']):
250
+ f[axes] = data[axes]
251
+ f[data.signal].dims[i].label = \
252
+ f'{axes} ({data[axes].units})' \
253
+ if 'units' in data[axes].attrs else axes
254
+ f[axes].make_scale(axes)
255
+ f[data.signal].dims[i].attach_scale(f[axes])
256
+
257
+ return data
258
+
259
+
207
260
  class MatplotlibAnimationWriter(Writer):
208
261
  """Writer for saving matplotlib animations."""
209
262
  def write(self, data, filename, fps=1):
@@ -211,7 +264,7 @@ class MatplotlibAnimationWriter(Writer):
211
264
  contained in `data` to file.
212
265
 
213
266
  :param data: The matplotlib animation.
214
- :type data: CHAP.pipeline.PipelineData
267
+ :type data: list[PipelineData]
215
268
  :param filename: The name of the file to write to.
216
269
  :type filename: str
217
270
  :param fps: Movie frame rate (frames per second),
@@ -224,7 +277,7 @@ class MatplotlibAnimationWriter(Writer):
224
277
  extension = os_path.splitext(filename)[1]
225
278
  if not extension:
226
279
  data.save(f'{filename}.gif', fps=fps)
227
- elif extension in '.gif':
280
+ elif extension == '.gif':
228
281
  data.save(filename, fps=fps)
229
282
  elif extension == '.mp4':
230
283
  data.save(filename, writer='ffmpeg', fps=fps)
@@ -239,7 +292,7 @@ class MatplotlibFigureWriter(Writer):
239
292
  file.
240
293
 
241
294
  :param data: The matplotlib figure
242
- :type data: CHAP.pipeline.PipelineData
295
+ :type data: list[PipelineData]
243
296
  :param filename: The name of the file to write to.
244
297
  :type filename: str
245
298
  :param savefig_kw: Keyword args to pass to
@@ -261,11 +314,11 @@ class MatplotlibFigureWriter(Writer):
261
314
 
262
315
  class NexusWriter(Writer):
263
316
  """Writer for NeXus files from `NXobject`-s"""
264
- def write(self, data, filename, force_overwrite=False):
317
+ def write(self, data, filename, nxpath=None, force_overwrite=False):
265
318
  """Write the NeXus object contained in `data` to file.
266
319
 
267
320
  :param data: The data to write to file.
268
- :type data: CHAP.pipeline.PipelineData
321
+ :type data: list[PipelineData]
269
322
  :param filename: The name of the file to write to.
270
323
  :param force_overwrite: Flag to allow data in `filename` to be
271
324
  overwritten if it already exists, defaults to `False`.
@@ -275,12 +328,130 @@ class NexusWriter(Writer):
275
328
  :return: The data written to file.
276
329
  :rtype: nexusformat.nexus.NXobject
277
330
  """
331
+ # Third party modules
332
+ from nexusformat.nexus import (
333
+ NXentry,
334
+ NXFile,
335
+ NXroot,
336
+ )
337
+ import os
278
338
  data = self.unwrap_pipelinedata(data)[-1]
279
- write_nexus(data, filename, force_overwrite)
280
-
339
+ nxname = data.nxname
340
+ if not os.path.isfile(filename) and nxpath is not None:
341
+ self.logger.warning(
342
+ f'{filename} does not yet exist. Argument for nxpath ({nxpath}) '
343
+ + 'will be ignored.')
344
+ nxpath = None
345
+ if nxpath is None:
346
+ nxclass = data.nxclass
347
+ if nxclass == 'NXentry':
348
+ data = NXroot(data)
349
+ data[nxname].set_default()
350
+ elif nxclass != 'NXroot':
351
+ data = NXroot(NXentry(data))
352
+ if nxclass == 'NXdata':
353
+ data.entry[nxname].set_default()
354
+ data.entry.set_default()
355
+ write_nexus(data, filename, force_overwrite)
356
+ else:
357
+ nxfile = NXFile(filename, 'rw')
358
+ root = nxfile.readfile()
359
+ if nxfile.get(nxpath) is None:
360
+ nxpath = root.NXentry[0].nxpath
361
+ self.logger.warning(
362
+ f'Path "{nxpath}" not present in {filename}. '
363
+ + f'Using {nxpath} instead.')
364
+ full_nxpath = os.path.join(nxpath, nxname)
365
+ self.logger.debug(f'Full path for object to write: {full_nxpath}')
366
+ if nxfile.get(full_nxpath) is not None:
367
+ self.logger.debug(
368
+ f'{os.path.join(nxpath, nxname)} already exists in {filename}')
369
+ if force_overwrite:
370
+ self.logger.warning(
371
+ 'Deleting existing NXobject at '
372
+ + f'{os.path.join(nxpath, nxname)} in {filename}')
373
+ del root[full_nxpath]
374
+ try:
375
+ root[full_nxpath] = data
376
+ except Exception as e:
377
+ nxfile.close()
378
+ raise e
379
+ nxfile.close()
281
380
  return data
282
381
 
283
382
 
383
+ class PyfaiResultsWriter(Writer):
384
+ """Writer for results of one or more pyFAI integrations. Able to
385
+ handle multiple output formats. Currently supported formats are:
386
+ .npz, .nxs.
387
+ """
388
+ def write(self, data, filename, force_overwrite=False):
389
+ """Save pyFAI integration results to a file. Format is
390
+ determined automatically form the extension of `filename`.
391
+
392
+ :param data: Integration results to save.
393
+ :type data: Union[PipelineData,
394
+ list[pyFAI.containers.IntegrateResult]]
395
+ :param filename: Name of the file to which results will be
396
+ saved. Format of output is determined ffrom the
397
+ extension. Currently supported formats are: `.npz`,
398
+ `.nxs`
399
+ :type filename: str
400
+ """
401
+ import os
402
+
403
+ from pyFAI.containers import Integrate1dResult, Integrate2dResult
404
+
405
+ try:
406
+ results = self.unwrap_pipelinedata(data)[0]
407
+ except:
408
+ results = data
409
+ if not isinstance(results, list):
410
+ results = [results]
411
+ if not all([isinstance(r, Integrate1dResult) for r in results]) \
412
+ and not all([isinstance(r, Integrate2dResult) for r in results]):
413
+ raise Exception(
414
+ 'Bad input data: all items must have the same type -- either '
415
+ + 'all pyFAI.containers.Integrate1dResult, or all '
416
+ + 'pyFAI.containers.Integrate2dResult.')
417
+
418
+ if os.path.isfile(filename):
419
+ if force_overwrite:
420
+ self.logger.warning(f'Removing existing file {filename}')
421
+ os.remove(filename)
422
+ else:
423
+ raise Exception(f'{filename} already exists.')
424
+ _, ext = os.path.splitext(filename)
425
+ if ext.lower() == '.npz':
426
+ self.write_npz(results, filename)
427
+ elif ext.lower() == '.nxs':
428
+ self.write_nxs(results, filename)
429
+ else:
430
+ raise Exception(f'Unsupported file format: {ext}')
431
+ self.logger.info(f'Wrote to {filename}')
432
+ return results
433
+
434
+ def write_npz(self, results, filename):
435
+ """Save `results` to the .npz file, `filename`"""
436
+ import numpy as np
437
+
438
+ data = {'radial': results[0].radial,
439
+ 'intensity': [r.intensity for r in results]}
440
+ if hasattr(results[0], 'azimuthal'):
441
+ # 2d results
442
+ data['azimuthal'] = results[0].azimuthal
443
+ if all([r.sigma for r in results]):
444
+ # errors were included
445
+ data['sigma'] = [r.sigma for r in results]
446
+
447
+ np.savez(filename, **data)
448
+
449
+ def write_nxs(results, filename):
450
+ """Save `results` to the .nxs file, `filename`"""
451
+ raise NotImplementedError
452
+
453
+
454
+
284
455
  class TXTWriter(Writer):
285
456
  """Writer for plain text files from string or tuples or lists of
286
457
  strings."""
@@ -331,6 +502,12 @@ class YAMLWriter(Writer):
331
502
  :rtype: dict
332
503
  """
333
504
  data = self.unwrap_pipelinedata(data)[-1]
505
+ try:
506
+ from pydantic import BaseModel
507
+ if isinstance(data, BaseModel):
508
+ data = data.dict()
509
+ except:
510
+ pass
334
511
  write_yaml(data, filename, force_overwrite)
335
512
  return data
336
513
 
CHAP/edd/__init__.py CHANGED
@@ -1,11 +1,20 @@
1
1
  """This subpackage contains `PipelineItems` unique to EDD data
2
2
  processing workflows.
3
3
  """
4
- # from CHAP.edd.reader import
4
+ from CHAP.edd.reader import (EddMapReader,
5
+ ScanToMapReader,
6
+ SetupNXdataReader,
7
+ UpdateNXdataReader,
8
+ NXdataSliceReader)
5
9
  from CHAP.edd.processor import (DiffractionVolumeLengthProcessor,
6
- MCACeriaCalibrationProcessor,
10
+ LatticeParameterRefinementProcessor,
11
+ MCAEnergyCalibrationProcessor,
12
+ MCATthCalibrationProcessor,
7
13
  MCADataProcessor,
8
- StrainAnalysisProcessor)
14
+ MCAEnergyCalibrationProcessor,
15
+ MCACalibratedDataPlotter,
16
+ StrainAnalysisProcessor,
17
+ CreateStrainAnalysisConfigProcessor)
9
18
  # from CHAP.edd.writer import
10
19
 
11
20
  from CHAP.common import MapProcessor