ChessAnalysisPipeline 0.0.3__tar.gz → 0.0.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

Files changed (55) hide show
  1. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/models/map.py +11 -10
  2. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/utils/fit.py +120 -83
  3. ChessAnalysisPipeline-0.0.4/CHAP/common/utils/general.py +1225 -0
  4. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/utils/scanparsers.py +19 -1
  5. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/edd/processor.py +0 -3
  6. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/pipeline.py +1 -1
  7. ChessAnalysisPipeline-0.0.4/CHAP/tomo/__init__.py +5 -0
  8. ChessAnalysisPipeline-0.0.4/CHAP/tomo/models.py +125 -0
  9. ChessAnalysisPipeline-0.0.4/CHAP/tomo/processor.py +2009 -0
  10. ChessAnalysisPipeline-0.0.4/CHAP/tomo/reader.py +5 -0
  11. ChessAnalysisPipeline-0.0.4/CHAP/tomo/writer.py +5 -0
  12. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4/ChessAnalysisPipeline.egg-info}/PKG-INFO +1 -1
  13. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/ChessAnalysisPipeline.egg-info/SOURCES.txt +6 -0
  14. {ChessAnalysisPipeline-0.0.3/ChessAnalysisPipeline.egg-info → ChessAnalysisPipeline-0.0.4}/PKG-INFO +1 -1
  15. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/setup.py +5 -1
  16. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/__init__.py +0 -0
  17. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/__init__.py +0 -0
  18. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/models/__init__.py +0 -0
  19. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/models/integration.py +0 -0
  20. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/processor.py +0 -0
  21. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/reader.py +0 -0
  22. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/utils/__init__.py +0 -0
  23. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/utils/material.py +0 -0
  24. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/common/writer.py +0 -0
  25. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/edd/__init__.py +0 -0
  26. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/edd/models.py +0 -0
  27. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/edd/reader.py +0 -0
  28. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/edd/writer.py +0 -0
  29. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/inference/__init__.py +0 -0
  30. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/inference/processor.py +0 -0
  31. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/inference/reader.py +0 -0
  32. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/inference/writer.py +0 -0
  33. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/processor.py +0 -0
  34. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/reader.py +0 -0
  35. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/runner.py +0 -0
  36. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/saxswaxs/__init__.py +0 -0
  37. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/saxswaxs/processor.py +0 -0
  38. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/saxswaxs/reader.py +0 -0
  39. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/saxswaxs/writer.py +0 -0
  40. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/sin2psi/__init__.py +0 -0
  41. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/sin2psi/processor.py +0 -0
  42. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/sin2psi/reader.py +0 -0
  43. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/sin2psi/writer.py +0 -0
  44. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/CHAP/writer.py +0 -0
  45. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/ChessAnalysisPipeline.egg-info/dependency_links.txt +0 -0
  46. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/ChessAnalysisPipeline.egg-info/entry_points.txt +0 -0
  47. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/ChessAnalysisPipeline.egg-info/requires.txt +0 -0
  48. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/ChessAnalysisPipeline.egg-info/top_level.txt +0 -0
  49. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/LICENSE +0 -0
  50. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/MLaaS/__init__.py +0 -0
  51. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/MLaaS/ktrain.py +0 -0
  52. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/MLaaS/mnist_img.py +0 -0
  53. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/MLaaS/tfaas_client.py +0 -0
  54. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/README.md +0 -0
  55. {ChessAnalysisPipeline-0.0.3 → ChessAnalysisPipeline-0.0.4}/setup.cfg +0 -0
@@ -221,11 +221,11 @@ class PointByPointScanData(BaseModel):
221
221
  scan_step_index_range = range(scanparser.spec_scan_npts)
222
222
  else:
223
223
  scan_step_index_range = range(scan_step_index,scan_step_index+1)
224
- for scan_step_index in scan_step_index_range:
224
+ for index in scan_step_index_range:
225
225
  try:
226
- self.get_value(scans, scan_number, scan_step_index)
226
+ self.get_value(scans, scan_number, index)
227
227
  except:
228
- raise(RuntimeError(f'Could not find data for {self.name} (data_type "{self.data_type}") on scan number {scan_number} in spec file {scans.spec_file}'))
228
+ raise(RuntimeError(f'Could not find data for {self.name} (data_type "{self.data_type}") on scan number {scan_number} for index {index} in spec file {scans.spec_file}'))
229
229
  def get_value(self, spec_scans:SpecScans, scan_number:int, scan_step_index:int):
230
230
  """Return the value recorded for this instance of `PointByPointScanData`
231
231
  at a specific scan step.
@@ -308,9 +308,10 @@ def get_smb_par_value(spec_file:str, scan_number:int, par_name:str):
308
308
  scanparser = get_scanparser(spec_file, scan_number)
309
309
  return(scanparser.pars[par_name])
310
310
  def validate_data_source_for_map_config(data_source, values):
311
- import_scanparser(values.get('station'), values.get('experiment_type'))
312
- data_source.validate_for_station(values.get('station'))
313
- data_source.validate_for_spec_scans(values.get('spec_scans'))
311
+ if data_source is not None:
312
+ import_scanparser(values.get('station'), values.get('experiment_type'))
313
+ data_source.validate_for_station(values.get('station'))
314
+ data_source.validate_for_spec_scans(values.get('spec_scans'))
314
315
  return(data_source)
315
316
 
316
317
  class CorrectionsData(PointByPointScanData):
@@ -424,7 +425,7 @@ class MapConfig(BaseModel):
424
425
  """
425
426
  title: constr(strip_whitespace=True, min_length=1)
426
427
  station: Literal['id1a3','id3a','id3b']
427
- experiment_type: Literal['SAXSWAXS', 'EDD', 'XRF']
428
+ experiment_type: Literal['SAXSWAXS', 'EDD', 'XRF', 'TOMO']
428
429
  sample: Sample
429
430
  spec_scans: conlist(item_type=SpecScans, min_items=1)
430
431
  independent_dimensions: conlist(item_type=PointByPointScanData, min_items=1)
@@ -443,11 +444,11 @@ class MapConfig(BaseModel):
443
444
  '''Ensure values for the station and experiment_type fields are compatible'''
444
445
  station = values.get('station')
445
446
  if station == 'id1a3':
446
- allowed_experiment_types = ['SAXSWAXS', 'EDD']
447
+ allowed_experiment_types = ['SAXSWAXS', 'EDD', 'TOMO']
447
448
  elif station == 'id3a':
448
- allowed_experiment_types = ['EDD']
449
+ allowed_experiment_types = ['EDD', 'TOMO']
449
450
  elif station == 'id3b':
450
- allowed_experiment_types = ['SAXSWAXS', 'XRF']
451
+ allowed_experiment_types = ['SAXSWAXS', 'XRF', 'TOMO']
451
452
  else:
452
453
  allowed_experiment_types = []
453
454
  if value not in allowed_experiment_types:
@@ -9,13 +9,19 @@ Created on Mon Dec 6 15:36:22 2021
9
9
 
10
10
  import logging
11
11
 
12
- from asteval import Interpreter, get_ast_names
12
+ try:
13
+ from asteval import Interpreter, get_ast_names
14
+ except:
15
+ pass
13
16
  from copy import deepcopy
14
- from lmfit import Model, Parameters
15
- from lmfit.model import ModelResult
16
- from lmfit.models import ConstantModel, LinearModel, QuadraticModel, PolynomialModel,\
17
- ExponentialModel, StepModel, RectangleModel, ExpressionModel, GaussianModel,\
18
- LorentzianModel
17
+ try:
18
+ from lmfit import Model, Parameters
19
+ from lmfit.model import ModelResult
20
+ from lmfit.models import ConstantModel, LinearModel, QuadraticModel, PolynomialModel,\
21
+ ExponentialModel, StepModel, RectangleModel, ExpressionModel, GaussianModel,\
22
+ LorentzianModel
23
+ except:
24
+ pass
19
25
  import numpy as np
20
26
  from os import cpu_count, getpid, listdir, mkdir, path
21
27
  from re import compile, sub
@@ -96,15 +102,16 @@ class Fit:
96
102
  if y is not None:
97
103
  if isinstance(y, (tuple, list, np.ndarray)):
98
104
  self._x = np.asarray(x)
105
+ self._y = np.asarray(y)
99
106
  elif have_xarray and isinstance(y, xr.DataArray):
100
107
  if x is not None:
101
108
  logging.warning('Ignoring superfluous input x ({x}) in Fit.__init__')
102
109
  if y.ndim != 1:
103
110
  illegal_value(y.ndim, 'DataArray dimensions', 'Fit:__init__', raise_error=True)
104
111
  self._x = np.asarray(y[y.dims[0]])
112
+ self._y = y
105
113
  else:
106
114
  illegal_value(y, 'y', 'Fit:__init__', raise_error=True)
107
- self._y = y
108
115
  if self._x.ndim != 1:
109
116
  raise ValueError(f'Invalid dimension for input x ({self._x.ndim})')
110
117
  if self._x.size != self._y.size:
@@ -172,6 +179,9 @@ class Fit:
172
179
  """
173
180
  if self._result is None:
174
181
  return(None)
182
+ if not have_xarray:
183
+ logging.warning('fit.best_results requires xarray in the conda environment')
184
+ return(None)
175
185
  if isinstance(self._y, xr.DataArray):
176
186
  best_results = self._y.to_dataset()
177
187
  dims = self._y.dims
@@ -272,9 +282,9 @@ class Fit:
272
282
  return(0.0)
273
283
  else:
274
284
  if self._result.init_params is not None:
275
- normalization_offset = self._result.init_params['tmp_normalization_offset_c']
285
+ normalization_offset = float(self._result.init_params['tmp_normalization_offset_c'])
276
286
  else:
277
- normalization_offset = self._result.params['tmp_normalization_offset_c']
287
+ normalization_offset = float(self._result.params['tmp_normalization_offset_c'])
278
288
  return(normalization_offset)
279
289
 
280
290
  @property
@@ -346,10 +356,10 @@ class Fit:
346
356
  if not isinstance(parameter, dict):
347
357
  raise ValueError(f'Invalid parameter ({parameter})')
348
358
  if parameter.get('expr') is not None:
349
- raise KeyError(f'Illegal "expr" key in parameter {parameter}')
359
+ raise KeyError(f'Invalid "expr" key in parameter {parameter}')
350
360
  name = parameter['name']
351
361
  if not isinstance(name, str):
352
- raise ValueError(f'Illegal "name" value ({name}) in parameter {parameter}')
362
+ raise ValueError(f'Invalid "name" value ({name}) in parameter {parameter}')
353
363
  if parameter.get('norm') is None:
354
364
  self._parameter_norms[name] = False
355
365
  else:
@@ -360,12 +370,12 @@ class Fit:
360
370
  self._parameter_norms[name] = False
361
371
  else:
362
372
  if not isinstance(norm, bool):
363
- raise ValueError(f'Illegal "norm" value ({norm}) in parameter {parameter}')
373
+ raise ValueError(f'Invalid "norm" value ({norm}) in parameter {parameter}')
364
374
  self._parameter_norms[name] = norm
365
375
  vary = parameter.get('vary')
366
376
  if vary is not None:
367
377
  if not isinstance(vary, bool):
368
- raise ValueError(f'Illegal "vary" value ({vary}) in parameter {parameter}')
378
+ raise ValueError(f'Invalid "vary" value ({vary}) in parameter {parameter}')
369
379
  if not vary:
370
380
  if 'min' in parameter:
371
381
  logging.warning(f'Ignoring min in parameter {name} in '+
@@ -418,29 +428,29 @@ class Fit:
418
428
  for par in parameters:
419
429
  name = par['name']
420
430
  if not isinstance(name, str):
421
- raise ValueError(f'Illegal "name" value ({name}) in input parameters')
431
+ raise ValueError(f'Invalid "name" value ({name}) in input parameters')
422
432
  if par.get('norm') is not None:
423
433
  norm = par.pop('norm')
424
434
  if not isinstance(norm, bool):
425
- raise ValueError(f'Illegal "norm" value ({norm}) in input parameters')
435
+ raise ValueError(f'Invalid "norm" value ({norm}) in input parameters')
426
436
  new_parameter_norms[f'{pprefix}{name}'] = norm
427
437
  else:
428
438
  for par in parameter_norms:
429
439
  name = par['name']
430
440
  if not isinstance(name, str):
431
- raise ValueError(f'Illegal "name" value ({name}) in input parameters')
441
+ raise ValueError(f'Invalid "name" value ({name}) in input parameters')
432
442
  norm = par.get('norm')
433
443
  if norm is None or not isinstance(norm, bool):
434
- raise ValueError(f'Illegal "norm" value ({norm}) in input parameters')
444
+ raise ValueError(f'Invalid "norm" value ({norm}) in input parameters')
435
445
  new_parameter_norms[f'{pprefix}{name}'] = norm
436
446
  if parameters is not None:
437
447
  for par in parameters:
438
448
  if par.get('expr') is not None:
439
- raise KeyError(f'Illegal "expr" key ({par.get("expr")}) in parameter '+
449
+ raise KeyError(f'Invalid "expr" key ({par.get("expr")}) in parameter '+
440
450
  f'{name} for a callable model {model}')
441
451
  name = par['name']
442
452
  if not isinstance(name, str):
443
- raise ValueError(f'Illegal "name" value ({name}) in input parameters')
453
+ raise ValueError(f'Invalid "name" value ({name}) in input parameters')
444
454
  # RV FIX callable model will need partial deriv functions for any linear pars to get the linearized matrix, so for now skip linear solution option
445
455
  newmodel = Model(model, prefix=prefix)
446
456
  elif isinstance(model, str):
@@ -462,6 +472,16 @@ class Fit:
462
472
  self._linear_parameters.append(f'{pprefix}a')
463
473
  self._linear_parameters.append(f'{pprefix}b')
464
474
  self._linear_parameters.append(f'{pprefix}c')
475
+ elif model == 'polynomial': # Par: c0, c1,..., c7
476
+ degree = kwargs.get('degree')
477
+ if degree is not None:
478
+ kwargs.pop('degree')
479
+ if degree is None or not is_int(degree, ge=0, le=7):
480
+ raise ValueError(f'Invalid parameter degree for build-in step model ({degree})')
481
+ newmodel = PolynomialModel(degree=degree, prefix=prefix)
482
+ for i in range(degree+1):
483
+ new_parameter_norms[f'{pprefix}c{i}'] = True
484
+ self._linear_parameters.append(f'{pprefix}c{i}')
465
485
  elif model == 'gaussian': # Par: amplitude, center, sigma (fwhm, height)
466
486
  newmodel = GaussianModel(prefix=prefix)
467
487
  new_parameter_norms[f'{pprefix}amplitude'] = True
@@ -524,7 +544,7 @@ class Fit:
524
544
  elif model == 'expression': # Par: by expression
525
545
  expr = kwargs['expr']
526
546
  if not isinstance(expr, str):
527
- raise ValueError(f'Illegal "expr" value ({expr}) in {model}')
547
+ raise ValueError(f'Invalid "expr" value ({expr}) in {model}')
528
548
  kwargs.pop('expr')
529
549
  if parameter_norms is not None:
530
550
  logging.warning('Ignoring parameter_norms (normalization determined from '+
@@ -532,7 +552,7 @@ class Fit:
532
552
  if parameters is not None:
533
553
  for par in parameters:
534
554
  if par.get('expr') is not None:
535
- raise KeyError(f'Illegal "expr" key ({par.get("expr")}) in parameter '+
555
+ raise KeyError(f'Invalid "expr" key ({par.get("expr")}) in parameter '+
536
556
  f'({par}) for an expression model')
537
557
  if par.get('norm') is not None:
538
558
  logging.warning(f'Ignoring "norm" key in parameter ({par}) '+
@@ -540,7 +560,7 @@ class Fit:
540
560
  par.pop('norm')
541
561
  name = par['name']
542
562
  if not isinstance(name, str):
543
- raise ValueError(f'Illegal "name" value ({name}) in input parameters')
563
+ raise ValueError(f'Invalid "name" value ({name}) in input parameters')
544
564
  ast = Interpreter()
545
565
  expr_parameters = [name for name in get_ast_names(ast.parse(expr))
546
566
  if name != 'x' and name not in self._parameters
@@ -632,7 +652,7 @@ class Fit:
632
652
  for parameter in parameters:
633
653
  name = parameter['name']
634
654
  if not isinstance(name, str):
635
- raise ValueError(f'Illegal "name" value ({name}) in input parameters')
655
+ raise ValueError(f'Invalid "name" value ({name}) in input parameters')
636
656
  if name not in new_parameters:
637
657
  name = prefix+name
638
658
  parameter['name'] = name
@@ -721,6 +741,13 @@ class Fit:
721
741
  # print(f'\nat end add_model: newmodel:\n{newmodel.__dict__}\n')
722
742
  return(kwargs)
723
743
 
744
+ def eval(self, x, result=None):
745
+ if result is None:
746
+ result = self._result
747
+ if result is None:
748
+ return
749
+ return(result.eval(x=np.asarray(x))-self.normalization_offset)
750
+
724
751
  def fit(self, interactive=False, guess=False, **kwargs):
725
752
  # Check inputs
726
753
  if self._model is None:
@@ -797,7 +824,7 @@ class Fit:
797
824
  raise ValueError(f'Unable to modify {name} parameter {par} (currently an '+
798
825
  'expression)')
799
826
  if par.get('expr') is not None:
800
- raise KeyError(f'Illegal "expr" key in {name} parameter {par}')
827
+ raise KeyError(f'Invalid "expr" key in {name} parameter {par}')
801
828
  self._parameters[name].set(vary=par.get('vary'))
802
829
  self._parameters[name].set(min=par.get('min'))
803
830
  self._parameters[name].set(max=par.get('max'))
@@ -901,8 +928,8 @@ class Fit:
901
928
  #RV self._parameters.pretty_print()
902
929
  # self.print_fit_report()
903
930
 
904
- def plot(self, y=None, y_title=None, result=None, skip_init=False, plot_comp_legends=False,
905
- plot_residual=False, plot_masked_data=True, **kwargs):
931
+ def plot(self, y=None, y_title=None, result=None, skip_init=False, plot_comp=True,
932
+ plot_comp_legends=False, plot_residual=False, plot_masked_data=True, **kwargs):
906
933
  if result is None:
907
934
  result = self._result
908
935
  if result is None:
@@ -932,34 +959,35 @@ class Fit:
932
959
  plots += [(self._x[mask], np.asarray(self._y)[mask], 'bx')]
933
960
  legend += ['masked data']
934
961
  if isinstance(plot_residual, bool) and plot_residual:
935
- plots += [(self._x[~mask], result.residual, 'k-')]
962
+ plots += [(self._x[~mask], result.residual, 'r-')]
936
963
  legend += ['residual']
937
964
  plots += [(self._x[~mask], result.best_fit, 'k-')]
938
965
  legend += ['best fit']
939
966
  if not skip_init and hasattr(result, 'init_fit'):
940
967
  plots += [(self._x[~mask], result.init_fit, 'g-')]
941
968
  legend += ['init']
942
- components = result.eval_components(x=self._x[~mask])
943
- num_components = len(components)
944
- if 'tmp_normalization_offset_' in components:
945
- num_components -= 1
946
- if num_components > 1:
947
- eval_index = 0
948
- for modelname, y in components.items():
949
- if modelname == 'tmp_normalization_offset_':
950
- continue
951
- if modelname == '_eval':
952
- modelname = f'eval{eval_index}'
953
- if len(modelname) > 20:
954
- modelname = f'{modelname[0:16]} ...'
955
- if isinstance(y, (int, float)):
956
- y *= np.ones(self._x[~mask].size)
957
- plots += [(self._x[~mask], y, '--')]
958
- if plot_comp_legends:
959
- if modelname[-1] == '_':
960
- legend.append(modelname[:-1])
961
- else:
962
- legend.append(modelname)
969
+ if plot_comp:
970
+ components = result.eval_components(x=self._x[~mask])
971
+ num_components = len(components)
972
+ if 'tmp_normalization_offset_' in components:
973
+ num_components -= 1
974
+ if num_components > 1:
975
+ eval_index = 0
976
+ for modelname, y in components.items():
977
+ if modelname == 'tmp_normalization_offset_':
978
+ continue
979
+ if modelname == '_eval':
980
+ modelname = f'eval{eval_index}'
981
+ if len(modelname) > 20:
982
+ modelname = f'{modelname[0:16]} ...'
983
+ if isinstance(y, (int, float)):
984
+ y *= np.ones(self._x[~mask].size)
985
+ plots += [(self._x[~mask], y, '--')]
986
+ if plot_comp_legends:
987
+ if modelname[-1] == '_':
988
+ legend.append(modelname[:-1])
989
+ else:
990
+ legend.append(modelname)
963
991
  title = kwargs.get('title')
964
992
  if title is not None:
965
993
  kwargs.pop('title')
@@ -1487,28 +1515,33 @@ class FitMultipeak(Fit):
1487
1515
 
1488
1516
  @classmethod
1489
1517
  def fit_multipeak(cls, y, centers, x=None, normalize=True, peak_models='gaussian',
1490
- center_exprs=None, fit_type=None, background_order=None, background_exp=False,
1491
- fwhm_max=None, plot_components=False):
1518
+ center_exprs=None, fit_type=None, background=None, fwhm_max=None,
1519
+ print_report=False, plot=False, x_eval=None):
1492
1520
  """Make sure that centers and fwhm_max are in the correct units and consistent with expr
1493
1521
  for a uniform fit (fit_type == 'uniform')
1494
1522
  """
1523
+ if x_eval is not None and not isinstance(x_eval, (tuple, list, np.ndarray)):
1524
+ raise ValueError(f'Invalid parameter x_eval ({x_eval})')
1495
1525
  fit = cls(y, x=x, normalize=normalize)
1496
1526
  success = fit.fit(centers, fit_type=fit_type, peak_models=peak_models, fwhm_max=fwhm_max,
1497
- center_exprs=center_exprs, background_order=background_order,
1498
- background_exp=background_exp, plot_components=plot_components)
1527
+ center_exprs=center_exprs, background=background, print_report=print_report,
1528
+ plot=plot)
1529
+ if x_eval is None:
1530
+ best_fit = fit.best_fit
1531
+ else:
1532
+ best_fit = fit.eval(x_eval)
1499
1533
  if success:
1500
- return(fit.best_fit, fit.residual, fit.best_values, fit.best_errors, fit.redchi, \
1534
+ return(best_fit, fit.residual, fit.best_values, fit.best_errors, fit.redchi, \
1501
1535
  fit.success)
1502
1536
  else:
1503
1537
  return(np.array([]), np.array([]), {}, {}, float_max, False)
1504
1538
 
1505
1539
  def fit(self, centers, fit_type=None, peak_models=None, center_exprs=None, fwhm_max=None,
1506
- background_order=None, background_exp=False, plot_components=False,
1507
- param_constraint=False):
1540
+ background=None, print_report=False, plot=True, param_constraint=False):
1508
1541
  self._fwhm_max = fwhm_max
1509
1542
  # Create the multipeak model
1510
- self._create_model(centers, fit_type, peak_models, center_exprs, background_order,
1511
- background_exp, param_constraint)
1543
+ self._create_model(centers, fit_type, peak_models, center_exprs, background,
1544
+ param_constraint)
1512
1545
 
1513
1546
  # RV: Obsolete Normalize the data and results
1514
1547
  # print('\nBefore fit before normalization in FitMultipeak:')
@@ -1536,8 +1569,8 @@ class FitMultipeak(Fit):
1536
1569
  else:
1537
1570
  logging.info(' -> Retry fitting with constraints')
1538
1571
  self.fit(centers, fit_type, peak_models, center_exprs, fwhm_max=fwhm_max,
1539
- background_order=background_order, background_exp=background_exp,
1540
- plot_components=plot_components, param_constraint=True)
1572
+ background=background, print_report=print_report, plot=plot,
1573
+ param_constraint=True)
1541
1574
  else:
1542
1575
  # RV: Obsolete Renormalize the data and results
1543
1576
  # print('\nAfter fit before renormalization in FitMultipeak:')
@@ -1549,14 +1582,16 @@ class FitMultipeak(Fit):
1549
1582
  # self.print_fit_report()
1550
1583
 
1551
1584
  # Print report and plot components if requested
1552
- if plot_components:
1585
+ if print_report:
1553
1586
  self.print_fit_report()
1554
- self.plot()
1587
+ if plot:
1588
+ self.plot(skip_init=True, plot_comp=True, plot_comp_legends=True,
1589
+ plot_residual=True)
1555
1590
 
1556
1591
  return(success)
1557
1592
 
1558
1593
  def _create_model(self, centers, fit_type=None, peak_models=None, center_exprs=None,
1559
- background_order=None, background_exp=False, param_constraint=False):
1594
+ background=None, param_constraint=False):
1560
1595
  """Create the multipeak model
1561
1596
  """
1562
1597
  if isinstance(centers, (int, float)):
@@ -1564,8 +1599,10 @@ class FitMultipeak(Fit):
1564
1599
  num_peaks = len(centers)
1565
1600
  if peak_models is None:
1566
1601
  peak_models = num_peaks*['gaussian']
1567
- elif isinstance(peak_models, str):
1602
+ elif isinstance(peak_models, str) and peak_models in ('gaussian', 'lorentzian'):
1568
1603
  peak_models = num_peaks*[peak_models]
1604
+ else:
1605
+ raise ValueError(f'Invalid peak model parameter ({peak_models})')
1569
1606
  if len(peak_models) != num_peaks:
1570
1607
  raise ValueError(f'Inconsistent number of peaks in peak_models ({len(peak_models)} vs '+
1571
1608
  f'{num_peaks})')
@@ -1602,21 +1639,21 @@ class FitMultipeak(Fit):
1602
1639
  self._parameters = Parameters()
1603
1640
  self._result = None
1604
1641
 
1605
- # Add background model
1606
- if background_order is not None:
1607
- if background_order == 0:
1608
- self.add_model('constant', prefix='background', parameters=
1609
- {'name': 'c', 'value': float_min, 'min': min_value})
1610
- elif background_order == 1:
1611
- self.add_model('linear', prefix='background', slope=0.0, intercept=0.0)
1612
- elif background_order == 2:
1613
- self.add_model('quadratic', prefix='background', a=0.0, b=0.0, c=0.0)
1642
+ # Add background model(s)
1643
+ if background is not None:
1644
+ if isinstance(background, dict):
1645
+ background = [background]
1646
+ if isinstance(background, str):
1647
+ self.add_model(background, prefix='bkgd_')
1648
+ elif is_dict_series(background):
1649
+ for model in deepcopy(background):
1650
+ if 'model' not in model:
1651
+ raise KeyError(f'Missing keyword "model" in model in background ({model})')
1652
+ name = model.pop('model')
1653
+ parameters=model.pop('parameters', None)
1654
+ self.add_model(name, prefix=f'bkgd_{name}_', parameters=parameters, **model)
1614
1655
  else:
1615
- raise ValueError(f'Invalid parameter background_order ({background_order})')
1616
- if background_exp:
1617
- self.add_model('exponential', prefix='background', parameters=(
1618
- {'name': 'amplitude', 'value': float_min, 'min': min_value},
1619
- {'name': 'decay', 'value': float_min, 'min': min_value}))
1656
+ raise ValueError(f'Invalid parameter background ({background})')
1620
1657
 
1621
1658
  # Add peaks and guess initial fit parameters
1622
1659
  ast = Interpreter()
@@ -1673,8 +1710,8 @@ class FitMultipeak(Fit):
1673
1710
  fit_failure = False
1674
1711
  index = compile(r'\d+')
1675
1712
  for name, par in self.best_parameters.items():
1676
- if 'background' in name:
1677
- # if ((name == 'backgroundc' and par['value'] <= 0.0) or
1713
+ if 'bkgd' in name:
1714
+ # if ((name == 'bkgd_c' and par['value'] <= 0.0) or
1678
1715
  # (name.endswith('amplitude') and par['value'] <= 0.0) or
1679
1716
  if ((name.endswith('amplitude') and par['value'] <= 0.0) or
1680
1717
  (name.endswith('decay') and par['value'] <= 0.0)):
@@ -1687,7 +1724,7 @@ class FitMultipeak(Fit):
1687
1724
  (name == 'scale_factor' and par['value'] <= 0.0)):
1688
1725
  logging.info(f'Invalid fit result for {name} ({par["value"]})')
1689
1726
  fit_failure = True
1690
- if name.endswith('sigma') and self._sigma_max is not None:
1727
+ if 'bkgd' not in name and name.endswith('sigma') and self._sigma_max is not None:
1691
1728
  if name == 'sigma':
1692
1729
  sigma_max = self._sigma_max[0]
1693
1730
  else:
@@ -1697,7 +1734,7 @@ class FitMultipeak(Fit):
1697
1734
  fit_failure = True
1698
1735
  elif par['value'] == sigma_max:
1699
1736
  logging.warning(f'Edge result on for {name} ({par["value"]})')
1700
- if name.endswith('fwhm') and self._fwhm_max is not None:
1737
+ if 'bkgd' not in name and name.endswith('fwhm') and self._fwhm_max is not None:
1701
1738
  if par['value'] > self._fwhm_max:
1702
1739
  logging.info(f'Invalid fit result for {name} ({par["value"]})')
1703
1740
  fit_failure = True
@@ -2016,7 +2053,7 @@ class FitMap(Fit):
2016
2053
  plots += [(self._x[~mask], self.best_fit[dims], 'k-')]
2017
2054
  legend += ['best fit']
2018
2055
  if plot_residual:
2019
- plots += [(self._x[~mask], self.residual[dims], 'k--')]
2056
+ plots += [(self._x[~mask], self.residual[dims], 'r--')]
2020
2057
  legend += ['residual']
2021
2058
  # Create current parameters
2022
2059
  parameters = deepcopy(self._parameters)
@@ -2128,7 +2165,7 @@ class FitMap(Fit):
2128
2165
  value = par.get('value')
2129
2166
  vary = par.get('vary')
2130
2167
  if par.get('expr') is not None:
2131
- raise KeyError(f'Illegal "expr" key in {name} parameter {par}')
2168
+ raise KeyError(f'Invalid "expr" key in {name} parameter {par}')
2132
2169
  self._parameters[name].set(value=value, vary=vary, min=par.get('min'),
2133
2170
  max=par.get('max'))
2134
2171
  # Overwrite existing best values for fixed parameters when a value is specified