pyTEMlib 0.2024.2.1__py2.py3-none-any.whl → 0.2024.6.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

pyTEMlib/eels_dialog.py CHANGED
@@ -1,6 +1,4 @@
1
1
  """
2
- QT dialog window for EELS compositional analysis
3
-
4
2
  Author: Gerd Duscher
5
3
  """
6
4
 
@@ -93,7 +91,7 @@ class CurveVisualizer(object):
93
91
  legline.set_alpha(0.2)
94
92
  self.fig.canvas.draw()
95
93
 
96
- def get_sidebar():
94
+ def get_core_loss_sidebar():
97
95
  side_bar = ipywidgets.GridspecLayout(14, 3,width='auto', grid_gap="0px")
98
96
 
99
97
 
@@ -194,6 +192,7 @@ def get_sidebar():
194
192
  return side_bar
195
193
 
196
194
 
195
+
197
196
  class CompositionWidget(object):
198
197
  def __init__(self, datasets=None, key=None):
199
198
 
@@ -203,7 +202,7 @@ class CompositionWidget(object):
203
202
 
204
203
 
205
204
  self.model = []
206
- self.sidebar = get_sidebar()
205
+ self.sidebar = get_core_loss_sidebar()
207
206
 
208
207
  self.set_dataset(key)
209
208
 
@@ -312,11 +311,12 @@ class CompositionWidget(object):
312
311
  reference_list =[('None', -1)]
313
312
 
314
313
  for index, key in enumerate(self.datasets.keys()):
315
- if 'Reference' not in key:
316
- if 'SPECTR' in self.datasets[key].data_type.name:
317
- spectrum_list.append((f'{key}: {self.datasets[key].title}', index))
318
- self.spectrum_keys_list.append(key)
319
- reference_list.append((f'{key}: {self.datasets[key].title}', index))
314
+ if '_rel' not in key:
315
+ if 'Reference' not in key :
316
+ if 'SPECTR' in self.datasets[key].data_type.name:
317
+ spectrum_list.append((f'{key}: {self.datasets[key].title}', index))
318
+ self.spectrum_keys_list.append(key)
319
+ reference_list.append((f'{key}: {self.datasets[key].title}', index))
320
320
 
321
321
  if set_key in self.spectrum_keys_list:
322
322
  self.key = set_key
@@ -324,10 +324,11 @@ class CompositionWidget(object):
324
324
  self.key = self.spectrum_keys_list[-1]
325
325
  self.dataset = self.datasets[self.key]
326
326
 
327
- spec_dim = self.dataset.get_dimensions_by_type(sidpy.DimensionType.SPECTRAL)
328
- self.spec_dim = self.dataset._axes[spec_dim[0]]
327
+ self.spec_dim = self.dataset.get_spectral_dims(return_axis=True)[0]
329
328
 
330
329
  self.energy_scale = self.spec_dim.values
330
+ self.dd = (self.energy_scale[0], self.energy_scale[1])
331
+
331
332
  self.dataset.metadata['experiment']['offset'] = self.energy_scale[0]
332
333
  self.dataset.metadata['experiment']['dispersion'] = self.energy_scale[1] - self.energy_scale[0]
333
334
  if 'edges' not in self.dataset.metadata or self.dataset.metadata['edges'] == {}:
@@ -634,7 +635,7 @@ class CompositionWidget(object):
634
635
  raise ValueError('need a experiment parameter in metadata dictionary')
635
636
 
636
637
  eff_beta = eels.effective_collection_angle(self.energy_scale, alpha, beta, beam_kv)
637
-
638
+ eff_beta = beta
638
639
  self.low_loss = None
639
640
  if self.sidebar[12, 1].value:
640
641
  for key in self.datasets.keys():
@@ -1033,13 +1033,12 @@ class EdgesAtCursor(object):
1033
1033
  self.label.remove()
1034
1034
  self.line.remove()
1035
1035
  if event.button == 1:
1036
- self.label = self.ax.text(x, y_max, eels.find_major_edges(event.xdata, self.maximal_chemical_shift),
1036
+ self.label = self.ax.text(x, y_max, eels.find_all_edges(event.xdata, self.maximal_chemical_shift, major_edges_only=True),
1037
1037
  verticalalignment='top')
1038
1038
  self.line, = self.ax.plot([x, x], [y_min, y_max], color='black')
1039
1039
  if event.button == 3:
1040
1040
  self.line, = self.ax.plot([x, x], [y_min, y_max], color='black')
1041
- self.label = self.ax.text(x, y_max, eels.find_all_edges(event.xdata, self.maximal_chemical_shift),
1042
- verticalalignment='top')
1041
+ self.label = self.ax.text(x, y_max, eels.find_all_edges(event.xdata, self.maximal_chemical_shift), verticalalignment='top')
1043
1042
  self.ax.set_ylim(y_min, y_max)
1044
1043
 
1045
1044
  def mouse_move(self, event):
pyTEMlib/eels_tools.py CHANGED
@@ -373,15 +373,30 @@ def shift_energy(dataset: sidpy.Dataset, shifts: np.ndarray) -> sidpy.Dataset:
373
373
 
374
374
 
375
375
  def align_zero_loss(dataset: sidpy.Dataset) -> sidpy.Dataset:
376
+ """
377
+ Shifts the energy axis of the input dataset to be aligned with the zero-loss peak.
378
+
379
+ Parameters:
380
+ -----------
381
+ dataset : sidpy.Dataset
382
+ The input dataset containing the energy axis to be aligned.
376
383
 
384
+ Returns:
385
+ --------
386
+ sidpy.Dataset
387
+ The dataset with the energy axis shifted to align the zero-loss peak.
388
+
389
+ """
377
390
  shifts = get_zero_loss_energy(dataset)
378
- print(shifts, dataset)
391
+ # print(shifts, dataset)
379
392
  new_si = shift_energy(dataset, shifts)
380
393
  new_si.metadata.update({'zero_loss': {'shifted': shifts}})
381
394
  return new_si
382
395
 
383
396
 
384
- def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endFitEnergy: float=+1,
397
+
398
+
399
+ def get_resolution_functions(dataset: sidpy.Dataset, startFitEnergy: float=-1, endFitEnergy: float=+1,
385
400
  n_workers: int=1, n_threads: int=8):
386
401
  """
387
402
  Analyze and fit low-loss EELS data within a specified energy range to determine zero-loss peaks.
@@ -392,33 +407,35 @@ def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endF
392
407
  from the dataset. The function handles both 2D and 3D datasets.
393
408
 
394
409
  Parameters:
395
- dset: sidpy.Dataset
396
- The dataset containing TEM spectral data.
397
- startFitEnergy: float
398
- The start energy of the fitting window.
399
- endFitEnergy: float
400
- The end energy of the fitting window.
401
- n_workers: int, optional
402
- The number of workers for parallel processing (default is 1).
403
- n_threads: int, optional
404
- The number of threads for parallel processing (default is 8).
410
+ -----------
411
+ dataset (sidpy.Dataset): The dataset containing TEM spectral data.
412
+ startFitEnergy (float): The start energy of the fitting window.
413
+ endFitEnergy (float): The end energy of the fitting window.
414
+ n_workers (int, optional): The number of workers for parallel processing (default is 1).
415
+ n_threads (int, optional): The number of threads for parallel processing (default is 8).
405
416
 
406
417
  Returns:
418
+ --------
407
419
  tuple: A tuple containing:
408
420
  - z_loss_dset (sidpy.Dataset): The dataset with added zero-loss peak information.
409
421
  - z_loss_params (numpy.ndarray): Array of parameters used for the zero-loss peak fitting.
410
422
 
411
423
  Raises:
424
+ -------
412
425
  ValueError: If the input dataset does not have the expected dimensions or format.
413
426
 
414
427
  Notes:
428
+ ------
415
429
  - The function expects `dset` to have specific dimensionalities and will raise an error if they are not met.
416
430
  - Parallel processing is employed to enhance performance, particularly for large datasets.
417
431
  """
418
- energy = dset.get_spectral_dims(return_axis=True)[0].values
432
+ energy = dataset.get_spectral_dims(return_axis=True)[0].values
419
433
  start_fit_pixel = np.searchsorted(energy, startFitEnergy)
420
434
  end_fit_pixel = np.searchsorted(energy, endFitEnergy)
421
435
  guess_width = (endFitEnergy - startFitEnergy)/2
436
+ if end_fit_pixel - start_fit_pixel < 5:
437
+ start_fit_pixel -= 2
438
+ end_fit_pixel += 2
422
439
 
423
440
  def get_good_guess(zl_func, energy, spectrum):
424
441
  popt, pcov = curve_fit(zl_func, energy, spectrum,
@@ -428,21 +445,21 @@ def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endF
428
445
 
429
446
  fit_energy = energy[start_fit_pixel:end_fit_pixel]
430
447
  # get a good guess for the fit parameters
431
- if len(dset.shape) == 3:
432
- fit_dset = dset[:, :, start_fit_pixel:end_fit_pixel]
448
+ if len(dataset.shape) == 3:
449
+ fit_dset = dataset[:, :, start_fit_pixel:end_fit_pixel]
433
450
  guess_amplitude = np.sqrt(fit_dset.max())
434
451
  guess_params = get_good_guess(zl_func, fit_energy, fit_dset.sum(axis=(0, 1))/fit_dset.shape[0]/fit_dset.shape[1])
435
- elif len(dset.shape) == 2:
436
- fit_dset = dset[:, start_fit_pixel:end_fit_pixel]
452
+ elif len(dataset.shape) == 2:
453
+ fit_dset = dataset[:, start_fit_pixel:end_fit_pixel]
437
454
  fit_energy = energy[start_fit_pixel:end_fit_pixel]
438
455
  guess_amplitude = np.sqrt(fit_dset.max())
439
456
  guess_params = get_good_guess(zl_func, fit_energy, fit_dset.sum(axis=0)/fit_dset.shape[0])
440
- elif len(dset.shape) == 1:
441
- fit_dset = dset[start_fit_pixel:end_fit_pixel]
457
+ elif len(dataset.shape) == 1:
458
+ fit_dset = dataset[start_fit_pixel:end_fit_pixel]
442
459
  fit_energy = energy[start_fit_pixel:end_fit_pixel]
443
460
  guess_amplitude = np.sqrt(fit_dset.max())
444
461
  guess_params = get_good_guess(zl_func, fit_energy, fit_dset)
445
- z_loss_dset = dset.copy()
462
+ z_loss_dset = dataset.copy()
446
463
  z_loss_dset *= 0.0
447
464
  z_loss_dset += zl_func(energy, *guess_params)
448
465
  if 'zero_loss' not in z_loss_dset.metadata:
@@ -450,11 +467,11 @@ def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endF
450
467
  z_loss_dset.metadata['zero_loss'].update({'startFitEnergy': startFitEnergy,
451
468
  'endFitEnergy': endFitEnergy,
452
469
  'fit_parameter': guess_params,
453
- 'original_low_loss': dset.title})
470
+ 'original_low_loss': dataset.title})
454
471
  return z_loss_dset
455
472
  else:
456
473
  print('Error: need a spectrum or spectral image sidpy dataset')
457
- print('Not dset.shape = ', dset.shape)
474
+ print('Not dset.shape = ', dataset.shape)
458
475
  return None
459
476
 
460
477
  # define guess function for SidFitter
@@ -466,7 +483,7 @@ def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endF
466
483
  return_cov=False, return_fit=False, return_std=False, km_guess=False, num_fit_parms=6)
467
484
 
468
485
  [z_loss_params] = zero_loss_fitter.do_fit()
469
- z_loss_dset = dset.copy()
486
+ z_loss_dset = dataset.copy()
470
487
  z_loss_dset *= 0.0
471
488
 
472
489
  energy_grid = np.broadcast_to(energy.reshape((1, 1, -1)), (z_loss_dset.shape[0],
@@ -480,7 +497,7 @@ def get_resolution_functions(dset: sidpy.Dataset, startFitEnergy: float=-1, endF
480
497
  z_loss_dset.metadata['zero_loss'].update({'startFitEnergy': startFitEnergy,
481
498
  'endFitEnergy': endFitEnergy,
482
499
  'fit_parameter': z_loss_params,
483
- 'original_low_loss': dset.title})
500
+ 'original_low_loss': dataset.title})
484
501
 
485
502
 
486
503
  return z_loss_dset
@@ -503,7 +520,7 @@ def drude_lorentz(eps_inf, leng, ep, eb, gamma, e, amplitude):
503
520
  return eps
504
521
 
505
522
 
506
- def fit_plasmon(dataset, startFitEnergy, endFitEnergy, plot_result=False, number_workers=4, number_threads=8):
523
+ def fit_plasmon(dataset: Union[sidpy.Dataset, np.ndarray], startFitEnergy: float, endFitEnergy: float, plot_result: bool = False, number_workers: int = 4, number_threads: int = 8) -> Union[sidpy.Dataset, np.ndarray]:
507
524
  """
508
525
  Fit plasmon peak positions and widths in a TEM dataset using a Drude model.
509
526
 
@@ -540,7 +557,7 @@ def fit_plasmon(dataset, startFitEnergy, endFitEnergy, plot_result=False, number
540
557
  - If `plot_result` is True, the function plots Ep, Ew, and A as separate subplots.
541
558
  """
542
559
  # define Drude function for plasmon fitting
543
- def energy_loss_function(E, Ep, Ew, A):
560
+ def energy_loss_function(E: np.ndarray, Ep: float, Ew: float, A: float) -> np.ndarray:
544
561
  E = E/E.max()
545
562
  eps = 1 - Ep**2/(E**2+Ew**2) + 1j * Ew * Ep**2/E/(E**2+Ew**2)
546
563
  elf = (-1/eps).imag
@@ -592,7 +609,6 @@ def drude_simulation(dset, e, ep, ew, tnm, eb):
592
609
  Gives probabilities of dielectric function eps relative to zero-loss integral (i0 = 1) per eV
593
610
  Details in R.F.Egerton: EELS in the Electron Microscope, 3rd edition, Springer 2011
594
611
 
595
- # function drude(ep,ew,eb,epc,e0,beta,nn,tnm)
596
612
  # Given the plasmon energy (ep), plasmon fwhm (ew) and binding energy(eb),
597
613
  # this program generates:
598
614
  # EPS1, EPS2 from modified Eq. (3.40), ELF=Im(-1/EPS) from Eq. (3.42),
@@ -603,41 +619,10 @@ def drude_simulation(dset, e, ep, ew, tnm, eb):
603
619
  # Details in R.F.Egerton: EELS in the Electron Microscope, 3rd edition, Springer 2011
604
620
  # Version 10.11.26
605
621
 
606
-
607
- b.7 drude Simulation of a Low-Loss Spectrum
608
- The program DRUDE calculates a single-scattering plasmon-loss spectrum for
609
- a specimen of a given thickness tnm (in nm), recorded with electrons of a
610
- specified incident energy e0 by a spectrometer that accepts scattering up to a
611
- specified collection semi-angle beta. It is based on the extended drude model
612
- (Section 3.3.2), with a volume energy-loss function elf in accord with Eq. (3.64) and
613
- a surface-scattering energy-loss function srelf as in Eq. (4.31). Retardation effects
614
- and coupling between the two surface modes are not included. The surface term can
615
- be made negligible by entering a large specimen thickness (tnm > 1000).
616
- Surface intensity srfint and volume intensity volint are calculated from
617
- Eqs. (4.31) and (4.26), respectively. The total spectral intensity ssd is written to
618
- the file DRUDE.SSD, which can be used as input for KRAKRO. These intensities are
619
- all divided by i0, to give relative probabilities (per eV). The real and imaginary parts
620
- of the dielectric function are written to DRUDE.EPS and can be used for comparison
621
- with the results of Kramers–Kronig analysis (KRAKRO.DAT).
622
- Written output includes the surface-loss probability Ps, obtained by integrating
623
- srfint (a value that relates to two surfaces but includes the negative begrenzungs
624
- term), for comparison with the analytical integration represented by Eq. (3.77). The
625
- volume-loss probability p_v is obtained by integrating volint and is used to calculate
626
- the volume plasmon mean free path (lam = tnm/p_v). The latter is listed and
627
- compared with the MFP obtained from Eq. (3.44), which represents analytical integration
628
- assuming a zero-width plasmon peak. The total probability (Pt = p_v+Ps) is
629
- calculated and used to evaluate the thickness (lam.Pt) that would be given by the formula
630
- t/λ = ln(It/i0), ignoring the surface-loss probability. Note that p_v will exceed
631
- 1 for thicker specimens (t/λ > 1), since it represents the probability of plasmon
632
- scattering relative to that of no inelastic scattering.
633
- The command-line usage is drude(ep,ew,eb,epc,beta,e0,tnm,nn), where ep is the
634
- plasmon energy, ew the plasmon width, eb the binding energy of the electrons (0 for
635
- a metal), and nn is the number of channels in the output spectrum. An example of
636
- the output is shown in Fig. b.1a,b.
637
-
638
622
  """
639
-
640
- epc = dset.energy_scale[1] - dset.energy_scale[0] # input('ev per channel : ');
623
+ energy_scale = dset.get_spectral_dims(return_axis=True)[0].values
624
+
625
+ epc = energy_scale[1] - energy_scale[0] # input('ev per channel : ');
641
626
 
642
627
  b = dset.metadata['collection_angle'] / 1000. # rad
643
628
  epc = dset.energy_scale[1] - dset.energy_scale[0] # input('ev per channel : ');
@@ -1465,31 +1450,31 @@ def fit_edges2(spectrum, energy_scale, edges):
1465
1450
 
1466
1451
 
1467
1452
  def model(xx, pp):
1468
- yy = pp[0] + x**pp[1] + pp[2] + pp[3] * xx + pp[4] * xx * xx
1453
+ yy = pp[0] * xx**pp[1] + pp[2] + pp[3]* xx + pp[4] * xx * xx
1469
1454
  for i in range(number_of_edges):
1470
1455
  pp[i+5] = np.abs(pp[i+5])
1471
1456
  yy = yy + pp[i+5] * xsec[i, :]
1472
1457
  return yy
1473
1458
 
1474
1459
  def residuals(pp, xx, yy):
1475
- err = np.abs((yy - model(xx, pp)) * mask) # / np.sqrt(np.abs(y))
1460
+ err = np.abs((yy - model(xx, pp)) * mask) / np.sqrt(np.abs(y))
1476
1461
  return err
1477
1462
 
1478
1463
  scale = y[100]
1479
- pin = np.array([A,r, 10., 1., 0.00] + [scale/5] * number_of_edges)
1464
+ pin = np.array([A,-r, 10., 1., 0.00] + [scale/5] * number_of_edges)
1480
1465
  [p, _] = leastsq(residuals, pin, args=(x, y))
1481
1466
 
1482
1467
  for key in edges:
1483
1468
  if key.isdigit():
1484
1469
  edges[key]['areal_density'] = p[int(key)+5]
1485
-
1470
+ print(p)
1486
1471
  edges['model'] = {}
1487
- edges['model']['background'] = (background + p[6] + p[7] * x + p[8] * x * x)
1488
- edges['model']['background-poly_0'] = p[6]
1489
- edges['model']['background-poly_1'] = p[7]
1490
- edges['model']['background-poly_2'] = p[8]
1491
- edges['model']['background-A'] = A
1492
- edges['model']['background-r'] = r
1472
+ edges['model']['background'] = ( p[0] * np.power(x, -p[1])+ p[2]+ x**p[3] + p[4] * x * x)
1473
+ edges['model']['background-poly_0'] = p[2]
1474
+ edges['model']['background-poly_1'] = p[3]
1475
+ edges['model']['background-poly_2'] = p[4]
1476
+ edges['model']['background-A'] = p[0]
1477
+ edges['model']['background-r'] = p[1]
1493
1478
  edges['model']['spectrum'] = model(x, p)
1494
1479
  edges['model']['blurred'] = blurred
1495
1480
  edges['model']['mask'] = mask
pyTEMlib/file_tools.py CHANGED
@@ -82,11 +82,12 @@ class FileWidget(ipywidgets.DOMWidget):
82
82
 
83
83
  """
84
84
 
85
- def __init__(self, dir_name=None, extension=['*']):
85
+ def __init__(self, dir_name=None, extension=['*'], sum_frames=False):
86
86
  self.save_path = False
87
87
  self.dir_dictionary = {}
88
88
  self.dir_list = ['.', '..']
89
89
  self.display_list = ['.', '..']
90
+ self.sum_frames = sum_frames
90
91
 
91
92
  self.dir_name = '.'
92
93
  if dir_name is None:
@@ -148,7 +149,7 @@ class FileWidget(ipywidgets.DOMWidget):
148
149
  self.datasets = {}
149
150
  #self.loaded_datasets.value = self.dataset_list[0]
150
151
  self.dataset_list = []
151
- self.datasets = open_file(self.file_name)
152
+ self.datasets = open_file(self.file_name, sum_frames=self.sum_frames)
152
153
  self.dataset_list = []
153
154
  for key in self.datasets.keys():
154
155
  self.dataset_list.append(f'{key}: {self.datasets[key].title}')
@@ -593,7 +594,7 @@ def h5_group_to_dict(group, group_dict={}):
593
594
  return group_dict
594
595
 
595
596
 
596
- def open_file(filename=None, h5_group=None, write_hdf_file=False): # save_file=False,
597
+ def open_file(filename=None, h5_group=None, write_hdf_file=False, sum_frames=False): # save_file=False,
597
598
  """Opens a file if the extension is .hf5, .ndata, .dm3 or .dm4
598
599
 
599
600
  If no filename is provided the QT open_file windows opens (if QT_available==True)
@@ -680,7 +681,7 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False): # save_file
680
681
  print('This file type needs hyperspy to be installed to be able to be read')
681
682
  return
682
683
  elif extension == '.emd':
683
- reader = SciFiReaders.EMDReader(filename)
684
+ reader = SciFiReaders.EMDReader(filename, sum_frames=sum_frames)
684
685
 
685
686
  elif 'edax' in extension.lower():
686
687
  if 'h5' in extension:
@@ -730,7 +731,13 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False): # save_file
730
731
  read_essential_metadata(dset)
731
732
  dset.metadata['filename'] = filename
732
733
  dataset_dict = {'Channel_000': dset}
733
-
734
+
735
+ # Temporary Fix for dual eels spectra in dm files
736
+ # Todo: Fic in ScifyReaders
737
+ for dset in dataset_dict.values():
738
+ if 'single_exposure_time' in dset.metadata['experiment']:
739
+ dset.metadata['experiment']['exposure_time'] = dset.metadata['experiment']['number_of_frames'] * \
740
+ dset.metadata['experiment']['single_exposure_time']
734
741
  if write_hdf_file:
735
742
  h5_master_group = save_dataset(dataset_dict, filename=filename)
736
743