pyTEMlib 0.2023.4.0__py2.py3-none-any.whl → 0.2023.8.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

@@ -12,7 +12,7 @@ try:
12
12
 
13
13
  except:
14
14
  Qt_available = False
15
- print('Qt dialogs are not available')
15
+ # print('Qt dialogs are not available')
16
16
 
17
17
  import sidpy
18
18
  import matplotlib.patches as patches
@@ -233,7 +233,7 @@ if Qt_available:
233
233
 
234
234
  self.selector = SpanSelector(self.axis, self.line_select_callback,
235
235
  direction="horizontal",
236
- span_stays=True,
236
+ interactive=True,
237
237
  props=dict(facecolor='blue', alpha=0.2))
238
238
  self.edit1.setText(f'{self.x_min:.3f}')
239
239
  self.edit2.setText(f'{self.x_max:.3f}')
@@ -364,8 +364,10 @@ class RegionSelector(object):
364
364
  self.xmin = 0
365
365
  self.width = 0
366
366
 
367
- self.span = SpanSelector(ax, self.on_select1, 'horizontal', useblit=True,
368
- rectprops=dict(alpha=0.5, facecolor='red'), span_stays=True)
367
+ self.span = SpanSelector(ax, self.on_select1,
368
+ direction="horizontal",
369
+ interactive=True,
370
+ props=dict(facecolor='blue', alpha=0.2))
369
371
  self.cid = ax.figure.canvas.mpl_connect('key_press_event', self.click)
370
372
  self.draw = ax.figure.canvas.mpl_connect('draw_event', self.onresize)
371
373
 
@@ -552,6 +554,8 @@ def get_likely_edges(energy_scale):
552
554
  return likely_edges
553
555
 
554
556
 
557
+
558
+
555
559
  def get_periodic_table_info():
556
560
  """Info for periodic table dialog"""
557
561
  pt_info = \
pyTEMlib/eels_dlg.py CHANGED
@@ -15,7 +15,7 @@ try:
15
15
  from PyQt5 import QtCore, QtGui, QtWidgets
16
16
  except:
17
17
  Qt_available = False
18
- print('Qt dialogs are not available')
18
+ # print('Qt dialogs are not available')
19
19
 
20
20
  if Qt_available:
21
21
  class UiDialog(object):
@@ -23,7 +23,7 @@ if Qt_available:
23
23
  def __init__(self, dialog):
24
24
  dialog.setObjectName('Fit Composition Input')
25
25
  dialog.resize(371, 184)
26
-
26
+ self.dialog = dialog
27
27
  valid_float = QtGui.QDoubleValidator()
28
28
  valid_int = QtGui.QIntValidator()
29
29
  valid_int.setBottom(0)
@@ -85,7 +85,7 @@ if Qt_available:
85
85
  self.separator2.setText("Elements")
86
86
  self.layout.addWidget(self.separator2, row, 0, 1, 4)
87
87
  ######################################################################
88
- # row += 1
88
+
89
89
  self.auto_id_button = QtWidgets.QPushButton('Auto ID', dialog)
90
90
  self.select_button = QtWidgets.QPushButton('Manual ID', dialog)
91
91
 
@@ -238,6 +238,7 @@ if Qt_available:
238
238
 
239
239
  self.do_fit_button = QtWidgets.QPushButton('Fit Composition', dialog)
240
240
  self.do_fit_button.setCheckable(True)
241
+ self.do_fit_button.setDefault(True)
241
242
 
242
243
  self.layout.addWidget(self.do_all_button, row, 0)
243
244
  self.layout.addWidget(self.progress, row, 1)
pyTEMlib/eels_tools.py CHANGED
@@ -28,11 +28,6 @@ from scipy.ndimage import gaussian_filter
28
28
 
29
29
  from scipy import constants
30
30
  import matplotlib.pyplot as plt
31
- # import matplotlib.patches as patches
32
-
33
- # from matplotlib.widgets import SpanSelector
34
- # import ipywidgets as widgets
35
- # from IPython.display import display
36
31
 
37
32
  import requests
38
33
 
@@ -44,6 +39,7 @@ import pickle # pkg_resources,
44
39
  import pyTEMlib.file_tools as ft
45
40
  from pyTEMlib.xrpa_x_sections import x_sections
46
41
 
42
+ import sidpy
47
43
  from sidpy.base.num_utils import get_slope
48
44
 
49
45
  major_edges = ['K1', 'L3', 'M5', 'N5']
@@ -186,6 +182,7 @@ def fit_peaks(spectrum, energy_scale, pin, start_fit, end_fit, only_positive_int
186
182
  # TODO: remove zero_loss_fit_width add absolute
187
183
 
188
184
  fit_energy = energy_scale[start_fit:end_fit]
185
+ spectrum = np.array(spectrum)
189
186
  fit_spectrum = spectrum[start_fit:end_fit]
190
187
 
191
188
  pin_flat = [item for sublist in pin for item in sublist]
@@ -939,6 +936,7 @@ def find_peaks(dataset, fit_start, fit_end, sensitivity=2):
939
936
 
940
937
  difference = np.array(spectrum)[start_channel:end_channel] - model
941
938
  fit = np.zeros(len(energy_scale))
939
+ p_out = []
942
940
  if len(peaks) > 0:
943
941
  p_in = np.ravel([[energy_scale[i], difference[i], .7] for i in peaks])
944
942
  [p_out, _] = scipy.optimize.leastsq(residuals_smooth, p_in, ftol=1e-3, args=(energy_scale,
@@ -1100,11 +1098,26 @@ def fit_model(x, y, pin, number_of_peaks, peak_shape, p_zl, restrict_pos=0, rest
1100
1098
  return p, peak_shape
1101
1099
 
1102
1100
 
1103
- def fix_energy_scale(spec, energy):
1104
- """Shift energy scale according to zero-loss peak position"""
1101
+ def fix_energy_scale(spec, energy=None):
1102
+ """Shift energy scale according to zero-loss peak position
1103
+
1104
+ This function assumes that the fzero loss peak is the maximum of the spectrum.
1105
+ """
1106
+
1105
1107
  # determine start and end fitting region in pixels
1106
- start = np.searchsorted(energy, -10)
1107
- end = np.searchsorted(energy, 10)
1108
+ if isinstance(spec, sidpy.Dataset):
1109
+ if energy is None:
1110
+ energy = spec.energy_loss.values
1111
+ spec = np.array(spec)
1112
+
1113
+ else:
1114
+ if energy is None:
1115
+ return
1116
+ if not isinstance(spec, np.ndarray):
1117
+ return
1118
+
1119
+ start = np.searchsorted(np.array(energy), -10)
1120
+ end = np.searchsorted(np.array(energy), 10)
1108
1121
  startx = np.argmax(spec[start:end]) + start
1109
1122
 
1110
1123
  end = startx + 3
@@ -1148,10 +1161,14 @@ def resolution_function2(dataset, width =0.3):
1148
1161
  [p_zl, _] = leastsq(zl2, p0, args=(y, x), maxfev=2000)
1149
1162
 
1150
1163
  z_loss = zl_func(p_zl, dataset.energy_loss)
1151
- z_loss = dataset.like_array(z_loss)
1164
+ z_loss = dataset.like_data(z_loss)
1152
1165
  z_loss.title = 'resolution_function'
1153
1166
  z_loss.metadata['zero_loss_parameter']=p_zl
1154
- return z_loss, p_zl
1167
+
1168
+ dataset.metadata['low_loss']['zero_loss'] = {'zero_loss_parameter': p_zl,
1169
+ 'zero_loss_fit': 'Product2Lorentzians'}
1170
+ zero_loss = dataset.like_array(z_loss)
1171
+ return zero_loss, p_zl
1155
1172
 
1156
1173
 
1157
1174
 
@@ -1217,26 +1234,69 @@ def resolution_function(energy_scale, spectrum, width, verbose=False):
1217
1234
  return z_loss, p_zl
1218
1235
 
1219
1236
 
1220
- def get_energy_shifts(spectrum_image, energy_scale, zero_loss_fit_width):
1221
- """get shift of spectrum form zero-loss peak position"""
1237
+ def get_energy_shifts(spectrum_image, energy_scale=None, zero_loss_fit_width=0.3):
1238
+ """ get shift of spectrum from zero-loss peak position
1239
+ better to use get resolution_functions
1240
+ """
1241
+ resolution_functions = get_resolution_functions(spectrum_image, energy_scale=energy_scale, zero_loss_fit_width=zero_loss_fit_width)
1242
+ return resolution_functions.metadata['low_loss']['shifts'], resolution_functions.metadata['low_loss']['widths']
1243
+
1244
+ def get_resolution_functions(spectrum_image, energy_scale=None, zero_loss_fit_width=0.3):
1245
+ """get resolution_function and shift of spectra form zero-loss peak position"""
1246
+ if isinstance(spectrum_image, sidpy.Dataset):
1247
+ energy_dimension = spectrum_image.get_dimensions_by_type('spectral')
1248
+ if len(energy_dimension) != 1:
1249
+ raise TypeError('Dataset needs to have exactly one spectral dimension to analyze zero-loss peak')
1250
+ energy_dimension = spectrum_image.get_dimension_by_number(energy_dimension)[0]
1251
+ energy_scale = energy_dimension.values
1252
+ spatial_dimension = spectrum_image.get_dimensions_by_type('spatial')
1253
+ if len(spatial_dimension) == 0:
1254
+ fwhm, delta_e = fix_energy_scale(spectrum_image)
1255
+ z_loss, p_zl = resolution_function(energy_scale - delta_e, spectrum_image, zero_loss_fit_width)
1256
+ fwhm2, delta_e2 = fix_energy_scale(z_loss, energy_scale - delta_e)
1257
+ return delta_e + delta_e2, fwhm2
1258
+ elif len(spatial_dimension) != 2:
1259
+ return
1222
1260
  shifts = np.zeros(spectrum_image.shape[0:2])
1261
+ widths = np.zeros(spectrum_image.shape[0:2])
1262
+ resolution_functions = spectrum_image.copy()
1223
1263
  for x in range(spectrum_image.shape[0]):
1224
1264
  for y in range(spectrum_image.shape[1]):
1225
- spectrum = spectrum_image[x, y, :]
1265
+ spectrum = np.array(spectrum_image[x, y])
1226
1266
  fwhm, delta_e = fix_energy_scale(spectrum, energy_scale)
1227
1267
  z_loss, p_zl = resolution_function(energy_scale - delta_e, spectrum, zero_loss_fit_width)
1268
+ resolution_functions[x, y] = z_loss
1228
1269
  fwhm2, delta_e2 = fix_energy_scale(z_loss, energy_scale - delta_e)
1229
1270
  shifts[x, y] = delta_e + delta_e2
1230
- return shifts
1271
+ widths[x,y] = fwhm2
1231
1272
 
1273
+ resolution_functions.metadata['low_loss'] = {'shifts': shifts,
1274
+ 'widths': widths}
1275
+ return resolution_functions
1232
1276
 
1233
- def shift_on_same_scale(spectrum_image, shift, energy_scale, master_energy_scale):
1234
- """shift spectrum in energy"""
1235
1277
 
1236
- new_si = np.zeros(spectrum_image.shape)
1278
+ def shift_on_same_scale(spectrum_image, shifts=None, energy_scale=None, master_energy_scale=None):
1279
+ """shift spectrum in energy"""
1280
+ if isinstance(spectrum_image, sidpy.Dataset):
1281
+ if shifts is None:
1282
+ if 'low_loss' in spectrum_image.metadata:
1283
+ if 'shifts' in spectrum_image.metadata['low_loss']:
1284
+ shifts = spectrum_image.metadata['low_loss']['shifts']
1285
+ else:
1286
+ resolution_functions = get_resolution_functions(spectrum_image)
1287
+ shifts = resolution_functions.metadata['low_loss']['shifts']
1288
+ energy_dimension = spectrum_image.get_dimensions_by_type('spectral')
1289
+ if len(energy_dimension) != 1:
1290
+ raise TypeError('Dataset needs to have exactly one spectral dimension to analyze zero-loss peak')
1291
+ energy_dimension = spectrum_image.get_dimension_by_number(energy_dimension)[0]
1292
+ energy_scale = energy_dimension.values
1293
+ master_energy_scale = energy_scale.copy()
1294
+
1295
+ new_si = spectrum_image.copy()
1296
+ new_si *= 0.0
1237
1297
  for x in range(spectrum_image.shape[0]):
1238
1298
  for y in range(spectrum_image.shape[1]):
1239
- tck = interpolate.splrep(energy_scale - shift[x, y], spectrum_image[x, y, :], k=1, s=0)
1299
+ tck = interpolate.splrep(np.array(energy_scale - shifts[x, y]), np.array(spectrum_image[x, y]), k=1, s=0)
1240
1300
  new_si[x, y, :] = interpolate.splev(master_energy_scale, tck, der=0)
1241
1301
  return new_si
1242
1302
 
@@ -1458,29 +1518,27 @@ def drude_simulation(dset, e, ep, ew, tnm, eb):
1458
1518
  the output is shown in Fig. b.1a,b.
1459
1519
 
1460
1520
  """
1521
+
1461
1522
  epc = dset.energy_scale[1] - dset.energy_scale[0] # input('ev per channel : ');
1462
- e0 = dset.metadata['acceleration_voltage'] / 1000. # input('incident energy e0(kev) : ');
1463
- # tnm = input('thickness(nm) : ');
1464
-
1465
- b = dset.metadata['collection_angle'] # rad
1523
+
1524
+ b = dset.metadata['collection_angle']/ 1000. # rad
1466
1525
  epc = dset.energy_scale[1] - dset.energy_scale[0] # input('ev per channel : ');
1467
1526
  e0 = dset.metadata['acceleration_voltage'] / 1000. # input('incident energy e0(kev) : ');
1468
1527
 
1528
+ # effective kinetic energy: T = m_o v^2/2,
1469
1529
  t = 1000.0 * e0 * (1. + e0 / 1022.12) / (1.0 + e0 / 511.06) ** 2 # eV # equ.5.2a or Appendix E p 427
1530
+
1531
+ # 2 gamma T
1470
1532
  tgt = 1000 * e0 * (1022.12 + e0) / (511.06 + e0) # eV Appendix E p 427
1533
+
1471
1534
  rk0 = 2590 * (1.0 + e0 / 511.06) * np.sqrt(2.0 * t / 511060)
1535
+
1472
1536
  os = e[0]
1473
1537
  ew_mod = eb
1474
1538
  tags = dset.metadata
1475
- # eps = 1 - ep**2/(e**2-eb**2+2*e*ew*1j) # eq 3.64
1476
- # eps = 1 - ep**2/(e**2+2*e*ew*1j) # eq 3.64
1477
- # eps = 1 - (ep**2)/(e**2+e*ew*1j-ep**2) # Lorentz Term
1539
+
1478
1540
  eps = 1 - (ep ** 2 - ew_mod * e * 1j) / (e ** 2 + 2 * e * ew * 1j) # Mod drude term
1479
- # eps1 = np.real(eps);
1480
- # eps2 = np.imag(eps);
1481
- # eps1 = 1. - ep.^2./(e.^2+ew.^2); #eq 3.40
1482
- # eps2 = ew.*ep.^2./e./(e.^2+ew.^2);#eq 3.40
1483
- # elf = ep**2*e*ew/((e**2-ep**2)**2+(e*ew)**2); # eq 3.40?
1541
+
1484
1542
  eps[np.nonzero(eps == 0.0)] = 1e-19
1485
1543
  elf = np.imag(-1 / eps)
1486
1544
 
@@ -1490,16 +1548,16 @@ def drude_simulation(dset, e, ep, ew, tnm, eb):
1490
1548
  angdep = np.arctan(b / the) / the - b / (b * b + the * the)
1491
1549
  srfint = angdep * srfelf / (3.1416 * 0.05292 * rk0 * t) # probability per eV
1492
1550
  anglog = np.log(1.0 + b * b / the / the)
1493
- i0 = tags['spec'].sum() # *tags['counts2e']
1494
- # print('counts2e',1/tags['counts2e'])
1551
+ i0 = dset.sum() # *tags['counts2e']
1552
+
1495
1553
 
1496
1554
  # 2 * t = m_0 v**2 !!! a_0 = 0.05292 nm
1497
1555
  volint = abs(tnm / (np.pi * 0.05292 * t * 2.0) * elf * anglog) # S equ 4.26% probability per eV
1498
1556
  volint = volint * i0 / epc # S probability per channel
1499
1557
  ssd = volint # + srfint;
1500
1558
 
1501
- if os < -1.0:
1502
- xs = int(abs(-os / epc))
1559
+ if e[0] < -1.0:
1560
+ xs = int(abs(-e[0] / epc))
1503
1561
 
1504
1562
  ssd[0:xs] = 0.0
1505
1563
  volint[0:xs] = 0.0
@@ -1512,10 +1570,6 @@ def drude_simulation(dset, e, ep, ew, tnm, eb):
1512
1570
  lam = tnm / p_v # does NOT depend on free-electron approximation (no damping).
1513
1571
  lamfe = 4.0 * 0.05292 * t / ep / np.log(1 + (b * tgt / ep) ** 2) # Eq.(3.44) approximation
1514
1572
 
1515
- # print('p_s(2surfaces+begrenzung terms) =', p_s, 'p_v=t/lambda(beta)= ',p_v,'\n');
1516
- # print('Volume-plasmon MFP(nm) = ', lam,' Free-electron MFP(nm) = ',lamfe,'\n');
1517
- # print('--------------------------------\n');
1518
-
1519
1573
  tags['eps'] = eps
1520
1574
  tags['lam'] = lam
1521
1575
  tags['lamfe'] = lamfe
pyTEMlib/file_tools.py CHANGED
@@ -95,6 +95,8 @@ class FileWidget(object):
95
95
  self.dir_list = ['.']
96
96
  self.extensions = extension
97
97
  self.file_name = ''
98
+ self.datasets ={}
99
+ self.dataset = None
98
100
 
99
101
  self.select_files = widgets.Select(
100
102
  options=self.dir_list,
@@ -104,9 +106,56 @@ class FileWidget(object):
104
106
  rows=10,
105
107
  layout=widgets.Layout(width='70%')
106
108
  )
107
- display(self.select_files)
109
+
110
+ select_button = widgets.Button(description='Select Main',
111
+ layout=widgets.Layout(width='auto', grid_area='header'),
112
+ style=widgets.ButtonStyle(button_color='lightblue'))
113
+
114
+ add_button = widgets.Button(description='Add',
115
+ layout=widgets.Layout(width='auto', grid_area='header'),
116
+ style=widgets.ButtonStyle(button_color='lightblue'))
117
+
118
+ self.path_choice = widgets.Dropdown(options=['None'],
119
+ value='None',
120
+ description='directory:',
121
+ disabled=False,
122
+ button_style='',
123
+ layout=widgets.Layout(width='90%'))
124
+ self.dataset_list = ['None']
125
+ self.loaded_datasets = widgets.Dropdown(options=self.dataset_list,
126
+ value=self.dataset_list[0],
127
+ description='loaded datasets:',
128
+ disabled=False,
129
+ button_style='')
130
+
108
131
  self.set_options()
132
+ ui = widgets.VBox([self.path_choice, self.select_files, widgets.HBox([select_button, add_button, self.loaded_datasets])])
133
+ display(ui)
134
+
109
135
  self.select_files.observe(self.get_file_name, names='value')
136
+ self.path_choice.observe(self.set_dir, names='value')
137
+
138
+ select_button.on_click(self.select_main)
139
+ add_button.on_click(self.add_dataset)
140
+ self.loaded_datasets.observe(self.selected_dataset)
141
+
142
+ def select_main(self, value=0):
143
+ self.datasets = {}
144
+ self.loaded_datasets.value = self.dataset_list[0]
145
+ self.datasets = open_file(self.file_name)
146
+ self.dataset_list = []
147
+ for key in self.datasets.keys():
148
+ self.dataset_list.append(f'{key}: {self.datasets[key].title}')
149
+ self.loaded_datasets.options = self.dataset_list
150
+ self.loaded_datasets.value = self.dataset_list[0]
151
+ self.dataset = self.datasets[list(self.datasets.keys())[0]]
152
+ self.selected_dataset = self.dataset
153
+
154
+ def add_dataset(self, value=0):
155
+ key = add_dataset_from_file(self.datasets, self.file_name, 'Channel')
156
+ self.dataset_list.append(f'{key}: {self.datasets[key].title}')
157
+ self.loaded_datasets.options = self.dataset_list
158
+ self.loaded_datasets.value = self.dataset_list[-1]
110
159
 
111
160
  def get_directory(self, directory=None):
112
161
  self.dir_name = directory
@@ -114,6 +163,17 @@ class FileWidget(object):
114
163
  self.dir_list = []
115
164
  self.dir_list = ['.', '..'] + os.listdir(directory)
116
165
 
166
+ def set_dir(self, value=0):
167
+ self.dir_name = self.path_choice.value
168
+ self.select_files.index = 0
169
+ self.set_options()
170
+
171
+ def selected_dataset(self, value=0):
172
+
173
+ key = self.loaded_datasets.value.split(':')[0]
174
+ if key != 'None':
175
+ self.selected_dataset = self.datasets[key]
176
+
117
177
  def set_options(self):
118
178
  self.dir_name = os.path.abspath(os.path.join(self.dir_name, self.dir_list[self.select_files.index]))
119
179
  dir_list = os.listdir(self.dir_name)
@@ -137,6 +197,16 @@ class FileWidget(object):
137
197
 
138
198
  self.dir_label = os.path.split(self.dir_name)[-1] + ':'
139
199
  self.select_files.options = self.display_list
200
+
201
+ path = self.dir_name
202
+ old_path = ' '
203
+ path_list = []
204
+ while path != old_path:
205
+ path_list.append(path)
206
+ old_path = path
207
+ path = os.path.split(path)[0]
208
+ self.path_choice.options = path_list
209
+ self.path_choice.value = path_list[0]
140
210
 
141
211
  def get_file_name(self, b):
142
212
 
@@ -589,7 +659,7 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False): # save_file
589
659
 
590
660
  # tags = open_file(filename)
591
661
  if extension in ['.dm3', '.dm4']:
592
- reader = SciFiReaders.DM3Reader(filename)
662
+ reader = SciFiReaders.DMReader(filename)
593
663
 
594
664
  elif extension in ['.emi']:
595
665
  try:
@@ -875,7 +945,7 @@ def log_results(h5_group, dataset=None, attributes=None):
875
945
  return log_group
876
946
 
877
947
 
878
- def add_dataset_from_file(datasets, filename=None, keyname='Log'):
948
+ def add_dataset_from_file(datasets, filename=None, key_name='Log', single_dataset=True):
879
949
  """Add dataset to datasets dictionary
880
950
 
881
951
  Parameters
@@ -884,23 +954,35 @@ def add_dataset_from_file(datasets, filename=None, keyname='Log'):
884
954
  dictionary to write to file
885
955
  filename: str, default: None,
886
956
  name of file to open, if None, adialog will appear
887
- keyname: str, default: 'Log'
957
+ key_name: str, default: 'Log'
888
958
  name for key in dictionary with running number being added
889
959
 
890
-
891
960
  Returns
892
961
  -------
893
- nothing
962
+ key_name: str
963
+ actual last used name of dictionary key
894
964
  """
895
965
 
896
966
  datasets2 = open_file(filename=filename)
897
- index = 0
898
- for key in datasets.keys():
899
- if keyname in key:
900
- if int(key[-3:]) >= index:
901
- index = int(key[-3:])+1
902
- for dataset in datasets2.values():
903
- datasets[keyname+f'_{index:03}'] = dataset
967
+ first_dataset = datasets2[list(datasets2)[0]]
968
+ if isinstance(first_dataset, sidpy.Dataset):
969
+
970
+ index = 0
971
+ for key in datasets.keys():
972
+ if key_name in key:
973
+ if int(key[-3:]) >= index:
974
+ index = int(key[-3:])+1
975
+ if single_dataset:
976
+ datasets[key_name+f'_{index:03}'] = first_dataset
977
+ else:
978
+ for dataset in datasets2.values():
979
+ datasets[key_name+f'_{index:03}'] = dataset
980
+ index += 1
981
+ index -= 1
982
+ else:
983
+ return None
984
+
985
+ return f'{key_name}_{index:03}'
904
986
 
905
987
 
906
988
  # ##