pyTEMlib 0.2025.2.2__tar.gz → 0.2025.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

Files changed (45) hide show
  1. {pytemlib-0.2025.2.2/pyTEMlib.egg-info → pytemlib-0.2025.4.0}/PKG-INFO +4 -3
  2. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/atom_tools.py +2 -0
  3. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/core_loss_widget.py +2 -4
  4. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/eels_dialog.py +1 -1
  5. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/eels_tools.py +11 -10
  6. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/file_tools.py +106 -16
  7. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/graph_tools.py +95 -35
  8. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/image_tools.py +78 -93
  9. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/probe_tools.py +200 -4
  10. pytemlib-0.2025.4.0/pyTEMlib/version.py +6 -0
  11. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0/pyTEMlib.egg-info}/PKG-INFO +4 -3
  12. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib.egg-info/requires.txt +1 -1
  13. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/setup.py +1 -1
  14. pytemlib-0.2025.2.2/pyTEMlib/version.py +0 -6
  15. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/LICENSE +0 -0
  16. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/MANIFEST.in +0 -0
  17. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/README.rst +0 -0
  18. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/__init__.py +0 -0
  19. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/animation.py +0 -0
  20. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/config_dir.py +0 -0
  21. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/crystal_tools.py +0 -0
  22. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/diffraction_plot.py +0 -0
  23. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/dynamic_scattering.py +0 -0
  24. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/eds_tools.py +0 -0
  25. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/eels_dialog_utilities.py +0 -0
  26. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/file_tools_qt.py +0 -0
  27. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/graph_viz.py +0 -0
  28. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/image_dialog.py +0 -0
  29. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/image_dlg.py +0 -0
  30. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/info_widget.py +0 -0
  31. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/info_widget3.py +0 -0
  32. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/interactive_image.py +0 -0
  33. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/kinematic_scattering.py +0 -0
  34. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/low_loss_widget.py +0 -0
  35. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/microscope.py +0 -0
  36. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/peak_dialog.py +0 -0
  37. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/peak_dlg.py +0 -0
  38. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/sidpy_tools.py +0 -0
  39. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/simulation_tools.py +0 -0
  40. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib/xrpa_x_sections.py +0 -0
  41. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib.egg-info/SOURCES.txt +0 -0
  42. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib.egg-info/dependency_links.txt +0 -0
  43. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib.egg-info/entry_points.txt +0 -0
  44. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/pyTEMlib.egg-info/top_level.txt +0 -0
  45. {pytemlib-0.2025.2.2 → pytemlib-0.2025.4.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: pyTEMlib
3
- Version: 0.2025.2.2
3
+ Version: 0.2025.4.0
4
4
  Summary: pyTEM: TEM Data Quantification library through a model-based approach
5
5
  Home-page: https://pycroscopy.github.io/pyTEMlib/about.html
6
6
  Author: Gerd Duscher
@@ -39,7 +39,7 @@ Requires-Dist: spglib
39
39
  Requires-Dist: scikit-image
40
40
  Requires-Dist: scikit-learn
41
41
  Requires-Dist: pyNSID>=0.0.7
42
- Requires-Dist: sidpy>=0.12.1
42
+ Requires-Dist: sidpy>=0.12.7
43
43
  Requires-Dist: SciFiReaders>=0.0.8
44
44
  Dynamic: author
45
45
  Dynamic: author-email
@@ -48,6 +48,7 @@ Dynamic: description
48
48
  Dynamic: home-page
49
49
  Dynamic: keywords
50
50
  Dynamic: license
51
+ Dynamic: license-file
51
52
  Dynamic: platform
52
53
  Dynamic: requires-dist
53
54
  Dynamic: summary
@@ -61,6 +61,8 @@ def find_atoms(image, atom_size=0.1, threshold=0.):
61
61
  threshold = np.std(im)
62
62
  atoms = blob_log(im, max_sigma=atom_size/scale_x, threshold=threshold)
63
63
 
64
+
65
+
64
66
  return atoms
65
67
 
66
68
 
@@ -500,8 +500,7 @@ class CoreLoss(object):
500
500
  self.core_loss_tab[11, 0].value = edge['areal_density']
501
501
  self.core_loss_tab[11, 2].value = 'a.u.'
502
502
  else:
503
- dispersion = self.parent.energy_scale[1] - \
504
- self.parent.energy_scale[0]
503
+ dispersion = self.parent.energy_scale.slope
505
504
  self.core_loss_tab[11, 0].value = np.round(
506
505
  edge['areal_density']/self.dataset.metadata['experiment']['flux_ppm']*1e-6, 2)
507
506
  self.core_loss_tab[11, 2].value = 'atoms/nm²'
@@ -663,8 +662,7 @@ class CoreLoss(object):
663
662
 
664
663
  edge['areal_density'] = self.core_loss_tab[11, 0].value
665
664
  if self.parent.y_scale != 1.0:
666
- dispersion = self.parent.energy_scale[1] - \
667
- self.parent.energy_scale[0]
665
+ dispersion = self.parent.energy_scale.slope
668
666
  edge['areal_density'] = self.core_loss_tab[11, 0].value * \
669
667
  self.dataset.metadata['experiment']['flux_ppm']/1e-6
670
668
  if 'model' in self.edges:
@@ -335,7 +335,7 @@ class CompositionWidget(object):
335
335
  self.dd = (self.energy_scale[0], self.energy_scale[1])
336
336
 
337
337
  self.dataset.metadata['experiment']['offset'] = self.energy_scale[0]
338
- self.dataset.metadata['experiment']['dispersion'] = self.energy_scale[1] - self.energy_scale[0]
338
+ self.dataset.metadata['experiment']['dispersion'] = self.spec_dim.slope
339
339
  if 'edges' not in self.dataset.metadata or self.dataset.metadata['edges'] == {}:
340
340
  self.dataset.metadata['edges'] = {'0': {}, 'model': {}, 'use_low_loss': False}
341
341
 
@@ -42,7 +42,6 @@ from pyTEMlib.xrpa_x_sections import x_sections
42
42
 
43
43
  import sidpy
44
44
  from sidpy.proc.fitter import SidFitter
45
- from sidpy.base.num_utils import get_slope
46
45
 
47
46
  # we have a function called find peaks - is it necessary?
48
47
  # or could we just use scipy.signal import find_peaks
@@ -604,7 +603,8 @@ def fit_plasmon(dataset: Union[sidpy.Dataset, np.ndarray], startFitEnergy: float
604
603
  guess_pos = np.argmax(fit_dset)
605
604
  guess_amplitude = fit_dset[guess_pos]
606
605
  guess_width =(endFitEnergy-startFitEnergy)/4
607
- guess_pos = energy[guess_pos]
606
+ guess_pos = energy[start_fit_pixel+guess_pos]
607
+
608
608
  if guess_width >8:
609
609
  guess_width=8
610
610
  try:
@@ -618,7 +618,7 @@ def fit_plasmon(dataset: Union[sidpy.Dataset, np.ndarray], startFitEnergy: float
618
618
  p0=[guess_pos, guess_width, guess_amplitude])
619
619
  except:
620
620
  popt=[0,0,0]
621
-
621
+
622
622
  plasmon = dataset.like_data(energy_loss_function(energy, popt[0], popt[1], popt[2]))
623
623
  plasmon *= anglog
624
624
  start_plasmon = np.searchsorted(energy, 0)+1
@@ -652,16 +652,16 @@ def fit_plasmon(dataset: Union[sidpy.Dataset, np.ndarray], startFitEnergy: float
652
652
  def angle_correction(spectrum):
653
653
 
654
654
  acceleration_voltage = spectrum.metadata['experiment']['acceleration_voltage']
655
- energy_scale = spectrum.get_spectral_dims(return_axis=True)[0].values
655
+ energy_scale = spectrum.get_spectral_dims(return_axis=True)[0]
656
656
  # eff_beta = effective_collection_angle(energy_scale, spectrum.metadata['experiment']['convergence_angle'],
657
657
  # spectrum.metadata['experiment']['collection_angle'],acceleration_voltage)
658
658
 
659
659
 
660
- epc = energy_scale[1] - energy_scale[0] # input('ev per channel : ');
660
+ epc = energy_scale.slope # input('ev per channel : ');
661
661
 
662
662
  alpha = spectrum.metadata['experiment']['convergence_angle'] # input('Alpha (mrad) : ');
663
663
  beta = spectrum.metadata['experiment']['collection_angle']# input('Beta (mrad) : ');
664
- e = energy_scale
664
+ e = energy_scale.values
665
665
  e0 = acceleration_voltage/1000 # input('E0 (keV) : ');
666
666
 
667
667
  T = 1000.0*e0*(1.+e0/1022.12)/(1.0+e0/511.06)**2 # %eV # equ.5.2a or Appendix E p 427
@@ -1261,7 +1261,7 @@ def second_derivative(dataset: sidpy.Dataset, sensitivity: float=2.5) -> None:
1261
1261
  """Calculates second derivative of a sidpy.dataset"""
1262
1262
 
1263
1263
  dim = dataset.get_spectral_dims()
1264
- energy_scale = dataset.get_spectral_dims(return_axis=True)[0].values
1264
+ energy_scale = dataset.get_spectral_dims(return_axis=True)[0]
1265
1265
  if dataset.data_type.name == 'SPECTRAL_IMAGE':
1266
1266
  spectrum = dataset.view.get_spectrum()
1267
1267
  else:
@@ -1269,7 +1269,7 @@ def second_derivative(dataset: sidpy.Dataset, sensitivity: float=2.5) -> None:
1269
1269
 
1270
1270
  spec = scipy.ndimage.gaussian_filter(spectrum, 3)
1271
1271
 
1272
- dispersion = get_slope(energy_scale)
1272
+ dispersion = energy_scale.slope
1273
1273
  second_dif = np.roll(spec, -3) - 2 * spec + np.roll(spec, +3)
1274
1274
  second_dif[:3] = 0
1275
1275
  second_dif[-3:] = 0
@@ -1405,8 +1405,9 @@ def identify_edges(dataset: sidpy.Dataset, noise_level: float=2.0):
1405
1405
 
1406
1406
  """
1407
1407
  dim = dataset.get_spectral_dims()
1408
- energy_scale = dataset.get_spectral_dims(return_axis=True)[0].values
1409
- dispersion = get_slope(energy_scale)
1408
+ energy_scale = dataset.get_spectral_dims(return_axis=True)[0]
1409
+ dispersion = energy_scale.slope
1410
+
1410
1411
  spec = scipy.ndimage.gaussian_filter(dataset, 3/dispersion) # smooth with 3eV wideGaussian
1411
1412
 
1412
1413
  first_derivative = spec - np.roll(spec, +2)
@@ -23,6 +23,9 @@ import ase.io
23
23
  import SciFiReaders
24
24
  import pyNSID
25
25
  import sidpy
26
+ import sidpy
27
+ import xml.etree.ElementTree as ET
28
+ import collections
26
29
  import ipywidgets as widgets
27
30
  from IPython.display import display
28
31
 
@@ -33,8 +36,6 @@ import pyTEMlib.crystal_tools
33
36
  from pyTEMlib.config_dir import config_path
34
37
  from pyTEMlib.sidpy_tools import *
35
38
 
36
- from pyTEMlib.sidpy_tools import *
37
-
38
39
  Qt_available = True
39
40
  try:
40
41
  from PyQt5 import QtCore, QtWidgets, QtGui
@@ -44,8 +45,6 @@ except ModuleNotFoundError:
44
45
 
45
46
  Dimension = sidpy.Dimension
46
47
 
47
- # Austin commented the line below - it is not used anywhere in the code, and it gives import errors 9-14-2024
48
- # get_slope = sidpy.base.num_utils.get_slopes
49
48
  __version__ = '2024.9.14'
50
49
 
51
50
  from traitlets import Unicode, Bool, validate, TraitError
@@ -116,7 +115,6 @@ class FileWidget2(ipywidgets.DOMWidget):
116
115
  value='None',
117
116
  description='directory:',
118
117
  disabled=False,
119
- button_style='',
120
118
  layout=widgets.Layout(width='90%'))
121
119
 
122
120
 
@@ -203,8 +201,7 @@ class FileWidget3(FileWidget2):
203
201
  self.loaded_datasets = widgets.Dropdown(options=self.dataset_list,
204
202
  value=self.dataset_list[0],
205
203
  description='loaded datasets:',
206
- disabled=False,
207
- button_style='')
204
+ disabled=False)
208
205
 
209
206
  ui = widgets.HBox([select_button, add_button, self.loaded_datasets])
210
207
  display(ui)
@@ -313,14 +310,12 @@ class FileWidget(ipywidgets.DOMWidget):
313
310
  value='None',
314
311
  description='directory:',
315
312
  disabled=False,
316
- button_style='',
317
313
  layout=widgets.Layout(width='90%'))
318
314
  self.dataset_list = ['None']
319
315
  self.loaded_datasets = widgets.Dropdown(options=self.dataset_list,
320
316
  value=self.dataset_list[0],
321
317
  description='loaded datasets:',
322
- disabled=False,
323
- button_style='')
318
+ disabled=False)
324
319
 
325
320
  self.set_options()
326
321
  ui = widgets.VBox([self.path_choice, self.select_files, widgets.HBox([select_button, add_button,
@@ -446,8 +441,7 @@ class ChooseDataset(object):
446
441
  self.select_image = widgets.Dropdown(options=self.dataset_list,
447
442
  value=self.dataset_list[0],
448
443
  description='select dataset:',
449
- disabled=False,
450
- button_style='')
444
+ disabled=False)
451
445
  if show_dialog:
452
446
  display(self.select_image)
453
447
 
@@ -484,7 +478,7 @@ class ChooseDataset(object):
484
478
  self.dataset = self.datasets[self.key]
485
479
  self.dataset.title = self.dataset.title.split('/')[-1]
486
480
  self.dataset.title = self.dataset.title.split('/')[-1]
487
-
481
+
488
482
 
489
483
  def add_to_dict(file_dict, name):
490
484
  full_name = os.path.join(file_dict['directory'], name)
@@ -884,7 +878,6 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False, sum_frames=Fa
884
878
  else:
885
879
  if isinstance(datasets, dict):
886
880
  dataset_dict = datasets
887
-
888
881
  else:
889
882
  dataset_dict = {}
890
883
  for index, dataset in enumerate(datasets):
@@ -926,23 +919,28 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False, sum_frames=Fa
926
919
  dset.title = dset.title + '_SI'
927
920
  dset = dset.T
928
921
  dset.title = dset.title[11:]
922
+ dset.add_provenance('pyTEMlib', 'open_file', version=pyTEMlib.__version__, linked_data='emi_converted_by_hyperspy')
929
923
  dataset_dict[f'Channel_{index:03d}'] = dset
924
+
930
925
  return dataset_dict
931
926
  except ImportError:
932
927
  print('This file type needs hyperspy to be installed to be able to be read')
933
928
  return
934
929
  elif extension == '.emd':
935
930
  reader = SciFiReaders.EMDReader(filename, sum_frames=sum_frames)
936
-
931
+ provenance = 'SciFiReader.EMDReader'
937
932
  elif 'edax' in extension.lower():
938
933
  if 'h5' in extension:
939
934
  reader = SciFiReaders.EDAXReader(filename)
935
+ provenance = 'SciFiReader.EDAXReader'
940
936
 
941
937
  elif extension in ['.ndata', '.h5']:
942
938
  reader = SciFiReaders.NionReader(filename)
943
-
939
+ provenance = 'SciFiReader.NionReader'
940
+
944
941
  elif extension in ['.mrc']:
945
942
  reader = SciFiReaders.MRCReader(filename)
943
+ provenance = 'SciFiReader.MRCReader'
946
944
 
947
945
  else:
948
946
  raise NotImplementedError('extension not supported')
@@ -994,6 +992,7 @@ def open_file(filename=None, h5_group=None, write_hdf_file=False, sum_frames=Fa
994
992
  if isinstance(dset, dict):
995
993
  dataset_dict = dset
996
994
  for dataset in dataset_dict.values():
995
+ dataset.add_provenance('pyTEMlib', 'open_file', version=pyTEMlib.__version__, linked_data = 'SciFiReader')
997
996
  dataset.metadata['filename'] = filename
998
997
 
999
998
  elif isinstance(dset, list):
@@ -1431,6 +1430,97 @@ def h5_get_crystal_structure(structure_group):
1431
1430
  # ToDo: Read all of info dictionary
1432
1431
  return atoms
1433
1432
 
1433
+ import collections
1434
+ def etree_to_dict(element):
1435
+ """Recursively converts an ElementTree object into a nested dictionary."""
1436
+ d = {element.tag: {} if element.attrib else None}
1437
+ children = list(element)
1438
+ if children:
1439
+ dd = collections.defaultdict(list)
1440
+ for dc in map(etree_to_dict, children):
1441
+ for k, v in dc.items():
1442
+ dd[k].append(v)
1443
+ d = {element.tag: {k: v[0] if len(v) == 1 else v for k, v in dd.items()}}
1444
+ if element.attrib:
1445
+ d[element.tag].update(('@' + k, v) for k, v in element.attrib.items())
1446
+ if element.text:
1447
+ text = element.text.strip()
1448
+ if children or element.attrib:
1449
+ if text:
1450
+ d[element.tag]['#text'] = text
1451
+ else:
1452
+ d[element.tag] = text
1453
+ return d
1454
+
1455
+ def read_adorned_metadata(image):
1456
+ xml_str = image.metadata.metadata_as_xml
1457
+ root = ET.fromstring(xml_str)
1458
+ metadata_dict = etree_to_dict(root)
1459
+ detector = 'detector'
1460
+ if 'Detectors' in metadata_dict['Metadata']['Detectors']['ScanningDetector']:
1461
+ if 'ScanningDetector' in metadata_dict['Metadata']['Detectors']['ScanningDetector']:
1462
+ detector = metadata_dict['Metadata']['Detectors']['ScanningDetector']['DetectorName']
1463
+
1464
+ segment = ''
1465
+ if 'CustomPropertyGroup' in metadata_dict['Metadata']:
1466
+ if 'CustomProperties' in metadata_dict['Metadata']['CustomPropertyGroup']:
1467
+ for list_item in metadata_dict['Metadata']['CustomPropertyGroup']['CustomProperties']:
1468
+
1469
+ if isinstance(list_item, dict):
1470
+ for key in list_item:
1471
+ for item in list_item[key]:
1472
+ if '@name' in item:
1473
+ if item['@name']== 'DetectorCommercialName':
1474
+ detector = item['@value']
1475
+ if item['@name']== 'StemSegment':
1476
+ segment = '_'+item['@value']
1477
+ return detector+segment, metadata_dict['Metadata']
1478
+
1479
+ def adorned_to_sidpy(images):
1480
+ """
1481
+ Convert a list of adorned images to a dictionary of Sidpy datasets.
1482
+ Each dataset is created from the image data and adorned metadata.
1483
+ The datasets are stored in a dictionary with keys 'Channel_000', 'Channel_001', etc.
1484
+ The dimensions of the datasets are set based on the image data shape and pixel sizes.
1485
+ The original metadata is also stored in the dataset.
1486
+ Args:
1487
+ images (list or object): A list of adorned images or a single adorned image.
1488
+ Returns:
1489
+ dict: A dictionary of Sidpy datasets, where each dataset corresponds to an image.
1490
+ """
1491
+
1492
+ data_sets = {}
1493
+ if not isinstance(images, list):
1494
+ images = [images]
1495
+ for index, image in enumerate(images):
1496
+ name, original_metadata = read_adorned_metadata(image)
1497
+ data_sets[f'Channel_{index:03}'] = sidpy.Dataset.from_array(image.data.T, title=name)
1498
+ ds = data_sets[f'Channel_{index:03}']
1499
+
1500
+
1501
+ ds.original_metadata = original_metadata
1502
+
1503
+ pixel_size_x_m = float(ds.original_metadata['BinaryResult']['PixelSize']['X']['#text'])
1504
+ pixel_size_y_m = float(ds.original_metadata['BinaryResult']['PixelSize']['Y']['#text'])
1505
+ pixel_size_x_nm = pixel_size_x_m * 1e9
1506
+ pixel_size_y_nm = pixel_size_y_m * 1e9
1507
+ if image.data.ndim == 3:
1508
+ ds.data_type = 'image_stack'
1509
+ ds.set_dimension(0, sidpy.Dimension(np.arange(image.data.shape[0]),
1510
+ name='frame', units='frame', quantity='Length', dimension_type='temporal'))
1511
+ ds.set_dimension(1, sidpy.Dimension(np.arange(image.data.shape[1]) * pixel_size_y_nm,
1512
+ name='y', units='nm', quantity='Length', dimension_type='spatial'))
1513
+ ds.set_dimension(2, sidpy.Dimension(np.arange(image.data.shape[2]) * pixel_size_x_nm,
1514
+ name='x', units='nm', quantity='Length', dimension_type='spatial'))
1515
+ else:
1516
+ ds.data_type = 'image'
1517
+ ds.set_dimension(0, sidpy.Dimension(np.arange(image.data.shape[0]) * pixel_size_y_nm,
1518
+ name='y', units='nm', quantity='Length', dimension_type='spatial'))
1519
+ ds.set_dimension(1, sidpy.Dimension(np.arange(image.data.shape[1]) * pixel_size_x_nm,
1520
+ name='x', units='nm', quantity='Length', dimension_type='spatial'))
1521
+
1522
+ return data_sets
1523
+
1434
1524
 
1435
1525
  ###############################################
1436
1526
  # Support old pyTEM file format
@@ -5,7 +5,6 @@ import numpy as np
5
5
  # import ase
6
6
  import sys
7
7
 
8
- # from scipy.spatial import cKDTree, Voronoi, ConvexHull
9
8
  import scipy.spatial
10
9
  import scipy.optimize
11
10
  import scipy.interpolate
@@ -20,7 +19,7 @@ import pyTEMlib.crystal_tools
20
19
  from tqdm.auto import tqdm, trange
21
20
 
22
21
  from .graph_viz import *
23
-
22
+ QT_available = False
24
23
 
25
24
  ###########################################################################
26
25
  # utility functions
@@ -212,6 +211,14 @@ def get_voronoi(tetrahedra, atoms, bond_radii=None, optimize=True):
212
211
  """
213
212
 
214
213
  extent = atoms.cell.lengths()
214
+ print('extent', extent)
215
+
216
+ if np.abs(atoms.positions[:, 2]).sum() <= 0.01:
217
+ positions = atoms.positions[:, :2]
218
+ extent = extent[:2]
219
+ else:
220
+ positions = atoms.positions
221
+
215
222
  if atoms.info is None:
216
223
  atoms.info = {}
217
224
 
@@ -232,8 +239,8 @@ def get_voronoi(tetrahedra, atoms, bond_radii=None, optimize=True):
232
239
  r_a = []
233
240
  for vert in vertices:
234
241
  r_a.append(bond_radii[vert])
235
- voronoi, radius = interstitial_sphere_center(atoms.positions[vertices], r_a, optimize=optimize)
236
-
242
+ voronoi, radius = interstitial_sphere_center(positions[vertices], r_a, optimize=optimize)
243
+
237
244
  r_a = np.average(r_a) # np.min(r_a)
238
245
  r_aa.append(r_a)
239
246
 
@@ -247,7 +254,7 @@ def get_voronoi(tetrahedra, atoms, bond_radii=None, optimize=True):
247
254
  def find_overlapping_spheres(voronoi_vertices, r_vv, r_a, cheat=1.):
248
255
  """Find overlapping spheres"""
249
256
 
250
- vertex_tree = scipy.spatial.cKDTree(voronoi_vertices)
257
+ vertex_tree = scipy.spatial.KDTree(voronoi_vertices)
251
258
 
252
259
  pairs = vertex_tree.query_pairs(r=r_a * 2)
253
260
 
@@ -424,7 +431,7 @@ def get_non_periodic_supercell(super_cell):
424
431
  return atoms
425
432
 
426
433
  def get_connectivity_matrix(crystal, atoms, polyhedra):
427
- crystal_tree = scipy.spatial.cKDTree(crystal.positions)
434
+ crystal_tree = scipy.spatial.KDTree(crystal.positions)
428
435
 
429
436
 
430
437
  connectivity_matrix = np.zeros([len(atoms),len(atoms)], dtype=int)
@@ -476,8 +483,8 @@ def get_bonds(crystal, shift= 0., verbose = False, cheat=1.0):
476
483
  other = []
477
484
  super_cell_atoms =[]
478
485
 
479
- atoms_tree = scipy.spatial.cKDTree(atoms.positions-crystal.cell.lengths())
480
- crystal_tree = scipy.spatial.cKDTree(crystal.positions)
486
+ atoms_tree = scipy.spatial.KDTree(atoms.positions-crystal.cell.lengths())
487
+ crystal_tree = scipy.spatial.KDTree(crystal.positions)
481
488
  connectivity_matrix = np.zeros([len(atoms),len(atoms)], dtype=float)
482
489
 
483
490
  for polyhedron in polyhedra.values():
@@ -699,18 +706,21 @@ def find_polyhedra(atoms, optimize=True, cheat=1.0, bond_radii=None):
699
706
  raise TypeError('This function needs an ase.Atoms object')
700
707
 
701
708
  if np.abs(atoms.positions[:, 2]).sum() <= 0.01:
702
- tetrahedra = scipy.spatial.Delaunay(atoms.positions[:, :2])
709
+ positions = atoms.positions[:, :2]
710
+ print('2D')
703
711
  else:
704
- tetrahedra = scipy.spatial.Delaunay(atoms.positions)
712
+ positions = atoms.positions
713
+ tetrahedra = scipy.spatial.Delaunay(positions)
705
714
 
706
715
  voronoi_vertices, voronoi_tetrahedrons, r_vv, r_a = get_voronoi(tetrahedra, atoms, optimize=optimize, bond_radii=bond_radii)
707
- if np.abs(atoms.positions[:, 2]).sum() <= 0.01:
708
- r_vv = np.array(r_vv)*3.
716
+
717
+ if positions.shape[1] < 3:
718
+ r_vv = np.array(r_vv)*1.
709
719
  overlapping_pairs = find_overlapping_spheres(voronoi_vertices, r_vv, r_a, cheat=cheat)
710
720
 
711
721
  clusters, visited_all = find_interstitial_clusters(overlapping_pairs)
712
722
 
713
- if np.abs(atoms.positions[:, 2]).sum() <= 0.01:
723
+ if positions.shape[1] < 3:
714
724
  rings = get_polygons(atoms, clusters, voronoi_tetrahedrons)
715
725
  return rings
716
726
  else:
@@ -770,7 +780,7 @@ def sort_polyhedra_by_vertices(polyhedra, visible=range(4, 100), z_lim=[0, 100],
770
780
  ##########################
771
781
  # New Graph Stuff
772
782
  ##########################
773
- def breadth_first_search(graph, initial, projected_crystal):
783
+ def breadth_first_search2(graph, initial, projected_crystal):
774
784
  """ breadth first search of atoms viewed as a graph
775
785
 
776
786
  the projection dictionary has to contain the following items
@@ -794,15 +804,23 @@ def breadth_first_search(graph, initial, projected_crystal):
794
804
  """
795
805
 
796
806
  projection_tags = projected_crystal.info['projection']
797
-
798
- # get lattice vectors to hopp along through graph
799
- projected_unit_cell = projected_crystal.cell[:2, :2]
800
- a_lattice_vector = projected_unit_cell[0]
801
- b_lattice_vector = projected_unit_cell[1]
802
- main = np.array([a_lattice_vector, -a_lattice_vector, b_lattice_vector, -b_lattice_vector]) # vectors of unit cell
803
- near = np.append(main, projection_tags['near_base'], axis=0) # all nearest atoms
807
+ if 'lattice_vector' in projection_tags:
808
+ a_lattice_vector = projection_tags['lattice_vector']['a']
809
+ b_lattice_vector = projection_tags['lattice_vector']['b']
810
+ main = np.array([a_lattice_vector, -a_lattice_vector, b_lattice_vector, -b_lattice_vector]) # vectors of unit cell
811
+ near = []
812
+ else:
813
+ # get lattice vectors to hopp along through graph
814
+ projected_unit_cell = projected_crystal.cell[:2, :2]
815
+ a_lattice_vector = projected_unit_cell[0]
816
+ b_lattice_vector = projected_unit_cell[1]
817
+ main = np.array([a_lattice_vector, -a_lattice_vector, b_lattice_vector, -b_lattice_vector]) # vectors of unit cell
818
+ near = projection_tags['near_base'] # all nearest atoms
804
819
  # get k next nearest neighbours for each node
805
- neighbour_tree = scipy.spatial.cKDTree(graph)
820
+ main = np.array([a_lattice_vector, -a_lattice_vector, b_lattice_vector, -b_lattice_vector]) # vectors of unit cell
821
+ near = np.append(main, near, axis=0)
822
+
823
+ neighbour_tree = scipy.spatial.KDTree(graph)
806
824
  distances, indices = neighbour_tree.query(graph, # let's get all neighbours
807
825
  k=50) # projection_tags['number_of_nearest_neighbours']*2 + 1)
808
826
  # print(projection_tags['number_of_nearest_neighbours'] * 2 + 1)
@@ -835,6 +853,53 @@ def breadth_first_search(graph, initial, projected_crystal):
835
853
 
836
854
  return graph[visited], ideal
837
855
 
856
+
857
+
858
+ def breadth_first_search_felxible(graph, initial, lattice_parameter, tolerance=1):
859
+ """ breadth first search of atoms viewed as a graph
860
+ This is a rotational invariant search of atoms in a lattice, and returns the angles of unit cells.
861
+ We only use the ideal lattice parameter to determine the lattice.
862
+ """
863
+ if isinstance(lattice_parameter, ase.Atoms):
864
+ lattice_parameter = lattice_parameter.cell.lengths()[:2]
865
+ elif isinstance(lattice_parameter, float):
866
+ lattice_parameter = [lattice_parameter]
867
+ lattice_parameter = np.array(lattice_parameter)
868
+
869
+ neighbour_tree = scipy.spatial.KDTree(graph)
870
+ distances, indices = neighbour_tree.query(graph, # let's get all neighbours
871
+ k=50) # projection_tags['number_of_nearest_neighbours']*2 + 1)
872
+ visited = [] # the atoms we visited
873
+ angles = [] # atoms at ideal lattice
874
+ sub_lattice = [] # atoms in base and disregarded
875
+ queue = [initial]
876
+ queue_angles=[0]
877
+
878
+ while queue:
879
+ node = queue.pop(0)
880
+ angle = queue_angles.pop(0)
881
+ if node not in visited:
882
+ visited.append(node)
883
+ angles.append(angle)
884
+ neighbors = indices[node]
885
+ for i, neighbour in enumerate(neighbors):
886
+ if neighbour not in visited:
887
+ hopp = graph[node] - graph[neighbour]
888
+ distance_to_ideal = np.linalg.norm(hopp)
889
+ if np.min(np.abs(distance_to_ideal - lattice_parameter)) < tolerance:
890
+ queue.append(neighbour)
891
+ queue_angles.append(np.arctan2(hopp[1], hopp[0]))
892
+ angles[0] = angles[1]
893
+ out_atoms = np.stack([graph[visited][:, 0], graph[visited][:, 1], angles])
894
+ return out_atoms.T, visited
895
+
896
+ def delete_rim_atoms(atoms, extent, rim_distance):
897
+ rim = np.where(atoms[:, :2] - extent > -rim_distance)[0]
898
+ middle_atoms = np.delete(atoms, rim, axis=0)
899
+ rim = np.where(middle_atoms[:, :2].min(axis=1)<rim_distance)[0]
900
+ middle_atoms = np.delete(middle_atoms, rim, axis=0)
901
+ return middle_atoms
902
+
838
903
  ####################
839
904
  # Distortion Matrix
840
905
  ####################
@@ -992,7 +1057,7 @@ def get_significant_vertices(vertices, distance=3):
992
1057
  list of points that are all a minimum of 3 apart.
993
1058
  """
994
1059
 
995
- tt = scipy.spatial.cKDTree(np.array(vertices))
1060
+ tt = scipy.spatial.KDTree(np.array(vertices))
996
1061
  near = tt.query_ball_point(vertices, distance)
997
1062
  ideal_vertices = []
998
1063
  for indices in near:
@@ -1146,21 +1211,16 @@ def undistort_stack(distortion_matrix, data):
1146
1211
  nimages = data.shape[0]
1147
1212
  done = 0
1148
1213
 
1149
- if QT_available:
1150
- progress = ft.ProgressDialog("Correct Scan Distortions", nimages)
1214
+
1151
1215
  for i in range(nimages):
1152
- if QT_available:
1153
- progress.set_value(i)
1154
- elif done < int((i + 1) / nimages * 50):
1155
- done = int((i + 1) / nimages * 50)
1156
- sys.stdout.write('\r')
1157
- # progress output :
1158
- sys.stdout.write("[%-50s] %d%%" % ('=' * done, 2 * done))
1159
- sys.stdout.flush()
1216
+ done = int((i + 1) / nimages * 50)
1217
+ sys.stdout.write('\r')
1218
+ # progress output :
1219
+ sys.stdout.write("[%-50s] %d%%" % ('=' * done, 2 * done))
1220
+ sys.stdout.flush()
1160
1221
 
1161
1222
  interpolated[i, :, :] = griddata(corrected, intensity_values[i, :], (grid_x, grid_y), method='linear')
1162
- if QT_available:
1163
- progress.set_value(nimages)
1223
+
1164
1224
  print(':-)')
1165
1225
  print('You have successfully completed undistortion of image stack')
1166
1226
  return interpolated