pyTEMlib 0.2020.11.0__py3-none-any.whl → 0.2024.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

Files changed (59) hide show
  1. pyTEMlib/__init__.py +11 -11
  2. pyTEMlib/animation.py +631 -0
  3. pyTEMlib/atom_tools.py +240 -222
  4. pyTEMlib/config_dir.py +57 -29
  5. pyTEMlib/core_loss_widget.py +658 -0
  6. pyTEMlib/crystal_tools.py +1255 -0
  7. pyTEMlib/diffraction_plot.py +756 -0
  8. pyTEMlib/dynamic_scattering.py +293 -0
  9. pyTEMlib/eds_tools.py +609 -0
  10. pyTEMlib/eels_dialog.py +749 -486
  11. pyTEMlib/{interactive_eels.py → eels_dialog_utilities.py} +1199 -1524
  12. pyTEMlib/eels_tools.py +2031 -1731
  13. pyTEMlib/file_tools.py +1276 -491
  14. pyTEMlib/file_tools_qt.py +193 -0
  15. pyTEMlib/graph_tools.py +1166 -450
  16. pyTEMlib/graph_viz.py +449 -0
  17. pyTEMlib/image_dialog.py +158 -0
  18. pyTEMlib/image_dlg.py +146 -0
  19. pyTEMlib/image_tools.py +1399 -956
  20. pyTEMlib/info_widget.py +933 -0
  21. pyTEMlib/interactive_image.py +1 -0
  22. pyTEMlib/kinematic_scattering.py +1196 -0
  23. pyTEMlib/low_loss_widget.py +176 -0
  24. pyTEMlib/microscope.py +61 -78
  25. pyTEMlib/peak_dialog.py +1047 -350
  26. pyTEMlib/peak_dlg.py +286 -248
  27. pyTEMlib/probe_tools.py +653 -202
  28. pyTEMlib/sidpy_tools.py +153 -129
  29. pyTEMlib/simulation_tools.py +104 -87
  30. pyTEMlib/version.py +6 -3
  31. pyTEMlib/xrpa_x_sections.py +20972 -0
  32. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/LICENSE +21 -21
  33. pyTEMlib-0.2024.8.4.dist-info/METADATA +93 -0
  34. pyTEMlib-0.2024.8.4.dist-info/RECORD +37 -0
  35. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/WHEEL +6 -5
  36. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/entry_points.txt +0 -1
  37. pyTEMlib/KinsCat.py +0 -2685
  38. pyTEMlib/__version__.py +0 -2
  39. pyTEMlib/data/TEMlibrc +0 -68
  40. pyTEMlib/data/edges_db.csv +0 -189
  41. pyTEMlib/data/edges_db.pkl +0 -0
  42. pyTEMlib/data/fparam.txt +0 -103
  43. pyTEMlib/data/microscopes.csv +0 -7
  44. pyTEMlib/data/microscopes.xml +0 -167
  45. pyTEMlib/data/path.txt +0 -1
  46. pyTEMlib/defaults_parser.py +0 -86
  47. pyTEMlib/dm3_reader.py +0 -609
  48. pyTEMlib/edges_db.py +0 -76
  49. pyTEMlib/eels_dlg.py +0 -240
  50. pyTEMlib/hdf_utils.py +0 -481
  51. pyTEMlib/image_tools1.py +0 -2194
  52. pyTEMlib/info_dialog.py +0 -227
  53. pyTEMlib/info_dlg.py +0 -205
  54. pyTEMlib/nion_reader.py +0 -293
  55. pyTEMlib/nsi_reader.py +0 -165
  56. pyTEMlib/structure_tools.py +0 -316
  57. pyTEMlib-0.2020.11.0.dist-info/METADATA +0 -20
  58. pyTEMlib-0.2020.11.0.dist-info/RECORD +0 -42
  59. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/top_level.txt +0 -0
pyTEMlib/nion_reader.py DELETED
@@ -1,293 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: iso-8859-1 -*-
3
-
4
- ################################################################################
5
- # Python class for reading Nion Swift files into sidpy Dataset
6
- # and extracting all metadata
7
- #
8
- # Written by Gerd Duscher, UTK 2020
9
- #
10
- # Works for python 3
11
- #
12
- ################################################################################
13
- from __future__ import division, print_function, absolute_import, unicode_literals
14
-
15
- import json
16
- import struct
17
- import h5py
18
- from warnings import warn
19
- import sys
20
- import numpy as np
21
- import os
22
-
23
-
24
- import sidpy
25
-
26
- __all__ = ["NionReader", "version"]
27
-
28
- version = '0.1beta'
29
-
30
- debugLevel = 0 # 0=none, 1-3=basic, 4-5=simple, 6-10 verbose
31
-
32
- if sys.version_info.major == 3:
33
- unicode = str
34
-
35
- # ### utility functions ###
36
-
37
-
38
- def parse_zip(fp):
39
- """
40
- Parse the zip file headers at fp
41
- :param fp: the file pointer from which to parse the zip file
42
- :return: A tuple of local files, directory headers, and end of central directory
43
- The local files are dictionary where the keys are the local file offset and the
44
- values are each a tuple consisting of the name, data position, data length, and crc32.
45
- The directory headers are a dictionary where the keys are the names of the files
46
- and the values are a tuple consisting of the directory header position, and the
47
- associated local file position.
48
- The end of central directory is a tuple consisting of the location of the end of
49
- central directory header and the location of the first directory header.
50
- This method will seek to location 0 of fp and leave fp at end of file.
51
-
52
- This function is copied from nionswift/nion/swift/model/NDataHandler.py
53
-
54
- """
55
- local_files = {}
56
- dir_files = {}
57
- eocd = None
58
- fp.seek(0)
59
- while True:
60
- pos = fp.tell()
61
- signature = struct.unpack('I', fp.read(4))[0]
62
- if signature == 0x04034b50:
63
- fp.seek(pos + 14)
64
- crc32 = struct.unpack('I', fp.read(4))[0]
65
- fp.seek(pos + 18)
66
- data_len = struct.unpack('I', fp.read(4))[0]
67
- fp.seek(pos + 26)
68
- name_len = struct.unpack('H', fp.read(2))[0]
69
- extra_len = struct.unpack('H', fp.read(2))[0]
70
- name_bytes = fp.read(name_len)
71
- fp.seek(extra_len, os.SEEK_CUR)
72
- data_pos = fp.tell()
73
- fp.seek(data_len, os.SEEK_CUR)
74
- local_files[pos] = (name_bytes, data_pos, data_len, crc32)
75
- elif signature == 0x02014b50:
76
- fp.seek(pos + 28)
77
- name_len = struct.unpack('H', fp.read(2))[0]
78
- extra_len = struct.unpack('H', fp.read(2))[0]
79
- comment_len = struct.unpack('H', fp.read(2))[0]
80
- fp.seek(pos + 42)
81
- pos2 = struct.unpack('I', fp.read(4))[0]
82
- name_bytes = fp.read(name_len)
83
- fp.seek(pos + 46 + name_len + extra_len + comment_len)
84
- dir_files[name_bytes] = (pos, pos2)
85
- elif signature == 0x06054b50:
86
- fp.seek(pos + 16)
87
- pos2 = struct.unpack('I', fp.read(4))[0]
88
- eocd = (pos, pos2)
89
- break
90
- else:
91
- raise IOError()
92
- return local_files, dir_files, eocd
93
-
94
-
95
- class NionReader(sidpy.Reader):
96
-
97
- def __init__(self, file_path, verbose=False):
98
- """
99
- file_path: filepath to dm3 file.
100
- """
101
-
102
- super().__init__(file_path)
103
-
104
- # initialize variables ##
105
- self.verbose = verbose
106
- self.__filename = file_path
107
-
108
- path, file_name = os.path.split(self.__filename)
109
- self.basename, self.extension = os.path.splitext(file_name)
110
- self.data_cube = None
111
- self.original_metadata = {}
112
- self.dimensions = []
113
- if self.extension == '.ndata':
114
-
115
- # - open file for reading
116
- try:
117
- self.__f = open(self.__filename, "rb")
118
- except FileNotFoundError:
119
- raise FileNotFoundError('File not found')
120
- try:
121
- local_files, dir_files, eocd = parse_zip(self.__f)
122
- except IOError:
123
- raise IOError("File {} does not seem to be of Nion`s .ndata format".format(self.__filename))
124
- self.__f.close()
125
- elif self.extension == '.h5':
126
- try:
127
- fp = h5py.File(self.__filename, mode='a')
128
- if 'data' not in fp:
129
- raise IOError("File {} does not seem to be of Nion`s .h5 format".format(self.__filename))
130
- fp.close()
131
- except IOError:
132
- raise IOError("File {} does not seem to be of Nion`s .h5 format".format(self.__filename))
133
-
134
- def read(self):
135
- if self.extension == '.ndata':
136
- try:
137
- self.__f = open(self.__filename, "rb")
138
- except FileNotFoundError:
139
- raise FileNotFoundError('File not found')
140
- local_files, dir_files, eocd = parse_zip(self.__f)
141
-
142
- contains_data = b"data.npy" in dir_files
143
- contains_metadata = b"metadata.json" in dir_files
144
- file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
145
-
146
- self.__f.seek(local_files[dir_files[b"data.npy"][1]][1])
147
-
148
- self.data_cube = np.load(self.__f)
149
-
150
- json_pos = local_files[dir_files[b"metadata.json"][1]][1]
151
- json_len = local_files[dir_files[b"metadata.json"][1]][2]
152
- self.__f.seek(json_pos)
153
- json_properties = self.__f.read(json_len)
154
-
155
- self.original_metadata = json.loads(json_properties.decode("utf-8"))
156
- self.__f.close()
157
- elif self.extension == '.h5':
158
- # TODO: use lazy load for large datasets
159
- self.__f = h5py.File(self.__filename, 'a')
160
- if 'data' in self.__f:
161
- json_properties = self.__f['data'].attrs.get("properties", "")
162
- self.data_cube = self.__f['data'][:]
163
- self.original_metadata = json.loads(json_properties)
164
-
165
- self.get_dimensions()
166
- ## Need to switch image dimensions in Nion format
167
- image_dims = []
168
- for dim, axis in enumerate(self.dimensions):
169
- print(dim, axis)
170
- if axis.dimension_type == sidpy.DimensionTypes.SPATIAL:
171
- image_dims.append(dim)
172
- print('image_dims', image_dims)
173
- print(self.data_cube.shape)
174
- if len(image_dims) == 2:
175
- self.data_cube = np.swapaxes(self.data_cube, image_dims[0], image_dims[1])
176
- temp = self.dimensions[image_dims[0]].copy()
177
- self.dimensions[image_dims[0]] = self.dimensions[image_dims[1]].copy()
178
- self.dimensions[image_dims[1]] = temp
179
-
180
- dataset = sidpy.Dataset.from_array(self.data_cube)
181
-
182
- for dim, axis in enumerate(self.dimensions):
183
- dataset.set_dimension(dim, axis)
184
-
185
- dataset.original_metadata = self.original_metadata
186
- if 'dimensional_calibrations' in dataset.original_metadata:
187
- for dim in dataset.original_metadata['dimensional_calibrations']:
188
- if dim['units'] == '':
189
- dim['units'] = 'pixels'
190
-
191
- dataset.quantity = 'intensity'
192
- dataset.units = 'counts'
193
- if 'description' in dataset.original_metadata:
194
- dataset.title = dataset.original_metadata['description']['title']
195
- else:
196
- if 'title' in dataset.original_metadata:
197
- dataset.title = dataset.original_metadata['title']
198
- else:
199
- path, file_name = os.path.split(self.__filename)
200
- basename, extension = os.path.splitext(file_name)
201
- dataset.title = basename
202
-
203
- if 'data_source' in dataset.original_metadata:
204
- dataset.source = dataset.original_metadata['data_source']
205
- else:
206
- dataset.source = 'NionReader'
207
-
208
- self.set_data_type(dataset)
209
- dataset.modality = 'STEM data'
210
- dataset.h5_dataset = None
211
-
212
- return dataset
213
-
214
- def set_data_type(self, dataset):
215
-
216
- spectral_dim = False
217
- for axis in dataset._axes.values():
218
- if axis.dimension_type == sidpy.DimensionTypes.SPECTRAL:
219
- spectral_dim = True
220
-
221
- if len(dataset.shape) > 3:
222
- raise NotImplementedError('Data_type not implemented yet')
223
- elif len(dataset.shape) == 3:
224
- if spectral_dim:
225
- dataset.data_type = 'spectrum_image'
226
- else:
227
- dataset.data_type = 'IMAGE_STACK'
228
- for dim, axis in dataset._axes.items():
229
- if axis.dimension_type != sidpy.DimensionTypes.SPATIAL:
230
- dataset.set_dimension(dim, sidpy.Dimension(axis.values,
231
- name='frame',
232
- units='frame',
233
- quantity='stack',
234
- dimension_type=sidpy.DimensionTypes.TEMPORAL))
235
- break
236
-
237
- elif len(dataset.shape) == 2:
238
- if spectral_dim:
239
- dataset.data_type = sidpy.DataTypes.SPECTRAL_IMAGE
240
- else:
241
- dataset.data_type = sidpy.DataTypes.IMAGE
242
- elif len(dataset.shape) == 1:
243
- if spectral_dim:
244
- dataset.data_type = sidpy.DataTypes.SPECTRUM
245
- else:
246
- dataset.data_type = sidpy.DataTypes.LINE_PLOT
247
-
248
- def get_dimensions(self):
249
- dic = self.original_metadata
250
-
251
- reciprocal_name = 'u'
252
- spatial_name = 'x'
253
-
254
- if 'dimensional_calibrations' in dic:
255
-
256
- for dim in range(len(dic['dimensional_calibrations'])):
257
- dimension_tags = dic['dimensional_calibrations'][dim]
258
- units = dimension_tags['units']
259
- values = (np.arange(self.data_cube.shape[int(dim)]) - dimension_tags['offset']) * dimension_tags['scale']
260
-
261
- if 'eV' == units:
262
- self.dimensions.append(sidpy.Dimension(values, name='energy_loss', units=units,
263
- quantity='energy-loss', dimension_type='spectral'))
264
- elif 'eV' in units:
265
- self.dimensions.append(sidpy.Dimension(values, name='energy', units=units,
266
- quantity='energy', dimension_type='spectral'))
267
- elif '1/' in units or units in ['mrad', 'rad']:
268
- self.dimensions.append(sidpy.Dimension(values, name=reciprocal_name, units=units,
269
- quantity='reciprocal distance',dimension_type='reciprocal'))
270
- reciprocal_name = chr(ord(reciprocal_name) + 1)
271
- elif 'nm' in units:
272
- self.dimensions.append(sidpy.Dimension(values, name=spatial_name, units=units,
273
- quantity='distance', dimension_type='spatial'))
274
- spatial_name = chr(ord(spatial_name) + 1)
275
- else:
276
- self.dimensions.append(sidpy.Dimension(values, name='generic', units='generic',
277
- quantity='generic', dimension_type='UNKNOWN'))
278
-
279
-
280
- def get_filename(self):
281
- return self.__filename
282
-
283
- filename = property(get_filename)
284
-
285
- def get_raw(self):
286
- return self.data
287
-
288
- data = property(get_raw)
289
-
290
- def get_tags(self):
291
- return self.original_metadata
292
-
293
- tags = property(get_tags)
pyTEMlib/nsi_reader.py DELETED
@@ -1,165 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- Reader capable of reading one or all NSID datasets present in a given HDF5 file
4
-
5
- Created on Fri May 22 16:29:25 2020
6
-
7
- @author: Gerd Duscher, Suhas Somnath
8
- """
9
- from __future__ import division, print_function, absolute_import, unicode_literals
10
- from warnings import warn
11
- import sys
12
- import h5py
13
- import numpy as np
14
-
15
- # pyNSID function
16
- import sidpy
17
-
18
- from pyTEMlib.hdf_utils import get_all_main
19
- if sys.version_info.major == 3:
20
- unicode = str
21
-
22
-
23
- class NSIDReader(sidpy.Reader):
24
-
25
- def __init__(self, h5_object):
26
- """
27
- h5_object - hdf5 dataset
28
- specific Main dataset that needs to be read into a sidpy Dataset.
29
- # If no path is specified, read all available NSID Main datasets
30
- """
31
-
32
- if not isinstance(h5_object.file, h5py.File):
33
- raise TypeError('we can only read h5py datasets')
34
-
35
- super(NSIDReader, self).__init__(file_path=h5_object.file.name)
36
-
37
- self.dset = None
38
- self.main_datasets = []
39
- if isinstance(h5_object, h5py.Dataset):
40
- self.dset = h5_object
41
- self.h5_group = self.dset.parent
42
-
43
- elif isinstance(h5_object, h5py.Group):
44
- self.h5_group = h5_object
45
- else:
46
- raise TypeError('we can only read h5py datasets')
47
-
48
- # Find all main datasets is done in read as the file may change between readings
49
- # DO NOT close HDF5 file. Dask array will fail if you do so.
50
- # TODO: sidpy.Dataset may need the ability to close a HDF5 file
51
- # Perhaps this would be done by reading all contents into memory..
52
-
53
- @staticmethod
54
- def read_h5py_dataset(dset):
55
-
56
- if not isinstance(dset, h5py.Dataset):
57
- raise TypeError('can only read single Dataset, use read_all_in_group or read_all function instead')
58
- # create vanilla dask array
59
- dataset = sidpy.Dataset.from_array(np.array(dset))
60
-
61
- if 'title' in dset.attrs:
62
- dataset.title = dset.attrs['title']
63
- else:
64
- dataset.title = dset.name
65
-
66
- if 'units' in dset.attrs:
67
- dataset.units = dset.attrs['units']
68
- else:
69
- dataset.units = 'generic'
70
-
71
- if 'quantity' in dset.attrs:
72
- dataset.quantity = dset.attrs['quantity']
73
- else:
74
- dataset.quantity = 'generic'
75
-
76
- if 'data_type' in dset.attrs:
77
- dataset.data_type = dset.attrs['data_type']
78
- else:
79
- dataset.data_type = 'generic'
80
-
81
- if 'modality' in dset.attrs:
82
- dataset.modality = dset.attrs['modality']
83
- else:
84
- dataset.modality = 'generic'
85
-
86
- if 'source' in dset.attrs:
87
- dataset.source = dset.attrs['source']
88
- else:
89
- dataset.source = 'generic'
90
-
91
- dataset.axes = {}
92
-
93
- for dim in range(np.array(dset).ndim):
94
- try:
95
- label = dset.dims[dim].keys()[-1]
96
-
97
- except ValueError:
98
- print('dimension {} not NSID type using generic'.format(dim))
99
- name = dset.dims[dim][label].name
100
- dim_dict = {'quantity': 'generic', 'units': 'generic', 'dimension_type': 'generic'}
101
- h5_dim_dict = dict(dset.parent[name].attrs)
102
- if 'quantity' in h5_dim_dict:
103
- dim_dict['quantity'] = h5_dim_dict['quantity']
104
- else:
105
- if 'NAME' in h5_dim_dict:
106
- param = h5_dim_dict['NAME'].decode("utf-8").split('[')
107
- # print(param)
108
- if len(param) == 1:
109
- if param[0] == 'frame':
110
- dim_dict['quantity'] = 'stack'
111
- dim_dict['units'] = 'frame'
112
- dim_dict['dimension_type'] = sidpy.DimensionTypes.TEMPORAL
113
- elif len(param) == 2:
114
- dim_dict['quantity'] = param[0]
115
- dim_dict['units'] = param[1][0:-1]
116
-
117
- if dim_dict['units'] == 'nm':
118
- dim_dict['dimension_type'] = sidpy.DimensionTypes.SPATIAL
119
- elif dim_dict['units'] == 'eV':
120
- dim_dict['dimension_type'] = sidpy.DimensionTypes.SPECTRAL
121
-
122
- if 'units' in h5_dim_dict:
123
- dim_dict['units'] = h5_dim_dict['units']
124
- if 'dimension_type' in h5_dim_dict:
125
- dim_dict['dimension_type'] = h5_dim_dict['dimension_type']
126
-
127
- dim_dict.update(dict(dset.parent[name].attrs))
128
-
129
- dataset.set_dimension(dim, sidpy.Dimension(np.array(dset.parent[name][()]), name=dset.dims[dim].label,
130
- quantity=dim_dict['quantity'], units=dim_dict['units'],
131
- dimension_type=dim_dict['dimension_type']))
132
-
133
- if 'metadata' in dset.parent:
134
- dataset.metadata = sidpy.base.dict_utils.nest_dict(dict(dset.parent['metadata'].attrs))
135
-
136
- dataset.metadata.update(dict(dset.attrs))
137
-
138
- dataset.original_metadata = {}
139
- if 'original_metadata' in dset.parent:
140
- dataset.original_metadata = sidpy.base.dict_utils.nest_dict(dict(dset.parent['original_metadata'].attrs))
141
-
142
- # hdf5 information
143
- dataset.h5_dataset = dset
144
-
145
- return dataset
146
-
147
- def can_read(self):
148
- pass
149
-
150
- def read(self):
151
- if not isinstance(self.h5_group, h5py.Group):
152
- raise TypeError('This function needs to be initialised with a hdf5 group or dataset first')
153
- list_of_main = get_all_main(self.h5_group, verbose=False)
154
-
155
- """
156
- Go through each of the identified
157
- """
158
- list_of_datasets = []
159
- for dset in list_of_main:
160
- list_of_datasets.append(self.read_h5py_dataset(dset))
161
-
162
- return list_of_datasets
163
-
164
- def read_all_in_group(self, recursive=True):
165
- pass