pyTEMlib 0.2020.11.1__py3-none-any.whl → 0.2024.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

Files changed (60) hide show
  1. pyTEMlib/__init__.py +11 -11
  2. pyTEMlib/animation.py +631 -0
  3. pyTEMlib/atom_tools.py +240 -245
  4. pyTEMlib/config_dir.py +57 -33
  5. pyTEMlib/core_loss_widget.py +658 -0
  6. pyTEMlib/crystal_tools.py +1255 -0
  7. pyTEMlib/diffraction_plot.py +756 -0
  8. pyTEMlib/dynamic_scattering.py +293 -0
  9. pyTEMlib/eds_tools.py +609 -0
  10. pyTEMlib/eels_dialog.py +749 -491
  11. pyTEMlib/{interactive_eels.py → eels_dialog_utilities.py} +1199 -1177
  12. pyTEMlib/eels_tools.py +2031 -1698
  13. pyTEMlib/file_tools.py +1276 -560
  14. pyTEMlib/file_tools_qt.py +193 -0
  15. pyTEMlib/graph_tools.py +1166 -450
  16. pyTEMlib/graph_viz.py +449 -0
  17. pyTEMlib/image_dialog.py +158 -0
  18. pyTEMlib/image_dlg.py +146 -232
  19. pyTEMlib/image_tools.py +1399 -1028
  20. pyTEMlib/info_widget.py +933 -0
  21. pyTEMlib/interactive_image.py +1 -226
  22. pyTEMlib/kinematic_scattering.py +1196 -0
  23. pyTEMlib/low_loss_widget.py +176 -0
  24. pyTEMlib/microscope.py +61 -81
  25. pyTEMlib/peak_dialog.py +1047 -410
  26. pyTEMlib/peak_dlg.py +286 -242
  27. pyTEMlib/probe_tools.py +653 -207
  28. pyTEMlib/sidpy_tools.py +153 -136
  29. pyTEMlib/simulation_tools.py +104 -87
  30. pyTEMlib/version.py +6 -3
  31. pyTEMlib/xrpa_x_sections.py +20972 -0
  32. {pyTEMlib-0.2020.11.1.dist-info → pyTEMlib-0.2024.8.4.dist-info}/LICENSE +21 -21
  33. pyTEMlib-0.2024.8.4.dist-info/METADATA +93 -0
  34. pyTEMlib-0.2024.8.4.dist-info/RECORD +37 -0
  35. {pyTEMlib-0.2020.11.1.dist-info → pyTEMlib-0.2024.8.4.dist-info}/WHEEL +6 -5
  36. {pyTEMlib-0.2020.11.1.dist-info → pyTEMlib-0.2024.8.4.dist-info}/entry_points.txt +0 -1
  37. pyTEMlib/KinsCat.py +0 -2758
  38. pyTEMlib/__version__.py +0 -2
  39. pyTEMlib/data/TEMlibrc +0 -68
  40. pyTEMlib/data/edges_db.csv +0 -189
  41. pyTEMlib/data/edges_db.pkl +0 -0
  42. pyTEMlib/data/fparam.txt +0 -103
  43. pyTEMlib/data/microscopes.csv +0 -7
  44. pyTEMlib/data/microscopes.xml +0 -167
  45. pyTEMlib/data/path.txt +0 -1
  46. pyTEMlib/defaults_parser.py +0 -90
  47. pyTEMlib/dm3_reader.py +0 -613
  48. pyTEMlib/edges_db.py +0 -76
  49. pyTEMlib/eels_dlg.py +0 -224
  50. pyTEMlib/hdf_utils.py +0 -483
  51. pyTEMlib/image_tools1.py +0 -2194
  52. pyTEMlib/info_dialog.py +0 -237
  53. pyTEMlib/info_dlg.py +0 -202
  54. pyTEMlib/nion_reader.py +0 -297
  55. pyTEMlib/nsi_reader.py +0 -170
  56. pyTEMlib/structure_tools.py +0 -316
  57. pyTEMlib/test.py +0 -2072
  58. pyTEMlib-0.2020.11.1.dist-info/METADATA +0 -20
  59. pyTEMlib-0.2020.11.1.dist-info/RECORD +0 -45
  60. {pyTEMlib-0.2020.11.1.dist-info → pyTEMlib-0.2024.8.4.dist-info}/top_level.txt +0 -0
pyTEMlib/nion_reader.py DELETED
@@ -1,297 +0,0 @@
1
- #!/usr/bin/env python
2
- # -*- coding: iso-8859-1 -*-
3
- """
4
- Python class for reading Nion Swift files into sidpy Dataset
5
-
6
- will move to Scope Reader in pycroscopy
7
- """
8
- ################################################################################
9
- # Python class for reading Nion Swift files into sidpy Dataset
10
- # and extracting all metadata
11
- #
12
- # Written by Gerd Duscher, UTK 2020
13
- #
14
- # Works for python 3
15
- #
16
- ################################################################################
17
- from __future__ import division, print_function, absolute_import, unicode_literals
18
-
19
- import json
20
- import struct
21
- import h5py
22
- from warnings import warn
23
- import sys
24
- import numpy as np
25
- import os
26
-
27
-
28
- import sidpy
29
-
30
- __all__ = ["NionReader", "version"]
31
-
32
- version = '0.1beta'
33
-
34
- debugLevel = 0 # 0=none, 1-3=basic, 4-5=simple, 6-10 verbose
35
-
36
- if sys.version_info.major == 3:
37
- unicode = str
38
-
39
- # ### utility functions ###
40
-
41
-
42
- def parse_zip(fp):
43
- """
44
- Parse the zip file headers at fp
45
- :param fp: the file pointer from which to parse the zip file
46
- :return: A tuple of local files, directory headers, and end of central directory
47
- The local files are dictionary where the keys are the local file offset and the
48
- values are each a tuple consisting of the name, data position, data length, and crc32.
49
- The directory headers are a dictionary where the keys are the names of the files
50
- and the values are a tuple consisting of the directory header position, and the
51
- associated local file position.
52
- The end of central directory is a tuple consisting of the location of the end of
53
- central directory header and the location of the first directory header.
54
- This method will seek to location 0 of fp and leave fp at end of file.
55
-
56
- This function is copied from nionswift/nion/swift/model/NDataHandler.py
57
-
58
- """
59
- local_files = {}
60
- dir_files = {}
61
- eocd = None
62
- fp.seek(0)
63
- while True:
64
- pos = fp.tell()
65
- signature = struct.unpack('I', fp.read(4))[0]
66
- if signature == 0x04034b50:
67
- fp.seek(pos + 14)
68
- crc32 = struct.unpack('I', fp.read(4))[0]
69
- fp.seek(pos + 18)
70
- data_len = struct.unpack('I', fp.read(4))[0]
71
- fp.seek(pos + 26)
72
- name_len = struct.unpack('H', fp.read(2))[0]
73
- extra_len = struct.unpack('H', fp.read(2))[0]
74
- name_bytes = fp.read(name_len)
75
- fp.seek(extra_len, os.SEEK_CUR)
76
- data_pos = fp.tell()
77
- fp.seek(data_len, os.SEEK_CUR)
78
- local_files[pos] = (name_bytes, data_pos, data_len, crc32)
79
- elif signature == 0x02014b50:
80
- fp.seek(pos + 28)
81
- name_len = struct.unpack('H', fp.read(2))[0]
82
- extra_len = struct.unpack('H', fp.read(2))[0]
83
- comment_len = struct.unpack('H', fp.read(2))[0]
84
- fp.seek(pos + 42)
85
- pos2 = struct.unpack('I', fp.read(4))[0]
86
- name_bytes = fp.read(name_len)
87
- fp.seek(pos + 46 + name_len + extra_len + comment_len)
88
- dir_files[name_bytes] = (pos, pos2)
89
- elif signature == 0x06054b50:
90
- fp.seek(pos + 16)
91
- pos2 = struct.unpack('I', fp.read(4))[0]
92
- eocd = (pos, pos2)
93
- break
94
- else:
95
- raise IOError()
96
- return local_files, dir_files, eocd
97
-
98
-
99
- class NionReader(sidpy.Reader):
100
-
101
- def __init__(self, file_path, verbose=False):
102
- """
103
- file_path: filepath to dm3 file.
104
- """
105
-
106
- super().__init__(file_path)
107
-
108
- # initialize variables ##
109
- self.verbose = verbose
110
- self.__filename = file_path
111
-
112
- path, file_name = os.path.split(self.__filename)
113
- self.basename, self.extension = os.path.splitext(file_name)
114
- self.data_cube = None
115
- self.original_metadata = {}
116
- self.dimensions = []
117
- if self.extension == '.ndata':
118
-
119
- # - open file for reading
120
- try:
121
- self.__f = open(self.__filename, "rb")
122
- except FileNotFoundError:
123
- raise FileNotFoundError('File not found')
124
- try:
125
- local_files, dir_files, eocd = parse_zip(self.__f)
126
- except IOError:
127
- raise IOError("File {} does not seem to be of Nion`s .ndata format".format(self.__filename))
128
- self.__f.close()
129
- elif self.extension == '.h5':
130
- try:
131
- fp = h5py.File(self.__filename, mode='a')
132
- if 'data' not in fp:
133
- raise IOError("File {} does not seem to be of Nion`s .h5 format".format(self.__filename))
134
- fp.close()
135
- except IOError:
136
- raise IOError("File {} does not seem to be of Nion`s .h5 format".format(self.__filename))
137
-
138
- def read(self):
139
- if self.extension == '.ndata':
140
- try:
141
- self.__f = open(self.__filename, "rb")
142
- except FileNotFoundError:
143
- raise FileNotFoundError('File not found')
144
- local_files, dir_files, eocd = parse_zip(self.__f)
145
-
146
- contains_data = b"data.npy" in dir_files
147
- contains_metadata = b"metadata.json" in dir_files
148
- file_count = contains_data + contains_metadata # use fact that True is 1, False is 0
149
-
150
- self.__f.seek(local_files[dir_files[b"data.npy"][1]][1])
151
-
152
- self.data_cube = np.load(self.__f)
153
-
154
- json_pos = local_files[dir_files[b"metadata.json"][1]][1]
155
- json_len = local_files[dir_files[b"metadata.json"][1]][2]
156
- self.__f.seek(json_pos)
157
- json_properties = self.__f.read(json_len)
158
-
159
- self.original_metadata = json.loads(json_properties.decode("utf-8"))
160
- self.__f.close()
161
- elif self.extension == '.h5':
162
- # TODO: use lazy load for large datasets
163
- self.__f = h5py.File(self.__filename, 'a')
164
- if 'data' in self.__f:
165
- json_properties = self.__f['data'].attrs.get("properties", "")
166
- self.data_cube = self.__f['data'][:]
167
- self.original_metadata = json.loads(json_properties)
168
-
169
- self.get_dimensions()
170
- ## Need to switch image dimensions in Nion format
171
- image_dims = []
172
- for dim, axis in enumerate(self.dimensions):
173
- print(dim, axis)
174
- if axis.dimension_type == sidpy.DimensionTypes.SPATIAL:
175
- image_dims.append(dim)
176
- print('image_dims', image_dims)
177
- print(self.data_cube.shape)
178
- if len(image_dims) == 2:
179
- self.data_cube = np.swapaxes(self.data_cube, image_dims[0], image_dims[1])
180
- temp = self.dimensions[image_dims[0]].copy()
181
- self.dimensions[image_dims[0]] = self.dimensions[image_dims[1]].copy()
182
- self.dimensions[image_dims[1]] = temp
183
-
184
- dataset = sidpy.Dataset.from_array(self.data_cube)
185
-
186
- for dim, axis in enumerate(self.dimensions):
187
- dataset.set_dimension(dim, axis)
188
-
189
- dataset.original_metadata = self.original_metadata
190
- if 'dimensional_calibrations' in dataset.original_metadata:
191
- for dim in dataset.original_metadata['dimensional_calibrations']:
192
- if dim['units'] == '':
193
- dim['units'] = 'pixels'
194
-
195
- dataset.quantity = 'intensity'
196
- dataset.units = 'counts'
197
- if 'description' in dataset.original_metadata:
198
- dataset.title = dataset.original_metadata['description']['title']
199
- else:
200
- if 'title' in dataset.original_metadata:
201
- dataset.title = dataset.original_metadata['title']
202
- else:
203
- path, file_name = os.path.split(self.__filename)
204
- basename, extension = os.path.splitext(file_name)
205
- dataset.title = basename
206
-
207
- if 'data_source' in dataset.original_metadata:
208
- dataset.source = dataset.original_metadata['data_source']
209
- else:
210
- dataset.source = 'NionReader'
211
-
212
- self.set_data_type(dataset)
213
- dataset.modality = 'STEM data'
214
- dataset.h5_dataset = None
215
-
216
- return dataset
217
-
218
- def set_data_type(self, dataset):
219
-
220
- spectral_dim = False
221
- for axis in dataset._axes.values():
222
- if axis.dimension_type == sidpy.DimensionTypes.SPECTRAL:
223
- spectral_dim = True
224
-
225
- if len(dataset.shape) > 3:
226
- raise NotImplementedError('Data_type not implemented yet')
227
- elif len(dataset.shape) == 3:
228
- if spectral_dim:
229
- dataset.data_type = 'spectrum_image'
230
- else:
231
- dataset.data_type = 'IMAGE_STACK'
232
- for dim, axis in dataset._axes.items():
233
- if axis.dimension_type != sidpy.DimensionTypes.SPATIAL:
234
- dataset.set_dimension(dim, sidpy.Dimension(axis.values,
235
- name='frame',
236
- units='frame',
237
- quantity='stack',
238
- dimension_type=sidpy.DimensionTypes.TEMPORAL))
239
- break
240
-
241
- elif len(dataset.shape) == 2:
242
- if spectral_dim:
243
- dataset.data_type = sidpy.DataTypes.SPECTRAL_IMAGE
244
- else:
245
- dataset.data_type = sidpy.DataTypes.IMAGE
246
- elif len(dataset.shape) == 1:
247
- if spectral_dim:
248
- dataset.data_type = sidpy.DataTypes.SPECTRUM
249
- else:
250
- dataset.data_type = sidpy.DataTypes.LINE_PLOT
251
-
252
- def get_dimensions(self):
253
- dic = self.original_metadata
254
-
255
- reciprocal_name = 'u'
256
- spatial_name = 'x'
257
-
258
- if 'dimensional_calibrations' in dic:
259
-
260
- for dim in range(len(dic['dimensional_calibrations'])):
261
- dimension_tags = dic['dimensional_calibrations'][dim]
262
- units = dimension_tags['units']
263
- values = (np.arange(self.data_cube.shape[int(dim)]) - dimension_tags['offset']) * dimension_tags['scale']
264
-
265
- if 'eV' == units:
266
- self.dimensions.append(sidpy.Dimension(values, name='energy_loss', units=units,
267
- quantity='energy-loss', dimension_type='spectral'))
268
- elif 'eV' in units:
269
- self.dimensions.append(sidpy.Dimension(values, name='energy', units=units,
270
- quantity='energy', dimension_type='spectral'))
271
- elif '1/' in units or units in ['mrad', 'rad']:
272
- self.dimensions.append(sidpy.Dimension(values, name=reciprocal_name, units=units,
273
- quantity='reciprocal distance',dimension_type='reciprocal'))
274
- reciprocal_name = chr(ord(reciprocal_name) + 1)
275
- elif 'nm' in units:
276
- self.dimensions.append(sidpy.Dimension(values, name=spatial_name, units=units,
277
- quantity='distance', dimension_type='spatial'))
278
- spatial_name = chr(ord(spatial_name) + 1)
279
- else:
280
- self.dimensions.append(sidpy.Dimension(values, name='generic', units='generic',
281
- quantity='generic', dimension_type='UNKNOWN'))
282
-
283
-
284
- def get_filename(self):
285
- return self.__filename
286
-
287
- filename = property(get_filename)
288
-
289
- def get_raw(self):
290
- return self.data
291
-
292
- data = property(get_raw)
293
-
294
- def get_tags(self):
295
- return self.original_metadata
296
-
297
- tags = property(get_tags)
pyTEMlib/nsi_reader.py DELETED
@@ -1,170 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
- """
3
- Reader capable of reading one or all NSID datasets present in a given HDF5 file
4
-
5
- Created on Fri May 22 16:29:25 2020
6
-
7
- @author: Gerd Duscher, Suhas Somnath
8
- """
9
- from __future__ import division, print_function, absolute_import, unicode_literals
10
- from warnings import warn
11
- import sys
12
- import h5py
13
- import numpy as np
14
- """
15
- Python class for reading pyNSID files into sidpy Dataset
16
-
17
- will move to ScopeReader in pycroscopy
18
- """
19
-
20
- # pyNSID function
21
- import sidpy
22
-
23
- from pyTEMlib.hdf_utils import get_all_main
24
- if sys.version_info.major == 3:
25
- unicode = str
26
-
27
-
28
- class NSIDReader(sidpy.Reader):
29
-
30
- def __init__(self, h5_object):
31
- """
32
- h5_object - hdf5 dataset
33
- specific Main dataset that needs to be read into a sidpy Dataset.
34
- # If no path is specified, read all available NSID Main datasets
35
- """
36
-
37
- if not isinstance(h5_object.file, h5py.File):
38
- raise TypeError('we can only read h5py datasets')
39
-
40
- super(NSIDReader, self).__init__(file_path=h5_object.file.name)
41
-
42
- self.dset = None
43
- self.main_datasets = []
44
- if isinstance(h5_object, h5py.Dataset):
45
- self.dset = h5_object
46
- self.h5_group = self.dset.parent
47
-
48
- elif isinstance(h5_object, h5py.Group):
49
- self.h5_group = h5_object
50
- else:
51
- raise TypeError('we can only read h5py datasets')
52
-
53
- # Find all main datasets is done in read as the file may change between readings
54
- # DO NOT close HDF5 file. Dask array will fail if you do so.
55
- # TODO: sidpy.Dataset may need the ability to close a HDF5 file
56
- # Perhaps this would be done by reading all contents into memory..
57
-
58
- @staticmethod
59
- def read_h5py_dataset(dset):
60
-
61
- if not isinstance(dset, h5py.Dataset):
62
- raise TypeError('can only read single Dataset, use read_all_in_group or read_all function instead')
63
- # create vanilla dask array
64
- dataset = sidpy.Dataset.from_array(np.array(dset))
65
-
66
- if 'title' in dset.attrs:
67
- dataset.title = dset.attrs['title']
68
- else:
69
- dataset.title = dset.name
70
-
71
- if 'units' in dset.attrs:
72
- dataset.units = dset.attrs['units']
73
- else:
74
- dataset.units = 'generic'
75
-
76
- if 'quantity' in dset.attrs:
77
- dataset.quantity = dset.attrs['quantity']
78
- else:
79
- dataset.quantity = 'generic'
80
-
81
- if 'data_type' in dset.attrs:
82
- dataset.data_type = dset.attrs['data_type']
83
- else:
84
- dataset.data_type = 'generic'
85
-
86
- if 'modality' in dset.attrs:
87
- dataset.modality = dset.attrs['modality']
88
- else:
89
- dataset.modality = 'generic'
90
-
91
- if 'source' in dset.attrs:
92
- dataset.source = dset.attrs['source']
93
- else:
94
- dataset.source = 'generic'
95
-
96
- dataset.axes = {}
97
-
98
- for dim in range(np.array(dset).ndim):
99
- try:
100
- label = dset.dims[dim].keys()[-1]
101
-
102
- except ValueError:
103
- print('dimension {} not NSID type using generic'.format(dim))
104
- name = dset.dims[dim][label].name
105
- dim_dict = {'quantity': 'generic', 'units': 'generic', 'dimension_type': 'generic'}
106
- h5_dim_dict = dict(dset.parent[name].attrs)
107
- if 'quantity' in h5_dim_dict:
108
- dim_dict['quantity'] = h5_dim_dict['quantity']
109
- else:
110
- if 'NAME' in h5_dim_dict:
111
- param = h5_dim_dict['NAME'].decode("utf-8").split('[')
112
- # print(param)
113
- if len(param) == 1:
114
- if param[0] == 'frame':
115
- dim_dict['quantity'] = 'stack'
116
- dim_dict['units'] = 'frame'
117
- dim_dict['dimension_type'] = sidpy.DimensionTypes.TEMPORAL
118
- elif len(param) == 2:
119
- dim_dict['quantity'] = param[0]
120
- dim_dict['units'] = param[1][0:-1]
121
-
122
- if dim_dict['units'] == 'nm':
123
- dim_dict['dimension_type'] = sidpy.DimensionTypes.SPATIAL
124
- elif dim_dict['units'] == 'eV':
125
- dim_dict['dimension_type'] = sidpy.DimensionTypes.SPECTRAL
126
-
127
- if 'units' in h5_dim_dict:
128
- dim_dict['units'] = h5_dim_dict['units']
129
- if 'dimension_type' in h5_dim_dict:
130
- dim_dict['dimension_type'] = h5_dim_dict['dimension_type']
131
-
132
- dim_dict.update(dict(dset.parent[name].attrs))
133
-
134
- dataset.set_dimension(dim, sidpy.Dimension(np.array(dset.parent[name][()]), name=dset.dims[dim].label,
135
- quantity=dim_dict['quantity'], units=dim_dict['units'],
136
- dimension_type=dim_dict['dimension_type']))
137
-
138
- if 'metadata' in dset.parent:
139
- dataset.metadata = sidpy.base.dict_utils.nest_dict(dict(dset.parent['metadata'].attrs))
140
-
141
- dataset.metadata.update(dict(dset.attrs))
142
-
143
- dataset.original_metadata = {}
144
- if 'original_metadata' in dset.parent:
145
- dataset.original_metadata = sidpy.base.dict_utils.nest_dict(dict(dset.parent['original_metadata'].attrs))
146
-
147
- # hdf5 information
148
- dataset.h5_dataset = dset
149
-
150
- return dataset
151
-
152
- def can_read(self):
153
- pass
154
-
155
- def read(self):
156
- if not isinstance(self.h5_group, h5py.Group):
157
- raise TypeError('This function needs to be initialised with a hdf5 group or dataset first')
158
- list_of_main = get_all_main(self.h5_group, verbose=False)
159
-
160
- """
161
- Go through each of the identified
162
- """
163
- list_of_datasets = []
164
- for dset in list_of_main:
165
- list_of_datasets.append(self.read_h5py_dataset(dset))
166
-
167
- return list_of_datasets
168
-
169
- def read_all_in_group(self, recursive=True):
170
- pass