pyTEMlib 0.2020.11.0__py3-none-any.whl → 0.2024.8.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyTEMlib might be problematic. Click here for more details.

Files changed (59) hide show
  1. pyTEMlib/__init__.py +11 -11
  2. pyTEMlib/animation.py +631 -0
  3. pyTEMlib/atom_tools.py +240 -222
  4. pyTEMlib/config_dir.py +57 -29
  5. pyTEMlib/core_loss_widget.py +658 -0
  6. pyTEMlib/crystal_tools.py +1255 -0
  7. pyTEMlib/diffraction_plot.py +756 -0
  8. pyTEMlib/dynamic_scattering.py +293 -0
  9. pyTEMlib/eds_tools.py +609 -0
  10. pyTEMlib/eels_dialog.py +749 -486
  11. pyTEMlib/{interactive_eels.py → eels_dialog_utilities.py} +1199 -1524
  12. pyTEMlib/eels_tools.py +2031 -1731
  13. pyTEMlib/file_tools.py +1276 -491
  14. pyTEMlib/file_tools_qt.py +193 -0
  15. pyTEMlib/graph_tools.py +1166 -450
  16. pyTEMlib/graph_viz.py +449 -0
  17. pyTEMlib/image_dialog.py +158 -0
  18. pyTEMlib/image_dlg.py +146 -0
  19. pyTEMlib/image_tools.py +1399 -956
  20. pyTEMlib/info_widget.py +933 -0
  21. pyTEMlib/interactive_image.py +1 -0
  22. pyTEMlib/kinematic_scattering.py +1196 -0
  23. pyTEMlib/low_loss_widget.py +176 -0
  24. pyTEMlib/microscope.py +61 -78
  25. pyTEMlib/peak_dialog.py +1047 -350
  26. pyTEMlib/peak_dlg.py +286 -248
  27. pyTEMlib/probe_tools.py +653 -202
  28. pyTEMlib/sidpy_tools.py +153 -129
  29. pyTEMlib/simulation_tools.py +104 -87
  30. pyTEMlib/version.py +6 -3
  31. pyTEMlib/xrpa_x_sections.py +20972 -0
  32. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/LICENSE +21 -21
  33. pyTEMlib-0.2024.8.4.dist-info/METADATA +93 -0
  34. pyTEMlib-0.2024.8.4.dist-info/RECORD +37 -0
  35. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/WHEEL +6 -5
  36. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/entry_points.txt +0 -1
  37. pyTEMlib/KinsCat.py +0 -2685
  38. pyTEMlib/__version__.py +0 -2
  39. pyTEMlib/data/TEMlibrc +0 -68
  40. pyTEMlib/data/edges_db.csv +0 -189
  41. pyTEMlib/data/edges_db.pkl +0 -0
  42. pyTEMlib/data/fparam.txt +0 -103
  43. pyTEMlib/data/microscopes.csv +0 -7
  44. pyTEMlib/data/microscopes.xml +0 -167
  45. pyTEMlib/data/path.txt +0 -1
  46. pyTEMlib/defaults_parser.py +0 -86
  47. pyTEMlib/dm3_reader.py +0 -609
  48. pyTEMlib/edges_db.py +0 -76
  49. pyTEMlib/eels_dlg.py +0 -240
  50. pyTEMlib/hdf_utils.py +0 -481
  51. pyTEMlib/image_tools1.py +0 -2194
  52. pyTEMlib/info_dialog.py +0 -227
  53. pyTEMlib/info_dlg.py +0 -205
  54. pyTEMlib/nion_reader.py +0 -293
  55. pyTEMlib/nsi_reader.py +0 -165
  56. pyTEMlib/structure_tools.py +0 -316
  57. pyTEMlib-0.2020.11.0.dist-info/METADATA +0 -20
  58. pyTEMlib-0.2020.11.0.dist-info/RECORD +0 -42
  59. {pyTEMlib-0.2020.11.0.dist-info → pyTEMlib-0.2024.8.4.dist-info}/top_level.txt +0 -0
pyTEMlib/file_tools.py CHANGED
@@ -1,491 +1,1276 @@
1
- ##################################
2
- #
3
- # 2018 01 31 Included Nion Swift files to be opened
4
- # major revision 2020 09 to include sidpy and pyNSID data formats
5
- #
6
- ##################################
7
-
8
- import numpy as np
9
- import h5py
10
- import os
11
-
12
- # Open/Save File dialog
13
- try:
14
- from PyQt5 import QtGui, QtWidgets, QtCore
15
- QT_available = True
16
- except ImportError:
17
- QT_available = False
18
-
19
- # =============================================================
20
- # Include sidpy and other pyTEMlib Libraries #
21
- # =============================================================
22
- from .config_dir import config_path
23
-
24
- from .nsi_reader import NSIDReader
25
- from .dm3_reader import DM3Reader
26
- from .nion_reader import NionReader
27
- import pyNSID
28
-
29
- import ipywidgets as widgets
30
- from IPython.display import display
31
-
32
- from .sidpy_tools import *
33
- # import sys
34
- # sys.path.insert(0, "../../sidpy/")
35
- import sidpy
36
-
37
-
38
- Dimension = sidpy.Dimension
39
- nest_dict = sidpy.base.dict_utils.nest_dict
40
-
41
- get_slope = sidpy.base.num_utils.get_slope
42
- __version__ = '10.30.2020'
43
-
44
- # TODO: new sidpy-version, uncomment and delete function below.
45
- # flatten_dict = sidpy.dict_utils.flatten_dict
46
-
47
-
48
- def flatten_dict(d, parent_key='', sep='-'):
49
- items = []
50
- for k, v in d.items():
51
- if sep in k:
52
- k = k.replace(sep, '_')
53
- new_key = parent_key + sep + k if parent_key else k
54
- if isinstance(v, dict):
55
- items.extend(flatten_dict(v, new_key, sep=sep).items())
56
- elif isinstance(v, list):
57
- for i in range(len(v)):
58
- if isinstance(v[i], dict):
59
- for kk in v[i]:
60
- items.append(('dim-'+kk+'-'+str(i), v[i][kk]))
61
- else:
62
- if type(v) != bytes:
63
- items.append((new_key, v))
64
- else:
65
- if type(v) != bytes:
66
- items.append((new_key, v))
67
- return dict(items)
68
-
69
-
70
- ####
71
- # General Open and Save Methods
72
- ####
73
-
74
-
75
- def get_qt_app():
76
- """
77
- will start QT Application if not running yet
78
-
79
- :returns: QApplication
80
-
81
- """
82
-
83
- # start qt event loop
84
- _instance = QtWidgets.QApplication.instance()
85
- if not _instance:
86
- # print('not_instance')
87
- _instance = QtWidgets.QApplication([])
88
-
89
- return _instance
90
-
91
-
92
- def get_last_path():
93
- try:
94
- fp = open(config_path + '\\path.txt', 'r')
95
- path = fp.read()
96
- fp.close()
97
- except IOError:
98
- path = ''
99
-
100
- if len(path) < 2:
101
- path = '.'
102
- return path
103
-
104
-
105
- def save_path(filename):
106
- if len(filename) > 1:
107
- fp = open(config_path + '\\path.txt', 'w')
108
- path, fname = os.path.split(filename)
109
- fp.write(path)
110
- fp.close()
111
- else:
112
- path = '.'
113
- return path
114
-
115
-
116
- def set_directory():
117
- path = get_last_path()
118
-
119
- try:
120
- get_qt_app()
121
- except BaseException:
122
- pass
123
-
124
- options = QtWidgets.QFileDialog.Options()
125
- options |= QtWidgets.QFileDialog.ShowDirsOnly
126
-
127
- fname = str(QtWidgets.QFileDialog.getExistingDirectory(None, "Select Directory", path, options=options))
128
-
129
- path = save_path(fname)
130
-
131
- return path
132
-
133
-
134
- def savefile_dialog(initial_file='*.hf5', file_types=None):
135
- """
136
- Opens a save dialog in QT and returns an "*.hf5" file.
137
- New now with initial file
138
- """
139
- # Check whether QT is available
140
- if not QT_available:
141
- print('No QT dialog')
142
- return None
143
- else:
144
- if file_types is None:
145
- file_types = "All files (*)"
146
- try:
147
- get_qt_app()
148
- except BaseException:
149
- pass
150
-
151
- # Determine last path used
152
- path = get_last_path()
153
-
154
- filename = sidpy.io.interface_utils.savefile_dialog(initial_file, file_types=file_types, file_path=path)
155
- save_path(filename)
156
-
157
- if len(filename) > 3:
158
- h5_file_name = get_h5_filename(filename)
159
- return h5_file_name
160
- else:
161
- return ''
162
-
163
-
164
- def openfile_dialog(file_types=None): # , multiple_files=False):
165
- """
166
- Opens a File dialog which is used in open_file() function
167
- This function uses tkinter or pyQt5.
168
- The app of the Gui has to be running for QT so Tkinter is a safer bet.
169
- In jupyter notebooks use %gui Qt early in the notebook.
170
-
171
-
172
- The file looks first for a path.txt file for the last directory you used.
173
-
174
- Parameters
175
- ----------
176
- file_types : string of the file type filter
177
-
178
-
179
- Returns
180
- -------
181
- filename : full filename with absolute path and extension as a string
182
-
183
- Examples
184
- --------
185
-
186
- >> import file_tools as ft
187
- >>
188
- >> filename = ft.openfile_dialog()
189
- >>
190
- >> print(filename)
191
-
192
-
193
- """
194
- # determine file types by extension
195
- if file_types is None:
196
- file_types = 'TEM files (*.dm3 *.qf3 *.ndata *.h5 *.hf5);;pyNSID files (*.hf5);;QF files ( *.qf3);;' \
197
- 'DM files (*.dm3);;Nion files (*.ndata *.h5);;All files (*)'
198
- elif file_types == 'pyNSID':
199
- file_types = 'pyNSID files (*.hf5);;TEM files (*.dm3 *.qf3 *.ndata *.h5 *.hf5);;QF files ( *.qf3);;' \
200
- 'DM files (*.dm3);;Nion files (*.ndata *.h5);;All files (*)'
201
-
202
- # file_types = [("TEM files",["*.dm*","*.hf*","*.ndata" ]),("pyUSID files","*.hf5"),("DM files","*.dm*"),
203
- # ("Nion files",["*.h5","*.ndata"]),("all files","*.*")]
204
- # Determine last path used
205
-
206
- path = get_last_path()
207
- _ = get_qt_app()
208
-
209
- filename = sidpy.io.interface_utils.openfile_dialog(file_types=file_types, file_path=path)
210
- #
211
- save_path(filename)
212
-
213
- return filename
214
-
215
-
216
- def open_file(filename=None, save_file=False, h5_group=None):
217
- """
218
- Opens a file if the extension is .hf5, .dm3 or .dm4
219
- If no filename is provided the qt open_file windows opens
220
-
221
- Everything will be stored in a NSID style hf5 file.
222
-
223
- Subroutines used:
224
- - NSIDReader
225
- - nsid.write_
226
- - get_main_tags
227
- - get_additional tags
228
-
229
- """
230
- get_qt_app()
231
- if filename is None:
232
- filename = openfile_dialog()
233
- if filename == '':
234
- return
235
- path, file_name = os.path.split(filename)
236
- basename, extension = os.path.splitext(file_name)
237
-
238
- if extension == '.hf5':
239
- h5_file = h5py.File(filename, mode='a')
240
-
241
- h5_group = get_start_channel(h5_file)
242
- print()
243
- if 'nDim_Data' in h5_group:
244
- h5_dataset = h5_group['nDim_Data']
245
-
246
- h5_dataset.attrs['title'] = basename
247
- reader = NSIDReader(h5_dataset)
248
- dataset = reader.read_h5py_dataset(h5_dataset)
249
- dataset.h5_file = h5_file
250
- elif 'Raw_Data' in h5_group:
251
- dataset = read_old_h5group(h5_group)
252
- dataset.h5_dataset = h5_group['Raw_Data']
253
- else:
254
- reader = NSIDReader(h5_file['Measurement_000/Channel_000'])
255
- dataset = reader.read()[-1]
256
- dataset.h5_file = h5_file
257
- return dataset
258
-
259
- elif extension in ['.dm3', '.dm4', '.ndata', '.h5']:
260
-
261
- # tags = open_file(filename)
262
- if extension in ['.dm3', '.dm4']:
263
- reader = DM3Reader(filename)
264
- elif extension in ['.ndata', '.h5']:
265
- reader = NionReader(filename)
266
- else:
267
- IOError('problem')
268
- path, file_name = os.path.split(filename)
269
- basename, _ = os.path.splitext(file_name)
270
- dset = reader.read()
271
- dset.title = basename.strip().replace('-', '_')
272
- dset.filename = basename.strip().replace('-', '_')
273
- dset.original_metadata = flatten_dict(dset.original_metadata)
274
-
275
- h5_filename = get_h5_filename(filename)
276
- h5_file = h5py.File(h5_filename, mode='a')
277
-
278
- if 'Measurement_000' in h5_file:
279
- print('could not write dataset to file, try saving it with ft.save()')
280
- else:
281
- if not isinstance(h5_group, h5py.Group):
282
- h5_group = h5_file.create_group('Measurement_000/Channel_000')
283
- dset.axes = dset._axes
284
- dset.attrs = {}
285
- h5_dataset = pyNSID.hdf_io.write_nsid_dataset(dset, h5_group)
286
- dset.original_metadata = nest_dict(dset.original_metadata)
287
-
288
- dset.h5_dataset = h5_dataset
289
- return dset
290
- else:
291
- print('file type not handled yet.')
292
- return
293
-
294
-
295
- def get_h5_filename(fname):
296
- path, filename = os.path.split(fname)
297
- basename, extension = os.path.splitext(filename)
298
- h5_file_name_original = os.path.join(path, basename + '.hf5')
299
- h5_file_name = h5_file_name_original
300
-
301
- if os.path.exists(os.path.abspath(h5_file_name_original)):
302
- count = 1
303
- h5_file_name = h5_file_name_original[:-4] + '-' + str(count) + '.hf5'
304
- while os.path.exists(os.path.abspath(h5_file_name)):
305
- count += 1
306
- h5_file_name = h5_file_name_original[:-4] + '-' + str(count) + '.hf5'
307
-
308
- if h5_file_name != h5_file_name_original:
309
- path, filename = os.path.split(h5_file_name)
310
- print('Cannot overwrite file. Using: ', filename)
311
- return str(h5_file_name)
312
-
313
-
314
- def get_start_channel(h5_file):
315
- return get_main_channel(h5_file)
316
-
317
-
318
- def get_main_channel(h5_file):
319
- current_channel = None
320
- if 'Measurement_000' in h5_file:
321
- if 'Measurement_000/Channel_000' in h5_file:
322
- current_channel = h5_file['Measurement_000/Channel_000']
323
- return current_channel
324
-
325
-
326
- def h5_tree(input):
327
- """
328
- Just a wrapper for the sidpy function print_tree,
329
- so that sidpy does not have to be loaded in notebook
330
- """
331
- if isinstance(input, sidpy.Dataset):
332
- if not isinstance(input.h5_dataset, h5py.Dataset):
333
- raise ValueError('sidpy dataset does not have an associated h5py dataset')
334
- h5_file = input.h5_dataset.file
335
- elif isinstance(input, h5py.Dataset):
336
- h5_file = input.file
337
- elif isinstance(input, (h5py.Group, h5py.File)):
338
- h5_file = input
339
- else:
340
- raise TypeError('should be a h5py.object or sidpy Dataset')
341
- sidpy.hdf_utils.print_tree(h5_file)
342
-
343
-
344
- def log_results(h5_group, dataset=None, attributes=None):
345
- if dataset is None:
346
- log_group = sidpy.hdf.prov_utils.create_indexed_group(h5_group, 'Log_')
347
- else:
348
- log_group = pyNSID.hdf_io.write_results(h5_group, dataset=dataset)
349
- if hasattr(dataset, 'meta_data'):
350
- metadata = sidpy.dict_utils.flatten_dict(dataset.meta_data)
351
- metadata_group = log_group.create_group('meta_data')
352
- for key, item in metadata.items():
353
- metadata_group.attrs[key] = item
354
- if 'analysis' in dataset.meta_data:
355
- log_group['analysis'] = dataset.meta_data['analysis']
356
-
357
- dataset.h5_dataset = log_group[dataset.title.replace('-', '_')]
358
- if attributes is not None:
359
- for key, item in attributes.items():
360
- if key not in log_group:
361
- log_group[key] = item
362
-
363
- return log_group
364
-
365
-
366
- ###############################################
367
- # Support old pyTEM file format
368
- ###############################################
369
-
370
- def read_old_h5group(current_channel):
371
- """
372
- make a sidpy dataset from pyUSID style hdf5 group
373
- input
374
- current_channel: h5_group
375
- return
376
- sidpy Dataset
377
- """
378
- dim_dir = []
379
- if 'nDim_Data' in current_channel:
380
- h5_dataset = current_channel['nDim_Data']
381
- reader = NSIDReader(h5_dataset)
382
- dataset = reader.read_h5py_dataset(h5_dataset)
383
- dataset.h5_file = current_channel.file
384
- return dataset
385
- elif 'Raw_Data' in current_channel:
386
- if 'image_stack' in current_channel:
387
- sid_dataset = sidpy.Dataset.from_array(np.swapaxes(current_channel['image_stack'][()], 2, 0))
388
- dim_dir = ['SPATIAL', 'SPATIAL', 'TEMPORAL']
389
- elif 'data' in current_channel:
390
- sid_dataset = sidpy.Dataset.from_array(current_channel['data'][()])
391
- dim_dir = ['SPATIAL', 'SPATIAL']
392
- else:
393
- size_x = int(current_channel['spatial_size_x'][()])
394
- size_y = int(current_channel['spatial_size_y'][()])
395
- if 'spectral_size_x' in current_channel:
396
- size_s = int(current_channel['spectral_size_x'][()])
397
- else:
398
- size_s = 0
399
- data = np.reshape(current_channel['Raw_Data'][()], (size_x, size_y, size_s))
400
- sid_dataset = sidpy.Dataset.from_array(data)
401
- if size_x > 1:
402
- dim_dir.append('SPATIAL')
403
- if size_y > 1:
404
- dim_dir.append('SPATIAL')
405
- if size_s > 1:
406
- dim_dir.append('SPECTRAL')
407
- sid_dataset.h5_dataset = current_channel['Raw_Data']
408
-
409
- elif 'data' in current_channel:
410
- sid_dataset = sidpy.Dataset.from_array(current_channel['data'][()])
411
- dim_dir = ['SPATIAL', 'SPATIAL']
412
- sid_dataset.h5_dataset = current_channel['data']
413
- else:
414
- return
415
-
416
- if 'SPATIAL' in dim_dir:
417
- if 'SPECTRAL' in dim_dir:
418
- sid_dataset.data_type = sidpy.DataTypes.SPECTRAL_IMAGE
419
- elif 'TEMPORAL' in dim_dir:
420
- sid_dataset.data_type = sidpy.DataTypes.IMAGE_STACK
421
- else:
422
- sid_dataset.data_type = sidpy.DataTypes.IMAGE
423
- else:
424
- sid_dataset.data_type = sidpy.DataTypes.SPECTRUM
425
-
426
- sid_dataset.quantity = 'intensity'
427
- sid_dataset.units = 'counts'
428
- if 'analysis' in current_channel:
429
- sid_dataset.source = current_channel['analysis'][()]
430
-
431
- set_dimensions(sid_dataset, current_channel)
432
-
433
- return sid_dataset
434
-
435
-
436
- def set_dimensions(dset, current_channel):
437
- """
438
- Attaches correct dimension from old pyTEMlib style.
439
- Input:
440
- dset: sidpy Dataset
441
- current_channel: hdf5 group
442
- """
443
- dim = 0
444
- if dset.data_type == sidpy.DataTypes.IMAGE_STACK:
445
- dset.set_dimension(dim, sidpy.Dimension(np.arange(dset.shape[dim]), name='frame',
446
- units='frame', quantity='stack',
447
- dimension_type='TEMPORAL'))
448
- dim += 1
449
- if 'IMAGE' in dset.data_type:
450
-
451
- if 'spatial_scale_x' in current_channel:
452
- scale_x = current_channel['spatial_scale_x'][()]
453
- else:
454
- scale_x = 1
455
- if 'spatial_units' in current_channel:
456
- units_x = current_channel['spatial_units'][()]
457
- if len(units_x) < 2:
458
- units_x = 'pixel'
459
- else:
460
- units_x = 'generic'
461
- if 'spatial_scale_y' in current_channel:
462
- scale_y = current_channel['spatial_scale_y'][()]
463
- else:
464
- scale_y = 0
465
- dset.set_dimension(dim, sidpy.Dimension('x', np.arange(dset.shape[dim])*scale_x,
466
- units=units_x, quantity='Length',
467
- dimension_type='SPATIAL'))
468
- dim += 1
469
- dset.set_dimension(dim, sidpy.Dimension('y', np.arange(dset.shape[dim])*scale_y,
470
- units=units_x, quantity='Length',
471
- dimension_type='SPATIAL'))
472
- dim += 1
473
- if dset.data_type in [sidpy.DataTypes.SPECTRUM, sidpy.DataTypes.SPECTRAL_IMAGE]:
474
- if 'spectral_scale_x' in current_channel:
475
- scale_s = current_channel['spectral_scale_x'][()]
476
- else:
477
- scale_s = 1.0
478
- if 'spectral_units_x' in current_channel:
479
- units_s = current_channel['spectral_units_x']
480
- else:
481
- units_s = 'eV'
482
-
483
- if 'spectral_offset_x' in current_channel:
484
- offset = current_channel['spectral_offset_x']
485
- else:
486
- offset = 0.0
487
- dset.set_dimension(dim, sidpy.Dimension(np.arange(dset.shape[dim]) * scale_s + offset,
488
- name='energy',
489
- units=units_s,
490
- quantity='energy_loss',
491
- dimension_type='SPECTRAL'))
1
+ """file_tools: All tools to load and save data
2
+
3
+ ##################################
4
+
5
+ 2018 01 31 Included Nion Swift files to be opened
6
+ major revision 2020 09 to include sidpy and pyNSID data formats
7
+ 2022 change to ase format for structures: this changed the default unit of length to Angstrom!!!
8
+
9
+ ##################################
10
+ """
11
+
12
+ import numpy as np
13
+ import h5py
14
+ import os
15
+ import pickle
16
+
17
+ # For structure files of various flavor for instance POSCAR and other theory packages
18
+ import ase.io
19
+
20
+ # =============================================
21
+ # Include pycroscopy libraries #
22
+ # =============================================
23
+ import SciFiReaders
24
+ import pyNSID
25
+ import sidpy
26
+ import ipywidgets as widgets
27
+ from IPython.display import display
28
+
29
+ # =============================================
30
+ # Include pyTEMlib libraries #
31
+ # =============================================
32
+ import pyTEMlib.crystal_tools
33
+ from pyTEMlib.config_dir import config_path
34
+ from pyTEMlib.sidpy_tools import *
35
+
36
+ from pyTEMlib.sidpy_tools import *
37
+
38
+ Qt_available = True
39
+ try:
40
+ from PyQt5 import QtCore, QtWidgets, QtGui
41
+ except ModuleNotFoundError:
42
+ print('Qt dialogs are not available')
43
+ Qt_available = False
44
+
45
+ Dimension = sidpy.Dimension
46
+
47
+ get_slope = sidpy.base.num_utils.get_slope
48
+ __version__ = '2022.3.3'
49
+
50
+ from traitlets import Unicode, Bool, validate, TraitError
51
+ import ipywidgets
52
+
53
+
54
+ @ipywidgets.register
55
+ class FileWidget(ipywidgets.DOMWidget):
56
+ """Widget to select directories or widgets from a list
57
+
58
+ Works in google colab.
59
+ The widget converts the name of the nion file to the one in Nion's swift software,
60
+ because it is otherwise incomprehensible
61
+
62
+ Attributes
63
+ ----------
64
+ dir_name: str
65
+ name of starting directory
66
+ extension: list of str
67
+ extensions of files to be listed in widget
68
+
69
+ Methods
70
+ -------
71
+ get_directory
72
+ set_options
73
+ get_file_name
74
+
75
+ Example
76
+ -------
77
+ >>from google.colab import drive
78
+ >>drive.mount("/content/drive")
79
+ >>file_list = pyTEMlib.file_tools.FileWidget()
80
+ next code cell:
81
+ >>dataset = pyTEMlib.file_tools.open_file(file_list.file_name)
82
+
83
+ """
84
+
85
+ def __init__(self, dir_name=None, extension=['*'], sum_frames=False):
86
+ self.save_path = False
87
+ self.dir_dictionary = {}
88
+ self.dir_list = ['.', '..']
89
+ self.display_list = ['.', '..']
90
+ self.sum_frames = sum_frames
91
+
92
+ self.dir_name = '.'
93
+ if dir_name is None:
94
+ self.dir_name = get_last_path()
95
+ self.save_path = True
96
+ elif os.path.isdir(dir_name):
97
+ self.dir_name = dir_name
98
+
99
+ self.get_directory(self.dir_name)
100
+ self.dir_list = ['.']
101
+ self.extensions = extension
102
+ self.file_name = ''
103
+ self.datasets = {}
104
+ self.dataset = None
105
+
106
+ self.select_files = widgets.Select(
107
+ options=self.dir_list,
108
+ value=self.dir_list[0],
109
+ description='Select file:',
110
+ disabled=False,
111
+ rows=10,
112
+ layout=widgets.Layout(width='70%')
113
+ )
114
+
115
+ select_button = widgets.Button(description='Select Main',
116
+ layout=widgets.Layout(width='auto', grid_area='header'),
117
+ style=widgets.ButtonStyle(button_color='lightblue'))
118
+
119
+ add_button = widgets.Button(description='Add',
120
+ layout=widgets.Layout(width='auto', grid_area='header'),
121
+ style=widgets.ButtonStyle(button_color='lightblue'))
122
+
123
+ self.path_choice = widgets.Dropdown(options=['None'],
124
+ value='None',
125
+ description='directory:',
126
+ disabled=False,
127
+ button_style='',
128
+ layout=widgets.Layout(width='90%'))
129
+ self.dataset_list = ['None']
130
+ self.loaded_datasets = widgets.Dropdown(options=self.dataset_list,
131
+ value=self.dataset_list[0],
132
+ description='loaded datasets:',
133
+ disabled=False,
134
+ button_style='')
135
+
136
+ self.set_options()
137
+ ui = widgets.VBox([self.path_choice, self.select_files, widgets.HBox([select_button, add_button,
138
+ self.loaded_datasets])])
139
+ display(ui)
140
+
141
+ self.select_files.observe(self.get_file_name, names='value')
142
+ self.path_choice.observe(self.set_dir, names='value')
143
+
144
+ select_button.on_click(self.select_main)
145
+ add_button.on_click(self.add_dataset)
146
+ self.loaded_datasets.observe(self.select_dataset)
147
+
148
+ def select_main(self, value=0):
149
+ self.datasets = {}
150
+ #self.loaded_datasets.value = self.dataset_list[0]
151
+ self.dataset_list = []
152
+ self.datasets = open_file(self.file_name, sum_frames=self.sum_frames)
153
+ self.dataset_list = []
154
+ for key in self.datasets.keys():
155
+ self.dataset_list.append(f'{key}: {self.datasets[key].title}')
156
+ self.loaded_datasets.options = self.dataset_list
157
+ self.loaded_datasets.value = self.dataset_list[0]
158
+ self.debug = 5
159
+ self.dataset = self.datasets[list(self.datasets.keys())[0]]
160
+ self.debug = 6
161
+ self.selected_dataset = self.dataset
162
+
163
+ def add_dataset(self, value=0):
164
+ key = add_dataset_from_file(self.datasets, self.file_name, 'Channel')
165
+ self.dataset_list.append(f'{key}: {self.datasets[key].title}')
166
+ self.loaded_datasets.options = self.dataset_list
167
+ self.loaded_datasets.value = self.dataset_list[-1]
168
+
169
+ def get_directory(self, directory=None):
170
+ self.dir_name = directory
171
+ self.dir_dictionary = {}
172
+ self.dir_list = []
173
+ self.dir_list = ['.', '..'] + os.listdir(directory)
174
+
175
+ def set_dir(self, value=0):
176
+ self.dir_name = self.path_choice.value
177
+ self.select_files.index = 0
178
+ self.set_options()
179
+
180
+ def select_dataset(self, value=0):
181
+
182
+ key = self.loaded_datasets.value.split(':')[0]
183
+ if key != 'None':
184
+ self.selected_dataset = self.datasets[key]
185
+ self.selected_key = key
186
+
187
+ def set_options(self):
188
+ self.dir_name = os.path.abspath(os.path.join(self.dir_name, self.dir_list[self.select_files.index]))
189
+ dir_list = os.listdir(self.dir_name)
190
+ file_dict = update_directory_list(self.dir_name)
191
+
192
+ sort = np.argsort(file_dict['directory_list'])
193
+ self.dir_list = ['.', '..']
194
+ self.display_list = ['.', '..']
195
+ for j in sort:
196
+ self.display_list.append(f" * {file_dict['directory_list'][j]}")
197
+ self.dir_list.append(file_dict['directory_list'][j])
198
+
199
+ sort = np.argsort(file_dict['display_file_list'])
200
+
201
+ for i, j in enumerate(sort):
202
+ if '--' in dir_list[j]:
203
+ self.display_list.append(f" {i:3} {file_dict['display_file_list'][j]}")
204
+ else:
205
+ self.display_list.append(f" {i:3} {file_dict['display_file_list'][j]}")
206
+ self.dir_list.append(file_dict['file_list'][j])
207
+
208
+ self.dir_label = os.path.split(self.dir_name)[-1] + ':'
209
+ self.select_files.options = self.display_list
210
+
211
+ path = self.dir_name
212
+ old_path = ' '
213
+ path_list = []
214
+ while path != old_path:
215
+ path_list.append(path)
216
+ old_path = path
217
+ path = os.path.split(path)[0]
218
+ self.path_choice.options = path_list
219
+ self.path_choice.value = path_list[0]
220
+
221
+ def get_file_name(self, b):
222
+
223
+ if os.path.isdir(os.path.join(self.dir_name, self.dir_list[self.select_files.index])):
224
+ self.set_options()
225
+
226
+ elif os.path.isfile(os.path.join(self.dir_name, self.dir_list[self.select_files.index])):
227
+ self.file_name = os.path.join(self.dir_name, self.dir_list[self.select_files.index])
228
+
229
+
230
+ class ChooseDataset(object):
231
+ """Widget to select dataset object """
232
+
233
+ def __init__(self, input_object, show_dialog=True):
234
+ self.datasets = None
235
+ if isinstance(input_object, sidpy.Dataset):
236
+ if isinstance(input_object.h5_dataset, h5py.Dataset):
237
+ self.current_channel = input_object.h5_dataset.parent
238
+ elif isinstance(input_object, h5py.Group):
239
+ self.current_channel = input_object
240
+ elif isinstance(input_object, h5py.Dataset):
241
+ self.current_channel = input_object.parent
242
+ elif isinstance(input_object, dict):
243
+ self.datasets = input_object
244
+ else:
245
+ raise ValueError('Need hdf5 group or sidpy Dataset to determine image choices')
246
+ self.dataset_names = []
247
+ self.dataset_list = []
248
+ self.dataset_type = None
249
+ self.dataset = None
250
+ if not isinstance(self.datasets, dict):
251
+ self.reader = SciFiReaders.NSIDReader(self.current_channel.file.filename)
252
+ else:
253
+ self.reader = None
254
+ self.get_dataset_list()
255
+ self.select_image = widgets.Dropdown(options=self.dataset_list,
256
+ value=self.dataset_list[0],
257
+ description='select dataset:',
258
+ disabled=False,
259
+ button_style='')
260
+ if show_dialog:
261
+ display(self.select_image)
262
+
263
+ self.select_image.observe(self.set_dataset, names='value')
264
+ self.set_dataset(0)
265
+ self.select_image.index = (len(self.dataset_names) - 1)
266
+
267
+ def get_dataset_list(self):
268
+ """ Get by Log number sorted list of datasets"""
269
+ if not isinstance(self.datasets, dict):
270
+ dataset_list = self.reader.read()
271
+ self.datasets = {}
272
+ for dataset in dataset_list:
273
+ self.datasets[dataset.title] = dataset
274
+ order = []
275
+ keys = []
276
+ for title, dset in self.datasets.items():
277
+ if isinstance(dset, sidpy.Dataset):
278
+ if self.dataset_type is None or dset.data_type == self.data_type:
279
+ if 'Log' in title:
280
+ order.append(2)
281
+ else:
282
+ order.append(0)
283
+ keys.append(title)
284
+ for index in np.argsort(order):
285
+ self.dataset_names.append(keys[index])
286
+ self.dataset_list.append(keys[index] + ': ' + self.datasets[keys[index]].title)
287
+
288
+ def set_dataset(self, b):
289
+ index = self.select_image.index
290
+ self.key = self.dataset_names[index]
291
+ self.dataset = self.datasets[self.key]
292
+ self.dataset.title = self.dataset.title.split('/')[-1]
293
+ self.dataset.title = self.dataset.title.split('/')[-1]
294
+
295
+
296
+ def add_to_dict(file_dict, name):
297
+ full_name = os.path.join(file_dict['directory'], name)
298
+ basename, extension = os.path.splitext(name)
299
+ size = os.path.getsize(full_name) * 2 ** -20
300
+ display_name = name
301
+ if len(extension) == 0:
302
+ display_file_list = f' {name} - {size:.1f} MB'
303
+ elif extension[0] == 'hf5':
304
+ if extension in ['.hf5']:
305
+ display_file_list = f" {name} - {size:.1f} MB"
306
+ elif extension in ['.h5', '.ndata']:
307
+ try:
308
+ reader = SciFiReaders.NionReader(full_name)
309
+ dataset_nion = reader.read()
310
+ display_name = dataset_nion.title
311
+ display_file_list = f" {display_name}{extension} - {size:.1f} MB"
312
+ except:
313
+ display_file_list = f" {name} - {size:.1f} MB"
314
+ else:
315
+ display_file_list = f' {name} - {size:.1f} MB'
316
+ file_dict[name] = {'display_string': display_file_list, 'basename': basename, 'extension': extension,
317
+ 'size': size, 'display_name': display_name}
318
+
319
+
320
+ def update_directory_list(directory_name):
321
+ dir_list = os.listdir(directory_name)
322
+
323
+ if '.pyTEMlib.files.pkl' in dir_list:
324
+ with open(os.path.join(directory_name, '.pyTEMlib.files.pkl'), 'rb') as f:
325
+ file_dict = pickle.load(f)
326
+ if directory_name != file_dict['directory']:
327
+ print('directory moved since last time read')
328
+ file_dict['directory'] = directory_name
329
+ dir_list.remove('.pyTEMlib.files.pkl')
330
+ else:
331
+ file_dict = {'directory': directory_name}
332
+
333
+ # add new files
334
+ file_dict['file_list'] = []
335
+ file_dict['display_file_list'] = []
336
+ file_dict['directory_list'] = []
337
+
338
+ for name in dir_list:
339
+ if os.path.isfile(os.path.join(file_dict['directory'], name)):
340
+ if name not in file_dict:
341
+ add_to_dict(file_dict, name)
342
+ file_dict['file_list'].append(name)
343
+ file_dict['display_file_list'].append(file_dict[name]['display_string'])
344
+ else:
345
+ file_dict['directory_list'].append(name)
346
+ remove_item = []
347
+
348
+ # delete items of deleted files
349
+ save_pickle = False
350
+
351
+ for name in file_dict.keys():
352
+ if name not in dir_list and name not in ['directory', 'file_list', 'directory_list', 'display_file_list']:
353
+ remove_item.append(name)
354
+ else:
355
+ if 'extension' in file_dict[name]:
356
+ save_pickle = True
357
+ for item in remove_item:
358
+ file_dict.pop(item)
359
+
360
+ if save_pickle:
361
+ with open(os.path.join(file_dict['directory'], '.pyTEMlib.files.pkl'), 'wb') as f:
362
+ pickle.dump(file_dict, f)
363
+ return file_dict
364
+
365
+
366
+ ####
367
+ # General Open and Save Methods
368
+ ####
369
+
370
+ def get_last_path():
371
+ """Returns the path of the file last opened"""
372
+ try:
373
+ fp = open(config_path + '\\path.txt', 'r')
374
+ path = fp.read()
375
+ fp.close()
376
+ except IOError:
377
+ path = ''
378
+
379
+ if len(path) < 2:
380
+ path = '.'
381
+ else:
382
+ if not os.path.exists(path):
383
+ path = '.'
384
+ return path
385
+
386
+
387
+ def save_path(filename):
388
+ """Save path of last opened file"""
389
+
390
+ if len(filename) > 1:
391
+ fp = open(config_path + '\\path.txt', 'w')
392
+ path, fname = os.path.split(filename)
393
+ fp.write(path)
394
+ fp.close()
395
+ else:
396
+ path = '.'
397
+ return path
398
+
399
+
400
+ if Qt_available:
401
+ def get_qt_app():
402
+ """
403
+ will start QT Application if not running yet
404
+
405
+ :returns: QApplication
406
+
407
+ """
408
+
409
+ # start qt event loop
410
+ _instance = QtWidgets.QApplication.instance()
411
+ if not _instance:
412
+ # print('not_instance')
413
+ _instance = QtWidgets.QApplication([])
414
+
415
+ return _instance
416
+
417
+
418
+ def open_file_dialog_qt(file_types=None): # , multiple_files=False):
419
+ """Opens a File dialog which is used in open_file() function
420
+
421
+ This function uses pyQt5.
422
+ The app of the Gui has to be running for QT. Tkinter does not run on Macs at this point in time.
423
+ In jupyter notebooks use %gui Qt early in the notebook.
424
+
425
+ The file looks first for a path.txt file for the last directory you used.
426
+
427
+ Parameters
428
+ ----------
429
+ file_types : string
430
+ file type filter in the form of '*.hf5'
431
+
432
+
433
+ Returns
434
+ -------
435
+ filename : string
436
+ full filename with absolute path and extension as a string
437
+
438
+ Example
439
+ -------
440
+ >> import file_tools as ft
441
+ >> filename = ft.openfile_dialog()
442
+ >> print(filename)
443
+
444
+ """
445
+ """will start QT Application if not running yet and returns QApplication """
446
+
447
+ # determine file types by extension
448
+ if file_types is None:
449
+ file_types = 'TEM files (*.dm3 *.dm4 *.emd *.ndata *.h5 *.hf5);;pyNSID files (*.hf5);;QF files ( *.qf3);;' \
450
+ 'DM files (*.dm3 *.dm4);;Nion files (*.ndata *.h5);;All files (*)'
451
+ elif file_types == 'pyNSID':
452
+ file_types = 'pyNSID files (*.hf5);;TEM files (*.dm3 *.dm4 *.qf3 *.ndata *.h5 *.hf5);;QF files ( *.qf3);;' \
453
+ 'DM files (*.dm3 *.dm4);;Nion files (*.ndata *.h5);;All files (*)'
454
+
455
+ # file_types = [("TEM files",["*.dm*","*.hf*","*.ndata" ]),("pyNSID files","*.hf5"),("DM files","*.dm*"),
456
+ # ("Nion files",["*.h5","*.ndata"]),("all files","*.*")]
457
+
458
+ # Determine last path used
459
+ path = get_last_path()
460
+
461
+ if Qt_available:
462
+ _ = get_qt_app()
463
+ filename = sidpy.io.interface_utils.openfile_dialog_QT(file_types=file_types, file_path=path)
464
+ save_path(filename)
465
+ return filename
466
+
467
+
468
+ def save_file_dialog_qt(file_types=None): # , multiple_files=False):
469
+ """Opens a File dialog which is used in open_file() function
470
+
471
+ This function uses pyQt5.
472
+ The app of the Gui has to be running for QT. Tkinter does not run on Macs at this point in time.
473
+ In jupyter notebooks use %gui Qt early in the notebook.
474
+
475
+ The file looks first for a path.txt file for the last directory you used.
476
+
477
+ Parameters
478
+ ----------
479
+ file_types : string
480
+ file type filter in the form of '*.hf5'
481
+
482
+
483
+ Returns
484
+ -------
485
+ filename : string
486
+ full filename with absolute path and extension as a string
487
+
488
+ Example
489
+ -------
490
+ >> import file_tools as ft
491
+ >> filename = ft.openfile_dialog()
492
+ >> print(filename)
493
+
494
+ """
495
+ """will start QT Application if not running yet and returns QApplication """
496
+
497
+ # determine file types by extension
498
+ if file_types is None:
499
+ file_types = 'pyNSID files (*.hf5);;TEM files (*.dm3 *.dm4 *.qf3 *.ndata *.h5 *.hf5);;QF files ( *.qf3);;' \
500
+ 'DM files (*.dm3 *.dm4);;Nion files (*.ndata *.h5);;All files (*)'
501
+ elif file_types == 'TEM':
502
+ file_types = 'TEM files (*.dm3 *.dm4 *.emd *.ndata *.h5 *.hf5);;pyNSID files (*.hf5);;QF files ( *.qf3);;' \
503
+ 'DM files (*.dm3 *.dm4);;Nion files (*.ndata *.h5);;All files (*)'
504
+
505
+ # Determine last path used
506
+ path = get_last_path()
507
+
508
+ if Qt_available:
509
+ _ = get_qt_app()
510
+ filename = sidpy.io.interface_utils.savefile_dialog(file_types=file_types, file_path=path)
511
+ save_path(filename)
512
+ return filename
513
+
514
+
515
+ def save_dataset(dataset, filename=None, h5_group=None):
516
+ """ Saves a dataset to a file in pyNSID format
517
+ Parameters
518
+ ----------
519
+ dataset: sidpy.Dataset
520
+ the data
521
+ filename: str
522
+ name of file to be opened, if filename is None, a QT file dialog will try to open
523
+ h5_group: hd5py.Group
524
+ not used yet
525
+ """
526
+ if filename is None:
527
+ filename = save_file_dialog_qt()
528
+ h5_filename = get_h5_filename(filename)
529
+ h5_file = h5py.File(h5_filename, mode='a')
530
+ path, file_name = os.path.split(filename)
531
+ basename, _ = os.path.splitext(file_name)
532
+
533
+ if isinstance(dataset, dict):
534
+ h5_group = save_dataset_dictionary(h5_file, dataset)
535
+ return h5_group
536
+
537
+ elif isinstance(dataset, sidpy.Dataset):
538
+ h5_dataset = save_single_dataset(h5_file, dataset, h5_group=h5_group)
539
+ return h5_dataset.parent
540
+ else:
541
+ raise TypeError('Only sidpy.datasets or dictionaries can be saved with pyTEMlib')
542
+
543
+
544
+ def save_single_dataset(h5_file, dataset, h5_group=None):
545
+ if h5_group is None:
546
+ h5_measurement_group = sidpy.hdf.prov_utils.create_indexed_group(h5_file, 'Measurement_')
547
+ h5_group = sidpy.hdf.prov_utils.create_indexed_group(h5_measurement_group, 'Channel_')
548
+
549
+ elif isinstance(h5_group, str):
550
+ if h5_group not in h5_file:
551
+ h5_group = h5_file.create_group(h5_group)
552
+ else:
553
+ if h5_group[-1] == '/':
554
+ h5_group = h5_group[:-1]
555
+
556
+ channel = h5_group.split('/')[-1]
557
+ h5_measurement_group = h5_group[:-len(channel)]
558
+ h5_group = sidpy.hdf.prov_utils.create_indexed_group(h5_group, 'Channel_')
559
+ else:
560
+ raise ValueError('h5_group needs to be string or None')
561
+
562
+ h5_dataset = pyNSID.hdf_io.write_nsid_dataset(dataset, h5_group)
563
+ dataset.h5_dataset = h5_dataset
564
+ h5_dataset.file.flush()
565
+ return h5_dataset
566
+
567
+
568
+ def save_dataset_dictionary(h5_file, datasets):
569
+ h5_measurement_group = sidpy.hdf.prov_utils.create_indexed_group(h5_file, 'Measurement_')
570
+ for key, dataset in datasets.items():
571
+ if key[-1] == '/':
572
+ key = key[:-1]
573
+ if isinstance(dataset, sidpy.Dataset):
574
+ h5_group = h5_measurement_group.create_group(key)
575
+ h5_dataset = pyNSID.hdf_io.write_nsid_dataset(dataset, h5_group)
576
+ dataset.h5_dataset = h5_dataset
577
+ h5_dataset.file.flush()
578
+ elif isinstance(dataset, dict):
579
+ sidpy.hdf.hdf_utils.write_dict_to_h5_group(h5_measurement_group, dataset, key)
580
+ else:
581
+ print('could not save item ', key, 'of dataset dictionary')
582
+ return h5_measurement_group
583
+
584
+
585
+ def h5_group_to_dict(group, group_dict={}):
586
+ if not isinstance(group, h5py.Group):
587
+ raise TypeError('we need a h5py group to read from')
588
+ if not isinstance(group_dict, dict):
589
+ raise TypeError('group_dict needs to be a python dictionary')
590
+
591
+ group_dict[group.name.split('/')[-1]] = dict(group.attrs)
592
+ for key in group.keys():
593
+ h5_group_to_dict(group[key], group_dict[group.name.split('/')[-1]])
594
+ return group_dict
595
+
596
+
597
+ def open_file(filename=None, h5_group=None, write_hdf_file=False, sum_frames=False): # save_file=False,
598
+ """Opens a file if the extension is .hf5, .ndata, .dm3 or .dm4
599
+
600
+ If no filename is provided the QT open_file windows opens (if QT_available==True)
601
+ Everything will be stored in a NSID style hf5 file.
602
+ Subroutines used:
603
+ - NSIDReader
604
+ - nsid.write_
605
+ - get_main_tags
606
+ - get_additional tags
607
+
608
+ Parameters
609
+ ----------
610
+ filename: str
611
+ name of file to be opened, if filename is None, a QT file dialog will try to open
612
+ h5_group: hd5py.Group
613
+ not used yet #TODO: provide hook for usage of external chosen group
614
+ write_hdf_file: bool
615
+ set to false so that sidpy dataset will not be written to hf5-file automatically
616
+
617
+ Returns
618
+ -------
619
+ sidpy.Dataset
620
+ sidpy dataset with location of hdf5 dataset as attribute
621
+
622
+ """
623
+ if filename is None:
624
+ selected_file = open_file_dialog_qt()
625
+ filename = selected_file
626
+
627
+ else:
628
+ if not isinstance(filename, str):
629
+ raise TypeError('filename must be a non-empty string or None (to a QT open file dialog)')
630
+ elif filename == '':
631
+ raise TypeError('filename must be a non-empty string or None (to a QT open file dialog)')
632
+
633
+ path, file_name = os.path.split(filename)
634
+ basename, extension = os.path.splitext(file_name)
635
+
636
+ if extension == '.hf5':
637
+ reader = SciFiReaders.NSIDReader(filename)
638
+ datasets = reader.read()
639
+ if len(datasets) < 1:
640
+ print('no hdf5 dataset found in file')
641
+ return {}
642
+ else:
643
+ dataset_dict = {}
644
+ for index, dataset in enumerate(datasets):
645
+ title = dataset.title.split('/')[2]
646
+ dataset.title = dataset.title.split('/')[-1]
647
+ dataset_dict[title] = dataset
648
+ if index == 0:
649
+ file = datasets[0].h5_dataset.file
650
+ master_group = datasets[0].h5_dataset.parent.parent.parent
651
+ for key in master_group.keys():
652
+ if key not in dataset_dict:
653
+ dataset_dict[key] = h5_group_to_dict(master_group[key])
654
+ if not write_hdf_file:
655
+ file.close()
656
+ return dataset_dict
657
+ elif extension in ['.dm3', '.dm4', '.ndata', '.ndata1', '.h5', '.emd', '.emi', '.edaxh5']:
658
+ # tags = open_file(filename)
659
+ if extension in ['.dm3', '.dm4']:
660
+ reader = SciFiReaders.DMReader(filename)
661
+ elif extension in ['.emi']:
662
+ try:
663
+ import hyperspy.api as hs
664
+ s = hs.load(filename)
665
+ dataset_dict = {}
666
+ spectrum_number = 0
667
+ if not isinstance(s, list):
668
+ s = [s]
669
+ for index, datum in enumerate(s):
670
+ dset = SciFiReaders.convert_hyperspy(datum)
671
+ if datum.data.ndim == 1:
672
+ dset.title = dset.title + f'_{spectrum_number}_Spectrum'
673
+ spectrum_number += 1
674
+ elif datum.data.ndim == 3:
675
+ dset.title = dset.title + '_SI'
676
+ dset = dset.T
677
+ dset.title = dset.title[11:]
678
+ dataset_dict[f'Channel_{index:03d}'] = dset
679
+ return dataset_dict
680
+ except ImportError:
681
+ print('This file type needs hyperspy to be installed to be able to be read')
682
+ return
683
+ elif extension == '.emd':
684
+ reader = SciFiReaders.EMDReader(filename, sum_frames=sum_frames)
685
+
686
+ elif 'edax' in extension.lower():
687
+ if 'h5' in extension:
688
+ reader = SciFiReaders.EDAXReader(filename)
689
+
690
+ elif extension in ['.ndata', '.h5']:
691
+ reader = SciFiReaders.NionReader(filename)
692
+
693
+ else:
694
+ raise NotImplementedError('extension not supported')
695
+
696
+ path, file_name = os.path.split(filename)
697
+ basename, _ = os.path.splitext(file_name)
698
+ if extension != '.emi':
699
+ dset = reader.read()
700
+
701
+ if extension in ['.dm3', '.dm4']:
702
+ title = (basename.strip().replace('-', '_')).split('/')[-1]
703
+ if not isinstance(dset, dict):
704
+ print('Please use new SciFiReaders Package for full functionality')
705
+ if isinstance(dset, sidpy.Dataset):
706
+ dset = [dset]
707
+
708
+ if isinstance(dset, dict):
709
+ dataset_dict = dset
710
+
711
+ elif isinstance(dset, list):
712
+ if len(dset) < 1:
713
+ print('no dataset found in file')
714
+ return {}
715
+ else:
716
+ if 'PageSetup' in dset[0].original_metadata:
717
+ del dset[0].original_metadata['PageSetup']
718
+ dset[0].original_metadata['original_title'] = title
719
+ dataset_dict = {}
720
+ for index, dataset in enumerate(dset):
721
+ if extension == '.emi':
722
+ if 'experiment' in dataset.metadata:
723
+ if 'detector' in dataset.metadata['experiment']:
724
+ dataset.title = dataset.metadata['experiment']['detector']
725
+ dataset.filename = basename.strip()
726
+ # read_essential_metadata(dataset)
727
+ dataset.metadata['filename'] = filename
728
+ dataset_dict[f'Channel_{index:03}'] = dataset
729
+ else:
730
+ dset.filename = basename.strip().replace('-', '_')
731
+ read_essential_metadata(dset)
732
+ dset.metadata['filename'] = filename
733
+ dataset_dict = {'Channel_000': dset}
734
+
735
+ # Temporary Fix for dual eels spectra in dm files
736
+ # Todo: Fic in ScifyReaders
737
+ for dset in dataset_dict.values():
738
+ if 'single_exposure_time' in dset.metadata['experiment']:
739
+ dset.metadata['experiment']['exposure_time'] = dset.metadata['experiment']['number_of_frames'] * \
740
+ dset.metadata['experiment']['single_exposure_time']
741
+ if write_hdf_file:
742
+ h5_master_group = save_dataset(dataset_dict, filename=filename)
743
+
744
+ save_path(filename)
745
+ return dataset_dict
746
+ else:
747
+ print('file type not handled yet.')
748
+ return
749
+
750
+
751
+ ################################################################
752
+ # Read Functions
753
+ #################################################################
754
+
755
+ def read_essential_metadata(dataset):
756
+ """Updates dataset.metadata['experiment'] with essential information read from original metadata
757
+
758
+ This depends on whether it is originally a nion or a dm3 file
759
+ """
760
+ if not isinstance(dataset, sidpy.Dataset):
761
+ raise TypeError("we need a sidpy.Dataset")
762
+ experiment_dictionary = {}
763
+ if 'metadata' in dataset.original_metadata:
764
+ if 'hardware_source' in dataset.original_metadata['metadata']:
765
+ experiment_dictionary = read_nion_image_info(dataset.original_metadata)
766
+ if 'DM' in dataset.original_metadata:
767
+ experiment_dictionary = read_dm3_info(dataset.original_metadata)
768
+ if 'experiment' not in dataset.metadata:
769
+ dataset.metadata['experiment'] = {}
770
+
771
+ dataset.metadata['experiment'].update(experiment_dictionary)
772
+
773
+
774
+ def read_dm3_info(original_metadata):
775
+ """Read essential parameter from original_metadata originating from a dm3 file"""
776
+ if not isinstance(original_metadata, dict):
777
+ raise TypeError('We need a dictionary to read')
778
+
779
+ if 'DM' not in original_metadata:
780
+ return {}
781
+ if 'ImageTags' not in original_metadata:
782
+ return {}
783
+ exp_dictionary = original_metadata['ImageTags']
784
+ experiment = {}
785
+ if 'EELS' in exp_dictionary:
786
+ if 'Acquisition' in exp_dictionary['EELS']:
787
+ for key, item in exp_dictionary['EELS']['Acquisition'].items():
788
+ if 'Exposure' in key:
789
+ _, units = key.split('(')
790
+ if units[:-1] == 's':
791
+ experiment['single_exposure_time'] = item
792
+ if 'Integration' in key:
793
+ _, units = key.split('(')
794
+ if units[:-1] == 's':
795
+ experiment['exposure_time'] = item
796
+ if 'frames' in key:
797
+ experiment['number_of_frames'] = item
798
+
799
+ if 'Experimental Conditions' in exp_dictionary['EELS']:
800
+ for key, item in exp_dictionary['EELS']['Experimental Conditions'].items():
801
+ if 'Convergence' in key:
802
+ experiment['convergence_angle'] = item
803
+ if 'Collection' in key:
804
+ # print(item)
805
+ # for val in item.values():
806
+ experiment['collection_angle'] = item
807
+ if 'number_of_frames' not in experiment:
808
+ experiment['number_of_frames'] = 1
809
+ if 'exposure_time' not in experiment:
810
+ if 'single_exposure_time' in experiment:
811
+ experiment['exposure_time'] = experiment['number_of_frames'] * experiment['single_exposure_time']
812
+
813
+ else:
814
+ if 'Acquisition' in exp_dictionary:
815
+ if 'Parameters' in exp_dictionary['Acquisition']:
816
+ if 'High Level' in exp_dictionary['Acquisition']['Parameters']:
817
+ if 'Exposure (s)' in exp_dictionary['Acquisition']['Parameters']['High Level']:
818
+ experiment['exposure_time'] = exp_dictionary['Acquisition']['Parameters']['High Level'][
819
+ 'Exposure (s)']
820
+
821
+ if 'Microscope Info' in exp_dictionary:
822
+ if 'Microscope' in exp_dictionary['Microscope Info']:
823
+ experiment['microscope'] = exp_dictionary['Microscope Info']['Microscope']
824
+ if 'Voltage' in exp_dictionary['Microscope Info']:
825
+ experiment['acceleration_voltage'] = exp_dictionary['Microscope Info']['Voltage']
826
+
827
+ return experiment
828
+
829
+
830
+ def read_nion_image_info(original_metadata):
831
+ """Read essential parameter from original_metadata originating from a dm3 file"""
832
+ if not isinstance(original_metadata, dict):
833
+ raise TypeError('We need a dictionary to read')
834
+ if 'metadata' not in original_metadata:
835
+ return {}
836
+ if 'hardware_source' not in original_metadata['metadata']:
837
+ return {}
838
+ if 'ImageScanned' not in original_metadata['metadata']['hardware_source']:
839
+ return {}
840
+
841
+ exp_dictionary = original_metadata['metadata']['hardware_source']['ImageScanned']
842
+ experiment = exp_dictionary
843
+ # print(exp_dictionary)
844
+ if 'autostem' in exp_dictionary:
845
+ pass
846
+
847
+
848
+ def get_h5_filename(fname):
849
+ """Determines file name of hdf5 file for newly converted data file"""
850
+
851
+ path, filename = os.path.split(fname)
852
+ basename, extension = os.path.splitext(filename)
853
+ h5_file_name_original = os.path.join(path, basename + '.hf5')
854
+ h5_file_name = h5_file_name_original
855
+
856
+ if os.path.exists(os.path.abspath(h5_file_name_original)):
857
+ count = 1
858
+ h5_file_name = h5_file_name_original[:-4] + '-' + str(count) + '.hf5'
859
+ while os.path.exists(os.path.abspath(h5_file_name)):
860
+ count += 1
861
+ h5_file_name = h5_file_name_original[:-4] + '-' + str(count) + '.hf5'
862
+
863
+ if h5_file_name != h5_file_name_original:
864
+ path, filename = os.path.split(h5_file_name)
865
+ print('Cannot overwrite file. Using: ', filename)
866
+ return str(h5_file_name)
867
+
868
+
869
+ def get_start_channel(h5_file):
870
+ """ Legacy for get start channel"""
871
+
872
+ DeprecationWarning('Depreciated: use function get_main_channel instead')
873
+ return get_main_channel(h5_file)
874
+
875
+
876
+ def get_main_channel(h5_file):
877
+ """Returns name of first channel group in hdf5-file"""
878
+
879
+ current_channel = None
880
+ if 'Measurement_000' in h5_file:
881
+ if 'Measurement_000/Channel_000' in h5_file:
882
+ current_channel = h5_file['Measurement_000/Channel_000']
883
+ return current_channel
884
+
885
+
886
+ def h5_tree(input_object):
887
+ """Just a wrapper for the sidpy function print_tree,
888
+
889
+ so that sidpy does not have to be loaded in notebook
890
+
891
+ """
892
+
893
+ if isinstance(input_object, sidpy.Dataset):
894
+ if not isinstance(input_object.h5_dataset, h5py.Dataset):
895
+ raise ValueError('sidpy dataset does not have an associated h5py dataset')
896
+ h5_file = input_object.h5_dataset.file
897
+ elif isinstance(input_object, h5py.Dataset):
898
+ h5_file = input_object.file
899
+ elif isinstance(input_object, (h5py.Group, h5py.File)):
900
+ h5_file = input_object
901
+ else:
902
+ raise TypeError('should be a h5py.object or sidpy Dataset')
903
+ sidpy.hdf_utils.print_tree(h5_file)
904
+
905
+
906
+ def log_results(h5_group, dataset=None, attributes=None):
907
+ """Log Results in hdf5-file
908
+
909
+ Saves either a sidpy.Dataset or dictionary in a hdf5-file.
910
+ The group for the result will consist of 'Log_' and a running index.
911
+ That group will be placed in h5_group.
912
+
913
+ Parameters
914
+ ----------
915
+ h5_group: hd5py.Group, or sidpy.Dataset
916
+ groups where result group are to be stored
917
+ dataset: sidpy.Dataset or None
918
+ sidpy dataset to be stored
919
+ attributes: dict
920
+ dictionary containing results that are not based on a sidpy.Dataset
921
+
922
+ Returns
923
+ -------
924
+ log_group: hd5py.Group
925
+ group in hdf5 file with results.
926
+
927
+ """
928
+ if isinstance(h5_group, sidpy.Dataset):
929
+ h5_group = h5_group.h5_dataset
930
+ if not isinstance(h5_group, h5py.Dataset):
931
+ raise TypeError('Use h5_dataset of sidpy.Dataset is not a valid h5py.Dataset')
932
+ h5_group = h5_group.parent.parent
933
+
934
+ if not isinstance(h5_group, h5py.Group):
935
+ raise TypeError('Need a valid h5py.Group for logging results')
936
+
937
+ if dataset is None:
938
+ log_group = sidpy.hdf.prov_utils.create_indexed_group(h5_group, 'Log_')
939
+ else:
940
+ log_group = pyNSID.hdf_io.write_results(h5_group, dataset=dataset)
941
+ if hasattr(dataset, 'meta_data'):
942
+ if 'analysis' in dataset.meta_data:
943
+ log_group['analysis'] = dataset.meta_data['analysis']
944
+ if hasattr(dataset, 'structures'):
945
+ for structure in dataset.structures.values():
946
+ h5_add_crystal_structure(log_group, structure)
947
+
948
+ dataset.h5_dataset = log_group[dataset.title.replace('-', '_')][dataset.title.replace('-', '_')]
949
+ if attributes is not None:
950
+ for key, item in attributes.items():
951
+ if not isinstance(item, dict):
952
+ log_group[key] = attributes[key]
953
+ else:
954
+ log_group.create_group(key)
955
+ sidpy.hdf.hdf_utils.write_simple_attrs(log_group[key], attributes[key])
956
+ return log_group
957
+
958
+
959
+ def add_dataset_from_file(datasets, filename=None, key_name='Log', single_dataset=True):
960
+ """Add dataset to datasets dictionary
961
+
962
+ Parameters
963
+ ----------
964
+ dataset: dict
965
+ dictionary to write to file
966
+ filename: str, default: None,
967
+ name of file to open, if None, adialog will appear
968
+ key_name: str, default: 'Log'
969
+ name for key in dictionary with running number being added
970
+
971
+ Returns
972
+ -------
973
+ key_name: str
974
+ actual last used name of dictionary key
975
+ """
976
+
977
+ datasets2 = open_file(filename=filename)
978
+ first_dataset = datasets2[list(datasets2)[0]]
979
+ if isinstance(first_dataset, sidpy.Dataset):
980
+
981
+ index = 0
982
+ for key in datasets.keys():
983
+ if key_name in key:
984
+ if int(key[-3:]) >= index:
985
+ index = int(key[-3:])+1
986
+ if single_dataset:
987
+ datasets[key_name+f'_{index:03}'] = first_dataset
988
+ else:
989
+ for dataset in datasets2.values():
990
+ datasets[key_name+f'_{index:03}'] = dataset
991
+ index += 1
992
+ index -= 1
993
+ else:
994
+ return None
995
+
996
+ return f'{key_name}_{index:03}'
997
+
998
+
999
+ # ##
1000
+ # Crystal Structure Read and Write
1001
+ # ##
1002
+ def read_poscar(file_name=None):
1003
+ """
1004
+ Open a POSCAR file from Vasp
1005
+ If no file name is provided an open file dialog to select a POSCAR file appears
1006
+
1007
+ Parameters
1008
+ ----------
1009
+ file_name: str
1010
+ if None is provided an open file dialog will appear
1011
+
1012
+ Return
1013
+ ------
1014
+ crystal: ase.Atoms
1015
+ crystal structure in ase format
1016
+ """
1017
+
1018
+ if file_name is None:
1019
+ file_name = open_file_dialog_qt('POSCAR (POSCAR*.txt);;All files (*)')
1020
+
1021
+ # use ase package to read file
1022
+ base = os.path.basename(file_name)
1023
+ base_name = os.path.splitext(base)[0]
1024
+ crystal = ase.io.read(file_name, format='vasp', parallel=False)
1025
+
1026
+ # make dictionary and plot structure (not essential for further notebook)
1027
+ crystal.info = {'title': base_name}
1028
+ return crystal
1029
+
1030
+
1031
+ def read_cif(file_name=None, verbose=False): # open file dialog to select cif file
1032
+ """
1033
+ Open a cif file
1034
+ If no file name is provided an open file dialog to select a cif file appears
1035
+
1036
+ Parameters
1037
+ ----------
1038
+ file_name: str
1039
+ if None is provided an open file dialog will appear
1040
+ verbose: bool
1041
+
1042
+ Return
1043
+ ------
1044
+ crystal: ase.Atoms
1045
+ crystal structure in ase format
1046
+ """
1047
+
1048
+ if file_name is None:
1049
+ file_name = open_file_dialog_qt('cif (*.cif);;All files (*)')
1050
+ # use ase package to read file
1051
+
1052
+ base = os.path.basename(file_name)
1053
+ base_name = os.path.splitext(base)[0]
1054
+ crystal = ase.io.read(file_name, format='cif', store_tags=True, parallel=False)
1055
+
1056
+ # make dictionary and plot structure (not essential for further notebook)
1057
+ if crystal.info is None:
1058
+ crystal.info = {'title': base_name}
1059
+ crystal.info.update({'title': base_name})
1060
+ if verbose:
1061
+ print('Opened cif file for ', crystal.get_chemical_formula())
1062
+
1063
+ return crystal
1064
+
1065
+
1066
+ def h5_add_crystal_structure(h5_file, input_structure, name=None):
1067
+ """Write crystal structure to NSID file"""
1068
+
1069
+ if isinstance(input_structure, ase.Atoms):
1070
+
1071
+ crystal_tags = pyTEMlib.crystal_tools.get_dictionary(input_structure)
1072
+ if crystal_tags['metadata'] == {}:
1073
+ crystal_tags['metadata'] = {'title': input_structure.get_chemical_formula()}
1074
+ elif isinstance(input_structure, dict):
1075
+ crystal_tags = input_structure
1076
+ else:
1077
+ raise TypeError('Need a dictionary or an ase.Atoms object with ase installed')
1078
+
1079
+ structure_group = sidpy.hdf.prov_utils.create_indexed_group(h5_file, 'Structure_')
1080
+
1081
+ for key, item in crystal_tags.items():
1082
+ if not isinstance(item, dict):
1083
+ structure_group[key] = item
1084
+
1085
+ if 'base' in crystal_tags:
1086
+ structure_group['relative_positions'] = crystal_tags['base']
1087
+ if 'title' in crystal_tags:
1088
+ structure_group['title'] = str(crystal_tags['title'])
1089
+ structure_group['_' + crystal_tags['title']] = str(crystal_tags['title'])
1090
+
1091
+ # ToDo: Save all of info dictionary
1092
+ if 'metadata' in input_structure:
1093
+ structure_group.create_group('metadata')
1094
+ sidpy.hdf.hdf_utils.write_simple_attrs(structure_group['metadata'], input_structure['metadata'])
1095
+
1096
+ h5_file.file.flush()
1097
+ return structure_group
1098
+
1099
+
1100
+ def h5_add_to_structure(structure_group, crystal_tags):
1101
+ """add dictionary as structure group"""
1102
+
1103
+ for key in crystal_tags:
1104
+ if key in structure_group.keys():
1105
+ print(key, ' not written; use new name')
1106
+ else:
1107
+ structure_group[key] = crystal_tags[key]
1108
+
1109
+
1110
+ def h5_get_crystal_structure(structure_group):
1111
+ """Read crystal structure from NSID file
1112
+ Any additional information will be read as dictionary into the info attribute of the ase.Atoms object
1113
+
1114
+ Parameters
1115
+ ----------
1116
+ structure_group: h5py.Group
1117
+ location in hdf5 file to where the structure information is stored
1118
+
1119
+ Returns
1120
+ -------
1121
+ atoms: ase.Atoms object
1122
+ crystal structure in ase format
1123
+
1124
+ """
1125
+
1126
+ crystal_tags = {'unit_cell': structure_group['unit_cell'][()],
1127
+ 'base': structure_group['relative_positions'][()],
1128
+ 'title': structure_group['title'][()]}
1129
+ if '2D' in structure_group:
1130
+ crystal_tags['2D'] = structure_group['2D'][()]
1131
+ elements = structure_group['elements'][()]
1132
+ crystal_tags['elements'] = []
1133
+ for e in elements:
1134
+ crystal_tags['elements'].append(e.astype(str, copy=False))
1135
+
1136
+ atoms = pyTEMlib.crystal_tools.atoms_from_dictionary(crystal_tags)
1137
+ if 'metadata' in structure_group:
1138
+ atoms.info = sidpy.hdf.hdf_utils.h5_group_to_dict(structure_group)
1139
+
1140
+ if 'zone_axis' in structure_group:
1141
+ atoms.info = {'experiment': {'zone_axis': structure_group['zone_axis'][()]}}
1142
+ # ToDo: Read all of info dictionary
1143
+ return atoms
1144
+
1145
+
1146
+ ###############################################
1147
+ # Support old pyTEM file format
1148
+ ###############################################
1149
+
1150
+ def read_old_h5group(current_channel):
1151
+ """Make a sidpy.Dataset from pyUSID style hdf5 group
1152
+
1153
+ Parameters
1154
+ ----------
1155
+ current_channel: h5_group
1156
+
1157
+ Returns
1158
+ -------
1159
+ sidpy.Dataset
1160
+ """
1161
+
1162
+ dim_dir = []
1163
+ if 'nDim_Data' in current_channel:
1164
+ h5_dataset = current_channel['nDim_Data']
1165
+ reader = pyNSID.NSIDReader(h5_dataset.file.filename)
1166
+ dataset = reader.read(h5_dataset)
1167
+ dataset.h5_file = current_channel.file
1168
+ return dataset
1169
+ elif 'Raw_Data' in current_channel:
1170
+ if 'image_stack' in current_channel:
1171
+ sid_dataset = sidpy.Dataset.from_array(np.swapaxes(current_channel['image_stack'][()], 2, 0))
1172
+ dim_dir = ['SPATIAL', 'SPATIAL', 'TEMPORAL']
1173
+ elif 'data' in current_channel:
1174
+ sid_dataset = sidpy.Dataset.from_array(current_channel['data'][()])
1175
+ dim_dir = ['SPATIAL', 'SPATIAL']
1176
+ else:
1177
+ size_x = int(current_channel['spatial_size_x'][()])
1178
+ size_y = int(current_channel['spatial_size_y'][()])
1179
+ if 'spectral_size_x' in current_channel:
1180
+ size_s = int(current_channel['spectral_size_x'][()])
1181
+ else:
1182
+ size_s = 0
1183
+ data = np.reshape(current_channel['Raw_Data'][()], (size_x, size_y, size_s))
1184
+ sid_dataset = sidpy.Dataset.from_array(data)
1185
+ if size_x > 1:
1186
+ dim_dir.append('SPATIAL')
1187
+ if size_y > 1:
1188
+ dim_dir.append('SPATIAL')
1189
+ if size_s > 1:
1190
+ dim_dir.append('SPECTRAL')
1191
+ sid_dataset.h5_dataset = current_channel['Raw_Data']
1192
+
1193
+ elif 'data' in current_channel:
1194
+ sid_dataset = sidpy.Dataset.from_array(current_channel['data'][()])
1195
+ dim_dir = ['SPATIAL', 'SPATIAL']
1196
+ sid_dataset.h5_dataset = current_channel['data']
1197
+ else:
1198
+ return
1199
+
1200
+ if 'SPATIAL' in dim_dir:
1201
+ if 'SPECTRAL' in dim_dir:
1202
+ sid_dataset.data_type = sidpy.DataType.SPECTRAL_IMAGE
1203
+ elif 'TEMPORAL' in dim_dir:
1204
+ sid_dataset.data_type = sidpy.DataType.IMAGE_STACK
1205
+ else:
1206
+ sid_dataset.data_type = sidpy.DataType.IMAGE
1207
+ else:
1208
+ sid_dataset.data_type = sidpy.DataType.SPECTRUM
1209
+
1210
+ sid_dataset.quantity = 'intensity'
1211
+ sid_dataset.units = 'counts'
1212
+ if 'analysis' in current_channel:
1213
+ sid_dataset.source = current_channel['analysis'][()]
1214
+
1215
+ set_dimensions(sid_dataset, current_channel)
1216
+
1217
+ return sid_dataset
1218
+
1219
+
1220
+ def set_dimensions(dset, current_channel):
1221
+ """Attaches correct dimension from old pyTEMlib style.
1222
+
1223
+ Parameters
1224
+ ----------
1225
+ dset: sidpy.Dataset
1226
+ current_channel: hdf5.Group
1227
+ """
1228
+ dim = 0
1229
+ if dset.data_type == sidpy.DataType.IMAGE_STACK:
1230
+ dset.set_dimension(dim, sidpy.Dimension(np.arange(dset.shape[dim]), name='frame',
1231
+ units='frame', quantity='stack',
1232
+ dimension_type='TEMPORAL'))
1233
+ dim += 1
1234
+ if 'IMAGE' in dset.data_type:
1235
+
1236
+ if 'spatial_scale_x' in current_channel:
1237
+ scale_x = current_channel['spatial_scale_x'][()]
1238
+ else:
1239
+ scale_x = 1
1240
+ if 'spatial_units' in current_channel:
1241
+ units_x = current_channel['spatial_units'][()]
1242
+ if len(units_x) < 2:
1243
+ units_x = 'pixel'
1244
+ else:
1245
+ units_x = 'generic'
1246
+ if 'spatial_scale_y' in current_channel:
1247
+ scale_y = current_channel['spatial_scale_y'][()]
1248
+ else:
1249
+ scale_y = 0
1250
+ dset.set_dimension(dim, sidpy.Dimension('x', np.arange(dset.shape[dim])*scale_x,
1251
+ units=units_x, quantity='Length',
1252
+ dimension_type='SPATIAL'))
1253
+ dim += 1
1254
+ dset.set_dimension(dim, sidpy.Dimension('y', np.arange(dset.shape[dim])*scale_y,
1255
+ units=units_x, quantity='Length',
1256
+ dimension_type='SPATIAL'))
1257
+ dim += 1
1258
+ if dset.data_type in [sidpy.DataType.SPECTRUM, sidpy.DataType.SPECTRAL_IMAGE]:
1259
+ if 'spectral_scale_x' in current_channel:
1260
+ scale_s = current_channel['spectral_scale_x'][()]
1261
+ else:
1262
+ scale_s = 1.0
1263
+ if 'spectral_units_x' in current_channel:
1264
+ units_s = current_channel['spectral_units_x']
1265
+ else:
1266
+ units_s = 'eV'
1267
+
1268
+ if 'spectral_offset_x' in current_channel:
1269
+ offset = current_channel['spectral_offset_x']
1270
+ else:
1271
+ offset = 0.0
1272
+ dset.set_dimension(dim, sidpy.Dimension(np.arange(dset.shape[dim]) * scale_s + offset,
1273
+ name='energy',
1274
+ units=units_s,
1275
+ quantity='energy_loss',
1276
+ dimension_type='SPECTRAL'))