pymodaq 4.1.5__py3-none-any.whl → 4.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymodaq might be problematic. Click here for more details.

Files changed (79) hide show
  1. pymodaq/__init__.py +41 -4
  2. pymodaq/control_modules/daq_move.py +32 -73
  3. pymodaq/control_modules/daq_viewer.py +73 -98
  4. pymodaq/control_modules/daq_viewer_ui.py +2 -1
  5. pymodaq/control_modules/move_utility_classes.py +17 -7
  6. pymodaq/control_modules/utils.py +153 -5
  7. pymodaq/control_modules/viewer_utility_classes.py +31 -20
  8. pymodaq/dashboard.py +23 -5
  9. pymodaq/examples/tcp_client.py +97 -0
  10. pymodaq/extensions/__init__.py +4 -0
  11. pymodaq/extensions/bayesian/__init__.py +2 -0
  12. pymodaq/extensions/bayesian/bayesian_optimisation.py +673 -0
  13. pymodaq/extensions/bayesian/utils.py +403 -0
  14. pymodaq/extensions/daq_scan.py +4 -4
  15. pymodaq/extensions/daq_scan_ui.py +2 -1
  16. pymodaq/extensions/pid/pid_controller.py +12 -7
  17. pymodaq/extensions/pid/utils.py +9 -26
  18. pymodaq/extensions/utils.py +3 -0
  19. pymodaq/post_treatment/load_and_plot.py +42 -19
  20. pymodaq/resources/VERSION +1 -1
  21. pymodaq/resources/config_template.toml +9 -24
  22. pymodaq/resources/setup_plugin.py +1 -1
  23. pymodaq/utils/config.py +103 -5
  24. pymodaq/utils/daq_utils.py +35 -134
  25. pymodaq/utils/data.py +614 -95
  26. pymodaq/utils/enums.py +17 -1
  27. pymodaq/utils/factory.py +2 -2
  28. pymodaq/utils/gui_utils/custom_app.py +5 -2
  29. pymodaq/utils/gui_utils/dock.py +33 -4
  30. pymodaq/utils/gui_utils/utils.py +14 -1
  31. pymodaq/utils/h5modules/backends.py +9 -1
  32. pymodaq/utils/h5modules/data_saving.py +254 -57
  33. pymodaq/utils/h5modules/saving.py +1 -0
  34. pymodaq/utils/leco/daq_move_LECODirector.py +172 -0
  35. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +170 -0
  36. pymodaq/utils/leco/desktop.ini +2 -0
  37. pymodaq/utils/leco/director_utils.py +58 -0
  38. pymodaq/utils/leco/leco_director.py +88 -0
  39. pymodaq/utils/leco/pymodaq_listener.py +279 -0
  40. pymodaq/utils/leco/utils.py +41 -0
  41. pymodaq/utils/managers/action_manager.py +20 -6
  42. pymodaq/utils/managers/parameter_manager.py +6 -4
  43. pymodaq/utils/managers/roi_manager.py +63 -54
  44. pymodaq/utils/math_utils.py +1 -1
  45. pymodaq/utils/plotting/data_viewers/__init__.py +3 -1
  46. pymodaq/utils/plotting/data_viewers/base.py +286 -0
  47. pymodaq/utils/plotting/data_viewers/viewer.py +29 -202
  48. pymodaq/utils/plotting/data_viewers/viewer0D.py +94 -47
  49. pymodaq/utils/plotting/data_viewers/viewer1D.py +341 -174
  50. pymodaq/utils/plotting/data_viewers/viewer1Dbasic.py +1 -1
  51. pymodaq/utils/plotting/data_viewers/viewer2D.py +271 -181
  52. pymodaq/utils/plotting/data_viewers/viewerND.py +26 -22
  53. pymodaq/utils/plotting/items/crosshair.py +3 -3
  54. pymodaq/utils/plotting/items/image.py +2 -1
  55. pymodaq/utils/plotting/plotter/plotter.py +94 -0
  56. pymodaq/utils/plotting/plotter/plotters/__init__.py +0 -0
  57. pymodaq/utils/plotting/plotter/plotters/matplotlib_plotters.py +134 -0
  58. pymodaq/utils/plotting/plotter/plotters/qt_plotters.py +78 -0
  59. pymodaq/utils/plotting/utils/axes_viewer.py +1 -1
  60. pymodaq/utils/plotting/utils/filter.py +194 -147
  61. pymodaq/utils/plotting/utils/lineout.py +13 -11
  62. pymodaq/utils/plotting/utils/plot_utils.py +89 -12
  63. pymodaq/utils/scanner/__init__.py +0 -3
  64. pymodaq/utils/scanner/scan_config.py +1 -9
  65. pymodaq/utils/scanner/scan_factory.py +10 -36
  66. pymodaq/utils/scanner/scanner.py +3 -2
  67. pymodaq/utils/scanner/scanners/_1d_scanners.py +7 -5
  68. pymodaq/utils/scanner/scanners/_2d_scanners.py +36 -49
  69. pymodaq/utils/scanner/scanners/sequential.py +10 -4
  70. pymodaq/utils/scanner/scanners/tabular.py +10 -5
  71. pymodaq/utils/slicing.py +1 -1
  72. pymodaq/utils/tcp_ip/serializer.py +38 -5
  73. pymodaq/utils/tcp_ip/tcp_server_client.py +25 -17
  74. {pymodaq-4.1.5.dist-info → pymodaq-4.2.0.dist-info}/METADATA +4 -2
  75. {pymodaq-4.1.5.dist-info → pymodaq-4.2.0.dist-info}/RECORD +78 -63
  76. pymodaq/resources/config_scan_template.toml +0 -42
  77. {pymodaq-4.1.5.dist-info → pymodaq-4.2.0.dist-info}/WHEEL +0 -0
  78. {pymodaq-4.1.5.dist-info → pymodaq-4.2.0.dist-info}/entry_points.txt +0 -0
  79. {pymodaq-4.1.5.dist-info → pymodaq-4.2.0.dist-info}/licenses/LICENSE +0 -0
@@ -6,12 +6,14 @@ Created the 21/11/2022
6
6
  """
7
7
  from time import time
8
8
  from typing import Union, List, Tuple, Iterable
9
+ from pathlib import Path
9
10
 
10
11
  import numpy as np
11
12
 
12
13
  from pymodaq.utils.abstract import ABCMeta, abstract_attribute
13
14
  from pymodaq.utils.enums import enum_checker
14
- from pymodaq.utils.data import Axis, DataDim, DataWithAxes, DataToExport, DataDistribution, DataDimError
15
+ from pymodaq.utils.data import (Axis, DataDim, DataWithAxes, DataToExport, DataDistribution,
16
+ DataDimError, squeeze)
15
17
  from .saving import DataType, H5Saver
16
18
  from .backends import GROUP, CARRAY, Node, EARRAY, NodeError
17
19
  from pymodaq.utils.daq_utils import capitalize
@@ -49,7 +51,16 @@ class DataManagement(metaclass=ABCMeta):
49
51
  """
50
52
  return f'{capitalize(cls.data_type.value)}{ind:02d}'
51
53
 
52
- def _get_next_node_name(self, where) -> str:
54
+ def __enter__(self):
55
+ return self
56
+
57
+ def __exit__(self, exc_type, exc_val, exc_tb):
58
+ self.close_file()
59
+
60
+ def close_file(self):
61
+ self._h5saver.close_file()
62
+
63
+ def _get_next_node_name(self, where: Union[str, Node]) -> str:
53
64
  """Get the formatted next node name given the ones already saved
54
65
 
55
66
  Parameters
@@ -63,7 +74,7 @@ class DataManagement(metaclass=ABCMeta):
63
74
  """
64
75
  return self._format_node_name(self._get_next_data_type_index_in_group(where))
65
76
 
66
- def get_last_node_name(self, where) -> Union[str, None]:
77
+ def get_last_node_name(self, where: Union[str, Node]) -> Union[str, None]:
67
78
  """Get the last node name among the ones already saved
68
79
 
69
80
  Parameters
@@ -81,9 +92,17 @@ class DataManagement(metaclass=ABCMeta):
81
92
  else:
82
93
  return self._format_node_name(index)
83
94
 
84
- def get_node_from_index(self, where, index):
95
+ def get_node_from_index(self, where: Union[str, Node], index: int) -> Node:
85
96
  return self._h5saver.get_node(where, self._format_node_name(index))
86
97
 
98
+ def get_index_from_node_name(self, where: Union[str, Node]):
99
+ node = self._h5saver.get_node(where)
100
+ try:
101
+ index = int(node.name.split(self.data_type.value)[1])
102
+ except IndexError:
103
+ return None
104
+ return index
105
+
87
106
  def _get_next_data_type_index_in_group(self, where: Union[Node, str]) -> int:
88
107
  """Check how much node with a given data_type are already present within the GROUP where
89
108
  Parameters
@@ -102,7 +121,7 @@ class DataManagement(metaclass=ABCMeta):
102
121
  ind += 1
103
122
  return ind
104
123
 
105
- def _is_node_of_data_type(self, where):
124
+ def _is_node_of_data_type(self, where: Union[str, Node]) -> bool:
106
125
  """Check if a given node is of the data_type of the real class implementation
107
126
 
108
127
  eg 'axis' for the AxisSaverLoader
@@ -141,7 +160,7 @@ class DataManagement(metaclass=ABCMeta):
141
160
  else:
142
161
  return [node]
143
162
 
144
- def _get_nodes_from_data_type(self, where):
163
+ def _get_nodes_from_data_type(self, where: Union[str, Node]) -> List[Node]:
145
164
  """Get the node list hanging from a parent and having the same data type as self
146
165
 
147
166
  Parameters
@@ -221,7 +240,7 @@ class AxisSaverLoader(DataManagement):
221
240
  if not self._is_node_of_data_type(axis_node):
222
241
  raise AxisError(f'Could not create an Axis object from this node: {axis_node}')
223
242
  return Axis(label=axis_node.attrs['label'], units=axis_node.attrs['units'],
224
- data=np.atleast_1d(np.squeeze(axis_node.read())), index=axis_node.attrs['index'],
243
+ data=squeeze(axis_node.read()), index=axis_node.attrs['index'],
225
244
  spread_order=axis_node.attrs['spread_order'])
226
245
 
227
246
  def get_axes(self, where: Union[Node, str]) -> List[Axis]:
@@ -236,7 +255,13 @@ class AxisSaverLoader(DataManagement):
236
255
  -------
237
256
  List[Axis]: the list of all Axis object
238
257
  """
239
- return [self.load_axis(node) for node in self._get_nodes_from_data_type(where)]
258
+ axes = []
259
+ for node in self._get_nodes_from_data_type(where):
260
+ axis = self.load_axis(node)
261
+ # if axis.size > 1:
262
+ # axes.append(axis)
263
+ axes.append(axis)
264
+ return axes
240
265
 
241
266
 
242
267
  class DataSaverLoader(DataManagement):
@@ -244,7 +269,7 @@ class DataSaverLoader(DataManagement):
244
269
 
245
270
  Parameters
246
271
  ----------
247
- h5saver: H5Saver
272
+ h5saver: H5Saver or Path or str
248
273
 
249
274
  Attributes
250
275
  ----------
@@ -253,10 +278,18 @@ class DataSaverLoader(DataManagement):
253
278
  """
254
279
  data_type = DataType['data']
255
280
 
256
- def __init__(self, h5saver: H5Saver):
281
+ def __init__(self, h5saver: Union[H5Saver, Path]):
257
282
  self.data_type = enum_checker(DataType, self.data_type)
283
+
284
+ if isinstance(h5saver, Path) or isinstance(h5saver, str):
285
+ h5saver_tmp = H5Saver()
286
+ h5saver_tmp.init_file(addhoc_file_path=Path(h5saver))
287
+ h5saver = h5saver_tmp
288
+
258
289
  self._h5saver = h5saver
259
290
  self._axis_saver = AxisSaverLoader(h5saver)
291
+ if not isinstance(self, ErrorSaverLoader):
292
+ self._error_saver = ErrorSaverLoader(h5saver)
260
293
 
261
294
  def isopen(self) -> bool:
262
295
  """ Get the opened status of the underlying hdf5 file"""
@@ -278,15 +311,20 @@ class DataSaverLoader(DataManagement):
278
311
  source=data.source.name, distribution=data.distribution.name,
279
312
  origin=data.origin,
280
313
  nav_indexes=tuple(data.nav_indexes)
281
- if data.nav_indexes is not None else None)
314
+ if data.nav_indexes is not None else None,)
282
315
  for name in data.extra_attributes:
283
316
  metadata[name] = getattr(data, name)
284
- self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=data.name,
285
- array_to_save=data[ind_data], data_dimension=data.dim.name, metadata=metadata)
317
+ self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type,
318
+ title=data.name, array_to_save=data[ind_data],
319
+ data_dimension=data.dim.name, metadata=metadata)
320
+
286
321
  if save_axes:
287
322
  for axis in data.axes:
288
323
  self._axis_saver.add_axis(where, axis)
289
324
 
325
+ if data.errors is not None:
326
+ self._error_saver.add_data(where, data.errors_as_dwa(), save_axes=False)
327
+
290
328
  def get_axes(self, where: Union[Node, str]) -> List[Axis]:
291
329
  """
292
330
 
@@ -339,10 +377,22 @@ class DataSaverLoader(DataManagement):
339
377
  getter = self._get_nodes
340
378
 
341
379
  if with_bkg:
342
- return [np.atleast_1d(np.squeeze(array.read()-bkg.read()))
380
+ return [squeeze(array.read()-bkg.read(),
381
+ squeeze_indexes=self._get_signal_indexes_to_squeeze(array))
343
382
  for array, bkg in zip(getter(where), bkg_nodes)]
344
383
  else:
345
- return [np.atleast_1d(np.squeeze(array.read())) for array in getter(where)]
384
+ return [squeeze(array.read(),
385
+ squeeze_indexes=self._get_signal_indexes_to_squeeze(array))
386
+ for array in getter(where)]
387
+
388
+ def _get_signal_indexes_to_squeeze(self, array: Union[CARRAY, EARRAY]):
389
+ """ Get the tuple of indexes in the array shape that are not navigation and should be
390
+ squeezed"""
391
+ sig_indexes = []
392
+ for ind in range(len(array.attrs['shape'])):
393
+ if ind not in array.attrs['nav_indexes'] and array.attrs['shape'][ind] == 1:
394
+ sig_indexes.append(ind)
395
+ return tuple(sig_indexes)
346
396
 
347
397
  def load_data(self, where, with_bkg=False, load_all=False) -> DataWithAxes:
348
398
  """Return a DataWithAxes object from the Data and Axis Nodes hanging from (or among) a
@@ -371,19 +421,31 @@ class DataSaverLoader(DataManagement):
371
421
  parent_node = data_node.parent_node
372
422
  data_nodes = self._get_nodes_from_data_type(parent_node)
373
423
  data_node = data_nodes[0]
424
+ error_node = data_node
374
425
  else:
375
426
  parent_node = data_node.parent_node
376
427
  if not isinstance(data_node, CARRAY):
377
428
  return
378
429
  data_nodes = [data_node]
430
+ try:
431
+ error_node_index = self.get_index_from_node_name(data_node)
432
+ error_node = self._error_saver.get_node_from_index(parent_node, error_node_index)
433
+ except NodeError as e:
434
+ error_node = None
379
435
 
380
436
  if 'axis' in self.data_type.name:
381
- ndarrays = [np.atleast_1d(np.squeeze(data_node.read())) for data_node in data_nodes]
437
+ ndarrays = [squeeze(data_node.read()) for data_node in data_nodes]
382
438
  axes = [Axis(label=data_node.attrs['label'], units=data_node.attrs['units'],
383
439
  data=np.linspace(0, ndarrays[0].size-1, ndarrays[0].size-1))]
384
440
  else:
385
441
  ndarrays = self.get_data_arrays(data_node, with_bkg=with_bkg, load_all=load_all)
386
442
  axes = self.get_axes(parent_node)
443
+ if error_node is not None:
444
+ error_arrays = self._error_saver.get_data_arrays(error_node, load_all=load_all)
445
+ if len(error_arrays) == 0:
446
+ error_arrays = None
447
+ else:
448
+ error_arrays = None
387
449
 
388
450
  extra_attributes = data_node.attrs.to_dict()
389
451
  for name in ['TITLE', 'CLASS', 'VERSION', 'backend', 'source', 'data_dimension',
@@ -402,6 +464,7 @@ class DataSaverLoader(DataManagement):
402
464
  nav_indexes=data_node.attrs['nav_indexes'] if 'nav_indexes' in
403
465
  data_node.attrs else (),
404
466
  axes=axes,
467
+ errors=error_arrays,
405
468
  path=data_node.path,
406
469
  **extra_attributes)
407
470
  data.timestamp = data_node.attrs['timestamp']
@@ -418,7 +481,7 @@ class BkgSaver(DataSaverLoader):
418
481
  Attributes
419
482
  ----------
420
483
  data_type: DataType
421
- The enum for this type of data, here 'data'
484
+ The enum for this type of data, here 'bkg'
422
485
  """
423
486
  data_type = DataType['bkg']
424
487
 
@@ -426,10 +489,30 @@ class BkgSaver(DataSaverLoader):
426
489
  super().__init__(h5saver)
427
490
 
428
491
 
492
+ class ErrorSaverLoader(DataSaverLoader):
493
+ """Specialized Object to save and load DataWithAxes errors bars to and from a h5file
494
+
495
+ Parameters
496
+ ----------
497
+ hsaver: H5Saver
498
+
499
+ Attributes
500
+ ----------
501
+ data_type: DataType
502
+ The enum for this type of data, here 'error'
503
+ """
504
+ data_type = DataType['error']
505
+
506
+ def __init__(self, h5saver: H5Saver):
507
+ super().__init__(h5saver)
508
+
509
+
429
510
  class DataEnlargeableSaver(DataSaverLoader):
430
- """Specialized Object to save and load enlargeable DataWithAxes saved object to and from a h5file
511
+ """ Specialized Object to save and load enlargeable DataWithAxes saved object to and from a
512
+ h5file
431
513
 
432
- Particular case of DataND will a single *nav_indexes* parameter will be appended as chunks of signal data
514
+ Particular case of DataND with a single *nav_indexes* parameter will be appended as chunks
515
+ of signal data
433
516
 
434
517
  Parameters
435
518
  ----------
@@ -447,10 +530,17 @@ class DataEnlargeableSaver(DataSaverLoader):
447
530
  """
448
531
  data_type = DataType['data_enlargeable']
449
532
 
450
- def __init__(self, h5saver: H5Saver):
533
+ def __init__(self, h5saver: Union[H5Saver, Path],
534
+ enl_axis_names: Iterable[str] = ('nav axis',),
535
+ enl_axis_units: Iterable[str] = ('',)):
451
536
  super().__init__(h5saver)
452
537
 
453
- def _create_data_arrays(self, where: Union[Node, str], data: DataWithAxes, save_axes=True):
538
+ self._n_enl_axes = len(enl_axis_names)
539
+ self._enl_axis_names = enl_axis_names
540
+ self._enl_axis_units = enl_axis_units
541
+
542
+ def _create_data_arrays(self, where: Union[Node, str], data: DataWithAxes, save_axes=True,
543
+ add_enl_axes=True):
454
544
  """ Create enlargeable array to store data
455
545
 
456
546
  Parameters
@@ -459,59 +549,87 @@ class DataEnlargeableSaver(DataSaverLoader):
459
549
  the path of a given node or the node itself
460
550
  data: DataWithAxes
461
551
  save_axes: bool
552
+ if True, will save signal axes as data nodes
553
+ add_enl_axes: bool
554
+ if True, will save enlargeable axes as data nodes (depending on the self._enl_axis_names
555
+ field)
462
556
 
463
557
  Notes
464
558
  -----
465
- Because data will be saved at a given index in the enlargeable array, related axes will have their index
466
- increased by one unity
559
+ Because data will be saved at a given index in the enlargeable array, related signal axes
560
+ will have their index increased by 1)
467
561
  """
468
562
 
469
563
  if self.get_last_node_name(where) is None:
470
564
  for ind_data in range(len(data)):
471
565
  nav_indexes = list(data.nav_indexes)
472
- nav_indexes = [0] + list(np.array(nav_indexes, dtype=int) + 1)
566
+ nav_indexes = ([0] +
567
+ list(np.array(nav_indexes, dtype=int) + 1))
473
568
 
474
- self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type, title=data.name,
569
+ self._h5saver.add_array(where, self._get_next_node_name(where), self.data_type,
570
+ title=data.name,
475
571
  array_to_save=data[ind_data],
476
572
  data_shape=data[ind_data].shape,
477
573
  array_type=data[ind_data].dtype,
478
574
  enlargeable=True,
479
575
  data_dimension=data.dim.name,
480
- metadata=dict(timestamp=data.timestamp, label=data.labels[ind_data],
481
- source=data.source.name, distribution=data.distribution.name,
576
+ metadata=dict(timestamp=data.timestamp,
577
+ label=data.labels[ind_data],
578
+ source=data.source.name,
579
+ distribution='spread',
482
580
  origin=data.origin,
483
581
  nav_indexes=tuple(nav_indexes)))
582
+ if add_enl_axes:
583
+ for ind_enl_axis in range(self._n_enl_axes):
584
+ self._axis_saver.add_axis(where,
585
+ Axis(self._enl_axis_names[ind_enl_axis],
586
+ self._enl_axis_units[ind_enl_axis],
587
+ data=np.array([0., 1.]),
588
+ index=0, spread_order=ind_enl_axis),
589
+ enlargeable=True)
484
590
  if save_axes:
485
591
  for axis in data.axes:
592
+ axis = axis.copy()
486
593
  axis.index += 1 # because of enlargeable data will have an extra shape
487
594
  self._axis_saver.add_axis(where, axis)
488
595
 
489
- def add_data(self, where: Union[Node, str], data: DataWithAxes):
596
+ def add_data(self, where: Union[Node, str], data: DataWithAxes,
597
+ axis_values: Iterable[float] = None):
490
598
  """ Append data to an enlargeable array node
491
599
 
492
- Data of dim (0, 1 or 2) will be just appended to the enlargeable array. DataND with one navigation axis of
493
- length (Lnav) will be considered as a collection of Lnav signal data of dim (0, 1 or 2) and will therefore be
494
- appended as Lnav signal data
600
+ Data of dim (0, 1 or 2) will be just appended to the enlargeable array.
601
+
602
+ Uniform DataND with one navigation axis of length (Lnav) will be considered as a collection
603
+ of Lnav signal data of dim (0, 1 or 2) and will therefore be appended as Lnav signal data
495
604
 
496
605
  Parameters
497
606
  ----------
498
607
  where: Union[Node, str]
499
608
  the path of a given node or the node itself
500
609
  data: DataWithAxes
610
+ axis_values: optional, list of floats
611
+ the new spread axis values added to the data
612
+ if None the axes are not added to the h5 file
613
+
614
+
501
615
  """
616
+ add_enl_axes = axis_values is not None
617
+
502
618
  if self.get_last_node_name(where) is None:
503
619
  if len(data.nav_indexes) == 0:
504
620
  data_init = data
505
- elif len(data.nav_indexes) == 1:
506
- data_init = data.inav[0]
507
- data_init.source = data.source # because slicing returns a calculated one
508
621
  else:
509
- raise DataDimError('It is not possible to append DataND with more than 1 navigation axis')
510
- self._create_data_arrays(where, data_init, save_axes=True)
622
+ raise DataDimError('It is not possible to append DataND')
623
+ self._create_data_arrays(where, data_init, save_axes=True, add_enl_axes=add_enl_axes)
511
624
 
512
625
  for ind_data in range(len(data)):
513
626
  array: EARRAY = self.get_node_from_index(where, ind_data)
514
627
  array.append(data[ind_data])
628
+ if add_enl_axes and axis_values is not None:
629
+ for ind_axis in range(self._n_enl_axes):
630
+ axis_array: EARRAY = self._axis_saver.get_node_from_index(where, ind_axis)
631
+ axis_array.append(np.array([axis_values[ind_axis]]))
632
+ axis_array.attrs['size'] += 1
515
633
 
516
634
 
517
635
  class DataExtendedSaver(DataSaverLoader):
@@ -610,7 +728,12 @@ class DataToExportSaver:
610
728
  h5saver: H5Saver
611
729
 
612
730
  """
613
- def __init__(self, h5saver: H5Saver):
731
+ def __init__(self, h5saver: Union[H5Saver, Path, str]):
732
+ if isinstance(h5saver, Path) or isinstance(h5saver, str):
733
+ h5saver_tmp = H5Saver()
734
+ h5saver_tmp.init_file(addhoc_file_path=Path(h5saver))
735
+ h5saver = h5saver_tmp
736
+
614
737
  self._h5saver = h5saver
615
738
  self._data_saver = DataSaverLoader(h5saver)
616
739
  self._bkg_saver = BkgSaver(h5saver)
@@ -621,6 +744,15 @@ class DataToExportSaver:
621
744
  def close(self):
622
745
  self._h5saver.close()
623
746
 
747
+ def close_file(self):
748
+ self._h5saver.close_file()
749
+
750
+ def __enter__(self):
751
+ return self
752
+
753
+ def __exit__(self, exc_type, exc_val, exc_tb):
754
+ self.close_file()
755
+
624
756
  def isopen(self) -> bool:
625
757
  """ Get the opened status of the underlying hdf5 file"""
626
758
  return self._h5saver.isopen()
@@ -632,7 +764,7 @@ class DataToExportSaver:
632
764
  return f'CH{ind:02d}'
633
765
 
634
766
  def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
635
- metadata=None):
767
+ metadata=None, **kwargs):
636
768
  """
637
769
 
638
770
  Parameters
@@ -656,7 +788,7 @@ class DataToExportSaver:
656
788
  dwa_group = self._h5saver.get_set_group(dim_group, self.channel_formatter(ind),
657
789
  dwa.name)
658
790
  # dwa_group = self._h5saver.add_ch_group(dim_group, dwa.name)
659
- self._data_saver.add_data(dwa_group, dwa)
791
+ self._data_saver.add_data(dwa_group, dwa, **kwargs)
660
792
 
661
793
  def add_bkg(self, where: Union[Node, str], data: DataToExport):
662
794
  dims = data.get_dim_presents()
@@ -670,6 +802,18 @@ class DataToExportSaver:
670
802
  if dwa_group is not None:
671
803
  self._bkg_saver.add_data(dwa_group, dwa, save_axes=False)
672
804
 
805
+ def add_error(self, where: Union[Node, str], data: DataToExport):
806
+ dims = data.get_dim_presents()
807
+ for dim in dims:
808
+ dim_group = self._h5saver.get_set_group(where, dim)
809
+ for ind, dwa in enumerate(data.get_data_from_dim(dim)):
810
+ # dwa: DataWithAxes filtered by dim
811
+ dwa_group = self._h5saver.get_set_group(dim_group,
812
+ self.channel_formatter(ind), dwa.name)
813
+ # dwa_group = self._get_node_from_title(dim_group, dwa.name)
814
+ if dwa_group is not None:
815
+ self._bkg_saver.add_data(dwa_group, dwa, save_axes=False)
816
+
673
817
 
674
818
  class DataToExportEnlargeableSaver(DataToExportSaver):
675
819
  """Generic object to save DataToExport objects in an enlargeable h5 array
@@ -679,21 +823,41 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
679
823
  Parameters
680
824
  ----------
681
825
  h5saver: H5Saver
682
- axis_name: str
826
+ enl_axis_names: Iterable[str]
827
+ The names of the enlargeable axis, default ['nav_axis']
828
+ enl_axis_units: Iterable[str]
829
+ The names of the enlargeable axis, default ['']
830
+ axis_name: str, deprecated use enl_axis_names
683
831
  the name of the enlarged axis array
684
- axis_units: str
832
+ axis_units: str, deprecated use enl_axis_units
685
833
  the units of the enlarged axis array
686
834
  """
687
- def __init__(self, h5saver: H5Saver, axis_name: str = 'nav axis', axis_units: str = ''):
835
+ def __init__(self, h5saver: H5Saver,
836
+ enl_axis_names: Iterable[str] = None,
837
+ enl_axis_units: Iterable[str] = None,
838
+ axis_name: str = 'nav axis', axis_units: str = ''):
688
839
 
689
840
  super().__init__(h5saver)
841
+ if enl_axis_names is None: # for backcompatibility
842
+ enl_axis_names = (axis_name,)
843
+ if enl_axis_units is None: # for backcompatibilitu
844
+ enl_axis_units = (axis_units,)
845
+
846
+ if len(enl_axis_names) != len(enl_axis_units):
847
+ raise ValueError('Both enl_axis_names and enl_axis_units should have the same length')
848
+
849
+ self._enl_axis_names = enl_axis_names
850
+ self._enl_axis_units = enl_axis_units
851
+ self._n_enl = len(enl_axis_names)
852
+
690
853
  self._data_saver = DataEnlargeableSaver(h5saver)
691
854
  self._nav_axis_saver = AxisSaverLoader(h5saver)
692
- self._axis_name = axis_name
693
- self._axis_units = axis_units
694
855
 
695
856
  def add_data(self, where: Union[Node, str], data: DataToExport,
696
- axis_value: Union[float, np.ndarray], settings_as_xml='', metadata=None):
857
+ axis_values: List[Union[float, np.ndarray]] = None,
858
+ axis_value: Union[float, np.ndarray] = None,
859
+ settings_as_xml='', metadata=None,
860
+ ):
697
861
  """
698
862
 
699
863
  Parameters
@@ -702,24 +866,37 @@ class DataToExportEnlargeableSaver(DataToExportSaver):
702
866
  the path of a given node or the node itself
703
867
  data: DataToExport
704
868
  The data to be saved into an enlargeable array
705
- axis_value: float or np.ndarray
869
+ axis_values: iterable float or np.ndarray
870
+ The next value (or values) of the enlarged axis
871
+ axis_value: float or np.ndarray #deprecated in 4.2.0, use axis_values
706
872
  The next value (or values) of the enlarged axis
707
873
  settings_as_xml: str
708
874
  The settings parameter as an XML string
709
875
  metadata: dict
710
876
  all extra metadata to be saved in the group node where data will be saved
711
877
  """
878
+
879
+ if axis_values is None and axis_value is not None:
880
+ axis_values = [axis_value]
881
+
712
882
  super().add_data(where, data, settings_as_xml, metadata)
883
+ # a parent navigation group (same for all data nodes)
884
+
713
885
  where = self._get_node(where)
714
886
  nav_group = self._h5saver.get_set_group(where, SPECIAL_GROUP_NAMES['nav_axes'])
715
887
  if self._nav_axis_saver.get_last_node_name(nav_group) is None:
716
- axis = Axis(label=self._axis_name, units=self._axis_units, data=np.array([0., 1.]), index=0)
717
- axis_array = self._nav_axis_saver.add_axis(nav_group, axis, enlargeable=True)
718
- axis_array.attrs['size'] = 0
888
+ for ind in range(self._n_enl):
889
+ axis = Axis(label=self._enl_axis_names[ind],
890
+ units=self._enl_axis_units[ind], data=np.array([0., 1.]),
891
+ index=0, spread_order=ind)
892
+ axis_array = self._nav_axis_saver.add_axis(nav_group, axis, enlargeable=True)
893
+ axis_array.attrs['size'] = 0
719
894
 
720
- axis_array = self._nav_axis_saver.get_node_from_index(nav_group, 0)
721
- axis_array.append(np.atleast_1d(np.squeeze(np.array([axis_value]))), expand=False)
722
- axis_array.attrs['size'] += 1
895
+ for ind in range(self._n_enl):
896
+ axis_array: EARRAY = self._nav_axis_saver.get_node_from_index(nav_group, ind)
897
+ axis_array.append(squeeze(np.array([axis_values[ind]])),
898
+ expand=False)
899
+ axis_array.attrs['size'] += 1
723
900
 
724
901
 
725
902
  class DataToExportTimedSaver(DataToExportEnlargeableSaver):
@@ -733,11 +910,11 @@ class DataToExportTimedSaver(DataToExportEnlargeableSaver):
733
910
  This object is made for continuous saving mode of DAQViewer and logging to h5file for DAQLogger
734
911
  """
735
912
  def __init__(self, h5saver: H5Saver):
736
- super().__init__(h5saver, 'time', 's')
913
+ super().__init__(h5saver, enl_axis_names=('time',), enl_axis_units=('s',))
737
914
 
738
915
  def add_data(self, where: Union[Node, str], data: DataToExport, settings_as_xml='',
739
- metadata=None):
740
- super().add_data(where, data, axis_value=data.timestamp, settings_as_xml=settings_as_xml,
916
+ metadata=None, **kwargs):
917
+ super().add_data(where, data, axis_values=[data.timestamp], settings_as_xml=settings_as_xml,
741
918
  metadata=metadata)
742
919
 
743
920
 
@@ -812,16 +989,34 @@ class DataLoader:
812
989
  h5saver: H5Saver
813
990
  """
814
991
 
815
- def __init__(self, h5saver: H5Saver):
992
+ def __init__(self, h5saver: Union[H5Saver, Path]):
816
993
  self._axis_loader: AxisSaverLoader = None
817
994
  self._data_loader: DataSaverLoader = None
818
995
 
996
+ if isinstance(h5saver, Path) or isinstance(h5saver, str):
997
+ h5saver_tmp = H5Saver()
998
+ h5saver_tmp.init_file(addhoc_file_path=Path(h5saver))
999
+ h5saver = h5saver_tmp
1000
+
819
1001
  self.h5saver = h5saver
820
1002
 
821
1003
  @property
822
1004
  def h5saver(self):
823
1005
  return self._h5saver
824
1006
 
1007
+ def __enter__(self):
1008
+ return self
1009
+
1010
+ def __exit__(self, exc_type, exc_val, exc_tb):
1011
+ self.close_file()
1012
+
1013
+ def close_file(self):
1014
+ self._h5saver.close_file()
1015
+
1016
+ def walk_nodes(self, where: Union[str, Node] = '/'):
1017
+ """Return a Node generator iterating over the h5file content"""
1018
+ return self.h5saver.walk_nodes(where)
1019
+
825
1020
  @h5saver.setter
826
1021
  def h5saver(self, h5saver: H5Saver):
827
1022
  self._h5saver = h5saver
@@ -881,7 +1076,9 @@ class DataLoader:
881
1076
  nav_group = self.get_nav_group(where)
882
1077
  if nav_group is not None:
883
1078
  nav_axes = self._axis_loader.get_axes(nav_group)
884
- data.axes.extend(nav_axes)
1079
+ axes = data.axes[:]
1080
+ axes.extend(nav_axes)
1081
+ data.axes = axes
885
1082
  data.get_dim_from_data_axes()
886
1083
  data.create_missing_axes()
887
1084
  return data
@@ -57,6 +57,7 @@ class DataType(BaseEnum):
57
57
  strings = 'Strings'
58
58
  bkg = 'Bkg'
59
59
  data_enlargeable = 'EnlData'
60
+ error = 'ErrorBar'
60
61
 
61
62
 
62
63
  class H5SaverLowLevel(H5Backend):