pymodaq 5.0.5__py3-none-any.whl → 5.1.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymodaq might be problematic. Click here for more details.

Files changed (53) hide show
  1. pymodaq/control_modules/daq_move.py +77 -64
  2. pymodaq/control_modules/daq_move_ui.py +16 -15
  3. pymodaq/control_modules/daq_viewer.py +95 -87
  4. pymodaq/control_modules/daq_viewer_ui.py +22 -23
  5. pymodaq/control_modules/mocks.py +2 -2
  6. pymodaq/control_modules/move_utility_classes.py +28 -19
  7. pymodaq/control_modules/thread_commands.py +138 -0
  8. pymodaq/control_modules/utils.py +88 -20
  9. pymodaq/control_modules/viewer_utility_classes.py +8 -17
  10. pymodaq/dashboard.py +90 -27
  11. pymodaq/examples/qt_less_standalone_module.py +48 -11
  12. pymodaq/extensions/__init__.py +7 -3
  13. pymodaq/extensions/adaptive/__init__.py +2 -0
  14. pymodaq/extensions/adaptive/adaptive_optimization.py +159 -0
  15. pymodaq/extensions/adaptive/loss_function/_1d_loss_functions.py +73 -0
  16. pymodaq/extensions/adaptive/loss_function/_2d_loss_functions.py +86 -0
  17. pymodaq/extensions/adaptive/loss_function/__init__.py +3 -0
  18. pymodaq/extensions/adaptive/loss_function/loss_factory.py +106 -0
  19. pymodaq/extensions/adaptive/utils.py +97 -0
  20. pymodaq/extensions/bayesian/__init__.py +1 -1
  21. pymodaq/extensions/bayesian/acquisition/__init__.py +2 -0
  22. pymodaq/extensions/bayesian/acquisition/acquisition_function_factory.py +71 -0
  23. pymodaq/extensions/bayesian/acquisition/base_acquisition_function.py +86 -0
  24. pymodaq/extensions/bayesian/bayesian_optimization.py +121 -0
  25. pymodaq/extensions/bayesian/utils.py +27 -286
  26. pymodaq/extensions/daq_logger/daq_logger.py +7 -12
  27. pymodaq/extensions/daq_logger/h5logging.py +1 -1
  28. pymodaq/extensions/daq_scan.py +18 -47
  29. pymodaq/extensions/h5browser.py +3 -34
  30. pymodaq/extensions/optimizers_base/__init__.py +0 -0
  31. pymodaq/extensions/{bayesian/bayesian_optimisation.py → optimizers_base/optimizer.py} +441 -334
  32. pymodaq/extensions/optimizers_base/thread_commands.py +20 -0
  33. pymodaq/extensions/optimizers_base/utils.py +378 -0
  34. pymodaq/extensions/pid/pid_controller.py +6 -10
  35. pymodaq/extensions/utils.py +12 -0
  36. pymodaq/utils/data.py +1 -0
  37. pymodaq/utils/gui_utils/loader_utils.py +2 -0
  38. pymodaq/utils/h5modules/module_saving.py +134 -22
  39. pymodaq/utils/leco/daq_move_LECODirector.py +73 -73
  40. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +36 -84
  41. pymodaq/utils/leco/director_utils.py +25 -10
  42. pymodaq/utils/leco/leco_director.py +65 -26
  43. pymodaq/utils/leco/pymodaq_listener.py +118 -68
  44. pymodaq/utils/leco/utils.py +24 -24
  45. pymodaq/utils/managers/modules_manager.py +37 -8
  46. pymodaq/utils/scanner/scanners/_1d_scanners.py +0 -38
  47. pymodaq/utils/scanner/scanners/_2d_scanners.py +0 -58
  48. {pymodaq-5.0.5.dist-info → pymodaq-5.1.0a0.dist-info}/METADATA +4 -3
  49. {pymodaq-5.0.5.dist-info → pymodaq-5.1.0a0.dist-info}/RECORD +52 -38
  50. {pymodaq-5.0.5.dist-info → pymodaq-5.1.0a0.dist-info}/entry_points.txt +0 -2
  51. pymodaq/utils/leco/desktop.ini +0 -2
  52. {pymodaq-5.0.5.dist-info → pymodaq-5.1.0a0.dist-info}/WHEEL +0 -0
  53. {pymodaq-5.0.5.dist-info → pymodaq-5.1.0a0.dist-info}/licenses/LICENSE +0 -0
@@ -4,112 +4,75 @@ Created the 31/08/2023
4
4
 
5
5
  @author: Sebastien Weber
6
6
  """
7
- from abc import ABC, abstractproperty, abstractmethod
7
+
8
8
  from typing import List, TYPE_CHECKING, Union, Dict, Tuple, Iterable
9
- from pathlib import Path
10
- import importlib
11
- import pkgutil
12
- import inspect
13
9
  import numpy as np
14
10
  from collections import namedtuple
15
11
 
16
12
  from bayes_opt import BayesianOptimization
17
- from bayes_opt import UtilityFunction
18
13
 
19
- from pymodaq_utils.utils import find_dict_in_list_from_key_val, get_entrypoints
14
+
20
15
  from pymodaq_utils.logger import set_logger, get_module_name
21
- from pymodaq_utils.enums import BaseEnum
22
- from pymodaq_utils.config import BaseConfig
23
16
 
24
- from pymodaq_gui.parameter import Parameter
25
- from pymodaq_gui.plotting.data_viewers.viewer import ViewersEnum
26
17
 
27
18
  from pymodaq_data.data import (DataToExport, DataCalculated,
28
19
  DataRaw, Axis)
29
20
 
30
- from pymodaq.utils.data import DataActuator, DataToActuators
31
- from pymodaq.utils.managers.modules_manager import ModulesManager
32
-
33
21
 
34
- if TYPE_CHECKING:
35
- from pymodaq.extensions.bayesian.bayesian_optimisation import BayesianOptimisation
36
-
37
- logger = set_logger(get_module_name(__file__))
22
+ from pymodaq.extensions.bayesian.acquisition import GenericAcquisitionFunctionFactory
38
23
 
24
+ from pymodaq.extensions.optimizers_base.utils import (
25
+ GenericAlgorithm, OptimizerModelDefault, StopType, StoppingParameters,
26
+ OptimizerConfig)
39
27
 
40
- class StopType(BaseEnum):
41
- Predict = 0
42
-
43
-
44
- class UtilityKind(BaseEnum):
45
- ucb = 'Upper Confidence Bound'
46
- ei = 'Expected Improvement'
47
- poi = 'Probability of Improvement'
48
28
 
29
+ logger = set_logger(get_module_name(__file__))
49
30
 
50
- UtilityParameters = namedtuple('UtilityParameters',
51
- ['kind', 'kappa', 'xi', 'kappa_decay', 'kappa_decay_delay'])
52
31
 
32
+ class BayesianConfig(OptimizerConfig):
33
+ config_name = f"bayesian_settings"
53
34
 
54
- StoppingParameters = namedtuple('StoppingParameters',
55
- ['niter', 'stop_type', 'tolerance', 'npoints'])
56
35
 
57
36
 
58
- class BayesianAlgorithm:
37
+ class BayesianAlgorithm(GenericAlgorithm):
59
38
 
60
39
  def __init__(self, ini_random: int, bounds: dict, **kwargs):
61
-
40
+ super().__init__(ini_random)
62
41
  self._algo = BayesianOptimization(f=None,
63
42
  pbounds=bounds,
64
43
  **kwargs
65
44
  )
66
- self._next_point: np.ndarray = None
67
- self._suggested_coordinates: List[np.ndarray] = []
68
- self.ini_random_points = ini_random
69
- self.kappa = 2.5
70
45
 
71
- self._utility = UtilityFunction(kind="ucb", kappa=self.kappa, xi=0.0)
46
+ def set_prediction_function(self, kind: str = '', **kwargs):
47
+ self._prediction = GenericAcquisitionFunctionFactory.create(kind, **kwargs)
48
+
49
+ def update_prediction_function(self):
50
+ """ Update the parameters of the acquisition function (kappa decay for instance)"""
51
+ self._prediction.decay_exploration()
72
52
 
73
- def set_utility_function(self, kind: str, **kwargs):
74
- if kind in UtilityKind.names():
75
- self._utility = UtilityFunction(kind, **kwargs)
76
53
 
77
- def update_utility_function(self):
78
- """ Update the parameters of the Utility function (kappa decay for instance)"""
79
- self._utility.update_params()
80
- self.kappa = self._utility.kappa
54
+ @property
55
+ def tradeoff(self):
56
+ return self._prediction.tradeoff
81
57
 
82
58
  @property
83
59
  def bounds(self) -> List[np.ndarray]:
84
- return [bound for bound in self._algo._space.bounds]
60
+ return [bound for bound in self._algo.space.bounds]
85
61
 
86
62
  @bounds.setter
87
63
  def bounds(self, bounds: Union[Dict[str, Tuple[float, float]], Iterable[np.ndarray]]):
88
64
  if isinstance(bounds, dict):
89
65
  self._algo.set_bounds(bounds)
90
66
  else:
91
- self._algo.set_bounds(self._algo._space.array_to_params(np.array(bounds)))
92
-
93
- def get_random_point(self) -> np.ndarray:
94
- """ Get a random point coordinates in the defined bounds"""
95
- point = []
96
- for bound in self.bounds:
97
- point.append((np.max(bound) - np.min(bound)) * np.random.random_sample() +
98
- np.min(bound))
99
- return np.array(point)
100
-
101
- def ask(self) -> np.ndarray:
102
- if self.ini_random_points > 0:
103
- self.ini_random_points -= 1
104
- self._next_point = self.get_random_point()
105
- else:
106
- self._next_point = self._algo.space.params_to_array(self._algo.suggest(self._utility))
107
- self._suggested_coordinates.append(self._next_point)
108
- return self._next_point
67
+ self._algo.set_bounds(self._algo.space.array_to_params(np.array(bounds)))
68
+
69
+ def prediction_ask(self) -> np.ndarray:
70
+ """ Ask the prediction function or algo to provide the next point to probe"""
71
+ return self._prediction.suggest(self._algo._gp, self._algo.space)
109
72
 
110
73
  def tell(self, function_value: float):
111
74
  self._algo.register(params=self._next_point, target=function_value)
112
-
75
+
113
76
  @property
114
77
  def best_fitness(self) -> float:
115
78
  return self._algo.max['target']
@@ -181,226 +144,4 @@ class BayesianAlgorithm:
181
144
  return dwa_measured, dwa_prediction
182
145
 
183
146
 
184
- class BayesianModelGeneric(ABC):
185
-
186
- optimisation_algorithm: BayesianAlgorithm = BayesianAlgorithm
187
-
188
- actuators_name: List[str] = []
189
- detectors_name: List[str] = []
190
-
191
- observables_dim: List[ViewersEnum] = []
192
-
193
- params = [] # to be subclassed
194
-
195
- def __init__(self, optimisation_controller: 'BayesianOptimisation'):
196
- self.optimisation_controller = optimisation_controller # instance of the pid_controller using this model
197
- self.modules_manager: ModulesManager = optimisation_controller.modules_manager
198
-
199
- self.settings = self.optimisation_controller.settings.child('models', 'model_params') # set of parameters
200
- self.check_modules(self.modules_manager)
201
-
202
- def check_modules(self, modules_manager):
203
- for act in self.actuators_name:
204
- if act not in modules_manager.actuators_name:
205
- logger.warning(f'The actuator {act} defined in the model is'
206
- f' not present in the Dashboard')
207
- return False
208
- for det in self.detectors_name:
209
- if det not in modules_manager.detectors_name:
210
- logger.warning(f'The detector {det} defined in the model is'
211
- f' not present in the Dashboard')
212
-
213
- def update_detector_names(self):
214
- names = self.optimisation_controller.settings.child(
215
- 'main_settings', 'detector_modules').value()['selected']
216
- self.data_names = []
217
- for name in names:
218
- name = name.split('//')
219
- self.data_names.append(name)
220
-
221
- def update_settings(self, param: Parameter):
222
- """
223
- Get a parameter instance whose value has been modified by a user on the UI
224
- To be overwritten in child class
225
- """
226
- ...
227
-
228
- def update_plots(self):
229
- """ Called when updating the live plots """
230
- pass
231
-
232
- def ini_model_base(self):
233
- self.modules_manager.selected_actuators_name = self.actuators_name
234
- self.modules_manager.selected_detectors_name = self.detectors_name
235
-
236
- self.ini_model()
237
-
238
- def ini_model(self):
239
- """ To be subclassed
240
-
241
- Initialize whatever is needed by your custom model
242
- """
243
- raise NotImplementedError
244
-
245
- def runner_initialized(self):
246
- """ To be subclassed
247
-
248
- Initialize whatever is needed by your custom model after the optimization runner is
249
- initialized
250
- """
251
- pass
252
147
 
253
- def convert_input(self, measurements: DataToExport) -> float:
254
- """
255
- Convert the measurements in the units to be fed to the Optimisation Controller
256
- Parameters
257
- ----------
258
- measurements: DataToExport
259
- data object exported from the detectors from which the model extract a float value
260
- (fitness) to be fed to the algorithm
261
-
262
- Returns
263
- -------
264
- float
265
-
266
- """
267
- raise NotImplementedError
268
-
269
- def convert_output(self, outputs: List[np.ndarray], best_individual=None) -> DataToActuators:
270
- """ Convert the output of the Optimisation Controller in units to be fed into the actuators
271
- Parameters
272
- ----------
273
- outputs: list of numpy ndarray
274
- output value from the controller from which the model extract a value of the same units as the actuators
275
- best_individual: np.ndarray
276
- the coordinates of the best individual so far
277
- Returns
278
- -------
279
- DataToActuatorOpti: derived from DataToExport. Contains value to be fed to the actuators with a a mode
280
- attribute, either 'rel' for relative or 'abs' for absolute.
281
-
282
- """
283
- raise NotImplementedError
284
-
285
-
286
- class BayesianModelDefault(BayesianModelGeneric):
287
-
288
- actuators_name: List[str] = [] # to be populated dynamically at instantiation
289
- detectors_name: List[str] = [] # to be populated dynamically at instantiation
290
-
291
- params = [{'title': 'Optimizing signal', 'name': 'optimizing_signal', 'type': 'group',
292
- 'children': [
293
- {'title': 'Get data', 'name': 'data_probe', 'type': 'action'},
294
- {'title': 'Optimize 0Ds:', 'name': 'optimize_0d', 'type': 'itemselect',
295
- 'checkbox': True},
296
- ]},]
297
-
298
- def __init__(self, optimisation_controller: 'BayesianOptimisation'):
299
- self.actuators_name = optimisation_controller.modules_manager.actuators_name
300
- self.detectors_name = optimisation_controller.modules_manager.detectors_name
301
- super().__init__(optimisation_controller)
302
-
303
- self.settings.child('optimizing_signal', 'data_probe').sigActivated.connect(
304
- self.optimize_from)
305
-
306
- def ini_model(self):
307
- pass
308
-
309
- def optimize_from(self):
310
- self.modules_manager.get_det_data_list()
311
- data0D = self.modules_manager.settings['data_dimensions', 'det_data_list0D']
312
- data0D['selected'] = data0D['all_items']
313
- self.settings.child('optimizing_signal', 'optimize_0d').setValue(data0D)
314
-
315
- def update_settings(self, param: Parameter):
316
- pass
317
-
318
- def convert_input(self, measurements: DataToExport) -> float:
319
- """ Convert the measurements in the units to be fed to the Optimisation Controller
320
-
321
- Parameters
322
- ----------
323
- measurements: DataToExport
324
- data object exported from the detectors from which the model extract a float value
325
- (fitness) to be fed to the algorithm
326
-
327
- Returns
328
- -------
329
- float
330
-
331
- """
332
- data_name: str = self.settings['optimizing_signal', 'optimize_0d']['selected'][0]
333
- origin, name = data_name.split('/')
334
- return float(measurements.get_data_from_name_origin(name, origin).data[0][0])
335
-
336
- def convert_output(self, outputs: List[np.ndarray], best_individual=None) -> DataToActuators:
337
- """ Convert the output of the Optimisation Controller in units to be fed into the actuators
338
- Parameters
339
- ----------
340
- outputs: list of numpy ndarray
341
- output value from the controller from which the model extract a value of the same units as the actuators
342
- best_individual: np.ndarray
343
- the coordinates of the best individual so far
344
-
345
- Returns
346
- -------
347
- DataToActuators: derived from DataToExport. Contains value to be fed to the actuators
348
- with a mode attribute, either 'rel' for relative or 'abs' for absolute.
349
-
350
- """
351
- return DataToActuators('outputs', mode='abs',
352
- data=[DataActuator(self.modules_manager.actuators_name[ind],
353
- data=float(outputs[ind])) for ind in
354
- range(len(outputs))])
355
-
356
-
357
- def get_bayesian_models(model_name=None):
358
- """
359
- Get PID Models as a list to instantiate Control Actuators per degree of liberty in the model
360
-
361
- Returns
362
- -------
363
- list: list of disct containting the name and python module of the found models
364
- """
365
- models_import = []
366
- discovered_models = get_entrypoints(group='pymodaq.models')
367
- if len(discovered_models) > 0:
368
- for pkg in discovered_models:
369
- try:
370
- module = importlib.import_module(pkg.value)
371
- module_name = pkg.value
372
-
373
- for mod in pkgutil.iter_modules([
374
- str(Path(module.__file__).parent.joinpath('models'))]):
375
- try:
376
- model_module = importlib.import_module(f'{module_name}.models.{mod.name}',
377
- module)
378
- classes = inspect.getmembers(model_module, inspect.isclass)
379
- for name, klass in classes:
380
- if issubclass(klass, BayesianModelGeneric):
381
- if find_dict_in_list_from_key_val(models_import, 'name', mod.name)\
382
- is None:
383
- models_import.append({'name': klass.__name__,
384
- 'module': model_module,
385
- 'class': klass})
386
-
387
- except Exception as e:
388
- logger.warning(str(e))
389
-
390
- except Exception as e:
391
- logger.warning(f'Impossible to import the {pkg.value} bayesian model: {str(e)}')
392
- if find_dict_in_list_from_key_val(models_import, 'name', 'BayesianModelDefault') \
393
- is None:
394
- models_import.append({'name': 'BayesianModelDefault',
395
- 'module': inspect.getmodule(BayesianModelDefault),
396
- 'class': BayesianModelDefault})
397
- if model_name is None:
398
- return models_import
399
- else:
400
- return find_dict_in_list_from_key_val(models_import, 'name', model_name)
401
-
402
-
403
- class BayesianConfig(BaseConfig):
404
- """Main class to deal with configuration values for this plugin"""
405
- config_template_path = None
406
- config_name = f"bayesian_settings"
@@ -75,8 +75,6 @@ class DAQ_Logger(CustomExt):
75
75
  super().__init__(dockarea, dashboard)
76
76
 
77
77
  self.wait_time = 1000
78
-
79
- self.logger_thread = None
80
78
  self.logger: Union[H5Logger, DataBaseLogger] = None
81
79
  self.setup_ui()
82
80
 
@@ -293,25 +291,22 @@ class DAQ_Logger(CustomExt):
293
291
  res = self.set_logging()
294
292
 
295
293
  # mandatory to deal with multithreads
296
- if self.logger_thread is not None:
294
+ if self.runner_thread is not None:
297
295
  self.command_DAQ_signal.disconnect()
298
- if self.logger_thread.isRunning():
299
- self.logger_thread.exit()
300
- while not self.logger_thread.isFinished():
301
- QThread.msleep(100)
302
- self.logger_thread = None
296
+ self.exit_runner_thread()
297
+ self.runner_thread = None
303
298
 
304
- self.logger_thread = QThread()
299
+ self.runner_thread = QThread()
305
300
 
306
301
  log_acquisition = DAQ_Logging(self.settings, self.logger, self.modules_manager)
307
302
 
308
- log_acquisition.moveToThread(self.logger_thread)
303
+ log_acquisition.moveToThread(self.runner_thread)
309
304
 
310
305
  self.command_DAQ_signal[list].connect(log_acquisition.queue_command)
311
306
  log_acquisition.status_sig[list].connect(self.thread_status)
312
307
 
313
- self.logger_thread.log_acquisition = log_acquisition
314
- self.logger_thread.start()
308
+ self.runner_thread.log_acquisition = log_acquisition
309
+ self.runner_thread.start()
315
310
 
316
311
  self._actions['start'].setEnabled(False)
317
312
  QtWidgets.QApplication.processEvents()
@@ -45,7 +45,7 @@ class H5Logger(AbstractLogger):
45
45
 
46
46
  self.module_and_data_saver = module_saving.LoggerSaver(self)
47
47
  for det in self.modules_manager.detectors_all:
48
- det.module_and_data_saver = module_saving.DetectorEnlargeableSaver(det)
48
+ det.module_and_data_saver = module_saving.DetectorTimeSaver(det)
49
49
  self.module_and_data_saver.h5saver = self.h5saver # will update its h5saver and all submodules's h5saver
50
50
 
51
51
  def close(self):
@@ -146,7 +146,7 @@ class DAQScan(QObject, ParameterManager):
146
146
  self.curvilinear_values = []
147
147
  self.plot_colors = utils.plot_colors
148
148
 
149
- self.scan_thread: QThread = None
149
+ self.runner_thread: QThread = None
150
150
  self._h5saver: H5Saver = None
151
151
  self._module_and_data_saver: module_saving.ScanSaver = None
152
152
 
@@ -880,6 +880,13 @@ class DAQScan(QObject, ParameterManager):
880
880
  self._metada_dataset_set = True
881
881
  return res
882
882
 
883
+ def exit_runner_thread(self, duration : int = 5000):
884
+ self.runner_thread.quit()
885
+ terminated = self.runner_thread.wait(duration)
886
+ if not terminated:
887
+ self.runner_thread.terminate()
888
+ self.runner_thread.wait()
889
+
883
890
  def start_scan(self):
884
891
  """
885
892
  Start an acquisition calling the set_scan function.
@@ -919,27 +926,24 @@ class DAQScan(QObject, ParameterManager):
919
926
  self.module_and_data_saver.h5saver = self.h5saver # force the update as the h5saver ill also be set on each detectors
920
927
 
921
928
  # mandatory to deal with multithreads
922
- if self.scan_thread is not None:
929
+ if self.runner_thread is not None:
923
930
  self.command_daq_signal.disconnect()
924
- if self.scan_thread.isRunning():
925
- self.scan_thread.terminate()
926
- while not self.scan_thread.isFinished():
927
- QThread.msleep(100)
928
- self.scan_thread = None
931
+ self.exit_runner_thread()
932
+ self.runner_thread = None
929
933
 
930
- self.scan_thread = QThread()
934
+ self.runner_thread = QThread()
931
935
 
932
936
  scan_acquisition = DAQScanAcquisition(self.settings, self.scanner, self.modules_manager,
933
937
  )
934
938
 
935
939
  if config['scan']['scan_in_thread']:
936
- scan_acquisition.moveToThread(self.scan_thread)
940
+ scan_acquisition.moveToThread(self.runner_thread)
937
941
  self.command_daq_signal[utils.ThreadCommand].connect(scan_acquisition.queue_command)
938
942
  scan_acquisition.scan_data_tmp[ScanDataTemp].connect(self.save_temp_live_data)
939
943
  scan_acquisition.status_sig[utils.ThreadCommand].connect(self.thread_status)
940
944
 
941
- self.scan_thread.scan_acquisition = scan_acquisition
942
- self.scan_thread.start()
945
+ self.runner_thread.scan_acquisition = scan_acquisition
946
+ self.runner_thread.start()
943
947
 
944
948
  self.ui.set_action_enabled('ini_positions', False)
945
949
  self.ui.set_action_enabled('start', False)
@@ -992,7 +996,8 @@ class DAQScan(QObject, ParameterManager):
992
996
  self.ui.set_permanent_status('Stoping acquisition')
993
997
  self.command_daq_signal.emit(utils.ThreadCommand("stop_acquisition"))
994
998
  scan_node = self.module_and_data_saver.get_last_node()
995
- scan_node.attrs['scan_done'] = True
999
+ if scan_node is not None:
1000
+ scan_node.attrs['scan_done'] = True
996
1001
 
997
1002
  if not self.dashboard.overshoot:
998
1003
  self.set_ini_positions() # do not set ini position again in case overshoot fired
@@ -1095,7 +1100,6 @@ class DAQScanAcquisition(QObject):
1095
1100
 
1096
1101
  def start_acquisition(self):
1097
1102
  try:
1098
- #todo hoaw to apply newlayout to adaptive mode? => cannot has to be a new extension
1099
1103
 
1100
1104
  self.modules_manager.connect_actuators()
1101
1105
  self.modules_manager.connect_detectors()
@@ -1120,20 +1124,6 @@ class DAQScanAcquisition(QObject):
1120
1124
  positions = self.scanner.positions_at(self.ind_scan) # get positions
1121
1125
  else:
1122
1126
  pass
1123
- #todo update for v4
1124
- # positions = learner.ask(1)[0][-1] # next point to probe
1125
- # if self.scanner.scan_type == 'Tabular': # translate normalized curvilinear position to real coordinates
1126
- # self.curvilinear = positions
1127
- # length = 0.
1128
- # for v in self.scanner.vectors:
1129
- # length += v.norm()
1130
- # if length >= self.curvilinear:
1131
- # vec = v
1132
- # frac_curvilinear = (self.curvilinear - (length - v.norm())) / v.norm()
1133
- # break
1134
- #
1135
- # position = (vec.vectorize() * frac_curvilinear).translate_to(vec.p1()).p2()
1136
- # positions = [position.x(), position.y()]
1137
1127
 
1138
1128
  self.status_sig.emit(
1139
1129
  utils.ThreadCommand("Update_scan_index",
@@ -1148,21 +1138,7 @@ class DAQScanAcquisition(QObject):
1148
1138
  QThread.msleep(self.scan_settings['time_flow', 'wait_time_between'])
1149
1139
 
1150
1140
  #grab datas and wait for grab completion
1151
- self.det_done(self.modules_manager.grab_datas(positions=positions), positions)
1152
-
1153
- if self.isadaptive:
1154
- #todo update for v4
1155
- # det_channel = self.modules_manager.get_selected_probed_data()
1156
- # det, channel = det_channel[0].split('/')
1157
- # if self.scanner.scan_type == 'Tabular':
1158
- # self.curvilinear_array.append(np.array([self.curvilinear]))
1159
- # new_positions = self.curvilinear
1160
- # elif self.scanner.scan_type == 'Scan1D':
1161
- # new_positions = positions[0]
1162
- # else:
1163
- # new_positions = positions[:]
1164
- # learner.tell(new_positions, self.modules_manager.det_done_datas[det]['data0D'][channel]['data'])
1165
- pass
1141
+ self.det_done(self.modules_manager.grab_data(positions=positions), positions)
1166
1142
 
1167
1143
  # daq_scan wait time
1168
1144
  QThread.msleep(self.scan_settings.child('time_flow', 'wait_time').value())
@@ -1201,11 +1177,6 @@ class DAQScanAcquisition(QObject):
1201
1177
  utils.ThreadCommand("add_data",
1202
1178
  dict(indexes=indexes, distribution=self.scanner.distribution)))
1203
1179
 
1204
- #todo related to adaptive (solution lies along the Enlargeable data saver)
1205
- if self.isadaptive:
1206
- for ind_ax, nav_axis in enumerate(self.navigation_axes):
1207
- nav_axis.append(np.array(positions[ind_ax]))
1208
-
1209
1180
  self.det_done_flag = True
1210
1181
 
1211
1182
  full_names: list = self.scan_settings['plot_options', 'plot_0d']['selected'][:]
@@ -1,39 +1,8 @@
1
- import argparse
2
- from pathlib import Path
3
- import sys
4
- import os
5
- from qtpy import QtWidgets
6
- os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE"
7
- from pymodaq_gui.h5modules.browsing import H5Browser
8
- from pymodaq.utils.config import Config
1
+ from pymodaq_gui.h5modules.h5browser import main, H5Browser #backcompat
2
+ from pymodaq_utils.utils import deprecation_msg
9
3
 
10
4
 
11
- config = Config()
12
-
13
-
14
- def main(h5file_path: Path = None):
15
- from pymodaq_gui.utils.utils import mkQApp
16
- import sys
17
- app = mkQApp('H5Browser')
18
-
19
- h5file_path_tmp = None
20
- parser = argparse.ArgumentParser(description="Opens HDF5 files and navigate their contents")
21
- parser.add_argument("-i", "--input", help="specify path to the file to be opened")
22
- args = parser.parse_args()
23
-
24
- if args.input:
25
- h5file_path_tmp = Path(args.input).resolve() # Transform to absolute Path in case it is relative
26
-
27
- if not h5file_path_tmp.exists():
28
- print(f'Error: {args.input} does not exist. Opening h5browser without input file.')
29
- h5file_path_tmp = h5file_path
30
-
31
- win = QtWidgets.QMainWindow()
32
- prog = H5Browser(win, h5file_path=h5file_path_tmp)
33
- win.show()
34
- QtWidgets.QApplication.processEvents()
35
-
36
- app.exec()
5
+ deprecation_msg('H5Browser should now be loaded from the pymodaq_gui.h5modules.h5browser module')
37
6
 
38
7
 
39
8
  if __name__ == '__main__':
File without changes