pymodaq 5.0.18__py3-none-any.whl → 5.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pymodaq might be problematic. Click here for more details.

Files changed (92) hide show
  1. pymodaq/__init__.py +23 -11
  2. pymodaq/control_modules/__init__.py +1 -0
  3. pymodaq/control_modules/daq_move.py +451 -246
  4. pymodaq/control_modules/daq_move_ui/__init__.py +0 -0
  5. pymodaq/control_modules/daq_move_ui/factory.py +48 -0
  6. pymodaq/control_modules/{daq_move_ui.py → daq_move_ui/ui_base.py} +168 -210
  7. pymodaq/control_modules/daq_move_ui/uis/__init__.py +0 -0
  8. pymodaq/control_modules/daq_move_ui/uis/binary.py +139 -0
  9. pymodaq/control_modules/daq_move_ui/uis/original.py +120 -0
  10. pymodaq/control_modules/daq_move_ui/uis/relative.py +124 -0
  11. pymodaq/control_modules/daq_move_ui/uis/simple.py +126 -0
  12. pymodaq/control_modules/daq_viewer.py +113 -101
  13. pymodaq/control_modules/daq_viewer_ui.py +41 -31
  14. pymodaq/control_modules/mocks.py +2 -2
  15. pymodaq/control_modules/move_utility_classes.py +113 -41
  16. pymodaq/control_modules/thread_commands.py +137 -0
  17. pymodaq/control_modules/ui_utils.py +72 -0
  18. pymodaq/control_modules/utils.py +107 -63
  19. pymodaq/control_modules/viewer_utility_classes.py +13 -17
  20. pymodaq/dashboard.py +1294 -625
  21. pymodaq/examples/qt_less_standalone_module.py +48 -11
  22. pymodaq/extensions/__init__.py +8 -3
  23. pymodaq/extensions/adaptive/__init__.py +2 -0
  24. pymodaq/extensions/adaptive/adaptive_optimization.py +179 -0
  25. pymodaq/extensions/adaptive/loss_function/_1d_loss_functions.py +73 -0
  26. pymodaq/extensions/adaptive/loss_function/_2d_loss_functions.py +73 -0
  27. pymodaq/extensions/adaptive/loss_function/__init__.py +3 -0
  28. pymodaq/extensions/adaptive/loss_function/loss_factory.py +110 -0
  29. pymodaq/extensions/adaptive/utils.py +123 -0
  30. pymodaq/extensions/bayesian/__init__.py +1 -1
  31. pymodaq/extensions/bayesian/acquisition/__init__.py +2 -0
  32. pymodaq/extensions/bayesian/acquisition/acquisition_function_factory.py +80 -0
  33. pymodaq/extensions/bayesian/acquisition/base_acquisition_function.py +105 -0
  34. pymodaq/extensions/bayesian/bayesian_optimization.py +143 -0
  35. pymodaq/extensions/bayesian/utils.py +71 -297
  36. pymodaq/extensions/daq_logger/daq_logger.py +7 -12
  37. pymodaq/extensions/daq_logger/h5logging.py +1 -1
  38. pymodaq/extensions/daq_scan.py +30 -55
  39. pymodaq/extensions/data_mixer/__init__.py +0 -0
  40. pymodaq/extensions/data_mixer/daq_0Dviewer_DataMixer.py +97 -0
  41. pymodaq/extensions/data_mixer/data_mixer.py +262 -0
  42. pymodaq/extensions/data_mixer/model.py +108 -0
  43. pymodaq/extensions/data_mixer/models/__init__.py +0 -0
  44. pymodaq/extensions/data_mixer/models/equation_model.py +91 -0
  45. pymodaq/extensions/data_mixer/models/gaussian_fit_model.py +65 -0
  46. pymodaq/extensions/data_mixer/parser.py +53 -0
  47. pymodaq/extensions/data_mixer/utils.py +23 -0
  48. pymodaq/extensions/h5browser.py +3 -34
  49. pymodaq/extensions/optimizers_base/__init__.py +0 -0
  50. pymodaq/extensions/optimizers_base/optimizer.py +1016 -0
  51. pymodaq/extensions/optimizers_base/thread_commands.py +22 -0
  52. pymodaq/extensions/optimizers_base/utils.py +427 -0
  53. pymodaq/extensions/pid/actuator_controller.py +3 -2
  54. pymodaq/extensions/pid/daq_move_PID.py +107 -30
  55. pymodaq/extensions/pid/pid_controller.py +613 -287
  56. pymodaq/extensions/pid/utils.py +8 -5
  57. pymodaq/extensions/utils.py +17 -2
  58. pymodaq/resources/config_template.toml +57 -0
  59. pymodaq/resources/preset_default.xml +1 -1
  60. pymodaq/utils/config.py +10 -4
  61. pymodaq/utils/daq_utils.py +14 -0
  62. pymodaq/utils/data.py +1 -0
  63. pymodaq/utils/gui_utils/loader_utils.py +25 -15
  64. pymodaq/utils/h5modules/module_saving.py +134 -22
  65. pymodaq/utils/leco/daq_move_LECODirector.py +123 -84
  66. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +84 -97
  67. pymodaq/utils/leco/director_utils.py +32 -16
  68. pymodaq/utils/leco/leco_director.py +104 -27
  69. pymodaq/utils/leco/pymodaq_listener.py +186 -97
  70. pymodaq/utils/leco/rpc_method_definitions.py +43 -0
  71. pymodaq/utils/leco/utils.py +25 -25
  72. pymodaq/utils/managers/batchscan_manager.py +12 -11
  73. pymodaq/utils/managers/modules_manager.py +74 -33
  74. pymodaq/utils/managers/overshoot_manager.py +11 -10
  75. pymodaq/utils/managers/preset_manager.py +100 -64
  76. pymodaq/utils/managers/preset_manager_utils.py +163 -107
  77. pymodaq/utils/managers/remote_manager.py +21 -16
  78. pymodaq/utils/scanner/scan_factory.py +12 -3
  79. pymodaq/utils/scanner/scan_selector.py +1 -3
  80. pymodaq/utils/scanner/scanner.py +35 -6
  81. pymodaq/utils/scanner/scanners/_1d_scanners.py +15 -46
  82. pymodaq/utils/scanner/scanners/_2d_scanners.py +21 -68
  83. pymodaq/utils/scanner/scanners/sequential.py +50 -31
  84. pymodaq/utils/scanner/scanners/tabular.py +45 -28
  85. {pymodaq-5.0.18.dist-info → pymodaq-5.1.0.dist-info}/METADATA +7 -6
  86. pymodaq-5.1.0.dist-info/RECORD +154 -0
  87. {pymodaq-5.0.18.dist-info → pymodaq-5.1.0.dist-info}/entry_points.txt +0 -2
  88. pymodaq/extensions/bayesian/bayesian_optimisation.py +0 -690
  89. pymodaq/utils/leco/desktop.ini +0 -2
  90. pymodaq-5.0.18.dist-info/RECORD +0 -121
  91. {pymodaq-5.0.18.dist-info → pymodaq-5.1.0.dist-info}/WHEEL +0 -0
  92. {pymodaq-5.0.18.dist-info → pymodaq-5.1.0.dist-info}/licenses/LICENSE +0 -0
@@ -13,11 +13,14 @@ For remote control, you need to start a Coordinator, as described for remote con
13
13
 
14
14
  import logging
15
15
  from time import sleep
16
- from typing import List, Union
16
+ from typing import cast, List, Optional
17
17
 
18
18
  from pyleco.utils.listener import Listener
19
19
 
20
20
 
21
+ from pymodaq_data.data import DataWithAxes
22
+ from pymodaq_utils.serialize.factory import SerializableFactory
23
+
21
24
  class QtLessModule:
22
25
  """Some module doing things without Qt.
23
26
 
@@ -33,6 +36,9 @@ class QtLessModule:
33
36
  self._fake_position = 0
34
37
  self.start_listen()
35
38
  self._stored = []
39
+ # register DataWithAxes for deserialization
40
+ cls = DataWithAxes
41
+ SerializableFactory().register_from_type(cls, cls.serialize, cls.deserialize)
36
42
 
37
43
  def start_listen(self) -> None:
38
44
  """Start to listen on incoming commands."""
@@ -45,8 +51,9 @@ class QtLessModule:
45
51
  register_rpc_method = self.communicator.register_rpc_method
46
52
  register_rpc_method(self.set_info)
47
53
  register_rpc_method(self.send_data)
48
- register_rpc_method(self.move_abs)
49
- register_rpc_method(self.move_rel)
54
+ # binary methods can accept additionally binary payload, like serialized pymodaq objects.
55
+ self.listener.register_binary_rpc_method(self.move_abs, accept_binary_input=True)
56
+ self.listener.register_binary_rpc_method(self.move_rel, accept_binary_input=True)
50
57
  register_rpc_method(self.move_home)
51
58
  register_rpc_method(self.get_actuator_value)
52
59
  register_rpc_method(self.stop_motion)
@@ -56,7 +63,17 @@ class QtLessModule:
56
63
  """Stop to listen on incoming commands."""
57
64
  self.listener.stop_listen()
58
65
 
59
- # smethods for being remote controlled
66
+ @staticmethod
67
+ def extract_pymodaq_object(
68
+ value: Optional[float], additional_payload: Optional[List[bytes]]
69
+ ):
70
+ if value is None and additional_payload:
71
+ res = cast(DataWithAxes, SerializableFactory().get_apply_deserializer(additional_payload[0]))
72
+ else:
73
+ res = value
74
+ return res
75
+
76
+ # methods for being remote controlled
60
77
  # these methods are executed and cannot talk to the controlling module directly.
61
78
  # if you need to send a response (for example with a value) you have to store the information and
62
79
  # send it after these methods have been executed.
@@ -73,13 +90,33 @@ class QtLessModule:
73
90
  print("send_data")
74
91
 
75
92
  # actuator commands
76
- def move_abs(self, position: Union[float, str]) -> None:
77
- print("move_abs", position)
78
- self._fake_position = float(position)
79
-
80
- def move_rel(self, position: Union[float, str]) -> None:
81
- print("move_rel", position)
82
- self._fake_position += float(position)
93
+ def move_abs(
94
+ self,
95
+ position: Optional[float],
96
+ additional_payload: Optional[List[bytes]] = None,
97
+ ) -> None:
98
+ """Move to an absolute position.
99
+
100
+ :param position: Deprecated, should be None and content transferred binary.
101
+ :param additional_payload: binary frames containing the position as PyMoDAQ `DataActuator`.
102
+ """
103
+ pos = self.extract_pymodaq_object(position, additional_payload)
104
+ print("move_abs", pos)
105
+ self._fake_position = float(pos)
106
+
107
+ def move_rel(
108
+ self,
109
+ position: Optional[float],
110
+ additional_payload: Optional[List[bytes]] = None,
111
+ ) -> None:
112
+ """Move by a relative position.
113
+
114
+ :param position: Deprecated, should be None and content transferred binary.
115
+ :param additional_payload: binary frames containing the position as PyMoDAQ `DataActuator`.
116
+ """
117
+ pos = self.extract_pymodaq_object(position, additional_payload)
118
+ print("move_rel", pos)
119
+ self._fake_position += float(pos)
83
120
 
84
121
  def move_home(self) -> None:
85
122
  self._fake_position = 0
@@ -11,10 +11,15 @@ from .console import QtConsole
11
11
  from .daq_scan import DAQScan
12
12
  from .daq_logger.daq_logger import DAQ_Logger
13
13
  from .pid.pid_controller import DAQ_PID
14
- from .h5browser import H5Browser
14
+ from .h5browser import H5Browser #backcompat but should be loaded from pymodaq_gui!
15
+
16
+ from .bayesian.bayesian_optimization import BayesianOptimization
17
+ from .bayesian.utils import OptimizerModelDefault
18
+
19
+ from .adaptive.adaptive_optimization import AdaptiveOptimisation
20
+
21
+ from .data_mixer.data_mixer import DataMixer
15
22
 
16
- from .bayesian.bayesian_optimisation import BayesianOptimisation
17
- from .bayesian.utils import BayesianModelDefault, BayesianModelGeneric
18
23
 
19
24
 
20
25
 
@@ -0,0 +1,2 @@
1
+ from . import adaptive_optimization
2
+ from . import utils
@@ -0,0 +1,179 @@
1
+ from pymodaq_gui.messenger import messagebox
2
+ from pymodaq_utils import utils
3
+ from pymodaq_utils import config as config_mod
4
+ from pymodaq_utils.logger import set_logger, get_module_name
5
+ from pymodaq_utils.utils import ThreadCommand
6
+
7
+ from pymodaq.extensions.optimizers_base.optimizer import (
8
+ GenericOptimization, OptimizationRunner, optimizer_params, OptimizerAction, StopType)
9
+ from pymodaq.extensions.optimizers_base.utils import OptimizerModelDefault, find_key_in_nested_dict
10
+ from pymodaq.extensions.optimizers_base.thread_commands import OptimizerToRunner
11
+
12
+ from pymodaq.extensions.adaptive.loss_function import LossFunctionFactory,LossDim
13
+ from pymodaq.extensions.adaptive.utils import AdaptiveAlgorithm, AdaptiveConfig
14
+
15
+
16
+ logger = set_logger(get_module_name(__file__))
17
+ config = config_mod.Config()
18
+
19
+
20
+ EXTENSION_NAME = 'AdaptiveScan'
21
+ CLASS_NAME = 'AdaptiveOptimization'
22
+
23
+ STARTING_LOSS_DIM = LossDim.LOSS_1D
24
+
25
+ PREDICTION_NAMES = list(LossFunctionFactory.keys(STARTING_LOSS_DIM))
26
+ PREDICTION_PARAMS = (
27
+ [{'title': 'LossDim', 'name': 'lossdim', 'type': 'str',
28
+ 'value': LossDim.LOSS_1D, 'readonly': True},
29
+ {'title': 'Kind', 'name': 'kind', 'type': 'list',
30
+ 'value': PREDICTION_NAMES[0],
31
+ 'limits': PREDICTION_NAMES}] +
32
+ [{'title': 'Options', 'name': 'options', 'type': 'group',
33
+ 'children': LossFunctionFactory.get(STARTING_LOSS_DIM, PREDICTION_NAMES[0]).params}]
34
+ )
35
+
36
+
37
+ class AdaptiveOptimizationRunner(OptimizationRunner):
38
+
39
+
40
+ def __init__(self, *args, **kwargs):
41
+ super().__init__(*args, **kwargs)
42
+
43
+ def queue_command(self, command: ThreadCommand):
44
+ """
45
+ """
46
+ if command.command == OptimizerToRunner.PREDICTION:
47
+ kind = command.attribute.pop('kind')
48
+ lossdim = command.attribute.pop('lossdim')
49
+ self.optimization_algorithm.set_prediction_function(lossdim, kind, **command.attribute)
50
+ else:
51
+ super().queue_command(command)
52
+
53
+
54
+ class AdaptiveOptimisation(GenericOptimization):
55
+ """ PyMoDAQ extension of the DashBoard to perform the optimization of a target signal
56
+ taken form the detectors as a function of one or more parameters controlled by the actuators.
57
+ """
58
+
59
+ runner = AdaptiveOptimizationRunner
60
+ params = optimizer_params(PREDICTION_PARAMS)
61
+ config_saver = AdaptiveConfig
62
+
63
+ DISPLAY_BEST = False
64
+
65
+ def __init__(self, *args, **kwargs):
66
+ super().__init__(*args, **kwargs)
67
+ self.get_action(OptimizerAction.SAVE).trigger()
68
+ self.settings.child('main_settings', 'ini_random').hide()
69
+ self.settings.child('main_settings', 'stopping', 'tolerance').hide()
70
+ self.settings.child('main_settings', 'stopping', 'npoints').hide()
71
+ self.settings.child('main_settings', 'stopping', 'stop_type').setLimits(
72
+ [StopType.NONE.value, StopType.ITER.value])
73
+
74
+ def ini_custom_attributes(self):
75
+ """ Here you can reimplement specific attributes"""
76
+ self._base_name: str = 'Adaptive'
77
+
78
+ def validate_config(self) -> bool:
79
+ utility = find_key_in_nested_dict(self.optimizer_config.to_dict(), 'prediction')
80
+ if utility:
81
+ try:
82
+ utility_params = { k : v for k, v in utility.items() \
83
+ if k not in ("kind", "tradeoff_actual", 'lossdim') }
84
+ LossFunctionFactory.create(utility['lossdim'],
85
+ utility['kind'], **utility_params)
86
+ except (ValueError, KeyError):
87
+ return False
88
+
89
+ return True
90
+
91
+ def value_changed(self, param):
92
+ """ to be subclassed for actions to perform when one of the param's value in self.settings is changed
93
+
94
+ For instance:
95
+ if param.name() == 'do_something':
96
+ if param.value():
97
+ print('Do something')
98
+ self.settings.child('main_settings', 'something_done').setValue(False)
99
+
100
+ Parameters
101
+ ----------
102
+ param: (Parameter) the parameter whose value just changed
103
+ """
104
+ super().value_changed(param)
105
+ if param.name() == 'lossdim':
106
+ try:
107
+ self.settings.child('main_settings', 'prediction', 'kind').setLimits(
108
+ LossFunctionFactory.keys(param.value())
109
+ )
110
+ except Exception as e:
111
+ logger.debug('Warning: Error while trying to infer the kind of loss, may be because limits just changed')
112
+ elif param.name() == 'kind':
113
+ utility_settings = self.settings.child('main_settings', 'prediction')
114
+ utility_settings.child('options').clearChildren()
115
+ try:
116
+ params = LossFunctionFactory.get(utility_settings['lossdim'],
117
+ param.value()).params
118
+ utility_settings.child('options').addChildren(params)
119
+ except (KeyError, ValueError) as e:
120
+ logger.debug('Warning: Error while trying to populate options for loss, may be because limits for'
121
+ ' kind setting just changed')
122
+
123
+ def update_prediction_function(self):
124
+ utility_settings = self.settings.child('main_settings', 'prediction')
125
+ try:
126
+ uparams = {child.name() : child.value() for child in utility_settings.child('options').children()}
127
+ uparams['kind'] = utility_settings['kind']
128
+ uparams['lossdim'] = utility_settings['lossdim']
129
+
130
+ self.command_runner.emit(
131
+ utils.ThreadCommand(OptimizerToRunner.PREDICTION, uparams))
132
+ except (KeyError, ValueError, AttributeError) as e:
133
+ pass
134
+ print(e)
135
+
136
+ def update_after_actuators_changed(self, actuators: list[str]):
137
+ """ Actions to do after the actuators have been updated
138
+ """
139
+ try:#see if there is some registered loss function for the defined type
140
+ self.settings.child('main_settings', 'prediction',
141
+ 'lossdim').setValue(LossDim.get_enum_from_dim_as_int(len(actuators)))
142
+ self.update_prediction_function()
143
+
144
+ LossFunctionFactory.create(self.settings['main_settings', 'prediction',
145
+ 'lossdim'],
146
+ self.settings['main_settings', 'prediction',
147
+ 'kind'])
148
+ self.get_action(OptimizerAction.INI_RUNNER).setEnabled(True)
149
+
150
+ except ValueError as e:
151
+ self.get_action(OptimizerAction.INI_RUNNER).setEnabled(False)
152
+ messagebox(title='Warning',
153
+ text=f'You cannot select [{actuators}] as no corresponding Loss function exists')
154
+
155
+ def set_algorithm(self):
156
+ self.algorithm = AdaptiveAlgorithm(
157
+ ini_random=1,
158
+ bounds=self.format_bounds(),
159
+ actuators=self.modules_manager.selected_actuators_name,
160
+ loss_type=LossDim(self.settings['main_settings', 'prediction', 'lossdim']),
161
+ kind=self.settings['main_settings', 'prediction', 'kind'])
162
+
163
+
164
+ def main():
165
+ from pymodaq_gui.utils.utils import mkQApp
166
+ from pymodaq.utils.gui_utils.loader_utils import load_dashboard_with_preset
167
+
168
+ app = mkQApp('Adaptive Optimiser')
169
+ preset_file_name = config('presets', f'default_preset_for_scan')
170
+
171
+ dashboard, extension, win = load_dashboard_with_preset(preset_file_name, 'AdaptiveScan')
172
+
173
+ app.exec()
174
+
175
+ return dashboard, extension, win
176
+
177
+ if __name__ == '__main__':
178
+ main()
179
+
@@ -0,0 +1,73 @@
1
+ from typing import TYPE_CHECKING, Callable
2
+
3
+ from .loss_factory import LossFunctionBase, LossFunctionFactory, LossDim
4
+
5
+ from adaptive.learner.learner1D import (
6
+ curvature_loss_function,
7
+ default_loss,
8
+ uniform_loss,
9
+ resolution_loss_function,
10
+ abs_min_log_loss,
11
+ uses_nth_neighbors,
12
+
13
+ )
14
+
15
+
16
+ def default_loss_function(*args, **kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
17
+ return default_loss
18
+
19
+
20
+ def uniform_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
21
+ return uniform_loss
22
+
23
+
24
+ def abs_min_log_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
25
+ return abs_min_log_loss
26
+
27
+
28
+
29
+ @LossFunctionFactory.register()
30
+ class DefaultLoss(LossFunctionBase):
31
+ _loss = staticmethod(default_loss_function)
32
+ dim = LossDim.LOSS_1D
33
+ usual_name = 'Default'
34
+ params = []
35
+
36
+
37
+ @LossFunctionFactory.register()
38
+ class UniformLoss(LossFunctionBase):
39
+ _loss = staticmethod(uniform_loss_function)
40
+ dim = LossDim.LOSS_1D
41
+ usual_name = 'Uniform'
42
+ params = []
43
+
44
+
45
+ @LossFunctionFactory.register()
46
+ class CurvatureLoss(LossFunctionBase):
47
+ _loss = staticmethod(curvature_loss_function)
48
+ dim = LossDim.LOSS_1D
49
+ usual_name = 'Curvature'
50
+ params = [
51
+ {'title': 'Area', 'name': 'area_factor', 'type': 'float', 'value': 1.},
52
+ {'title': 'Euclid', 'name': 'euclid_factor', 'type': 'float', 'value': 0.02},
53
+ {'title': 'Horizontal', 'name': 'horizontal_factor', 'type': 'float', 'value': 0.02}
54
+ ]
55
+
56
+
57
+ @LossFunctionFactory.register()
58
+ class ResolutionLoss(LossFunctionBase):
59
+ _loss = staticmethod(resolution_loss_function)
60
+ dim = LossDim.LOSS_1D
61
+ usual_name = 'Resolution'
62
+ params = [
63
+ {'title': 'Min:', 'name': 'min_length', 'type': 'float', 'value': 0., 'min': 0., 'max': 1.},
64
+ {'title': 'Max:', 'name': 'max_length', 'type': 'float', 'value': 1., 'min': 0., 'max': 1.},
65
+ ]
66
+
67
+
68
+ @LossFunctionFactory.register()
69
+ class AbsMinLogLoss(LossFunctionBase):
70
+ _loss = staticmethod(abs_min_log_loss_function)
71
+ dim = LossDim.LOSS_1D
72
+ usual_name = 'AbsMinLog'
73
+ params = []
@@ -0,0 +1,73 @@
1
+ from typing import TYPE_CHECKING, Callable
2
+
3
+ from .loss_factory import LossFunctionBase, LossFunctionFactory, LossDim
4
+
5
+ from adaptive.learner.learner2D import (
6
+ default_loss,
7
+ uniform_loss,
8
+ resolution_loss_function,
9
+ minimize_triangle_surface_loss,
10
+ thresholded_loss_function,
11
+ )
12
+
13
+
14
+ def default_loss_function(*args, **kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
15
+ return default_loss
16
+
17
+
18
+ def uniform_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
19
+ return uniform_loss
20
+
21
+
22
+ def minimize_triangle_surface_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
23
+ return minimize_triangle_surface_loss
24
+
25
+
26
+ @LossFunctionFactory.register()
27
+ class DefaultLoss(LossFunctionBase):
28
+ _loss = staticmethod(default_loss_function)
29
+ dim = LossDim.LOSS_2D
30
+ usual_name = 'Default'
31
+ params = []
32
+
33
+
34
+ @LossFunctionFactory.register()
35
+ class UniformLoss(LossFunctionBase):
36
+ _loss = staticmethod(uniform_loss_function)
37
+ dim = LossDim.LOSS_2D
38
+ usual_name = 'Uniform'
39
+ params = []
40
+
41
+
42
+
43
+ @LossFunctionFactory.register()
44
+ class ResolutionLoss(LossFunctionBase):
45
+ _loss = staticmethod(resolution_loss_function)
46
+ dim = LossDim.LOSS_2D
47
+ usual_name = 'Resolution'
48
+ params = [
49
+ {'title': 'Min:', 'name': 'min_distance', 'type': 'float', 'value': 0., 'min': 0., 'max': 1.},
50
+ {'title': 'Max:', 'name': 'max_distance', 'type': 'float', 'value': 1., 'min': 0., 'max': 1.},
51
+ ]
52
+
53
+
54
+ @LossFunctionFactory.register()
55
+ class MinTriangleLoss(LossFunctionBase):
56
+ _loss = staticmethod(minimize_triangle_surface_loss_function)
57
+ dim = LossDim.LOSS_2D
58
+ usual_name = 'MinTriangle'
59
+ params = []
60
+
61
+
62
+
63
+ @LossFunctionFactory.register()
64
+ class ThresholdLoss(LossFunctionBase):
65
+ _loss = staticmethod(thresholded_loss_function)
66
+ dim = LossDim.LOSS_2D
67
+ usual_name = 'Threshold'
68
+ params = [
69
+ {'title': 'Lower:', 'name': 'lower_threshold', 'type': 'float', 'value': None,},
70
+ {'title': 'Upper:', 'name': 'upper_threshold', 'type': 'float', 'value': None,},
71
+ {'title': 'Priority factor:', 'name': 'priority_factor', 'type': 'float', 'value': 0.1,},
72
+ ]
73
+
@@ -0,0 +1,3 @@
1
+ from .loss_factory import LossFunctionFactory, LossDim
2
+ from . import _1d_loss_functions # to register inner class
3
+ from . import _2d_loss_functions # to register inner class
@@ -0,0 +1,110 @@
1
+ from abc import ABCMeta, abstractmethod
2
+ from collections import OrderedDict
3
+ from typing import Callable, Type, Union, Sequence
4
+ from pymodaq_utils.enums import StrEnum
5
+
6
+
7
+ from adaptive.learner import Learner1D, Learner2D, LearnerND, BaseLearner
8
+
9
+
10
+ from pymodaq_utils.logger import set_logger, get_module_name
11
+
12
+ logger = set_logger(get_module_name(__file__))
13
+
14
+
15
+ class LossDim(StrEnum):
16
+
17
+ LOSS_1D = 'Loss1D'
18
+ LOSS_2D = 'Loss2D'
19
+ LOSS_ND = 'LossND'
20
+
21
+ @staticmethod
22
+ def get_enum_from_dim_as_int(dim: int):
23
+ if dim == 1:
24
+ return LossDim.LOSS_1D
25
+ elif dim == 2:
26
+ return LossDim.LOSS_2D
27
+ elif dim > 2:
28
+ return LossDim.LOSS_ND
29
+ else:
30
+ raise ValueError(f'No Loss with dim={dim} is known')
31
+
32
+ def get_learner_from_enum(self, bounds: OrderedDict[str, tuple[float, float]],
33
+ loss_function: 'LossFunctionBase') -> Union[Learner1D, Learner2D, LearnerND]:
34
+ """ Return an instance of a Learner given the enum value
35
+
36
+ Parameters
37
+ ----------
38
+ bounds: type depends on the learner, could be a tuple of real numbers (Learner1D) or a tuple of tuples of real
39
+ numbers
40
+ loss_function: one of the LossFunction class as given by the LossFunctinoFactory
41
+
42
+ See Also:
43
+ ---------
44
+ :class:`LossFunctionFactory`
45
+ """
46
+ if self == self.LOSS_1D:
47
+ bounds = bounds.popitem(last=False)[1]
48
+ return Learner1D(None, bounds, loss_per_interval=loss_function)
49
+ elif self == self.LOSS_2D:
50
+ return Learner2D(None, tuple(bounds.values()), loss_per_triangle=loss_function)
51
+ elif self == self.LOSS_ND:
52
+ return LearnerND(None, tuple(bounds.values()), loss_per_simplex=loss_function)
53
+ else:
54
+ raise ValueError(f'No learner for this enum: {self}')
55
+
56
+
57
+ class LossFunctionBase(metaclass=ABCMeta):
58
+ _loss : Callable
59
+ dim: LossDim
60
+ usual_name: str
61
+ params: list[dict] = []
62
+
63
+ def __call__(self, *args, **kwargs):
64
+ return self._loss(**kwargs)
65
+
66
+
67
+ class LossFunctionFactory:
68
+ _builders = {}
69
+
70
+ @classmethod
71
+ def register(cls) -> Callable:
72
+ """ To be used as a decorator
73
+
74
+ Register in the class registry a new LossFunction class using its 2 identifiers: LossDim and usual_name
75
+ """
76
+
77
+ def inner_wrapper(wrapped_class: LossFunctionBase) -> Callable:
78
+ key = wrapped_class.usual_name
79
+ dim = wrapped_class.dim
80
+ if dim not in cls._builders:
81
+ cls._builders[dim] = {}
82
+ if key not in cls._builders[dim]:
83
+ cls._builders[dim][key] = wrapped_class
84
+ else:
85
+ logger.warning(f'The {key} builder is already registered. Replacing it')
86
+ return wrapped_class
87
+
88
+ return inner_wrapper
89
+
90
+ @classmethod
91
+ def get(cls, dim: LossDim, key : str) -> Type[LossFunctionBase]:
92
+ loss = cls._builders.get(dim, {key: None}).get(key)
93
+ if loss is None:
94
+ raise ValueError(f'Unknown Loss function with dim={dim} and key={key}')
95
+ return loss
96
+
97
+ @classmethod
98
+ def create(cls, dim: LossDim, key: str, **kwargs) -> LossFunctionBase:
99
+ return cls.get(dim, key)()(**kwargs)
100
+
101
+ @classmethod
102
+ def dims(cls) -> list[LossDim]:
103
+ return list(cls._builders.keys())
104
+
105
+ @classmethod
106
+ def keys(cls, dim: LossDim) -> list[str]:
107
+ try:
108
+ return list(cls._builders.get(dim).keys())
109
+ except (AttributeError, ValueError, KeyError) as e:
110
+ return []
@@ -0,0 +1,123 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created the 31/08/2023
4
+
5
+ @author: Sebastien Weber
6
+ """
7
+
8
+ from typing import List, TYPE_CHECKING, Union, Dict, Tuple, Iterable
9
+
10
+ import numpy as np
11
+ from collections import OrderedDict
12
+ from collections.abc import Iterable as IterableClass
13
+
14
+
15
+ from pymodaq_utils.logger import set_logger, get_module_name
16
+
17
+ from pymodaq_data.data import (DataCalculated, DataRaw, Axis)
18
+
19
+
20
+ from pymodaq.extensions.optimizers_base.utils import (
21
+ GenericAlgorithm, OptimizerModelDefault, StopType, StoppingParameters,
22
+ OptimizerConfig)
23
+ from pymodaq.extensions.adaptive.loss_function.loss_factory import LossDim, LossFunctionBase, LossFunctionFactory
24
+
25
+ logger = set_logger(get_module_name(__file__))
26
+
27
+
28
+ class AdaptiveConfig(OptimizerConfig):
29
+ config_name = f"adaptive_settings"
30
+
31
+
32
+ class AdaptiveAlgorithm(GenericAlgorithm):
33
+
34
+ def __init__(self, ini_random: int, bounds: OrderedDict[str, tuple[float, float]],
35
+ actuators: list[str],
36
+ loss_type: LossDim, kind: str, **kwargs):
37
+ super().__init__(ini_random, bounds, actuators)
38
+ self._algo = loss_type.get_learner_from_enum(
39
+ bounds=bounds,
40
+ loss_function=LossFunctionFactory.create(loss_type, kind, **kwargs))
41
+ self._best = 1
42
+
43
+ def get_random_point(self) -> dict[str, float]:
44
+ """ Get a random point coordinates in the defined bounds
45
+
46
+ Normally not needed for Adaptive
47
+ """
48
+ point = dict()
49
+ bounds = self.bounds
50
+ for ind in range(len(bounds)):
51
+ point[self.actuators[ind]] = ((np.max(bounds[ind]) - np.min(bounds[ind])) * np.random.random_sample() +
52
+ np.min(bounds[ind]))
53
+ return point
54
+
55
+ def set_prediction_function(self, loss_type=LossDim.LOSS_1D, kind='', **kwargs):
56
+ self._prediction = LossFunctionFactory.create(loss_type, kind, **kwargs)
57
+
58
+ def update_prediction_function(self):
59
+ pass
60
+
61
+ @property
62
+ def tradeoff(self) -> float:
63
+ return 0.
64
+
65
+ @property
66
+ def bounds(self) -> Dict[str, Tuple[float, float]]:
67
+ return dict(zip(self.actuators, self._algo.bounds))
68
+
69
+ @bounds.setter
70
+ def bounds(self, bounds: Dict[str, Tuple[float, float]]):
71
+ if isinstance(bounds, dict):
72
+ bounds = [bounds[act] for act in self.actuators]
73
+ self._algo.set_bounds(bounds)
74
+ else:
75
+ raise TypeError('Bounds should be defined as a dictionary')
76
+
77
+ def prediction_ask(self) -> dict[str, float]:
78
+ """ Ask the prediction function or algo to provide the next point to probe"""
79
+ return dict(zip(self.actuators, np.atleast_1d(self._algo.ask(1)[0][0])))
80
+
81
+ def tell(self, function_value: float):
82
+
83
+ next_point = tuple([self._next_point[act] for act in self.actuators])
84
+ if len(next_point) == 1:
85
+ next_point = next_point[0] #Learner don't have the same tell method signature
86
+ self._algo.tell(next_point, function_value)
87
+
88
+ @property
89
+ def best_fitness(self) -> float:
90
+ """ For adaptive optimization this is only used as a stopping critter"""
91
+ if 1/self._algo.loss() > self._best:
92
+ self._best = 1/self._algo.loss()
93
+ return self._best
94
+
95
+ @property
96
+ def best_individual(self) -> Union[dict[str, float], None]:
97
+ """ Return the individual coordinates with best fitness"""
98
+
99
+ if len(self._algo.data) > 0:
100
+ individual_array = np.atleast_1d(list(self._algo.data.keys())[list(self._algo.data.values()).index(max(self._algo.data.values()))])
101
+ else:
102
+ individual_array = np.atleast_1d(self._algo.bounds[0])
103
+ return dict(zip(self.actuators, individual_array))
104
+
105
+ def best_individuals(self, n_best):
106
+ if len(self._algo.data) > n_best:
107
+ individual_array = np.atleast_1d(list(self._algo.data.keys())[list(self._algo.data.values()).index(max(self._algo.data.values()))])
108
+ else:
109
+ individual_array = np.atleast_1d(self._algo.bounds[0])
110
+ return dict(zip(self.actuators, individual_array))
111
+
112
+
113
+ def stopping(self, ind_iter: int, stopping_parameters: StoppingParameters):
114
+ if stopping_parameters.stop_type != StopType.NONE:
115
+ if ind_iter >= stopping_parameters.niter: # For instance StopType.ITER
116
+ return True
117
+ return False
118
+
119
+
120
+
121
+
122
+
123
+