pymodaq 5.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (154) hide show
  1. pymodaq/__init__.py +98 -0
  2. pymodaq/control_modules/__init__.py +1 -0
  3. pymodaq/control_modules/daq_move.py +1238 -0
  4. pymodaq/control_modules/daq_move_ui/__init__.py +0 -0
  5. pymodaq/control_modules/daq_move_ui/factory.py +48 -0
  6. pymodaq/control_modules/daq_move_ui/ui_base.py +359 -0
  7. pymodaq/control_modules/daq_move_ui/uis/__init__.py +0 -0
  8. pymodaq/control_modules/daq_move_ui/uis/binary.py +139 -0
  9. pymodaq/control_modules/daq_move_ui/uis/original.py +120 -0
  10. pymodaq/control_modules/daq_move_ui/uis/relative.py +124 -0
  11. pymodaq/control_modules/daq_move_ui/uis/simple.py +126 -0
  12. pymodaq/control_modules/daq_viewer.py +1517 -0
  13. pymodaq/control_modules/daq_viewer_ui.py +407 -0
  14. pymodaq/control_modules/mocks.py +57 -0
  15. pymodaq/control_modules/move_utility_classes.py +1141 -0
  16. pymodaq/control_modules/thread_commands.py +137 -0
  17. pymodaq/control_modules/ui_utils.py +72 -0
  18. pymodaq/control_modules/utils.py +591 -0
  19. pymodaq/control_modules/viewer_utility_classes.py +670 -0
  20. pymodaq/daq_utils/__init__.py +0 -0
  21. pymodaq/daq_utils/daq_utils.py +6 -0
  22. pymodaq/dashboard.py +2396 -0
  23. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Client.aliases +3 -0
  24. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Client.lvlps +3 -0
  25. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Client.lvproj +32 -0
  26. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Client.vi +0 -0
  27. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Server_1Dgaussian.vi +0 -0
  28. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_Server_2Dgaussian.vi +0 -0
  29. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_read_cmd.vi +0 -0
  30. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_read_float.vi +0 -0
  31. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_read_int.vi +0 -0
  32. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_send_data.vi +0 -0
  33. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_send_int.vi +0 -0
  34. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_send_scalar.vi +0 -0
  35. pymodaq/examples/Labview_TCP_Client/DAQ_TCP_send_string.vi +0 -0
  36. pymodaq/examples/Labview_TCP_Client/client_state.ctl +0 -0
  37. pymodaq/examples/Labview_TCP_Client/cmd_types.ctl +0 -0
  38. pymodaq/examples/__init__.py +0 -0
  39. pymodaq/examples/function_plotter.py +160 -0
  40. pymodaq/examples/nonlinearscanner.py +126 -0
  41. pymodaq/examples/qt_less_standalone_module.py +165 -0
  42. pymodaq/examples/tcp_client.py +97 -0
  43. pymodaq/extensions/__init__.py +25 -0
  44. pymodaq/extensions/adaptive/__init__.py +2 -0
  45. pymodaq/extensions/adaptive/adaptive_optimization.py +179 -0
  46. pymodaq/extensions/adaptive/loss_function/_1d_loss_functions.py +73 -0
  47. pymodaq/extensions/adaptive/loss_function/_2d_loss_functions.py +73 -0
  48. pymodaq/extensions/adaptive/loss_function/__init__.py +3 -0
  49. pymodaq/extensions/adaptive/loss_function/loss_factory.py +110 -0
  50. pymodaq/extensions/adaptive/utils.py +123 -0
  51. pymodaq/extensions/bayesian/__init__.py +2 -0
  52. pymodaq/extensions/bayesian/acquisition/__init__.py +2 -0
  53. pymodaq/extensions/bayesian/acquisition/acquisition_function_factory.py +80 -0
  54. pymodaq/extensions/bayesian/acquisition/base_acquisition_function.py +105 -0
  55. pymodaq/extensions/bayesian/bayesian_optimization.py +143 -0
  56. pymodaq/extensions/bayesian/utils.py +180 -0
  57. pymodaq/extensions/console.py +73 -0
  58. pymodaq/extensions/daq_logger/__init__.py +1 -0
  59. pymodaq/extensions/daq_logger/abstract.py +52 -0
  60. pymodaq/extensions/daq_logger/daq_logger.py +519 -0
  61. pymodaq/extensions/daq_logger/db/__init__.py +0 -0
  62. pymodaq/extensions/daq_logger/db/db_logger.py +300 -0
  63. pymodaq/extensions/daq_logger/db/db_logger_models.py +100 -0
  64. pymodaq/extensions/daq_logger/h5logging.py +84 -0
  65. pymodaq/extensions/daq_scan.py +1218 -0
  66. pymodaq/extensions/daq_scan_ui.py +241 -0
  67. pymodaq/extensions/data_mixer/__init__.py +0 -0
  68. pymodaq/extensions/data_mixer/daq_0Dviewer_DataMixer.py +97 -0
  69. pymodaq/extensions/data_mixer/data_mixer.py +262 -0
  70. pymodaq/extensions/data_mixer/model.py +108 -0
  71. pymodaq/extensions/data_mixer/models/__init__.py +0 -0
  72. pymodaq/extensions/data_mixer/models/equation_model.py +91 -0
  73. pymodaq/extensions/data_mixer/models/gaussian_fit_model.py +65 -0
  74. pymodaq/extensions/data_mixer/parser.py +53 -0
  75. pymodaq/extensions/data_mixer/utils.py +23 -0
  76. pymodaq/extensions/h5browser.py +9 -0
  77. pymodaq/extensions/optimizers_base/__init__.py +0 -0
  78. pymodaq/extensions/optimizers_base/optimizer.py +1016 -0
  79. pymodaq/extensions/optimizers_base/thread_commands.py +22 -0
  80. pymodaq/extensions/optimizers_base/utils.py +427 -0
  81. pymodaq/extensions/pid/__init__.py +16 -0
  82. pymodaq/extensions/pid/actuator_controller.py +14 -0
  83. pymodaq/extensions/pid/daq_move_PID.py +154 -0
  84. pymodaq/extensions/pid/pid_controller.py +1016 -0
  85. pymodaq/extensions/pid/utils.py +189 -0
  86. pymodaq/extensions/utils.py +111 -0
  87. pymodaq/icon.ico +0 -0
  88. pymodaq/post_treatment/__init__.py +6 -0
  89. pymodaq/post_treatment/load_and_plot.py +352 -0
  90. pymodaq/resources/__init__.py +0 -0
  91. pymodaq/resources/config_template.toml +57 -0
  92. pymodaq/resources/preset_default.xml +1 -0
  93. pymodaq/resources/setup_plugin.py +73 -0
  94. pymodaq/splash.png +0 -0
  95. pymodaq/utils/__init__.py +0 -0
  96. pymodaq/utils/array_manipulation.py +6 -0
  97. pymodaq/utils/calibration_camera.py +180 -0
  98. pymodaq/utils/chrono_timer.py +203 -0
  99. pymodaq/utils/config.py +53 -0
  100. pymodaq/utils/conftests.py +5 -0
  101. pymodaq/utils/daq_utils.py +158 -0
  102. pymodaq/utils/data.py +128 -0
  103. pymodaq/utils/enums.py +6 -0
  104. pymodaq/utils/exceptions.py +38 -0
  105. pymodaq/utils/gui_utils/__init__.py +10 -0
  106. pymodaq/utils/gui_utils/loader_utils.py +75 -0
  107. pymodaq/utils/gui_utils/utils.py +18 -0
  108. pymodaq/utils/gui_utils/widgets/lcd.py +8 -0
  109. pymodaq/utils/h5modules/__init__.py +2 -0
  110. pymodaq/utils/h5modules/module_saving.py +526 -0
  111. pymodaq/utils/leco/__init__.py +25 -0
  112. pymodaq/utils/leco/daq_move_LECODirector.py +217 -0
  113. pymodaq/utils/leco/daq_xDviewer_LECODirector.py +163 -0
  114. pymodaq/utils/leco/director_utils.py +74 -0
  115. pymodaq/utils/leco/leco_director.py +166 -0
  116. pymodaq/utils/leco/pymodaq_listener.py +364 -0
  117. pymodaq/utils/leco/rpc_method_definitions.py +43 -0
  118. pymodaq/utils/leco/utils.py +74 -0
  119. pymodaq/utils/logger.py +6 -0
  120. pymodaq/utils/managers/__init__.py +0 -0
  121. pymodaq/utils/managers/batchscan_manager.py +346 -0
  122. pymodaq/utils/managers/modules_manager.py +589 -0
  123. pymodaq/utils/managers/overshoot_manager.py +242 -0
  124. pymodaq/utils/managers/preset_manager.py +229 -0
  125. pymodaq/utils/managers/preset_manager_utils.py +262 -0
  126. pymodaq/utils/managers/remote_manager.py +484 -0
  127. pymodaq/utils/math_utils.py +6 -0
  128. pymodaq/utils/messenger.py +6 -0
  129. pymodaq/utils/parameter/__init__.py +10 -0
  130. pymodaq/utils/parameter/utils.py +6 -0
  131. pymodaq/utils/scanner/__init__.py +5 -0
  132. pymodaq/utils/scanner/scan_config.py +16 -0
  133. pymodaq/utils/scanner/scan_factory.py +259 -0
  134. pymodaq/utils/scanner/scan_selector.py +477 -0
  135. pymodaq/utils/scanner/scanner.py +324 -0
  136. pymodaq/utils/scanner/scanners/_1d_scanners.py +174 -0
  137. pymodaq/utils/scanner/scanners/_2d_scanners.py +299 -0
  138. pymodaq/utils/scanner/scanners/__init__.py +1 -0
  139. pymodaq/utils/scanner/scanners/sequential.py +224 -0
  140. pymodaq/utils/scanner/scanners/tabular.py +319 -0
  141. pymodaq/utils/scanner/utils.py +110 -0
  142. pymodaq/utils/svg/__init__.py +6 -0
  143. pymodaq/utils/svg/svg_renderer.py +20 -0
  144. pymodaq/utils/svg/svg_view.py +35 -0
  145. pymodaq/utils/svg/svg_viewer2D.py +50 -0
  146. pymodaq/utils/tcp_ip/__init__.py +6 -0
  147. pymodaq/utils/tcp_ip/mysocket.py +12 -0
  148. pymodaq/utils/tcp_ip/serializer.py +13 -0
  149. pymodaq/utils/tcp_ip/tcp_server_client.py +772 -0
  150. pymodaq-5.1.6.dist-info/METADATA +238 -0
  151. pymodaq-5.1.6.dist-info/RECORD +154 -0
  152. pymodaq-5.1.6.dist-info/WHEEL +4 -0
  153. pymodaq-5.1.6.dist-info/entry_points.txt +7 -0
  154. pymodaq-5.1.6.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,179 @@
1
+ from pymodaq_gui.messenger import messagebox
2
+ from pymodaq_utils import utils
3
+ from pymodaq_utils import config as config_mod
4
+ from pymodaq_utils.logger import set_logger, get_module_name
5
+ from pymodaq_utils.utils import ThreadCommand
6
+
7
+ from pymodaq.extensions.optimizers_base.optimizer import (
8
+ GenericOptimization, OptimizationRunner, optimizer_params, OptimizerAction, StopType)
9
+ from pymodaq.extensions.optimizers_base.utils import OptimizerModelDefault, find_key_in_nested_dict
10
+ from pymodaq.extensions.optimizers_base.thread_commands import OptimizerToRunner
11
+
12
+ from pymodaq.extensions.adaptive.loss_function import LossFunctionFactory,LossDim
13
+ from pymodaq.extensions.adaptive.utils import AdaptiveAlgorithm, AdaptiveConfig
14
+
15
+
16
+ logger = set_logger(get_module_name(__file__))
17
+ config = config_mod.Config()
18
+
19
+
20
+ EXTENSION_NAME = 'AdaptiveScan'
21
+ CLASS_NAME = 'AdaptiveOptimization'
22
+
23
+ STARTING_LOSS_DIM = LossDim.LOSS_1D
24
+
25
+ PREDICTION_NAMES = list(LossFunctionFactory.keys(STARTING_LOSS_DIM))
26
+ PREDICTION_PARAMS = (
27
+ [{'title': 'LossDim', 'name': 'lossdim', 'type': 'str',
28
+ 'value': LossDim.LOSS_1D, 'readonly': True},
29
+ {'title': 'Kind', 'name': 'kind', 'type': 'list',
30
+ 'value': PREDICTION_NAMES[0],
31
+ 'limits': PREDICTION_NAMES}] +
32
+ [{'title': 'Options', 'name': 'options', 'type': 'group',
33
+ 'children': LossFunctionFactory.get(STARTING_LOSS_DIM, PREDICTION_NAMES[0]).params}]
34
+ )
35
+
36
+
37
+ class AdaptiveOptimizationRunner(OptimizationRunner):
38
+
39
+
40
+ def __init__(self, *args, **kwargs):
41
+ super().__init__(*args, **kwargs)
42
+
43
+ def queue_command(self, command: ThreadCommand):
44
+ """
45
+ """
46
+ if command.command == OptimizerToRunner.PREDICTION:
47
+ kind = command.attribute.pop('kind')
48
+ lossdim = command.attribute.pop('lossdim')
49
+ self.optimization_algorithm.set_prediction_function(lossdim, kind, **command.attribute)
50
+ else:
51
+ super().queue_command(command)
52
+
53
+
54
+ class AdaptiveOptimisation(GenericOptimization):
55
+ """ PyMoDAQ extension of the DashBoard to perform the optimization of a target signal
56
+ taken form the detectors as a function of one or more parameters controlled by the actuators.
57
+ """
58
+
59
+ runner = AdaptiveOptimizationRunner
60
+ params = optimizer_params(PREDICTION_PARAMS)
61
+ config_saver = AdaptiveConfig
62
+
63
+ DISPLAY_BEST = False
64
+
65
+ def __init__(self, *args, **kwargs):
66
+ super().__init__(*args, **kwargs)
67
+ self.get_action(OptimizerAction.SAVE).trigger()
68
+ self.settings.child('main_settings', 'ini_random').hide()
69
+ self.settings.child('main_settings', 'stopping', 'tolerance').hide()
70
+ self.settings.child('main_settings', 'stopping', 'npoints').hide()
71
+ self.settings.child('main_settings', 'stopping', 'stop_type').setLimits(
72
+ [StopType.NONE.value, StopType.ITER.value])
73
+
74
+ def ini_custom_attributes(self):
75
+ """ Here you can reimplement specific attributes"""
76
+ self._base_name: str = 'Adaptive'
77
+
78
+ def validate_config(self) -> bool:
79
+ utility = find_key_in_nested_dict(self.optimizer_config.to_dict(), 'prediction')
80
+ if utility:
81
+ try:
82
+ utility_params = { k : v for k, v in utility.items() \
83
+ if k not in ("kind", "tradeoff_actual", 'lossdim') }
84
+ LossFunctionFactory.create(utility['lossdim'],
85
+ utility['kind'], **utility_params)
86
+ except (ValueError, KeyError):
87
+ return False
88
+
89
+ return True
90
+
91
+ def value_changed(self, param):
92
+ """ to be subclassed for actions to perform when one of the param's value in self.settings is changed
93
+
94
+ For instance:
95
+ if param.name() == 'do_something':
96
+ if param.value():
97
+ print('Do something')
98
+ self.settings.child('main_settings', 'something_done').setValue(False)
99
+
100
+ Parameters
101
+ ----------
102
+ param: (Parameter) the parameter whose value just changed
103
+ """
104
+ super().value_changed(param)
105
+ if param.name() == 'lossdim':
106
+ try:
107
+ self.settings.child('main_settings', 'prediction', 'kind').setLimits(
108
+ LossFunctionFactory.keys(param.value())
109
+ )
110
+ except Exception as e:
111
+ logger.debug('Warning: Error while trying to infer the kind of loss, may be because limits just changed')
112
+ elif param.name() == 'kind':
113
+ utility_settings = self.settings.child('main_settings', 'prediction')
114
+ utility_settings.child('options').clearChildren()
115
+ try:
116
+ params = LossFunctionFactory.get(utility_settings['lossdim'],
117
+ param.value()).params
118
+ utility_settings.child('options').addChildren(params)
119
+ except (KeyError, ValueError) as e:
120
+ logger.debug('Warning: Error while trying to populate options for loss, may be because limits for'
121
+ ' kind setting just changed')
122
+
123
+ def update_prediction_function(self):
124
+ utility_settings = self.settings.child('main_settings', 'prediction')
125
+ try:
126
+ uparams = {child.name() : child.value() for child in utility_settings.child('options').children()}
127
+ uparams['kind'] = utility_settings['kind']
128
+ uparams['lossdim'] = utility_settings['lossdim']
129
+
130
+ self.command_runner.emit(
131
+ utils.ThreadCommand(OptimizerToRunner.PREDICTION, uparams))
132
+ except (KeyError, ValueError, AttributeError) as e:
133
+ pass
134
+ print(e)
135
+
136
+ def update_after_actuators_changed(self, actuators: list[str]):
137
+ """ Actions to do after the actuators have been updated
138
+ """
139
+ try:#see if there is some registered loss function for the defined type
140
+ self.settings.child('main_settings', 'prediction',
141
+ 'lossdim').setValue(LossDim.get_enum_from_dim_as_int(len(actuators)))
142
+ self.update_prediction_function()
143
+
144
+ LossFunctionFactory.create(self.settings['main_settings', 'prediction',
145
+ 'lossdim'],
146
+ self.settings['main_settings', 'prediction',
147
+ 'kind'])
148
+ self.get_action(OptimizerAction.INI_RUNNER).setEnabled(True)
149
+
150
+ except ValueError as e:
151
+ self.get_action(OptimizerAction.INI_RUNNER).setEnabled(False)
152
+ messagebox(title='Warning',
153
+ text=f'You cannot select [{actuators}] as no corresponding Loss function exists')
154
+
155
+ def set_algorithm(self):
156
+ self.algorithm = AdaptiveAlgorithm(
157
+ ini_random=1,
158
+ bounds=self.format_bounds(),
159
+ actuators=self.modules_manager.selected_actuators_name,
160
+ loss_type=LossDim(self.settings['main_settings', 'prediction', 'lossdim']),
161
+ kind=self.settings['main_settings', 'prediction', 'kind'])
162
+
163
+
164
+ def main():
165
+ from pymodaq_gui.utils.utils import mkQApp
166
+ from pymodaq.utils.gui_utils.loader_utils import load_dashboard_with_preset
167
+
168
+ app = mkQApp('Adaptive Optimiser')
169
+ preset_file_name = config('presets', f'default_preset_for_scan')
170
+
171
+ dashboard, extension, win = load_dashboard_with_preset(preset_file_name, 'AdaptiveScan')
172
+
173
+ app.exec()
174
+
175
+ return dashboard, extension, win
176
+
177
+ if __name__ == '__main__':
178
+ main()
179
+
@@ -0,0 +1,73 @@
1
+ from typing import TYPE_CHECKING, Callable
2
+
3
+ from .loss_factory import LossFunctionBase, LossFunctionFactory, LossDim
4
+
5
+ from adaptive.learner.learner1D import (
6
+ curvature_loss_function,
7
+ default_loss,
8
+ uniform_loss,
9
+ resolution_loss_function,
10
+ abs_min_log_loss,
11
+ uses_nth_neighbors,
12
+
13
+ )
14
+
15
+
16
+ def default_loss_function(*args, **kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
17
+ return default_loss
18
+
19
+
20
+ def uniform_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
21
+ return uniform_loss
22
+
23
+
24
+ def abs_min_log_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
25
+ return abs_min_log_loss
26
+
27
+
28
+
29
+ @LossFunctionFactory.register()
30
+ class DefaultLoss(LossFunctionBase):
31
+ _loss = staticmethod(default_loss_function)
32
+ dim = LossDim.LOSS_1D
33
+ usual_name = 'Default'
34
+ params = []
35
+
36
+
37
+ @LossFunctionFactory.register()
38
+ class UniformLoss(LossFunctionBase):
39
+ _loss = staticmethod(uniform_loss_function)
40
+ dim = LossDim.LOSS_1D
41
+ usual_name = 'Uniform'
42
+ params = []
43
+
44
+
45
+ @LossFunctionFactory.register()
46
+ class CurvatureLoss(LossFunctionBase):
47
+ _loss = staticmethod(curvature_loss_function)
48
+ dim = LossDim.LOSS_1D
49
+ usual_name = 'Curvature'
50
+ params = [
51
+ {'title': 'Area', 'name': 'area_factor', 'type': 'float', 'value': 1.},
52
+ {'title': 'Euclid', 'name': 'euclid_factor', 'type': 'float', 'value': 0.02},
53
+ {'title': 'Horizontal', 'name': 'horizontal_factor', 'type': 'float', 'value': 0.02}
54
+ ]
55
+
56
+
57
+ @LossFunctionFactory.register()
58
+ class ResolutionLoss(LossFunctionBase):
59
+ _loss = staticmethod(resolution_loss_function)
60
+ dim = LossDim.LOSS_1D
61
+ usual_name = 'Resolution'
62
+ params = [
63
+ {'title': 'Min:', 'name': 'min_length', 'type': 'float', 'value': 0., 'min': 0., 'max': 1.},
64
+ {'title': 'Max:', 'name': 'max_length', 'type': 'float', 'value': 1., 'min': 0., 'max': 1.},
65
+ ]
66
+
67
+
68
+ @LossFunctionFactory.register()
69
+ class AbsMinLogLoss(LossFunctionBase):
70
+ _loss = staticmethod(abs_min_log_loss_function)
71
+ dim = LossDim.LOSS_1D
72
+ usual_name = 'AbsMinLog'
73
+ params = []
@@ -0,0 +1,73 @@
1
+ from typing import TYPE_CHECKING, Callable
2
+
3
+ from .loss_factory import LossFunctionBase, LossFunctionFactory, LossDim
4
+
5
+ from adaptive.learner.learner2D import (
6
+ default_loss,
7
+ uniform_loss,
8
+ resolution_loss_function,
9
+ minimize_triangle_surface_loss,
10
+ thresholded_loss_function,
11
+ )
12
+
13
+
14
+ def default_loss_function(*args, **kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
15
+ return default_loss
16
+
17
+
18
+ def uniform_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
19
+ return uniform_loss
20
+
21
+
22
+ def minimize_triangle_surface_loss_function(**kwargs): #should be wrapped to handle eventual initializing argument, see params attributes below
23
+ return minimize_triangle_surface_loss
24
+
25
+
26
+ @LossFunctionFactory.register()
27
+ class DefaultLoss(LossFunctionBase):
28
+ _loss = staticmethod(default_loss_function)
29
+ dim = LossDim.LOSS_2D
30
+ usual_name = 'Default'
31
+ params = []
32
+
33
+
34
+ @LossFunctionFactory.register()
35
+ class UniformLoss(LossFunctionBase):
36
+ _loss = staticmethod(uniform_loss_function)
37
+ dim = LossDim.LOSS_2D
38
+ usual_name = 'Uniform'
39
+ params = []
40
+
41
+
42
+
43
+ @LossFunctionFactory.register()
44
+ class ResolutionLoss(LossFunctionBase):
45
+ _loss = staticmethod(resolution_loss_function)
46
+ dim = LossDim.LOSS_2D
47
+ usual_name = 'Resolution'
48
+ params = [
49
+ {'title': 'Min:', 'name': 'min_distance', 'type': 'float', 'value': 0., 'min': 0., 'max': 1.},
50
+ {'title': 'Max:', 'name': 'max_distance', 'type': 'float', 'value': 1., 'min': 0., 'max': 1.},
51
+ ]
52
+
53
+
54
+ @LossFunctionFactory.register()
55
+ class MinTriangleLoss(LossFunctionBase):
56
+ _loss = staticmethod(minimize_triangle_surface_loss_function)
57
+ dim = LossDim.LOSS_2D
58
+ usual_name = 'MinTriangle'
59
+ params = []
60
+
61
+
62
+
63
+ @LossFunctionFactory.register()
64
+ class ThresholdLoss(LossFunctionBase):
65
+ _loss = staticmethod(thresholded_loss_function)
66
+ dim = LossDim.LOSS_2D
67
+ usual_name = 'Threshold'
68
+ params = [
69
+ {'title': 'Lower:', 'name': 'lower_threshold', 'type': 'float', 'value': None,},
70
+ {'title': 'Upper:', 'name': 'upper_threshold', 'type': 'float', 'value': None,},
71
+ {'title': 'Priority factor:', 'name': 'priority_factor', 'type': 'float', 'value': 0.1,},
72
+ ]
73
+
@@ -0,0 +1,3 @@
1
+ from .loss_factory import LossFunctionFactory, LossDim
2
+ from . import _1d_loss_functions # to register inner class
3
+ from . import _2d_loss_functions # to register inner class
@@ -0,0 +1,110 @@
1
+ from abc import ABCMeta, abstractmethod
2
+ from collections import OrderedDict
3
+ from typing import Callable, Type, Union, Sequence
4
+ from pymodaq_utils.enums import StrEnum
5
+
6
+
7
+ from adaptive.learner import Learner1D, Learner2D, LearnerND, BaseLearner
8
+
9
+
10
+ from pymodaq_utils.logger import set_logger, get_module_name
11
+
12
+ logger = set_logger(get_module_name(__file__))
13
+
14
+
15
+ class LossDim(StrEnum):
16
+
17
+ LOSS_1D = 'Loss1D'
18
+ LOSS_2D = 'Loss2D'
19
+ LOSS_ND = 'LossND'
20
+
21
+ @staticmethod
22
+ def get_enum_from_dim_as_int(dim: int):
23
+ if dim == 1:
24
+ return LossDim.LOSS_1D
25
+ elif dim == 2:
26
+ return LossDim.LOSS_2D
27
+ elif dim > 2:
28
+ return LossDim.LOSS_ND
29
+ else:
30
+ raise ValueError(f'No Loss with dim={dim} is known')
31
+
32
+ def get_learner_from_enum(self, bounds: OrderedDict[str, tuple[float, float]],
33
+ loss_function: 'LossFunctionBase') -> Union[Learner1D, Learner2D, LearnerND]:
34
+ """ Return an instance of a Learner given the enum value
35
+
36
+ Parameters
37
+ ----------
38
+ bounds: type depends on the learner, could be a tuple of real numbers (Learner1D) or a tuple of tuples of real
39
+ numbers
40
+ loss_function: one of the LossFunction class as given by the LossFunctinoFactory
41
+
42
+ See Also:
43
+ ---------
44
+ :class:`LossFunctionFactory`
45
+ """
46
+ if self == self.LOSS_1D:
47
+ bounds = bounds.popitem(last=False)[1]
48
+ return Learner1D(None, bounds, loss_per_interval=loss_function)
49
+ elif self == self.LOSS_2D:
50
+ return Learner2D(None, tuple(bounds.values()), loss_per_triangle=loss_function)
51
+ elif self == self.LOSS_ND:
52
+ return LearnerND(None, tuple(bounds.values()), loss_per_simplex=loss_function)
53
+ else:
54
+ raise ValueError(f'No learner for this enum: {self}')
55
+
56
+
57
+ class LossFunctionBase(metaclass=ABCMeta):
58
+ _loss : Callable
59
+ dim: LossDim
60
+ usual_name: str
61
+ params: list[dict] = []
62
+
63
+ def __call__(self, *args, **kwargs):
64
+ return self._loss(**kwargs)
65
+
66
+
67
+ class LossFunctionFactory:
68
+ _builders = {}
69
+
70
+ @classmethod
71
+ def register(cls) -> Callable:
72
+ """ To be used as a decorator
73
+
74
+ Register in the class registry a new LossFunction class using its 2 identifiers: LossDim and usual_name
75
+ """
76
+
77
+ def inner_wrapper(wrapped_class: LossFunctionBase) -> Callable:
78
+ key = wrapped_class.usual_name
79
+ dim = wrapped_class.dim
80
+ if dim not in cls._builders:
81
+ cls._builders[dim] = {}
82
+ if key not in cls._builders[dim]:
83
+ cls._builders[dim][key] = wrapped_class
84
+ else:
85
+ logger.warning(f'The {key} builder is already registered. Replacing it')
86
+ return wrapped_class
87
+
88
+ return inner_wrapper
89
+
90
+ @classmethod
91
+ def get(cls, dim: LossDim, key : str) -> Type[LossFunctionBase]:
92
+ loss = cls._builders.get(dim, {key: None}).get(key)
93
+ if loss is None:
94
+ raise ValueError(f'Unknown Loss function with dim={dim} and key={key}')
95
+ return loss
96
+
97
+ @classmethod
98
+ def create(cls, dim: LossDim, key: str, **kwargs) -> LossFunctionBase:
99
+ return cls.get(dim, key)()(**kwargs)
100
+
101
+ @classmethod
102
+ def dims(cls) -> list[LossDim]:
103
+ return list(cls._builders.keys())
104
+
105
+ @classmethod
106
+ def keys(cls, dim: LossDim) -> list[str]:
107
+ try:
108
+ return list(cls._builders.get(dim).keys())
109
+ except (AttributeError, ValueError, KeyError) as e:
110
+ return []
@@ -0,0 +1,123 @@
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created the 31/08/2023
4
+
5
+ @author: Sebastien Weber
6
+ """
7
+
8
+ from typing import List, TYPE_CHECKING, Union, Dict, Tuple, Iterable
9
+
10
+ import numpy as np
11
+ from collections import OrderedDict
12
+ from collections.abc import Iterable as IterableClass
13
+
14
+
15
+ from pymodaq_utils.logger import set_logger, get_module_name
16
+
17
+ from pymodaq_data.data import (DataCalculated, DataRaw, Axis)
18
+
19
+
20
+ from pymodaq.extensions.optimizers_base.utils import (
21
+ GenericAlgorithm, OptimizerModelDefault, StopType, StoppingParameters,
22
+ OptimizerConfig)
23
+ from pymodaq.extensions.adaptive.loss_function.loss_factory import LossDim, LossFunctionBase, LossFunctionFactory
24
+
25
+ logger = set_logger(get_module_name(__file__))
26
+
27
+
28
+ class AdaptiveConfig(OptimizerConfig):
29
+ config_name = f"adaptive_settings"
30
+
31
+
32
+ class AdaptiveAlgorithm(GenericAlgorithm):
33
+
34
+ def __init__(self, ini_random: int, bounds: OrderedDict[str, tuple[float, float]],
35
+ actuators: list[str],
36
+ loss_type: LossDim, kind: str, **kwargs):
37
+ super().__init__(ini_random, bounds, actuators)
38
+ self._algo = loss_type.get_learner_from_enum(
39
+ bounds=bounds,
40
+ loss_function=LossFunctionFactory.create(loss_type, kind, **kwargs))
41
+ self._best = 1
42
+
43
+ def get_random_point(self) -> dict[str, float]:
44
+ """ Get a random point coordinates in the defined bounds
45
+
46
+ Normally not needed for Adaptive
47
+ """
48
+ point = dict()
49
+ bounds = self.bounds
50
+ for ind in range(len(bounds)):
51
+ point[self.actuators[ind]] = ((np.max(bounds[ind]) - np.min(bounds[ind])) * np.random.random_sample() +
52
+ np.min(bounds[ind]))
53
+ return point
54
+
55
+ def set_prediction_function(self, loss_type=LossDim.LOSS_1D, kind='', **kwargs):
56
+ self._prediction = LossFunctionFactory.create(loss_type, kind, **kwargs)
57
+
58
+ def update_prediction_function(self):
59
+ pass
60
+
61
+ @property
62
+ def tradeoff(self) -> float:
63
+ return 0.
64
+
65
+ @property
66
+ def bounds(self) -> Dict[str, Tuple[float, float]]:
67
+ return dict(zip(self.actuators, self._algo.bounds))
68
+
69
+ @bounds.setter
70
+ def bounds(self, bounds: Dict[str, Tuple[float, float]]):
71
+ if isinstance(bounds, dict):
72
+ bounds = [bounds[act] for act in self.actuators]
73
+ self._algo.set_bounds(bounds)
74
+ else:
75
+ raise TypeError('Bounds should be defined as a dictionary')
76
+
77
+ def prediction_ask(self) -> dict[str, float]:
78
+ """ Ask the prediction function or algo to provide the next point to probe"""
79
+ return dict(zip(self.actuators, np.atleast_1d(self._algo.ask(1)[0][0])))
80
+
81
+ def tell(self, function_value: float):
82
+
83
+ next_point = tuple([self._next_point[act] for act in self.actuators])
84
+ if len(next_point) == 1:
85
+ next_point = next_point[0] #Learner don't have the same tell method signature
86
+ self._algo.tell(next_point, function_value)
87
+
88
+ @property
89
+ def best_fitness(self) -> float:
90
+ """ For adaptive optimization this is only used as a stopping critter"""
91
+ if 1/self._algo.loss() > self._best:
92
+ self._best = 1/self._algo.loss()
93
+ return self._best
94
+
95
+ @property
96
+ def best_individual(self) -> Union[dict[str, float], None]:
97
+ """ Return the individual coordinates with best fitness"""
98
+
99
+ if len(self._algo.data) > 0:
100
+ individual_array = np.atleast_1d(list(self._algo.data.keys())[list(self._algo.data.values()).index(max(self._algo.data.values()))])
101
+ else:
102
+ individual_array = np.atleast_1d(self._algo.bounds[0])
103
+ return dict(zip(self.actuators, individual_array))
104
+
105
+ def best_individuals(self, n_best):
106
+ if len(self._algo.data) > n_best:
107
+ individual_array = np.atleast_1d(list(self._algo.data.keys())[list(self._algo.data.values()).index(max(self._algo.data.values()))])
108
+ else:
109
+ individual_array = np.atleast_1d(self._algo.bounds[0])
110
+ return dict(zip(self.actuators, individual_array))
111
+
112
+
113
+ def stopping(self, ind_iter: int, stopping_parameters: StoppingParameters):
114
+ if stopping_parameters.stop_type != StopType.NONE:
115
+ if ind_iter >= stopping_parameters.niter: # For instance StopType.ITER
116
+ return True
117
+ return False
118
+
119
+
120
+
121
+
122
+
123
+
@@ -0,0 +1,2 @@
1
+ from . import bayesian_optimization
2
+ from . import utils
@@ -0,0 +1,2 @@
1
+ from .acquisition_function_factory import GenericAcquisitionFunctionFactory, GenericAcquisitionFunctionBase
2
+ from .base_acquisition_function import GenericUpperConfidenceBound, GenericExpectedImprovement, GenericProbabilityOfImprovement
@@ -0,0 +1,80 @@
1
+ from abc import ABCMeta, abstractmethod
2
+ from typing import Callable
3
+
4
+ from numpy.random import RandomState
5
+ from bayes_opt.acquisition import AcquisitionFunction
6
+
7
+ from pymodaq_gui.managers.parameter_manager import ParameterManager
8
+ from pymodaq_utils.logger import set_logger, get_module_name
9
+
10
+ logger = set_logger(get_module_name(__file__))
11
+
12
+
13
+ class GenericAcquisitionFunctionBase(metaclass=ABCMeta):
14
+ _function: AcquisitionFunction
15
+ usual_name: str
16
+ short_name: str
17
+ params : property(abstractmethod)
18
+
19
+ def base_acq(self, mean, std):
20
+ return self._function.base_acq(mean, std)
21
+
22
+ def decay_exploration(self):
23
+ self._function.decay_exploration()
24
+
25
+ @property
26
+ def tradeoff(self):
27
+ raise NotImplementedError
28
+
29
+ @tradeoff.setter
30
+ def tradeoff(self, tradeoff):
31
+ raise NotImplementedError
32
+
33
+ def suggest(self, gaussian_process, target_space, n_random = 1000, n_l_bfgs_b = 10, fit_gp = True):
34
+ return self._function.suggest(gaussian_process, target_space, n_random, n_l_bfgs_b, fit_gp)
35
+
36
+
37
+ class GenericAcquisitionFunctionFactory:
38
+ _builders = {}
39
+
40
+ @classmethod
41
+ def register(cls) -> Callable:
42
+ """ To be used as a decorator
43
+
44
+ Register in the class registry a new scanner class using its 2 identifiers: scan_type and scan_sub_type
45
+ """
46
+
47
+ def inner_wrapper(wrapped_class: GenericAcquisitionFunctionBase) -> Callable:
48
+ key = wrapped_class.short_name
49
+
50
+ if key not in cls._builders:
51
+ cls._builders[key] = wrapped_class
52
+ else:
53
+ logger.warning(f'The {key} builder is already registered. Replacing it')
54
+ return wrapped_class
55
+
56
+ return inner_wrapper
57
+
58
+
59
+ @classmethod
60
+ def get(cls, key : str) -> GenericAcquisitionFunctionBase:
61
+ builder = cls._builders.get(key)
62
+ if not builder:
63
+ raise ValueError(key)
64
+ return builder
65
+
66
+ @classmethod
67
+ def create(cls, key, **kwargs) -> GenericAcquisitionFunctionBase:
68
+ return cls._builders.get(key)(**kwargs)
69
+
70
+ @classmethod
71
+ def keys(cls) -> list[str]:
72
+ return list(cls._builders.keys())
73
+
74
+ @classmethod
75
+ def short_names(cls) -> list[str]:
76
+ return list(cls.keys())
77
+
78
+ @classmethod
79
+ def usual_names(cls) -> list[str]:
80
+ return [cls.get(builder).usual_name for builder in cls._builders]