pyfemtet 0.9.5__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyfemtet might be problematic. Click here for more details.

Files changed (272) hide show
  1. pyfemtet/__init__.py +6 -1
  2. pyfemtet/_i18n/1. make_pot_and_update_po.bat +8 -0
  3. pyfemtet/_i18n/2. build_mo.bat +5 -0
  4. pyfemtet/_i18n/__init__.py +4 -0
  5. pyfemtet/_i18n/babel.cfg +2 -0
  6. pyfemtet/_i18n/i18n.py +37 -0
  7. pyfemtet/_i18n/locales/ja/LC_MESSAGES/messages.mo +0 -0
  8. pyfemtet/_i18n/locales/ja/LC_MESSAGES/messages.po +1020 -0
  9. pyfemtet/_i18n/locales/messages.pot +987 -0
  10. pyfemtet/{_message → _i18n}/messages.py +128 -41
  11. pyfemtet/_util/closing.py +19 -0
  12. pyfemtet/_util/dask_util.py +89 -7
  13. pyfemtet/_util/df_util.py +46 -0
  14. pyfemtet/_util/excel_macro_util.py +8 -3
  15. pyfemtet/_util/excel_parse_util.py +43 -23
  16. pyfemtet/_util/femtet_access_inspection.py +120 -0
  17. pyfemtet/{_femtet_config_util/autosave.py → _util/femtet_autosave.py} +7 -0
  18. pyfemtet/_util/femtet_exit.py +105 -0
  19. pyfemtet/_util/femtet_version.py +20 -0
  20. pyfemtet/_util/helper.py +103 -0
  21. pyfemtet/_util/process_util.py +107 -0
  22. pyfemtet/_util/str_enum.py +44 -0
  23. pyfemtet/core.py +15 -47
  24. pyfemtet/dispatch_extensions/__init__.py +8 -11
  25. pyfemtet/dispatch_extensions/_impl.py +42 -198
  26. pyfemtet/logger/__init__.py +8 -1
  27. pyfemtet/logger/_impl.py +5 -6
  28. pyfemtet/opt/__init__.py +3 -17
  29. pyfemtet/opt/exceptions.py +45 -0
  30. pyfemtet/opt/femopt.py +621 -0
  31. pyfemtet/opt/history/__init__.py +11 -0
  32. pyfemtet/opt/history/_history.py +1416 -0
  33. pyfemtet/opt/history/_hypervolume.py +169 -0
  34. pyfemtet/opt/history/_optimality.py +79 -0
  35. pyfemtet/opt/interface/__init__.py +17 -24
  36. pyfemtet/opt/interface/_base_interface.py +222 -0
  37. pyfemtet/opt/interface/_excel_interface/__init__.py +3 -0
  38. pyfemtet/opt/interface/_excel_interface/debug-excel-interface.xlsm +0 -0
  39. pyfemtet/opt/interface/_excel_interface/excel_interface.py +997 -0
  40. pyfemtet/opt/interface/_femtet_interface/__init__.py +3 -0
  41. pyfemtet/opt/interface/{_femtet_parametric.py → _femtet_interface/_femtet_parametric.py} +20 -12
  42. pyfemtet/opt/interface/{_femtet.py → _femtet_interface/femtet_interface.py} +508 -353
  43. pyfemtet/opt/interface/_femtet_with_nx_interface/__init__.py +5 -0
  44. pyfemtet/opt/interface/_femtet_with_nx_interface/femtet_with_nx_interface.py +230 -0
  45. pyfemtet/opt/interface/_femtet_with_nx_interface/model1.prt +0 -0
  46. pyfemtet/opt/interface/_femtet_with_nx_interface/model1.x_t +98 -0
  47. pyfemtet/opt/interface/{_femtet_with_nx → _femtet_with_nx_interface}/update_model.py +1 -3
  48. pyfemtet/opt/interface/_femtet_with_solidworks/__init__.py +5 -0
  49. pyfemtet/opt/interface/_femtet_with_solidworks/femtet_with_solidworks_interface.py +142 -0
  50. pyfemtet/opt/interface/_solidworks_interface/__init__.py +5 -0
  51. pyfemtet/opt/interface/_solidworks_interface/solidworks_interface.py +227 -0
  52. pyfemtet/opt/interface/_surrogate_model_interface/__init__.py +8 -0
  53. pyfemtet/opt/interface/_surrogate_model_interface/base_surrogate_interface.py +150 -0
  54. pyfemtet/opt/interface/_surrogate_model_interface/botorch_interface.py +298 -0
  55. pyfemtet/opt/interface/_surrogate_model_interface/debug-pof-botorch.reccsv +18 -0
  56. pyfemtet/opt/interface/_with_excel_settings/__init__.py +61 -0
  57. pyfemtet/opt/interface/_with_excel_settings/with_excel_settings.py +134 -0
  58. pyfemtet/opt/meta_script/YAML_Generator.xlsm +0 -0
  59. pyfemtet/opt/meta_script/__main__.py +58 -36
  60. pyfemtet/opt/optimizer/__init__.py +7 -9
  61. pyfemtet/opt/optimizer/_base_optimizer.py +911 -0
  62. pyfemtet/opt/optimizer/optuna_optimizer/__init__.py +9 -0
  63. pyfemtet/opt/optimizer/optuna_optimizer/_optuna_attribute.py +63 -0
  64. pyfemtet/opt/optimizer/optuna_optimizer/_optuna_optimizer.py +796 -0
  65. pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/__init__.py +7 -0
  66. pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/debug-pof-botorch.reccsv +18 -0
  67. pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/enable_nonlinear_constraint.py +244 -0
  68. pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/pof_botorch_sampler.py +1249 -0
  69. pyfemtet/opt/optimizer/optuna_optimizer/wat_ex14_parametric_jp.femprj +0 -0
  70. pyfemtet/opt/optimizer/scipy_optimizer/__init__.py +1 -0
  71. pyfemtet/opt/optimizer/scipy_optimizer/_scipy_optimizer.py +383 -0
  72. pyfemtet/opt/prediction/__init__.py +7 -0
  73. pyfemtet/opt/prediction/_botorch_utils.py +133 -0
  74. pyfemtet/opt/prediction/_gpytorch_modules_extension.py +142 -0
  75. pyfemtet/opt/prediction/_helper.py +155 -0
  76. pyfemtet/opt/prediction/_model.py +118 -0
  77. pyfemtet/opt/problem/problem.py +304 -0
  78. pyfemtet/opt/problem/variable_manager/__init__.py +20 -0
  79. pyfemtet/opt/problem/variable_manager/_string_as_expression.py +115 -0
  80. pyfemtet/opt/problem/variable_manager/_variable_manager.py +295 -0
  81. pyfemtet/opt/visualization/history_viewer/__main__.py +5 -0
  82. pyfemtet/opt/visualization/{_base.py → history_viewer/_base_application.py} +18 -13
  83. pyfemtet/opt/visualization/history_viewer/_common_pages.py +150 -0
  84. pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/alert_region.py +10 -5
  85. pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/control_femtet.py +16 -13
  86. pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/main_graph.py +117 -47
  87. pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/pm_graph.py +159 -138
  88. pyfemtet/opt/visualization/history_viewer/_process_monitor/_application.py +173 -0
  89. pyfemtet/opt/visualization/history_viewer/_process_monitor/_pages.py +291 -0
  90. pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/dbc.py +1 -1
  91. pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/dcc.py +1 -1
  92. pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/html.py +1 -1
  93. pyfemtet/opt/visualization/history_viewer/result_viewer/__main__.py +5 -0
  94. pyfemtet/opt/visualization/{result_viewer/application.py → history_viewer/result_viewer/_application.py} +6 -6
  95. pyfemtet/opt/visualization/{result_viewer/pages.py → history_viewer/result_viewer/_pages.py} +106 -82
  96. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08.csv +18 -0
  97. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08.db +0 -0
  98. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.jpg +0 -0
  99. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.log +45 -0
  100. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.pdt +0 -0
  101. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_1.jpg +0 -0
  102. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_1.pdt +0 -0
  103. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_10.jpg +0 -0
  104. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_10.pdt +0 -0
  105. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_11.jpg +0 -0
  106. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_11.pdt +0 -0
  107. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_12.jpg +0 -0
  108. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_12.pdt +0 -0
  109. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_13.jpg +0 -0
  110. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_13.pdt +0 -0
  111. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_14.jpg +0 -0
  112. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_14.pdt +0 -0
  113. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_15.jpg +0 -0
  114. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_15.pdt +0 -0
  115. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_16.jpg +0 -0
  116. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_16.pdt +0 -0
  117. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_17.jpg +0 -0
  118. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_17.pdt +0 -0
  119. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_18.jpg +0 -0
  120. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_18.pdt +0 -0
  121. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_19.jpg +0 -0
  122. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_19.pdt +0 -0
  123. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_2.jpg +0 -0
  124. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_2.pdt +0 -0
  125. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_20.jpg +0 -0
  126. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_20.pdt +0 -0
  127. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_3.jpg +0 -0
  128. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_3.pdt +0 -0
  129. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.bgr +0 -0
  130. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.bnd +0 -0
  131. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.btr +0 -0
  132. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.jpg +0 -0
  133. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.mtl +0 -0
  134. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.pdt +0 -0
  135. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.prm +0 -0
  136. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_5.jpg +0 -0
  137. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_5.pdt +0 -0
  138. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_6.jpg +0 -0
  139. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_6.pdt +0 -0
  140. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_7.jpg +0 -0
  141. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_7.pdt +0 -0
  142. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_8.jpg +0 -0
  143. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_8.pdt +0 -0
  144. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_9.jpg +0 -0
  145. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_9.pdt +0 -0
  146. pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.femprj +0 -0
  147. pyfemtet/opt/visualization/plotter/main_figure_creator.py +536 -0
  148. pyfemtet/opt/visualization/plotter/pm_graph_creator.py +359 -0
  149. pyfemtet/opt/worker_status.py +120 -0
  150. {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0.dist-info}/METADATA +23 -24
  151. pyfemtet-1.0.0.dist-info/RECORD +172 -0
  152. pyfemtet-1.0.0.dist-info/entry_points.txt +3 -0
  153. pyfemtet/_femtet_config_util/exit.py +0 -59
  154. pyfemtet/_message/1. make_pot.bat +0 -11
  155. pyfemtet/_message/2. make_mo.bat +0 -6
  156. pyfemtet/_message/__init__.py +0 -5
  157. pyfemtet/_message/babel.cfg +0 -2
  158. pyfemtet/_message/locales/ja/LC_MESSAGES/messages.mo +0 -0
  159. pyfemtet/_message/locales/ja/LC_MESSAGES/messages.po +0 -570
  160. pyfemtet/_message/locales/messages.pot +0 -551
  161. pyfemtet/_warning.py +0 -87
  162. pyfemtet/brep/_impl.py +0 -18
  163. pyfemtet/opt/_femopt.py +0 -1007
  164. pyfemtet/opt/_femopt_core.py +0 -1169
  165. pyfemtet/opt/_test_utils/control_femtet.py +0 -39
  166. pyfemtet/opt/_test_utils/hyper_sphere.py +0 -24
  167. pyfemtet/opt/_test_utils/record_history.py +0 -130
  168. pyfemtet/opt/advanced_samples/excel_ui/(ref) original_project.femprj +0 -0
  169. pyfemtet/opt/advanced_samples/excel_ui/femtet-macro.xlsm +0 -0
  170. pyfemtet/opt/advanced_samples/excel_ui/pyfemtet-core.py +0 -291
  171. pyfemtet/opt/advanced_samples/excel_ui/test-pyfemtet-core.cmd +0 -22
  172. pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric.femprj +0 -0
  173. pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric_restart.py +0 -99
  174. pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric_restart_jp.py +0 -102
  175. pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_create_training_data.py +0 -60
  176. pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_create_training_data_jp.py +0 -57
  177. pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_optimize_with_surrogate.py +0 -100
  178. pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_optimize_with_surrogate_jp.py +0 -90
  179. pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_parametric.femprj +0 -0
  180. pyfemtet/opt/interface/_base.py +0 -101
  181. pyfemtet/opt/interface/_excel_interface.py +0 -984
  182. pyfemtet/opt/interface/_femtet_excel.py +0 -141
  183. pyfemtet/opt/interface/_femtet_with_nx/__init__.py +0 -3
  184. pyfemtet/opt/interface/_femtet_with_nx/_interface.py +0 -178
  185. pyfemtet/opt/interface/_femtet_with_sldworks.py +0 -298
  186. pyfemtet/opt/interface/_surrogate/__init__.py +0 -5
  187. pyfemtet/opt/interface/_surrogate/_base.py +0 -129
  188. pyfemtet/opt/interface/_surrogate/_chaospy.py +0 -71
  189. pyfemtet/opt/interface/_surrogate/_singletaskgp.py +0 -71
  190. pyfemtet/opt/interface/_surrogate_excel.py +0 -102
  191. pyfemtet/opt/optimizer/_base.py +0 -376
  192. pyfemtet/opt/optimizer/_optuna/_botorch_patch/enable_nonlinear_constraint.py +0 -220
  193. pyfemtet/opt/optimizer/_optuna/_optuna.py +0 -434
  194. pyfemtet/opt/optimizer/_optuna/_pof_botorch.py +0 -1914
  195. pyfemtet/opt/optimizer/_scipy.py +0 -159
  196. pyfemtet/opt/optimizer/_scipy_scalar.py +0 -127
  197. pyfemtet/opt/optimizer/parameter.py +0 -113
  198. pyfemtet/opt/prediction/_base.py +0 -61
  199. pyfemtet/opt/prediction/single_task_gp.py +0 -119
  200. pyfemtet/opt/samples/femprj_sample/ParametricIF.femprj +0 -0
  201. pyfemtet/opt/samples/femprj_sample/ParametricIF.py +0 -29
  202. pyfemtet/opt/samples/femprj_sample/ParametricIF_test_result.reccsv +0 -13
  203. pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.femprj +0 -0
  204. pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.prt +0 -0
  205. pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.py +0 -135
  206. pyfemtet/opt/samples/femprj_sample/cad_ex01_NX_test_result.reccsv +0 -23
  207. pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.SLDPRT +0 -0
  208. pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.femprj +0 -0
  209. pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.py +0 -131
  210. pyfemtet/opt/samples/femprj_sample/cad_ex01_SW_test_result.reccsv +0 -23
  211. pyfemtet/opt/samples/femprj_sample/constrained_pipe.femprj +0 -0
  212. pyfemtet/opt/samples/femprj_sample/constrained_pipe.py +0 -96
  213. pyfemtet/opt/samples/femprj_sample/constrained_pipe_test_result.reccsv +0 -13
  214. pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric.femprj +0 -0
  215. pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric.py +0 -74
  216. pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric_test_result.reccsv +0 -13
  217. pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric.femprj +0 -0
  218. pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric.py +0 -58
  219. pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric_test_result.reccsv +0 -23
  220. pyfemtet/opt/samples/femprj_sample/gau_ex12_parametric.femprj +0 -0
  221. pyfemtet/opt/samples/femprj_sample/gau_ex12_parametric.py +0 -52
  222. pyfemtet/opt/samples/femprj_sample/her_ex40_parametric.femprj +0 -0
  223. pyfemtet/opt/samples/femprj_sample/her_ex40_parametric.py +0 -138
  224. pyfemtet/opt/samples/femprj_sample/her_ex40_parametric_test_result.reccsv +0 -18
  225. pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric.femprj +0 -0
  226. pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric.py +0 -60
  227. pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric_parallel.py +0 -61
  228. pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric_test_result.reccsv +0 -18
  229. pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric.femprj +0 -0
  230. pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric.py +0 -58
  231. pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric_parallel.py +0 -58
  232. pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric_test_result.reccsv +0 -18
  233. pyfemtet/opt/samples/femprj_sample_jp/ParametricIF_jp.femprj +0 -0
  234. pyfemtet/opt/samples/femprj_sample_jp/ParametricIF_jp.py +0 -29
  235. pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_NX_jp.femprj +0 -0
  236. pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_NX_jp.py +0 -129
  237. pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_SW_jp.femprj +0 -0
  238. pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_SW_jp.py +0 -125
  239. pyfemtet/opt/samples/femprj_sample_jp/constrained_pipe_jp.py +0 -93
  240. pyfemtet/opt/samples/femprj_sample_jp/gal_ex58_parametric_jp.femprj +0 -0
  241. pyfemtet/opt/samples/femprj_sample_jp/gal_ex58_parametric_jp.py +0 -70
  242. pyfemtet/opt/samples/femprj_sample_jp/gau_ex08_parametric_jp.femprj +0 -0
  243. pyfemtet/opt/samples/femprj_sample_jp/gau_ex08_parametric_jp.py +0 -57
  244. pyfemtet/opt/samples/femprj_sample_jp/gau_ex12_parametric_jp.py +0 -52
  245. pyfemtet/opt/samples/femprj_sample_jp/her_ex40_parametric_jp.femprj +0 -0
  246. pyfemtet/opt/samples/femprj_sample_jp/her_ex40_parametric_jp.py +0 -138
  247. pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_jp.femprj +0 -0
  248. pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_jp.py +0 -58
  249. pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_parallel_jp.py +0 -59
  250. pyfemtet/opt/samples/femprj_sample_jp/wat_ex14_parametric_jp.py +0 -56
  251. pyfemtet/opt/samples/femprj_sample_jp/wat_ex14_parametric_parallel_jp.py +0 -56
  252. pyfemtet/opt/visualization/_complex_components/main_figure_creator.py +0 -332
  253. pyfemtet/opt/visualization/_complex_components/pm_graph_creator.py +0 -201
  254. pyfemtet/opt/visualization/_process_monitor/application.py +0 -226
  255. pyfemtet/opt/visualization/_process_monitor/pages.py +0 -406
  256. pyfemtet/opt/visualization/_wrapped_components/__init__.py +0 -0
  257. pyfemtet/opt/visualization/result_viewer/__init__.py +0 -0
  258. pyfemtet-0.9.5.dist-info/RECORD +0 -158
  259. pyfemtet-0.9.5.dist-info/entry_points.txt +0 -3
  260. /pyfemtet/{_femtet_config_util → opt/problem}/__init__.py +0 -0
  261. /pyfemtet/{brep → opt/visualization/history_viewer}/__init__.py +0 -0
  262. /pyfemtet/opt/{_test_utils → visualization/history_viewer/_complex_components}/__init__.py +0 -0
  263. /pyfemtet/opt/{optimizer/_optuna → visualization/history_viewer/_process_monitor}/__init__.py +0 -0
  264. /pyfemtet/opt/{optimizer/_optuna/_botorch_patch → visualization/history_viewer/_wrapped_components}/__init__.py +0 -0
  265. /pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/str_enum.py +0 -0
  266. /pyfemtet/opt/visualization/{result_viewer → history_viewer/result_viewer}/.gitignore +0 -0
  267. /pyfemtet/opt/visualization/{_complex_components → history_viewer/result_viewer}/__init__.py +0 -0
  268. /pyfemtet/opt/visualization/{_process_monitor → plotter}/__init__.py +0 -0
  269. /pyfemtet/opt/{samples/femprj_sample_jp/wat_ex14_parametric_jp.femprj → wat_ex14_parametric_jp.femprj} +0 -0
  270. {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0.dist-info}/LICENSE +0 -0
  271. {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0.dist-info}/LICENSE_THIRD_PARTY.txt +0 -0
  272. {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1 @@
1
+ from ._scipy_optimizer import ScipyOptimizer
@@ -0,0 +1,383 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Callable
4
+
5
+ from contextlib import suppress
6
+
7
+ import numpy as np
8
+ from scipy.optimize import minimize, OptimizeResult
9
+ from scipy.optimize import NonlinearConstraint
10
+
11
+ from pyfemtet._i18n import Msg, _
12
+ from pyfemtet._util.closing import closing
13
+ from pyfemtet.opt.problem.variable_manager import *
14
+ from pyfemtet.opt.problem.problem import *
15
+ from pyfemtet.opt.exceptions import *
16
+ from pyfemtet.logger import get_module_logger
17
+
18
+ from pyfemtet.opt.optimizer._base_optimizer import *
19
+
20
+
21
+ __all__ = [
22
+ 'ScipyOptimizer',
23
+ ]
24
+
25
+
26
+ logger = get_module_logger('opt.optimizer', False)
27
+
28
+
29
+ class _ScipyCallback:
30
+
31
+ def __init__(self, opt: ScipyOptimizer):
32
+ self.opt = opt
33
+
34
+ def __call__(self, xk: np.ndarray = None, intermediate_result: OptimizeResult = None):
35
+ pass
36
+
37
+
38
+ class ScipyOptimizer(AbstractOptimizer):
39
+ """
40
+ Optimizer class that utilizes SciPy optimization methods.
41
+
42
+ This class serves as a wrapper around SciPy's optimization routines,
43
+ allowing customization of the optimization method, tolerance, and options.
44
+ It also provides mechanisms for handling constraints with enhancement and scaling.
45
+
46
+ Attributes:
47
+ method (str): The optimization method to use (e.g., 'BFGS', 'Nelder-Mead').
48
+ tol (float or None): Tolerance for termination.
49
+ options (dict): Additional options to pass to the SciPy optimizer.
50
+ constraint_enhancement (float): Small value added to enhance constraint handling.
51
+ constraint_scaling (float): Scaling factor applied to constraints.
52
+
53
+ Args:
54
+ method (str): The optimization method to use (e.g., 'BFGS', 'Nelder-Mead').
55
+ tol (float or None): Tolerance for termination.
56
+
57
+ """
58
+
59
+ _timeout: None = None
60
+ _n_trials: None = None
61
+
62
+ def __init__(self, method: str = None, tol=None):
63
+ super().__init__()
64
+
65
+ self.method = method
66
+ self.tol = tol
67
+ self.options = {}
68
+ self.constraint_enhancement = 0.001
69
+ self.constraint_scaling = 1.
70
+
71
+ @property
72
+ def timeout(self):
73
+ return self._timeout
74
+
75
+ @timeout.setter
76
+ def timeout(self, value):
77
+ if value is not None:
78
+ raise NotImplementedError(_(
79
+ en_message='`ScipyOptimizer` cannot use timeout.',
80
+ jp_message='`ScipyOptimizer` では timeout は指定できません。'
81
+ ))
82
+
83
+ @property
84
+ def n_trials(self):
85
+ return self._n_trials
86
+
87
+ @n_trials.setter
88
+ def n_trials(self, value):
89
+ if value is not None:
90
+ raise NotImplementedError(_(
91
+ en_message='`ScipyOptimizer` cannot use n_trials.',
92
+ jp_message='`ScipyOptimizer` では n_trials は指定できません。'
93
+ ))
94
+
95
+ def _get_x0(self) -> np.ndarray:
96
+
97
+ # params を取得
98
+ params: dict[str, Parameter] = self.variable_manager.get_variables(
99
+ filter='parameter', format='raw'
100
+ )
101
+
102
+ for param in params.values():
103
+ if isinstance(param, CategoricalVariable):
104
+ raise NotImplementedError(_(
105
+ en_message='Scipy can optimize only numerical parameters.',
106
+ jp_message='Scipy では数値パラメータのみ最適化できます。'
107
+ ))
108
+
109
+ # params のうち fix == True のものを除く
110
+ x0 = np.array([p.value for p in params.values() if not p.properties.get('fix', False)])
111
+
112
+ return x0
113
+
114
+ def _warn_bounds_for_nelder_mead(self) -> None:
115
+ # https://github.com/scipy/scipy/issues/19991
116
+
117
+ if self.method.lower() != 'nelder-mead':
118
+ return
119
+
120
+ bounds = self._get_scipy_bounds()
121
+ if bounds is None:
122
+ return
123
+
124
+ x0 = self._get_x0()
125
+ if (np.allclose(x0, bounds[:, 0])
126
+ or np.allclose(x0, bounds[:, 1])):
127
+ logger.warning(Msg.WARN_SCIPY_NELDER_MEAD_BOUND)
128
+
129
+ def _setup_before_parallel(self):
130
+
131
+ if not self._done_setup_before_parallel:
132
+
133
+ super()._setup_before_parallel() # flag inside
134
+
135
+ self._warn_bounds_for_nelder_mead()
136
+
137
+ def _get_scipy_bounds(self) -> np.ndarray | None:
138
+
139
+ has_any_bound = False
140
+
141
+ params: dict[str, Parameter] = self.variable_manager.get_variables(filter='parameter')
142
+ bounds = []
143
+ for param in params.values():
144
+ assert isinstance(param, NumericParameter)
145
+ bounds.append([
146
+ param.lower_bound or -np.inf,
147
+ param.upper_bound or np.inf,
148
+ ])
149
+ has_any_bound += (
150
+ (param.lower_bound is not None)
151
+ or (param.upper_bound is not None))
152
+
153
+ if has_any_bound:
154
+ bounds = np.array(bounds)
155
+ else:
156
+ bounds = None
157
+
158
+ return bounds
159
+
160
+ def _update_vm_by_xk(self, xk):
161
+
162
+ vm = self.variable_manager
163
+
164
+ # check interruption
165
+ self._check_and_raise_interruption()
166
+
167
+ # parameter suggestion
168
+ params = vm.get_variables(filter='parameter')
169
+ xk_list = list(xk)
170
+ for name, prm in params.items():
171
+
172
+ if prm.properties.get('fix', False): # default is False
173
+ continue
174
+
175
+ if isinstance(prm, NumericParameter):
176
+ prm.value = xk_list.pop(0)
177
+
178
+ elif isinstance(prm, CategoricalParameter):
179
+ raise NotImplementedError(Msg.ERR_SCIPY_NOT_IMPLEMENT_CATEGORICAL)
180
+
181
+ else:
182
+ raise NotImplementedError
183
+ assert len(xk_list) == 0
184
+
185
+ # evaluate expressions
186
+ vm.eval_expressions()
187
+
188
+ # check interruption
189
+ self._check_and_raise_interruption()
190
+
191
+ def _scipy_constraint_fun(self, xk, cns: Constraint):
192
+
193
+ self._update_vm_by_xk(xk)
194
+
195
+ # update fem (very slow!)
196
+ if cns.using_fem:
197
+ logger.warning(Msg.WARN_USING_FEM_IN_NLC)
198
+ pass_to_fem = self.variable_manager.get_variables(filter='pass_to_fem')
199
+ self.fem.update_parameter(pass_to_fem)
200
+
201
+ return cns.eval(self.fem)
202
+
203
+ def _get_scipy_constraints(self) -> (
204
+ None
205
+ | list[NonlinearConstraint | dict]
206
+ ):
207
+ if len(self.constraints) == 0:
208
+ return None
209
+
210
+ if self.method is None:
211
+ method = 'SLSQP'
212
+ else:
213
+ method = self.method
214
+ assert method.lower() in ('cobyla', 'cobyqa', 'slsqp', 'trust-constr')
215
+
216
+ out = []
217
+ for cns in self.constraints.values():
218
+
219
+ # use Constraint object
220
+ if method.lower() in ('trust-constr', 'cobyqa'):
221
+
222
+ if cns.hard:
223
+ raise NotImplementedError(
224
+ Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
225
+ method
226
+ )
227
+ )
228
+
229
+ # constraint_scaling を使うためには violation を計算しなければならない
230
+ # TODO: 上下両端が決められている場合は二回計算することになるのでそれを解消する
231
+ if cns.lower_bound is not None:
232
+ scipy_cns = NonlinearConstraint(
233
+ fun=(
234
+ lambda xk_, cns_=cns:
235
+ (
236
+ cns.lower_bound
237
+ - self._scipy_constraint_fun(xk_, cns_)
238
+ ) * self.constraint_scaling
239
+ + self.constraint_enhancement
240
+ ),
241
+ lb=-np.inf,
242
+ ub=0,
243
+ keep_feasible=cns.hard,
244
+ finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
245
+ )
246
+ out.append(scipy_cns)
247
+ if cns.upper_bound is not None:
248
+ scipy_cns = NonlinearConstraint(
249
+ fun=(
250
+ lambda xk_, cns_=cns:
251
+ (
252
+ self._scipy_constraint_fun(xk_, cns_)
253
+ - cns.upper_bound
254
+ ) * self.constraint_scaling
255
+ + self.constraint_enhancement
256
+ ),
257
+ lb=-np.inf,
258
+ ub=0,
259
+ keep_feasible=cns.hard,
260
+ finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
261
+ )
262
+ out.append(scipy_cns)
263
+
264
+ # scipy_cns = NonlinearConstraint(
265
+ # fun=lambda xk_, cns_=cns: self._scipy_constraint_fun(xk_, cns_),
266
+ # lb=(cns.lower_bound or -np.inf) + self.constraint_enhancement,
267
+ # ub=(cns.upper_bound or np.inf) - self.constraint_enhancement,
268
+ # keep_feasible=cns.hard,
269
+ # finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
270
+ # )
271
+ # out.append(scipy_cns)
272
+
273
+ # use dict object
274
+ else:
275
+
276
+ if method.lower() == 'slsqp' and not cns.hard:
277
+ logger.warning(Msg.WARN_SCIPY_SLSQP_CANNOT_PROCESS_SOFT_CONSTRAINT)
278
+
279
+ if method.lower() == 'cobyla' and cns.hard:
280
+ logger.error(
281
+ Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
282
+ method))
283
+ raise NotImplementedError(
284
+ Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
285
+ method))
286
+
287
+ if cns.lower_bound is not None:
288
+
289
+ scipy_cns = dict(
290
+ type='ineq',
291
+ fun=(lambda xk_, cns_=cns:
292
+ (
293
+ self._scipy_constraint_fun(xk_, cns_)
294
+ - cns_.lower_bound
295
+ ) * self.constraint_scaling
296
+ - self.constraint_enhancement),
297
+ )
298
+ out.append(scipy_cns)
299
+
300
+ if cns.upper_bound is not None:
301
+ scipy_cns = dict(
302
+ type='ineq',
303
+ fun=(lambda xk_, cns_=cns:
304
+ (
305
+ cns_.upper_bound
306
+ - self._scipy_constraint_fun(xk_, cns_)
307
+ ) * self.constraint_scaling
308
+ - self.constraint_enhancement),
309
+ )
310
+ out.append(scipy_cns)
311
+
312
+ return out
313
+
314
+ def _get_scipy_callback(self) -> (
315
+ Callable[[OptimizeResult, ...], ...]
316
+ | Callable[[np.ndarray, ...], ...]
317
+ ):
318
+ return _ScipyCallback(self)
319
+
320
+ class _SolveSet(AbstractOptimizer._SolveSet):
321
+
322
+ def _hard_constraint_handling(self, e: HardConstraintViolation):
323
+ raise NotImplementedError(
324
+ Msg.ERR_SCIPY_HARD_CONSTRAINT_VIOLATION
325
+ ) from e
326
+
327
+ def _hidden_constraint_handling(self, e: _HiddenConstraintViolation):
328
+ raise NotImplementedError(
329
+ Msg.ERR_SCIPY_HIDDEN_CONSTRAINT
330
+ ) from e
331
+
332
+ def _skip_handling(self, e: SkipSolve):
333
+ raise NotImplementedError(
334
+ Msg.ERR_SCIPY_NOT_IMPLEMENT_SKIP
335
+ ) from e
336
+
337
+ def _objective(self, xk: np.ndarray) -> float:
338
+
339
+ with self._logging():
340
+
341
+ vm = self.variable_manager
342
+
343
+ # parameter suggestion
344
+ self._update_vm_by_xk(xk)
345
+
346
+ # construct TrialInput
347
+ x = vm.get_variables(filter='parameter')
348
+ x_pass_to_fem: dict[str, SupportedVariableTypes] = vm.get_variables(
349
+ filter='pass_to_fem', format='dict')
350
+
351
+ # process main fidelity model
352
+ solve_set = self._get_solve_set()
353
+ f_return = solve_set.solve(x, x_pass_to_fem)
354
+ assert f_return is not None
355
+ dict_y_internal = f_return[1]
356
+ y_internal: float = tuple(dict_y_internal.values())[0] # type: ignore
357
+
358
+ return y_internal
359
+
360
+ def run(self):
361
+
362
+ # ===== finalize =====
363
+ self._finalize()
364
+
365
+ # ===== construct x0 =====
366
+ x0 = self._get_x0()
367
+
368
+ # ===== run =====
369
+ with closing(self.fem):
370
+
371
+ with self._setting_status(), suppress(InterruptOptimization):
372
+
373
+ minimize(
374
+ self._objective,
375
+ x0,
376
+ args=(),
377
+ method=self.method,
378
+ bounds=self._get_scipy_bounds(),
379
+ constraints=self._get_scipy_constraints(),
380
+ tol=self.tol,
381
+ callback=self._get_scipy_callback(),
382
+ options=self.options,
383
+ )
@@ -0,0 +1,7 @@
1
+ from ._model import *
2
+
3
+ __all__ = [
4
+ 'PyFemtetModel',
5
+ 'AbstractModel',
6
+ 'SingleTaskGPModel',
7
+ ]
@@ -0,0 +1,133 @@
1
+ # import
2
+ from __future__ import annotations
3
+
4
+ from packaging import version
5
+
6
+ import torch
7
+
8
+ from gpytorch.mlls import ExactMarginalLogLikelihood
9
+ from gpytorch.kernels import MaternKernel, ScaleKernel # , RBFKernel
10
+ from gpytorch.priors.torch_priors import GammaPrior # , LogNormalPrior
11
+ # from gpytorch.constraints.constraints import GreaterThan
12
+
13
+ from botorch.models import SingleTaskGP
14
+ from botorch.models.transforms import Standardize, Normalize
15
+
16
+ # import fit_gpytorch_mll
17
+ import botorch.version
18
+ if version.parse(botorch.version.version) < version.parse("0.8.0"):
19
+ # noinspection PyUnresolvedReferences
20
+ from botorch.fit import fit_gpytorch_model as fit_gpytorch_mll
21
+
22
+ else:
23
+ from botorch.fit import fit_gpytorch_mll
24
+
25
+
26
+ __all__ = [
27
+ 'get_standardizer_and_no_noise_train_yvar',
28
+ 'setup_yvar_and_standardizer',
29
+ 'setup_gp',
30
+ 'get_matern_kernel_with_gamma_prior_as_covar_module',
31
+ ]
32
+
33
+
34
+ def get_standardizer_and_no_noise_train_yvar(Y: torch.Tensor):
35
+ import gpytorch
36
+
37
+ standardizer = Standardize(m=Y.shape[-1])
38
+ min_noise = gpytorch.settings.min_fixed_noise.value(Y.dtype)
39
+ standardizer.forward(Y) # require to un-transform
40
+ _, YVar = standardizer.untransform(Y, min_noise * torch.ones_like(Y))
41
+
42
+ return YVar, standardizer
43
+
44
+
45
+ def setup_yvar_and_standardizer(
46
+ Y_: torch.Tensor,
47
+ observation_noise_: str | float | None,
48
+ ) -> tuple[torch.Tensor | None, Standardize]:
49
+
50
+ standardizer_ = None
51
+ train_yvar_ = None
52
+ if isinstance(observation_noise_, str):
53
+ if observation_noise_.lower() == 'no':
54
+ train_yvar_, standardizer_ = get_standardizer_and_no_noise_train_yvar(Y_)
55
+ else:
56
+ raise NotImplementedError
57
+ elif isinstance(observation_noise_, float):
58
+ train_yvar_ = torch.full_like(Y_, observation_noise_)
59
+
60
+ standardizer_ = standardizer_ or Standardize(m=Y_.shape[-1])
61
+
62
+ return train_yvar_, standardizer_
63
+
64
+
65
+ def _get_matern_kernel_with_gamma_prior(
66
+ ard_num_dims: int, batch_shape=None
67
+ ) -> ScaleKernel:
68
+ r"""Constructs the Scale-Matern kernel that is used by default by
69
+ several models. This uses a Gamma(3.0, 6.0) prior for the lengthscale
70
+ and a Gamma(2.0, 0.15) prior for the output scale.
71
+ """
72
+
73
+ # PoFBoTorch の要請: 観測のない点は std を大きくしたい
74
+
75
+ return ScaleKernel(
76
+ base_kernel=MaternKernel(
77
+ nu=2.5,
78
+ ard_num_dims=ard_num_dims,
79
+ batch_shape=batch_shape,
80
+ # lengthscale_prior=GammaPrior(3.0, 6.0),
81
+ lengthscale_prior=GammaPrior(1, 9.0),
82
+ ),
83
+ batch_shape=batch_shape,
84
+ # outputscale_prior=GammaPrior(2.0, 0.15),
85
+ outputscale_prior=GammaPrior(1.0, 0.15),
86
+ )
87
+
88
+
89
+ def get_matern_kernel_with_gamma_prior_as_covar_module(
90
+ X: torch.Tensor,
91
+ Y: torch.Tensor,
92
+ nu: float = 2.5,
93
+ lengthscale_prior: GammaPrior = None,
94
+ outputscale_prior: GammaPrior = None,
95
+ ):
96
+
97
+ _input_batch_shape, _aug_batch_shape = SingleTaskGP.get_batch_dimensions(X, Y)
98
+ ard_num_dims = X.shape[-1]
99
+ batch_shape = _aug_batch_shape
100
+
101
+ return ScaleKernel(
102
+ base_kernel=MaternKernel(
103
+ nu=nu,
104
+ ard_num_dims=ard_num_dims,
105
+ batch_shape=batch_shape,
106
+ lengthscale_prior=lengthscale_prior or GammaPrior(3.0, 6.0),
107
+ ),
108
+ batch_shape=batch_shape,
109
+ outputscale_prior=outputscale_prior or GammaPrior(2.0, 0.15),
110
+ )
111
+
112
+
113
+ def setup_gp(X, Y, bounds, observation_noise, lh_class=None, covar_module=None):
114
+
115
+ lh_class = lh_class or ExactMarginalLogLikelihood
116
+
117
+ train_yvar_, standardizer_ = setup_yvar_and_standardizer(
118
+ Y, observation_noise
119
+ )
120
+
121
+ model_ = SingleTaskGP(
122
+ X,
123
+ Y,
124
+ train_Yvar=train_yvar_,
125
+ input_transform=Normalize(d=X.shape[-1], bounds=bounds),
126
+ outcome_transform=standardizer_,
127
+ covar_module=covar_module,
128
+ )
129
+
130
+ mll_ = lh_class(model_.likelihood, model_)
131
+ fit_gpytorch_mll(mll_)
132
+
133
+ return model_
@@ -0,0 +1,142 @@
1
+ #!/usr/bin/env python3
2
+
3
+ # This file is derived from the one of botorch.
4
+
5
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
6
+ #
7
+ # This source code is licensed under the MIT license found in the
8
+ # LICENSE file in the root directory of this source tree.
9
+
10
+ r"""
11
+ Pre-packaged kernels for bayesian optimization, including a Scale/Matern
12
+ kernel that is well-suited to low-dimensional high-noise problems, and
13
+ a dimension-agnostic RBF kernel without outputscale.
14
+
15
+ References:
16
+
17
+ .. [Hvarfner2024vanilla]
18
+ C. Hvarfner, E. O. Hellsten, L. Nardi,
19
+ Vanilla Bayesian Optimization Performs Great in High Dimensions.
20
+ In International Conference on Machine Learning, 2024.
21
+ """
22
+
23
+ from math import log, sqrt
24
+ from typing import Optional, Sequence, Union
25
+
26
+ import torch
27
+ from gpytorch.constraints.constraints import GreaterThan
28
+ from gpytorch.kernels import MaternKernel, RBFKernel, ScaleKernel
29
+ from gpytorch.likelihoods.gaussian_likelihood import GaussianLikelihood
30
+ from gpytorch.priors.torch_priors import GammaPrior, LogNormalPrior
31
+
32
+ MIN_INFERRED_NOISE_LEVEL = 1e-4
33
+ SQRT2 = sqrt(2)
34
+ SQRT3 = sqrt(3)
35
+
36
+
37
+ # def get_matern_kernel_with_gamma_prior(
38
+ # ard_num_dims: int, batch_shape: Optional[torch.Size] = None
39
+ # ) -> ScaleKernel:
40
+ # r"""Constructs the Scale-Matern kernel that is used by default by
41
+ # several models. This uses a Gamma(3.0, 6.0) prior for the lengthscale
42
+ # and a Gamma(2.0, 0.15) prior for the output scale.
43
+ # """
44
+ # return ScaleKernel(
45
+ # base_kernel=MaternKernel(
46
+ # nu=2.5,
47
+ # ard_num_dims=ard_num_dims,
48
+ # batch_shape=batch_shape,
49
+ # lengthscale_prior=GammaPrior(3.0, 6.0),
50
+ # ),
51
+ # batch_shape=batch_shape,
52
+ # outputscale_prior=GammaPrior(2.0, 0.15),
53
+ # )
54
+ #
55
+ #
56
+ # def get_gaussian_likelihood_with_gamma_prior(
57
+ # batch_shape: Optional[torch.Size] = None,
58
+ # ) -> GaussianLikelihood:
59
+ # r"""Constructs the GaussianLikelihood that is used by default by
60
+ # several models. This uses a Gamma(1.1, 0.05) prior and constrains the
61
+ # noise level to be greater than MIN_INFERRED_NOISE_LEVEL (=1e-4).
62
+ # """
63
+ # batch_shape = torch.Size() if batch_shape is None else batch_shape
64
+ # noise_prior = GammaPrior(1.1, 0.05)
65
+ # noise_prior_mode = (noise_prior.concentration - 1) / noise_prior.rate
66
+ # return GaussianLikelihood(
67
+ # noise_prior=noise_prior,
68
+ # batch_shape=batch_shape,
69
+ # noise_constraint=GreaterThan(
70
+ # MIN_INFERRED_NOISE_LEVEL,
71
+ # transform=None,
72
+ # initial_value=noise_prior_mode,
73
+ # ),
74
+ # )
75
+ #
76
+ #
77
+ # def get_gaussian_likelihood_with_lognormal_prior(
78
+ # batch_shape: Optional[torch.Size] = None,
79
+ # ) -> GaussianLikelihood:
80
+ # """Return Gaussian likelihood with a LogNormal(-4.0, 1.0) prior.
81
+ # This prior is based on [Hvarfner2024vanilla]_.
82
+ #
83
+ # Args:
84
+ # batch_shape: Batch shape for the likelihood.
85
+ #
86
+ # Returns:
87
+ # GaussianLikelihood with LogNormal(-4.0, 1.0) prior and constrains the
88
+ # noise level to be greater than MIN_INFERRED_NOISE_LEVEL (=1e-4).
89
+ # """
90
+ # batch_shape = torch.Size() if batch_shape is None else batch_shape
91
+ # noise_prior = LogNormalPrior(loc=-4.0, scale=1.0)
92
+ # return GaussianLikelihood(
93
+ # noise_prior=noise_prior,
94
+ # batch_shape=batch_shape,
95
+ # noise_constraint=GreaterThan(
96
+ # MIN_INFERRED_NOISE_LEVEL,
97
+ # transform=None,
98
+ # initial_value=noise_prior.mode,
99
+ # ),
100
+ # )
101
+
102
+
103
+ def get_covar_module_with_dim_scaled_prior_extension(
104
+ ard_num_dims: int,
105
+ batch_shape: Optional[torch.Size] = None,
106
+ use_rbf_kernel: bool = True,
107
+ active_dims: Optional[Sequence[int]] = None,
108
+ loc_coef: float = 1.,
109
+ scale_coef: float = 1.,
110
+ ) -> Union[MaternKernel, RBFKernel]:
111
+ """Returns an RBF or Matern kernel with priors
112
+ from [Hvarfner2024vanilla]_.
113
+
114
+ Args:
115
+ ard_num_dims: Number of feature dimensions for ARD.
116
+ batch_shape: Batch shape for the covariance module.
117
+ use_rbf_kernel: Whether to use an RBF kernel. If False, uses a Matern kernel.
118
+ active_dims: The set of input dimensions to compute the covariances on.
119
+ By default, the covariance is computed using the full input tensor.
120
+ Set this if you'd like to ignore certain dimensions.
121
+
122
+ loc_coef (float): The coefficient of loc of LogNormalPrior.
123
+ scale_coef (float): The coefficient of scale of LogNormalPrior.
124
+
125
+
126
+ Returns:
127
+ A Kernel constructed according to the given arguments. The prior is constrained
128
+ to have lengthscales larger than 0.025 for numerical stability.
129
+ """
130
+ base_class = RBFKernel if use_rbf_kernel else MaternKernel
131
+ lengthscale_prior = LogNormalPrior(loc=(SQRT2 + log(ard_num_dims) * 0.5) * loc_coef, scale=SQRT3 * scale_coef)
132
+ base_kernel = base_class(
133
+ ard_num_dims=ard_num_dims,
134
+ batch_shape=batch_shape,
135
+ lengthscale_prior=lengthscale_prior,
136
+ lengthscale_constraint=GreaterThan(
137
+ 2.5e-2, transform=None, initial_value=lengthscale_prior.mode
138
+ ),
139
+ # pyre-ignore[6] GPyTorch type is unnecessarily restrictive.
140
+ active_dims=active_dims,
141
+ )
142
+ return base_kernel