pyfemtet 0.9.5__py3-none-any.whl → 1.0.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyfemtet might be problematic. Click here for more details.
- pyfemtet/__init__.py +6 -1
- pyfemtet/_i18n/1. make_pot_and_update_po.bat +8 -0
- pyfemtet/_i18n/2. build_mo.bat +5 -0
- pyfemtet/_i18n/__init__.py +4 -0
- pyfemtet/_i18n/babel.cfg +2 -0
- pyfemtet/_i18n/i18n.py +37 -0
- pyfemtet/_i18n/locales/ja/LC_MESSAGES/messages.mo +0 -0
- pyfemtet/_i18n/locales/ja/LC_MESSAGES/messages.po +1020 -0
- pyfemtet/_i18n/locales/messages.pot +987 -0
- pyfemtet/{_message → _i18n}/messages.py +128 -41
- pyfemtet/_util/closing.py +19 -0
- pyfemtet/_util/dask_util.py +89 -7
- pyfemtet/_util/df_util.py +29 -0
- pyfemtet/_util/excel_macro_util.py +8 -3
- pyfemtet/_util/excel_parse_util.py +43 -23
- pyfemtet/_util/femtet_access_inspection.py +120 -0
- pyfemtet/{_femtet_config_util/autosave.py → _util/femtet_autosave.py} +7 -0
- pyfemtet/_util/femtet_exit.py +105 -0
- pyfemtet/_util/femtet_version.py +20 -0
- pyfemtet/_util/helper.py +94 -0
- pyfemtet/_util/process_util.py +107 -0
- pyfemtet/_util/str_enum.py +44 -0
- pyfemtet/core.py +15 -47
- pyfemtet/dispatch_extensions/__init__.py +8 -11
- pyfemtet/dispatch_extensions/_impl.py +42 -198
- pyfemtet/logger/__init__.py +8 -1
- pyfemtet/logger/_impl.py +5 -6
- pyfemtet/opt/__init__.py +3 -17
- pyfemtet/opt/exceptions.py +45 -0
- pyfemtet/opt/femopt.py +608 -0
- pyfemtet/opt/history/__init__.py +11 -0
- pyfemtet/opt/history/_history.py +1404 -0
- pyfemtet/opt/history/_hypervolume.py +169 -0
- pyfemtet/opt/history/_optimality.py +79 -0
- pyfemtet/opt/interface/__init__.py +17 -24
- pyfemtet/opt/interface/_base_interface.py +222 -0
- pyfemtet/opt/interface/_excel_interface/__init__.py +3 -0
- pyfemtet/opt/interface/_excel_interface/debug-excel-interface.xlsm +0 -0
- pyfemtet/opt/interface/_excel_interface/excel_interface.py +999 -0
- pyfemtet/opt/interface/_femtet_interface/__init__.py +3 -0
- pyfemtet/opt/interface/{_femtet_parametric.py → _femtet_interface/_femtet_parametric.py} +20 -12
- pyfemtet/opt/interface/{_femtet.py → _femtet_interface/femtet_interface.py} +505 -349
- pyfemtet/opt/interface/_femtet_with_nx_interface/__init__.py +5 -0
- pyfemtet/opt/interface/_femtet_with_nx_interface/femtet_with_nx_interface.py +230 -0
- pyfemtet/opt/interface/_femtet_with_nx_interface/model1.prt +0 -0
- pyfemtet/opt/interface/_femtet_with_nx_interface/model1.x_t +98 -0
- pyfemtet/opt/interface/{_femtet_with_nx → _femtet_with_nx_interface}/update_model.py +1 -3
- pyfemtet/opt/interface/_femtet_with_solidworks/__init__.py +5 -0
- pyfemtet/opt/interface/_femtet_with_solidworks/femtet_with_solidworks_interface.py +122 -0
- pyfemtet/opt/interface/_solidworks_interface/__init__.py +5 -0
- pyfemtet/opt/interface/_solidworks_interface/solidworks_interface.py +206 -0
- pyfemtet/opt/interface/_surrogate_model_interface/__init__.py +8 -0
- pyfemtet/opt/interface/_surrogate_model_interface/base_surrogate_interface.py +150 -0
- pyfemtet/opt/interface/_surrogate_model_interface/botorch_interface.py +298 -0
- pyfemtet/opt/interface/_surrogate_model_interface/debug-pof-botorch.reccsv +18 -0
- pyfemtet/opt/interface/_with_excel_settings/__init__.py +61 -0
- pyfemtet/opt/interface/_with_excel_settings/with_excel_settings.py +134 -0
- pyfemtet/opt/meta_script/YAML_Generator.xlsm +0 -0
- pyfemtet/opt/meta_script/__main__.py +58 -36
- pyfemtet/opt/optimizer/__init__.py +7 -9
- pyfemtet/opt/optimizer/_base_optimizer.py +885 -0
- pyfemtet/opt/optimizer/optuna_optimizer/__init__.py +9 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_optuna_attribute.py +73 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_optuna_optimizer.py +678 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/__init__.py +7 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/debug-pof-botorch.reccsv +18 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/enable_nonlinear_constraint.py +244 -0
- pyfemtet/opt/optimizer/optuna_optimizer/_pof_botorch/pof_botorch_sampler.py +1249 -0
- pyfemtet/opt/optimizer/optuna_optimizer/wat_ex14_parametric_jp.femprj +0 -0
- pyfemtet/opt/optimizer/scipy_optimizer/__init__.py +1 -0
- pyfemtet/opt/optimizer/scipy_optimizer/_scipy_optimizer.py +364 -0
- pyfemtet/opt/prediction/__init__.py +7 -0
- pyfemtet/opt/prediction/_botorch_utils.py +133 -0
- pyfemtet/opt/prediction/_gpytorch_modules_extension.py +142 -0
- pyfemtet/opt/prediction/_helper.py +155 -0
- pyfemtet/opt/prediction/_model.py +118 -0
- pyfemtet/opt/problem/problem.py +304 -0
- pyfemtet/opt/problem/variable_manager/__init__.py +20 -0
- pyfemtet/opt/problem/variable_manager/_string_as_expression.py +115 -0
- pyfemtet/opt/problem/variable_manager/_variable_manager.py +295 -0
- pyfemtet/opt/visualization/history_viewer/__main__.py +5 -0
- pyfemtet/opt/visualization/{_base.py → history_viewer/_base_application.py} +18 -13
- pyfemtet/opt/visualization/history_viewer/_common_pages.py +150 -0
- pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/alert_region.py +10 -5
- pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/control_femtet.py +16 -13
- pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/main_graph.py +117 -47
- pyfemtet/opt/visualization/{_complex_components → history_viewer/_complex_components}/pm_graph.py +159 -138
- pyfemtet/opt/visualization/history_viewer/_process_monitor/_application.py +173 -0
- pyfemtet/opt/visualization/history_viewer/_process_monitor/_pages.py +291 -0
- pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/dbc.py +1 -1
- pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/dcc.py +1 -1
- pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/html.py +1 -1
- pyfemtet/opt/visualization/history_viewer/result_viewer/__main__.py +5 -0
- pyfemtet/opt/visualization/{result_viewer/application.py → history_viewer/result_viewer/_application.py} +6 -6
- pyfemtet/opt/visualization/{result_viewer/pages.py → history_viewer/result_viewer/_pages.py} +106 -82
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08.csv +18 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08.db +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.log +45 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_1.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_1.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_10.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_10.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_11.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_11.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_12.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_12.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_13.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_13.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_14.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_14.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_15.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_15.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_16.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_16.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_17.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_17.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_18.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_18.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_19.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_19.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_2.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_2.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_20.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_20.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_3.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_3.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.bgr +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.bnd +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.btr +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.mtl +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_4.prm +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_5.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_5.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_6.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_6.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_7.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_7.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_8.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_8.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_9.jpg +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.Results/ex8_trial_9.pdt +0 -0
- pyfemtet/opt/visualization/history_viewer/result_viewer/tutorial_files/tutorial_gau_ex08_parametric.femprj +0 -0
- pyfemtet/opt/visualization/plotter/main_figure_creator.py +536 -0
- pyfemtet/opt/visualization/plotter/pm_graph_creator.py +359 -0
- pyfemtet/opt/worker_status.py +120 -0
- {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0b0.dist-info}/METADATA +23 -24
- pyfemtet-1.0.0b0.dist-info/RECORD +172 -0
- pyfemtet-1.0.0b0.dist-info/entry_points.txt +3 -0
- pyfemtet/_femtet_config_util/exit.py +0 -59
- pyfemtet/_message/1. make_pot.bat +0 -11
- pyfemtet/_message/2. make_mo.bat +0 -6
- pyfemtet/_message/__init__.py +0 -5
- pyfemtet/_message/babel.cfg +0 -2
- pyfemtet/_message/locales/ja/LC_MESSAGES/messages.mo +0 -0
- pyfemtet/_message/locales/ja/LC_MESSAGES/messages.po +0 -570
- pyfemtet/_message/locales/messages.pot +0 -551
- pyfemtet/_warning.py +0 -87
- pyfemtet/brep/_impl.py +0 -18
- pyfemtet/opt/_femopt.py +0 -1007
- pyfemtet/opt/_femopt_core.py +0 -1169
- pyfemtet/opt/_test_utils/control_femtet.py +0 -39
- pyfemtet/opt/_test_utils/hyper_sphere.py +0 -24
- pyfemtet/opt/_test_utils/record_history.py +0 -130
- pyfemtet/opt/advanced_samples/excel_ui/(ref) original_project.femprj +0 -0
- pyfemtet/opt/advanced_samples/excel_ui/femtet-macro.xlsm +0 -0
- pyfemtet/opt/advanced_samples/excel_ui/pyfemtet-core.py +0 -291
- pyfemtet/opt/advanced_samples/excel_ui/test-pyfemtet-core.cmd +0 -22
- pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric.femprj +0 -0
- pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric_restart.py +0 -99
- pyfemtet/opt/advanced_samples/restart/gal_ex13_parametric_restart_jp.py +0 -102
- pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_create_training_data.py +0 -60
- pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_create_training_data_jp.py +0 -57
- pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_optimize_with_surrogate.py +0 -100
- pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_optimize_with_surrogate_jp.py +0 -90
- pyfemtet/opt/advanced_samples/surrogate_model/gal_ex13_parametric.femprj +0 -0
- pyfemtet/opt/interface/_base.py +0 -101
- pyfemtet/opt/interface/_excel_interface.py +0 -984
- pyfemtet/opt/interface/_femtet_excel.py +0 -141
- pyfemtet/opt/interface/_femtet_with_nx/__init__.py +0 -3
- pyfemtet/opt/interface/_femtet_with_nx/_interface.py +0 -178
- pyfemtet/opt/interface/_femtet_with_sldworks.py +0 -298
- pyfemtet/opt/interface/_surrogate/__init__.py +0 -5
- pyfemtet/opt/interface/_surrogate/_base.py +0 -129
- pyfemtet/opt/interface/_surrogate/_chaospy.py +0 -71
- pyfemtet/opt/interface/_surrogate/_singletaskgp.py +0 -71
- pyfemtet/opt/interface/_surrogate_excel.py +0 -102
- pyfemtet/opt/optimizer/_base.py +0 -376
- pyfemtet/opt/optimizer/_optuna/_botorch_patch/enable_nonlinear_constraint.py +0 -220
- pyfemtet/opt/optimizer/_optuna/_optuna.py +0 -434
- pyfemtet/opt/optimizer/_optuna/_pof_botorch.py +0 -1914
- pyfemtet/opt/optimizer/_scipy.py +0 -159
- pyfemtet/opt/optimizer/_scipy_scalar.py +0 -127
- pyfemtet/opt/optimizer/parameter.py +0 -113
- pyfemtet/opt/prediction/_base.py +0 -61
- pyfemtet/opt/prediction/single_task_gp.py +0 -119
- pyfemtet/opt/samples/femprj_sample/ParametricIF.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/ParametricIF.py +0 -29
- pyfemtet/opt/samples/femprj_sample/ParametricIF_test_result.reccsv +0 -13
- pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.prt +0 -0
- pyfemtet/opt/samples/femprj_sample/cad_ex01_NX.py +0 -135
- pyfemtet/opt/samples/femprj_sample/cad_ex01_NX_test_result.reccsv +0 -23
- pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.SLDPRT +0 -0
- pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/cad_ex01_SW.py +0 -131
- pyfemtet/opt/samples/femprj_sample/cad_ex01_SW_test_result.reccsv +0 -23
- pyfemtet/opt/samples/femprj_sample/constrained_pipe.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/constrained_pipe.py +0 -96
- pyfemtet/opt/samples/femprj_sample/constrained_pipe_test_result.reccsv +0 -13
- pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric.py +0 -74
- pyfemtet/opt/samples/femprj_sample/gal_ex58_parametric_test_result.reccsv +0 -13
- pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric.py +0 -58
- pyfemtet/opt/samples/femprj_sample/gau_ex08_parametric_test_result.reccsv +0 -23
- pyfemtet/opt/samples/femprj_sample/gau_ex12_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/gau_ex12_parametric.py +0 -52
- pyfemtet/opt/samples/femprj_sample/her_ex40_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/her_ex40_parametric.py +0 -138
- pyfemtet/opt/samples/femprj_sample/her_ex40_parametric_test_result.reccsv +0 -18
- pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric.py +0 -60
- pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric_parallel.py +0 -61
- pyfemtet/opt/samples/femprj_sample/paswat_ex1_parametric_test_result.reccsv +0 -18
- pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric.py +0 -58
- pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric_parallel.py +0 -58
- pyfemtet/opt/samples/femprj_sample/wat_ex14_parametric_test_result.reccsv +0 -18
- pyfemtet/opt/samples/femprj_sample_jp/ParametricIF_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/ParametricIF_jp.py +0 -29
- pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_NX_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_NX_jp.py +0 -129
- pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_SW_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/cad_ex01_SW_jp.py +0 -125
- pyfemtet/opt/samples/femprj_sample_jp/constrained_pipe_jp.py +0 -93
- pyfemtet/opt/samples/femprj_sample_jp/gal_ex58_parametric_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/gal_ex58_parametric_jp.py +0 -70
- pyfemtet/opt/samples/femprj_sample_jp/gau_ex08_parametric_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/gau_ex08_parametric_jp.py +0 -57
- pyfemtet/opt/samples/femprj_sample_jp/gau_ex12_parametric_jp.py +0 -52
- pyfemtet/opt/samples/femprj_sample_jp/her_ex40_parametric_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/her_ex40_parametric_jp.py +0 -138
- pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_jp.femprj +0 -0
- pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_jp.py +0 -58
- pyfemtet/opt/samples/femprj_sample_jp/paswat_ex1_parametric_parallel_jp.py +0 -59
- pyfemtet/opt/samples/femprj_sample_jp/wat_ex14_parametric_jp.py +0 -56
- pyfemtet/opt/samples/femprj_sample_jp/wat_ex14_parametric_parallel_jp.py +0 -56
- pyfemtet/opt/visualization/_complex_components/main_figure_creator.py +0 -332
- pyfemtet/opt/visualization/_complex_components/pm_graph_creator.py +0 -201
- pyfemtet/opt/visualization/_process_monitor/application.py +0 -226
- pyfemtet/opt/visualization/_process_monitor/pages.py +0 -406
- pyfemtet/opt/visualization/_wrapped_components/__init__.py +0 -0
- pyfemtet/opt/visualization/result_viewer/__init__.py +0 -0
- pyfemtet-0.9.5.dist-info/RECORD +0 -158
- pyfemtet-0.9.5.dist-info/entry_points.txt +0 -3
- /pyfemtet/{_femtet_config_util → opt/problem}/__init__.py +0 -0
- /pyfemtet/{brep → opt/visualization/history_viewer}/__init__.py +0 -0
- /pyfemtet/opt/{_test_utils → visualization/history_viewer/_complex_components}/__init__.py +0 -0
- /pyfemtet/opt/{optimizer/_optuna → visualization/history_viewer/_process_monitor}/__init__.py +0 -0
- /pyfemtet/opt/{optimizer/_optuna/_botorch_patch → visualization/history_viewer/_wrapped_components}/__init__.py +0 -0
- /pyfemtet/opt/visualization/{_wrapped_components → history_viewer/_wrapped_components}/str_enum.py +0 -0
- /pyfemtet/opt/visualization/{result_viewer → history_viewer/result_viewer}/.gitignore +0 -0
- /pyfemtet/opt/visualization/{_complex_components → history_viewer/result_viewer}/__init__.py +0 -0
- /pyfemtet/opt/visualization/{_process_monitor → plotter}/__init__.py +0 -0
- /pyfemtet/opt/{samples/femprj_sample_jp/wat_ex14_parametric_jp.femprj → wat_ex14_parametric_jp.femprj} +0 -0
- {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0b0.dist-info}/LICENSE +0 -0
- {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0b0.dist-info}/LICENSE_THIRD_PARTY.txt +0 -0
- {pyfemtet-0.9.5.dist-info → pyfemtet-1.0.0b0.dist-info}/WHEEL +0 -0
|
Binary file
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from ._scipy_optimizer import ScipyOptimizer
|
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
from contextlib import suppress
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
from scipy.optimize import minimize, OptimizeResult
|
|
9
|
+
from scipy.optimize import NonlinearConstraint
|
|
10
|
+
|
|
11
|
+
from pyfemtet._i18n import Msg, _
|
|
12
|
+
from pyfemtet._util.closing import closing
|
|
13
|
+
from pyfemtet.opt.problem.variable_manager import *
|
|
14
|
+
from pyfemtet.opt.problem.problem import *
|
|
15
|
+
from pyfemtet.opt.exceptions import *
|
|
16
|
+
from pyfemtet.logger import get_module_logger
|
|
17
|
+
|
|
18
|
+
from pyfemtet.opt.optimizer._base_optimizer import *
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
__all__ = [
|
|
22
|
+
'ScipyOptimizer',
|
|
23
|
+
]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
logger = get_module_logger('opt.optimizer', False)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _ScipyCallback:
|
|
30
|
+
|
|
31
|
+
def __init__(self, opt: ScipyOptimizer):
|
|
32
|
+
self.opt = opt
|
|
33
|
+
|
|
34
|
+
def __call__(self, xk: np.ndarray = None, intermediate_result: OptimizeResult = None):
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ScipyOptimizer(AbstractOptimizer):
|
|
39
|
+
|
|
40
|
+
_timeout: None = None
|
|
41
|
+
_n_trials: None = None
|
|
42
|
+
|
|
43
|
+
def __init__(self, method: str = None, tol=None):
|
|
44
|
+
super().__init__()
|
|
45
|
+
|
|
46
|
+
self.method = method
|
|
47
|
+
self.tol = tol
|
|
48
|
+
self.options = {}
|
|
49
|
+
self.constraint_enhancement = 0.001
|
|
50
|
+
self.constraint_scaling = 1.
|
|
51
|
+
|
|
52
|
+
@property
|
|
53
|
+
def timeout(self):
|
|
54
|
+
return self._timeout
|
|
55
|
+
|
|
56
|
+
@timeout.setter
|
|
57
|
+
def timeout(self, value):
|
|
58
|
+
if value is not None:
|
|
59
|
+
raise NotImplementedError(_(
|
|
60
|
+
en_message='`ScipyOptimizer` cannot use timeout.',
|
|
61
|
+
jp_message='`ScipyOptimizer` では timeout は指定できません。'
|
|
62
|
+
))
|
|
63
|
+
|
|
64
|
+
@property
|
|
65
|
+
def n_trials(self):
|
|
66
|
+
return self._n_trials
|
|
67
|
+
|
|
68
|
+
@n_trials.setter
|
|
69
|
+
def n_trials(self, value):
|
|
70
|
+
if value is not None:
|
|
71
|
+
raise NotImplementedError(_(
|
|
72
|
+
en_message='`ScipyOptimizer` cannot use n_trials.',
|
|
73
|
+
jp_message='`ScipyOptimizer` では n_trials は指定できません。'
|
|
74
|
+
))
|
|
75
|
+
|
|
76
|
+
def _get_x0(self) -> np.ndarray:
|
|
77
|
+
|
|
78
|
+
# params を取得
|
|
79
|
+
params: dict[str, Parameter] = self.variable_manager.get_variables(
|
|
80
|
+
filter='parameter', format='raw'
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
for param in params.values():
|
|
84
|
+
if isinstance(param, CategoricalVariable):
|
|
85
|
+
raise NotImplementedError(_(
|
|
86
|
+
en_message='Scipy can optimize only numerical parameters.',
|
|
87
|
+
jp_message='Scipy では数値パラメータのみ最適化できます。'
|
|
88
|
+
))
|
|
89
|
+
|
|
90
|
+
# params のうち fix == True のものを除く
|
|
91
|
+
x0 = np.array([p.value for p in params.values() if not p.properties.get('fix', False)])
|
|
92
|
+
|
|
93
|
+
return x0
|
|
94
|
+
|
|
95
|
+
def _warn_bounds_for_nelder_mead(self) -> None:
|
|
96
|
+
# https://github.com/scipy/scipy/issues/19991
|
|
97
|
+
|
|
98
|
+
if self.method.lower() != 'nelder-mead':
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
bounds = self._get_scipy_bounds()
|
|
102
|
+
if bounds is None:
|
|
103
|
+
return
|
|
104
|
+
|
|
105
|
+
x0 = self._get_x0()
|
|
106
|
+
if (np.allclose(x0, bounds[:, 0])
|
|
107
|
+
or np.allclose(x0, bounds[:, 1])):
|
|
108
|
+
logger.warning(Msg.WARN_SCIPY_NELDER_MEAD_BOUND)
|
|
109
|
+
|
|
110
|
+
def _setup_before_parallel(self):
|
|
111
|
+
|
|
112
|
+
if not self._done_setup_before_parallel:
|
|
113
|
+
|
|
114
|
+
super()._setup_before_parallel() # flag inside
|
|
115
|
+
|
|
116
|
+
self._warn_bounds_for_nelder_mead()
|
|
117
|
+
|
|
118
|
+
def _get_scipy_bounds(self) -> np.ndarray | None:
|
|
119
|
+
|
|
120
|
+
has_any_bound = False
|
|
121
|
+
|
|
122
|
+
params: dict[str, Parameter] = self.variable_manager.get_variables(filter='parameter')
|
|
123
|
+
bounds = []
|
|
124
|
+
for param in params.values():
|
|
125
|
+
assert isinstance(param, NumericParameter)
|
|
126
|
+
bounds.append([
|
|
127
|
+
param.lower_bound or -np.inf,
|
|
128
|
+
param.upper_bound or np.inf,
|
|
129
|
+
])
|
|
130
|
+
has_any_bound += (
|
|
131
|
+
(param.lower_bound is not None)
|
|
132
|
+
or (param.upper_bound is not None))
|
|
133
|
+
|
|
134
|
+
if has_any_bound:
|
|
135
|
+
bounds = np.array(bounds)
|
|
136
|
+
else:
|
|
137
|
+
bounds = None
|
|
138
|
+
|
|
139
|
+
return bounds
|
|
140
|
+
|
|
141
|
+
def _update_vm_by_xk(self, xk):
|
|
142
|
+
|
|
143
|
+
vm = self.variable_manager
|
|
144
|
+
|
|
145
|
+
# check interruption
|
|
146
|
+
self._check_and_raise_interruption()
|
|
147
|
+
|
|
148
|
+
# parameter suggestion
|
|
149
|
+
params = vm.get_variables(filter='parameter')
|
|
150
|
+
xk_list = list(xk)
|
|
151
|
+
for name, prm in params.items():
|
|
152
|
+
|
|
153
|
+
if prm.properties.get('fix', False): # default is False
|
|
154
|
+
continue
|
|
155
|
+
|
|
156
|
+
if isinstance(prm, NumericParameter):
|
|
157
|
+
prm.value = xk_list.pop(0)
|
|
158
|
+
|
|
159
|
+
elif isinstance(prm, CategoricalParameter):
|
|
160
|
+
raise NotImplementedError(Msg.ERR_SCIPY_NOT_IMPLEMENT_CATEGORICAL)
|
|
161
|
+
|
|
162
|
+
else:
|
|
163
|
+
raise NotImplementedError
|
|
164
|
+
assert len(xk_list) == 0
|
|
165
|
+
|
|
166
|
+
# evaluate expressions
|
|
167
|
+
vm.eval_expressions()
|
|
168
|
+
|
|
169
|
+
# check interruption
|
|
170
|
+
self._check_and_raise_interruption()
|
|
171
|
+
|
|
172
|
+
def _scipy_constraint_fun(self, xk, cns: Constraint):
|
|
173
|
+
|
|
174
|
+
self._update_vm_by_xk(xk)
|
|
175
|
+
|
|
176
|
+
# update fem (very slow!)
|
|
177
|
+
if cns.using_fem:
|
|
178
|
+
logger.warning(Msg.WARN_USING_FEM_IN_NLC)
|
|
179
|
+
pass_to_fem = self.variable_manager.get_variables(filter='pass_to_fem')
|
|
180
|
+
self.fem.update_parameter(pass_to_fem)
|
|
181
|
+
|
|
182
|
+
return cns.eval(self.fem)
|
|
183
|
+
|
|
184
|
+
def _get_scipy_constraints(self) -> (
|
|
185
|
+
None
|
|
186
|
+
| list[NonlinearConstraint | dict]
|
|
187
|
+
):
|
|
188
|
+
if len(self.constraints) == 0:
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
if self.method is None:
|
|
192
|
+
method = 'SLSQP'
|
|
193
|
+
else:
|
|
194
|
+
method = self.method
|
|
195
|
+
assert method.lower() in ('cobyla', 'cobyqa', 'slsqp', 'trust-constr')
|
|
196
|
+
|
|
197
|
+
out = []
|
|
198
|
+
for cns in self.constraints.values():
|
|
199
|
+
|
|
200
|
+
# use Constraint object
|
|
201
|
+
if method.lower() in ('trust-constr', 'cobyqa'):
|
|
202
|
+
|
|
203
|
+
if cns.hard:
|
|
204
|
+
raise NotImplementedError(
|
|
205
|
+
Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
|
|
206
|
+
method
|
|
207
|
+
)
|
|
208
|
+
)
|
|
209
|
+
|
|
210
|
+
# constraint_scaling を使うためには violation を計算しなければならない
|
|
211
|
+
# TODO: 上下両端が決められている場合は二回計算することになるのでそれを解消する
|
|
212
|
+
if cns.lower_bound is not None:
|
|
213
|
+
scipy_cns = NonlinearConstraint(
|
|
214
|
+
fun=(
|
|
215
|
+
lambda xk_, cns_=cns:
|
|
216
|
+
(
|
|
217
|
+
cns.lower_bound
|
|
218
|
+
- self._scipy_constraint_fun(xk_, cns_)
|
|
219
|
+
) * self.constraint_scaling
|
|
220
|
+
+ self.constraint_enhancement
|
|
221
|
+
),
|
|
222
|
+
lb=-np.inf,
|
|
223
|
+
ub=0,
|
|
224
|
+
keep_feasible=cns.hard,
|
|
225
|
+
finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
|
|
226
|
+
)
|
|
227
|
+
out.append(scipy_cns)
|
|
228
|
+
if cns.upper_bound is not None:
|
|
229
|
+
scipy_cns = NonlinearConstraint(
|
|
230
|
+
fun=(
|
|
231
|
+
lambda xk_, cns_=cns:
|
|
232
|
+
(
|
|
233
|
+
self._scipy_constraint_fun(xk_, cns_)
|
|
234
|
+
- cns.upper_bound
|
|
235
|
+
) * self.constraint_scaling
|
|
236
|
+
+ self.constraint_enhancement
|
|
237
|
+
),
|
|
238
|
+
lb=-np.inf,
|
|
239
|
+
ub=0,
|
|
240
|
+
keep_feasible=cns.hard,
|
|
241
|
+
finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
|
|
242
|
+
)
|
|
243
|
+
out.append(scipy_cns)
|
|
244
|
+
|
|
245
|
+
# scipy_cns = NonlinearConstraint(
|
|
246
|
+
# fun=lambda xk_, cns_=cns: self._scipy_constraint_fun(xk_, cns_),
|
|
247
|
+
# lb=(cns.lower_bound or -np.inf) + self.constraint_enhancement,
|
|
248
|
+
# ub=(cns.upper_bound or np.inf) - self.constraint_enhancement,
|
|
249
|
+
# keep_feasible=cns.hard,
|
|
250
|
+
# finite_diff_rel_step=self.options.get('finite_diff_rel_step', None),
|
|
251
|
+
# )
|
|
252
|
+
# out.append(scipy_cns)
|
|
253
|
+
|
|
254
|
+
# use dict object
|
|
255
|
+
else:
|
|
256
|
+
|
|
257
|
+
if method.lower() == 'slsqp' and not cns.hard:
|
|
258
|
+
logger.warning(Msg.WARN_SCIPY_SLSQP_CANNOT_PROCESS_SOFT_CONSTRAINT)
|
|
259
|
+
|
|
260
|
+
if method.lower() == 'cobyla' and cns.hard:
|
|
261
|
+
logger.error(
|
|
262
|
+
Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
|
|
263
|
+
method))
|
|
264
|
+
raise NotImplementedError(
|
|
265
|
+
Msg.F_ERR_SCIPY_METHOD_NOT_IMPLEMENT_HARD_CONSTRAINT(
|
|
266
|
+
method))
|
|
267
|
+
|
|
268
|
+
if cns.lower_bound is not None:
|
|
269
|
+
|
|
270
|
+
scipy_cns = dict(
|
|
271
|
+
type='ineq',
|
|
272
|
+
fun=(lambda xk_, cns_=cns:
|
|
273
|
+
(
|
|
274
|
+
self._scipy_constraint_fun(xk_, cns_)
|
|
275
|
+
- cns_.lower_bound
|
|
276
|
+
) * self.constraint_scaling
|
|
277
|
+
- self.constraint_enhancement),
|
|
278
|
+
)
|
|
279
|
+
out.append(scipy_cns)
|
|
280
|
+
|
|
281
|
+
if cns.upper_bound is not None:
|
|
282
|
+
scipy_cns = dict(
|
|
283
|
+
type='ineq',
|
|
284
|
+
fun=(lambda xk_, cns_=cns:
|
|
285
|
+
(
|
|
286
|
+
cns_.upper_bound
|
|
287
|
+
- self._scipy_constraint_fun(xk_, cns_)
|
|
288
|
+
) * self.constraint_scaling
|
|
289
|
+
- self.constraint_enhancement),
|
|
290
|
+
)
|
|
291
|
+
out.append(scipy_cns)
|
|
292
|
+
|
|
293
|
+
return out
|
|
294
|
+
|
|
295
|
+
def _get_scipy_callback(self) -> (
|
|
296
|
+
Callable[[OptimizeResult, ...], ...]
|
|
297
|
+
| Callable[[np.ndarray, ...], ...]
|
|
298
|
+
):
|
|
299
|
+
return _ScipyCallback(self)
|
|
300
|
+
|
|
301
|
+
class _SolveSet(AbstractOptimizer._SolveSet):
|
|
302
|
+
|
|
303
|
+
def _hard_constraint_handling(self, e: HardConstraintViolation):
|
|
304
|
+
raise NotImplementedError(
|
|
305
|
+
Msg.ERR_SCIPY_HARD_CONSTRAINT_VIOLATION
|
|
306
|
+
) from e
|
|
307
|
+
|
|
308
|
+
def _hidden_constraint_handling(self, e: _HiddenConstraintViolation):
|
|
309
|
+
raise NotImplementedError(
|
|
310
|
+
Msg.ERR_SCIPY_HIDDEN_CONSTRAINT
|
|
311
|
+
) from e
|
|
312
|
+
|
|
313
|
+
def _skip_handling(self, e: SkipSolve):
|
|
314
|
+
raise NotImplementedError(
|
|
315
|
+
Msg.ERR_SCIPY_NOT_IMPLEMENT_SKIP
|
|
316
|
+
) from e
|
|
317
|
+
|
|
318
|
+
def _objective(self, xk: np.ndarray) -> float:
|
|
319
|
+
|
|
320
|
+
with self._logging():
|
|
321
|
+
|
|
322
|
+
vm = self.variable_manager
|
|
323
|
+
|
|
324
|
+
# parameter suggestion
|
|
325
|
+
self._update_vm_by_xk(xk)
|
|
326
|
+
|
|
327
|
+
# construct TrialInput
|
|
328
|
+
x = vm.get_variables(filter='parameter')
|
|
329
|
+
x_pass_to_fem: dict[str, SupportedVariableTypes] = vm.get_variables(
|
|
330
|
+
filter='pass_to_fem', format='dict')
|
|
331
|
+
|
|
332
|
+
# process main fidelity model
|
|
333
|
+
solve_set = self._get_solve_set()
|
|
334
|
+
f_return = solve_set.solve(x, x_pass_to_fem)
|
|
335
|
+
assert f_return is not None
|
|
336
|
+
dict_y_internal = f_return[1]
|
|
337
|
+
y_internal: float = tuple(dict_y_internal.values())[0] # type: ignore
|
|
338
|
+
|
|
339
|
+
return y_internal
|
|
340
|
+
|
|
341
|
+
def run(self):
|
|
342
|
+
|
|
343
|
+
# ===== finalize =====
|
|
344
|
+
self._finalize()
|
|
345
|
+
|
|
346
|
+
# ===== construct x0 =====
|
|
347
|
+
x0 = self._get_x0()
|
|
348
|
+
|
|
349
|
+
# ===== run =====
|
|
350
|
+
with closing(self.fem):
|
|
351
|
+
|
|
352
|
+
with self._setting_status(), suppress(InterruptOptimization):
|
|
353
|
+
|
|
354
|
+
minimize(
|
|
355
|
+
self._objective,
|
|
356
|
+
x0,
|
|
357
|
+
args=(),
|
|
358
|
+
method=self.method,
|
|
359
|
+
bounds=self._get_scipy_bounds(),
|
|
360
|
+
constraints=self._get_scipy_constraints(),
|
|
361
|
+
tol=self.tol,
|
|
362
|
+
callback=self._get_scipy_callback(),
|
|
363
|
+
options=self.options,
|
|
364
|
+
)
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
# import
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from packaging import version
|
|
5
|
+
|
|
6
|
+
import torch
|
|
7
|
+
|
|
8
|
+
from gpytorch.mlls import ExactMarginalLogLikelihood
|
|
9
|
+
from gpytorch.kernels import MaternKernel, ScaleKernel # , RBFKernel
|
|
10
|
+
from gpytorch.priors.torch_priors import GammaPrior # , LogNormalPrior
|
|
11
|
+
# from gpytorch.constraints.constraints import GreaterThan
|
|
12
|
+
|
|
13
|
+
from botorch.models import SingleTaskGP
|
|
14
|
+
from botorch.models.transforms import Standardize, Normalize
|
|
15
|
+
|
|
16
|
+
# import fit_gpytorch_mll
|
|
17
|
+
import botorch.version
|
|
18
|
+
if version.parse(botorch.version.version) < version.parse("0.8.0"):
|
|
19
|
+
# noinspection PyUnresolvedReferences
|
|
20
|
+
from botorch.fit import fit_gpytorch_model as fit_gpytorch_mll
|
|
21
|
+
|
|
22
|
+
else:
|
|
23
|
+
from botorch.fit import fit_gpytorch_mll
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
__all__ = [
|
|
27
|
+
'get_standardizer_and_no_noise_train_yvar',
|
|
28
|
+
'setup_yvar_and_standardizer',
|
|
29
|
+
'setup_gp',
|
|
30
|
+
'get_matern_kernel_with_gamma_prior_as_covar_module',
|
|
31
|
+
]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def get_standardizer_and_no_noise_train_yvar(Y: torch.Tensor):
|
|
35
|
+
import gpytorch
|
|
36
|
+
|
|
37
|
+
standardizer = Standardize(m=Y.shape[-1])
|
|
38
|
+
min_noise = gpytorch.settings.min_fixed_noise.value(Y.dtype)
|
|
39
|
+
standardizer.forward(Y) # require to un-transform
|
|
40
|
+
_, YVar = standardizer.untransform(Y, min_noise * torch.ones_like(Y))
|
|
41
|
+
|
|
42
|
+
return YVar, standardizer
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def setup_yvar_and_standardizer(
|
|
46
|
+
Y_: torch.Tensor,
|
|
47
|
+
observation_noise_: str | float | None,
|
|
48
|
+
) -> tuple[torch.Tensor | None, Standardize]:
|
|
49
|
+
|
|
50
|
+
standardizer_ = None
|
|
51
|
+
train_yvar_ = None
|
|
52
|
+
if isinstance(observation_noise_, str):
|
|
53
|
+
if observation_noise_.lower() == 'no':
|
|
54
|
+
train_yvar_, standardizer_ = get_standardizer_and_no_noise_train_yvar(Y_)
|
|
55
|
+
else:
|
|
56
|
+
raise NotImplementedError
|
|
57
|
+
elif isinstance(observation_noise_, float):
|
|
58
|
+
train_yvar_ = torch.full_like(Y_, observation_noise_)
|
|
59
|
+
|
|
60
|
+
standardizer_ = standardizer_ or Standardize(m=Y_.shape[-1])
|
|
61
|
+
|
|
62
|
+
return train_yvar_, standardizer_
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _get_matern_kernel_with_gamma_prior(
|
|
66
|
+
ard_num_dims: int, batch_shape=None
|
|
67
|
+
) -> ScaleKernel:
|
|
68
|
+
r"""Constructs the Scale-Matern kernel that is used by default by
|
|
69
|
+
several models. This uses a Gamma(3.0, 6.0) prior for the lengthscale
|
|
70
|
+
and a Gamma(2.0, 0.15) prior for the output scale.
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
# PoFBoTorch の要請: 観測のない点は std を大きくしたい
|
|
74
|
+
|
|
75
|
+
return ScaleKernel(
|
|
76
|
+
base_kernel=MaternKernel(
|
|
77
|
+
nu=2.5,
|
|
78
|
+
ard_num_dims=ard_num_dims,
|
|
79
|
+
batch_shape=batch_shape,
|
|
80
|
+
# lengthscale_prior=GammaPrior(3.0, 6.0),
|
|
81
|
+
lengthscale_prior=GammaPrior(1, 9.0),
|
|
82
|
+
),
|
|
83
|
+
batch_shape=batch_shape,
|
|
84
|
+
# outputscale_prior=GammaPrior(2.0, 0.15),
|
|
85
|
+
outputscale_prior=GammaPrior(1.0, 0.15),
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def get_matern_kernel_with_gamma_prior_as_covar_module(
|
|
90
|
+
X: torch.Tensor,
|
|
91
|
+
Y: torch.Tensor,
|
|
92
|
+
nu: float = 2.5,
|
|
93
|
+
lengthscale_prior: GammaPrior = None,
|
|
94
|
+
outputscale_prior: GammaPrior = None,
|
|
95
|
+
):
|
|
96
|
+
|
|
97
|
+
_input_batch_shape, _aug_batch_shape = SingleTaskGP.get_batch_dimensions(X, Y)
|
|
98
|
+
ard_num_dims = X.shape[-1]
|
|
99
|
+
batch_shape = _aug_batch_shape
|
|
100
|
+
|
|
101
|
+
return ScaleKernel(
|
|
102
|
+
base_kernel=MaternKernel(
|
|
103
|
+
nu=nu,
|
|
104
|
+
ard_num_dims=ard_num_dims,
|
|
105
|
+
batch_shape=batch_shape,
|
|
106
|
+
lengthscale_prior=lengthscale_prior or GammaPrior(3.0, 6.0),
|
|
107
|
+
),
|
|
108
|
+
batch_shape=batch_shape,
|
|
109
|
+
outputscale_prior=outputscale_prior or GammaPrior(2.0, 0.15),
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def setup_gp(X, Y, bounds, observation_noise, lh_class=None, covar_module=None):
|
|
114
|
+
|
|
115
|
+
lh_class = lh_class or ExactMarginalLogLikelihood
|
|
116
|
+
|
|
117
|
+
train_yvar_, standardizer_ = setup_yvar_and_standardizer(
|
|
118
|
+
Y, observation_noise
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
model_ = SingleTaskGP(
|
|
122
|
+
X,
|
|
123
|
+
Y,
|
|
124
|
+
train_Yvar=train_yvar_,
|
|
125
|
+
input_transform=Normalize(d=X.shape[-1], bounds=bounds),
|
|
126
|
+
outcome_transform=standardizer_,
|
|
127
|
+
covar_module=covar_module,
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
mll_ = lh_class(model_.likelihood, model_)
|
|
131
|
+
fit_gpytorch_mll(mll_)
|
|
132
|
+
|
|
133
|
+
return model_
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
|
|
3
|
+
# This file is derived from the one of botorch.
|
|
4
|
+
|
|
5
|
+
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
6
|
+
#
|
|
7
|
+
# This source code is licensed under the MIT license found in the
|
|
8
|
+
# LICENSE file in the root directory of this source tree.
|
|
9
|
+
|
|
10
|
+
r"""
|
|
11
|
+
Pre-packaged kernels for bayesian optimization, including a Scale/Matern
|
|
12
|
+
kernel that is well-suited to low-dimensional high-noise problems, and
|
|
13
|
+
a dimension-agnostic RBF kernel without outputscale.
|
|
14
|
+
|
|
15
|
+
References:
|
|
16
|
+
|
|
17
|
+
.. [Hvarfner2024vanilla]
|
|
18
|
+
C. Hvarfner, E. O. Hellsten, L. Nardi,
|
|
19
|
+
Vanilla Bayesian Optimization Performs Great in High Dimensions.
|
|
20
|
+
In International Conference on Machine Learning, 2024.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from math import log, sqrt
|
|
24
|
+
from typing import Optional, Sequence, Union
|
|
25
|
+
|
|
26
|
+
import torch
|
|
27
|
+
from gpytorch.constraints.constraints import GreaterThan
|
|
28
|
+
from gpytorch.kernels import MaternKernel, RBFKernel, ScaleKernel
|
|
29
|
+
from gpytorch.likelihoods.gaussian_likelihood import GaussianLikelihood
|
|
30
|
+
from gpytorch.priors.torch_priors import GammaPrior, LogNormalPrior
|
|
31
|
+
|
|
32
|
+
MIN_INFERRED_NOISE_LEVEL = 1e-4
|
|
33
|
+
SQRT2 = sqrt(2)
|
|
34
|
+
SQRT3 = sqrt(3)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# def get_matern_kernel_with_gamma_prior(
|
|
38
|
+
# ard_num_dims: int, batch_shape: Optional[torch.Size] = None
|
|
39
|
+
# ) -> ScaleKernel:
|
|
40
|
+
# r"""Constructs the Scale-Matern kernel that is used by default by
|
|
41
|
+
# several models. This uses a Gamma(3.0, 6.0) prior for the lengthscale
|
|
42
|
+
# and a Gamma(2.0, 0.15) prior for the output scale.
|
|
43
|
+
# """
|
|
44
|
+
# return ScaleKernel(
|
|
45
|
+
# base_kernel=MaternKernel(
|
|
46
|
+
# nu=2.5,
|
|
47
|
+
# ard_num_dims=ard_num_dims,
|
|
48
|
+
# batch_shape=batch_shape,
|
|
49
|
+
# lengthscale_prior=GammaPrior(3.0, 6.0),
|
|
50
|
+
# ),
|
|
51
|
+
# batch_shape=batch_shape,
|
|
52
|
+
# outputscale_prior=GammaPrior(2.0, 0.15),
|
|
53
|
+
# )
|
|
54
|
+
#
|
|
55
|
+
#
|
|
56
|
+
# def get_gaussian_likelihood_with_gamma_prior(
|
|
57
|
+
# batch_shape: Optional[torch.Size] = None,
|
|
58
|
+
# ) -> GaussianLikelihood:
|
|
59
|
+
# r"""Constructs the GaussianLikelihood that is used by default by
|
|
60
|
+
# several models. This uses a Gamma(1.1, 0.05) prior and constrains the
|
|
61
|
+
# noise level to be greater than MIN_INFERRED_NOISE_LEVEL (=1e-4).
|
|
62
|
+
# """
|
|
63
|
+
# batch_shape = torch.Size() if batch_shape is None else batch_shape
|
|
64
|
+
# noise_prior = GammaPrior(1.1, 0.05)
|
|
65
|
+
# noise_prior_mode = (noise_prior.concentration - 1) / noise_prior.rate
|
|
66
|
+
# return GaussianLikelihood(
|
|
67
|
+
# noise_prior=noise_prior,
|
|
68
|
+
# batch_shape=batch_shape,
|
|
69
|
+
# noise_constraint=GreaterThan(
|
|
70
|
+
# MIN_INFERRED_NOISE_LEVEL,
|
|
71
|
+
# transform=None,
|
|
72
|
+
# initial_value=noise_prior_mode,
|
|
73
|
+
# ),
|
|
74
|
+
# )
|
|
75
|
+
#
|
|
76
|
+
#
|
|
77
|
+
# def get_gaussian_likelihood_with_lognormal_prior(
|
|
78
|
+
# batch_shape: Optional[torch.Size] = None,
|
|
79
|
+
# ) -> GaussianLikelihood:
|
|
80
|
+
# """Return Gaussian likelihood with a LogNormal(-4.0, 1.0) prior.
|
|
81
|
+
# This prior is based on [Hvarfner2024vanilla]_.
|
|
82
|
+
#
|
|
83
|
+
# Args:
|
|
84
|
+
# batch_shape: Batch shape for the likelihood.
|
|
85
|
+
#
|
|
86
|
+
# Returns:
|
|
87
|
+
# GaussianLikelihood with LogNormal(-4.0, 1.0) prior and constrains the
|
|
88
|
+
# noise level to be greater than MIN_INFERRED_NOISE_LEVEL (=1e-4).
|
|
89
|
+
# """
|
|
90
|
+
# batch_shape = torch.Size() if batch_shape is None else batch_shape
|
|
91
|
+
# noise_prior = LogNormalPrior(loc=-4.0, scale=1.0)
|
|
92
|
+
# return GaussianLikelihood(
|
|
93
|
+
# noise_prior=noise_prior,
|
|
94
|
+
# batch_shape=batch_shape,
|
|
95
|
+
# noise_constraint=GreaterThan(
|
|
96
|
+
# MIN_INFERRED_NOISE_LEVEL,
|
|
97
|
+
# transform=None,
|
|
98
|
+
# initial_value=noise_prior.mode,
|
|
99
|
+
# ),
|
|
100
|
+
# )
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_covar_module_with_dim_scaled_prior_extension(
|
|
104
|
+
ard_num_dims: int,
|
|
105
|
+
batch_shape: Optional[torch.Size] = None,
|
|
106
|
+
use_rbf_kernel: bool = True,
|
|
107
|
+
active_dims: Optional[Sequence[int]] = None,
|
|
108
|
+
loc_coef: float = 1.,
|
|
109
|
+
scale_coef: float = 1.,
|
|
110
|
+
) -> Union[MaternKernel, RBFKernel]:
|
|
111
|
+
"""Returns an RBF or Matern kernel with priors
|
|
112
|
+
from [Hvarfner2024vanilla]_.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
ard_num_dims: Number of feature dimensions for ARD.
|
|
116
|
+
batch_shape: Batch shape for the covariance module.
|
|
117
|
+
use_rbf_kernel: Whether to use an RBF kernel. If False, uses a Matern kernel.
|
|
118
|
+
active_dims: The set of input dimensions to compute the covariances on.
|
|
119
|
+
By default, the covariance is computed using the full input tensor.
|
|
120
|
+
Set this if you'd like to ignore certain dimensions.
|
|
121
|
+
|
|
122
|
+
loc_coef (float): The coefficient of loc of LogNormalPrior.
|
|
123
|
+
scale_coef (float): The coefficient of scale of LogNormalPrior.
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
A Kernel constructed according to the given arguments. The prior is constrained
|
|
128
|
+
to have lengthscales larger than 0.025 for numerical stability.
|
|
129
|
+
"""
|
|
130
|
+
base_class = RBFKernel if use_rbf_kernel else MaternKernel
|
|
131
|
+
lengthscale_prior = LogNormalPrior(loc=(SQRT2 + log(ard_num_dims) * 0.5) * loc_coef, scale=SQRT3 * scale_coef)
|
|
132
|
+
base_kernel = base_class(
|
|
133
|
+
ard_num_dims=ard_num_dims,
|
|
134
|
+
batch_shape=batch_shape,
|
|
135
|
+
lengthscale_prior=lengthscale_prior,
|
|
136
|
+
lengthscale_constraint=GreaterThan(
|
|
137
|
+
2.5e-2, transform=None, initial_value=lengthscale_prior.mode
|
|
138
|
+
),
|
|
139
|
+
# pyre-ignore[6] GPyTorch type is unnecessarily restrictive.
|
|
140
|
+
active_dims=active_dims,
|
|
141
|
+
)
|
|
142
|
+
return base_kernel
|