pymoo 0.6.1.6__cp312-cp312-macosx_10_13_universal2.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (337) hide show
  1. pymoo/__init__.py +3 -0
  2. pymoo/algorithms/__init__.py +0 -0
  3. pymoo/algorithms/base/__init__.py +0 -0
  4. pymoo/algorithms/base/bracket.py +38 -0
  5. pymoo/algorithms/base/genetic.py +110 -0
  6. pymoo/algorithms/base/line.py +62 -0
  7. pymoo/algorithms/base/local.py +39 -0
  8. pymoo/algorithms/base/meta.py +79 -0
  9. pymoo/algorithms/hyperparameters.py +91 -0
  10. pymoo/algorithms/moo/__init__.py +0 -0
  11. pymoo/algorithms/moo/age.py +310 -0
  12. pymoo/algorithms/moo/age2.py +194 -0
  13. pymoo/algorithms/moo/cmopso.py +239 -0
  14. pymoo/algorithms/moo/ctaea.py +305 -0
  15. pymoo/algorithms/moo/dnsga2.py +80 -0
  16. pymoo/algorithms/moo/kgb.py +450 -0
  17. pymoo/algorithms/moo/moead.py +183 -0
  18. pymoo/algorithms/moo/mopso_cd.py +309 -0
  19. pymoo/algorithms/moo/nsga2.py +113 -0
  20. pymoo/algorithms/moo/nsga3.py +361 -0
  21. pymoo/algorithms/moo/pinsga2.py +370 -0
  22. pymoo/algorithms/moo/rnsga2.py +188 -0
  23. pymoo/algorithms/moo/rnsga3.py +246 -0
  24. pymoo/algorithms/moo/rvea.py +214 -0
  25. pymoo/algorithms/moo/sms.py +196 -0
  26. pymoo/algorithms/moo/spea2.py +191 -0
  27. pymoo/algorithms/moo/unsga3.py +49 -0
  28. pymoo/algorithms/soo/__init__.py +0 -0
  29. pymoo/algorithms/soo/convex/__init__.py +0 -0
  30. pymoo/algorithms/soo/nonconvex/__init__.py +0 -0
  31. pymoo/algorithms/soo/nonconvex/brkga.py +162 -0
  32. pymoo/algorithms/soo/nonconvex/cmaes.py +556 -0
  33. pymoo/algorithms/soo/nonconvex/de.py +283 -0
  34. pymoo/algorithms/soo/nonconvex/direct.py +148 -0
  35. pymoo/algorithms/soo/nonconvex/es.py +213 -0
  36. pymoo/algorithms/soo/nonconvex/g3pcx.py +94 -0
  37. pymoo/algorithms/soo/nonconvex/ga.py +95 -0
  38. pymoo/algorithms/soo/nonconvex/ga_niching.py +223 -0
  39. pymoo/algorithms/soo/nonconvex/isres.py +74 -0
  40. pymoo/algorithms/soo/nonconvex/nelder.py +251 -0
  41. pymoo/algorithms/soo/nonconvex/nrbo.py +191 -0
  42. pymoo/algorithms/soo/nonconvex/optuna.py +80 -0
  43. pymoo/algorithms/soo/nonconvex/pattern.py +185 -0
  44. pymoo/algorithms/soo/nonconvex/pso.py +337 -0
  45. pymoo/algorithms/soo/nonconvex/pso_ep.py +307 -0
  46. pymoo/algorithms/soo/nonconvex/random_search.py +25 -0
  47. pymoo/algorithms/soo/nonconvex/sres.py +56 -0
  48. pymoo/algorithms/soo/univariate/__init__.py +0 -0
  49. pymoo/algorithms/soo/univariate/exp.py +46 -0
  50. pymoo/algorithms/soo/univariate/golden.py +65 -0
  51. pymoo/algorithms/soo/univariate/quadr_interp.py +81 -0
  52. pymoo/algorithms/soo/univariate/wolfe.py +163 -0
  53. pymoo/config.py +33 -0
  54. pymoo/constraints/__init__.py +3 -0
  55. pymoo/constraints/adaptive.py +66 -0
  56. pymoo/constraints/as_obj.py +56 -0
  57. pymoo/constraints/as_penalty.py +41 -0
  58. pymoo/constraints/eps.py +34 -0
  59. pymoo/constraints/from_bounds.py +36 -0
  60. pymoo/core/__init__.py +0 -0
  61. pymoo/core/algorithm.py +408 -0
  62. pymoo/core/callback.py +38 -0
  63. pymoo/core/crossover.py +79 -0
  64. pymoo/core/decision_making.py +102 -0
  65. pymoo/core/decomposition.py +76 -0
  66. pymoo/core/duplicate.py +163 -0
  67. pymoo/core/evaluator.py +116 -0
  68. pymoo/core/indicator.py +34 -0
  69. pymoo/core/individual.py +784 -0
  70. pymoo/core/infill.py +65 -0
  71. pymoo/core/initialization.py +44 -0
  72. pymoo/core/mating.py +39 -0
  73. pymoo/core/meta.py +21 -0
  74. pymoo/core/mixed.py +164 -0
  75. pymoo/core/mutation.py +44 -0
  76. pymoo/core/operator.py +46 -0
  77. pymoo/core/parameters.py +134 -0
  78. pymoo/core/plot.py +208 -0
  79. pymoo/core/population.py +180 -0
  80. pymoo/core/problem.py +373 -0
  81. pymoo/core/recorder.py +99 -0
  82. pymoo/core/repair.py +23 -0
  83. pymoo/core/replacement.py +96 -0
  84. pymoo/core/result.py +52 -0
  85. pymoo/core/sampling.py +45 -0
  86. pymoo/core/selection.py +61 -0
  87. pymoo/core/solution.py +10 -0
  88. pymoo/core/survival.py +107 -0
  89. pymoo/core/termination.py +70 -0
  90. pymoo/core/variable.py +415 -0
  91. pymoo/decomposition/__init__.py +0 -0
  92. pymoo/decomposition/aasf.py +24 -0
  93. pymoo/decomposition/asf.py +10 -0
  94. pymoo/decomposition/pbi.py +13 -0
  95. pymoo/decomposition/perp_dist.py +13 -0
  96. pymoo/decomposition/tchebicheff.py +11 -0
  97. pymoo/decomposition/util.py +13 -0
  98. pymoo/decomposition/weighted_sum.py +8 -0
  99. pymoo/docs.py +187 -0
  100. pymoo/experimental/__init__.py +0 -0
  101. pymoo/experimental/algorithms/__init__.py +0 -0
  102. pymoo/experimental/algorithms/gde3.py +57 -0
  103. pymoo/functions/__init__.py +135 -0
  104. pymoo/functions/compiled/__init__.py +0 -0
  105. pymoo/functions/compiled/calc_perpendicular_distance.cpp +27464 -0
  106. pymoo/functions/compiled/calc_perpendicular_distance.cpython-312-darwin.so +0 -0
  107. pymoo/functions/compiled/decomposition.cpp +28853 -0
  108. pymoo/functions/compiled/decomposition.cpython-312-darwin.so +0 -0
  109. pymoo/functions/compiled/info.cpp +7058 -0
  110. pymoo/functions/compiled/info.cpython-312-darwin.so +0 -0
  111. pymoo/functions/compiled/mnn.cpp +30095 -0
  112. pymoo/functions/compiled/mnn.cpython-312-darwin.so +0 -0
  113. pymoo/functions/compiled/non_dominated_sorting.cpp +35692 -0
  114. pymoo/functions/compiled/non_dominated_sorting.cpython-312-darwin.so +0 -0
  115. pymoo/functions/compiled/pruning_cd.cpp +29248 -0
  116. pymoo/functions/compiled/pruning_cd.cpython-312-darwin.so +0 -0
  117. pymoo/functions/compiled/stochastic_ranking.cpp +28042 -0
  118. pymoo/functions/compiled/stochastic_ranking.cpython-312-darwin.so +0 -0
  119. pymoo/functions/standard/__init__.py +1 -0
  120. pymoo/functions/standard/calc_perpendicular_distance.py +20 -0
  121. pymoo/functions/standard/decomposition.py +18 -0
  122. pymoo/functions/standard/hv.py +5 -0
  123. pymoo/functions/standard/mnn.py +78 -0
  124. pymoo/functions/standard/non_dominated_sorting.py +474 -0
  125. pymoo/functions/standard/pruning_cd.py +93 -0
  126. pymoo/functions/standard/stochastic_ranking.py +42 -0
  127. pymoo/gradient/__init__.py +24 -0
  128. pymoo/gradient/automatic.py +85 -0
  129. pymoo/gradient/grad_autograd.py +105 -0
  130. pymoo/gradient/grad_complex.py +35 -0
  131. pymoo/gradient/grad_jax.py +51 -0
  132. pymoo/gradient/numpy.py +22 -0
  133. pymoo/gradient/toolbox/__init__.py +19 -0
  134. pymoo/indicators/__init__.py +0 -0
  135. pymoo/indicators/distance_indicator.py +55 -0
  136. pymoo/indicators/gd.py +7 -0
  137. pymoo/indicators/gd_plus.py +7 -0
  138. pymoo/indicators/hv/__init__.py +59 -0
  139. pymoo/indicators/hv/approximate.py +105 -0
  140. pymoo/indicators/hv/exact.py +68 -0
  141. pymoo/indicators/hv/exact_2d.py +102 -0
  142. pymoo/indicators/igd.py +7 -0
  143. pymoo/indicators/igd_plus.py +7 -0
  144. pymoo/indicators/kktpm.py +151 -0
  145. pymoo/indicators/migd.py +55 -0
  146. pymoo/indicators/rmetric.py +203 -0
  147. pymoo/indicators/spacing.py +52 -0
  148. pymoo/mcdm/__init__.py +0 -0
  149. pymoo/mcdm/compromise_programming.py +19 -0
  150. pymoo/mcdm/high_tradeoff.py +40 -0
  151. pymoo/mcdm/pseudo_weights.py +32 -0
  152. pymoo/operators/__init__.py +0 -0
  153. pymoo/operators/control.py +190 -0
  154. pymoo/operators/crossover/__init__.py +0 -0
  155. pymoo/operators/crossover/binx.py +47 -0
  156. pymoo/operators/crossover/dex.py +125 -0
  157. pymoo/operators/crossover/erx.py +164 -0
  158. pymoo/operators/crossover/expx.py +53 -0
  159. pymoo/operators/crossover/hux.py +37 -0
  160. pymoo/operators/crossover/nox.py +25 -0
  161. pymoo/operators/crossover/ox.py +88 -0
  162. pymoo/operators/crossover/pcx.py +84 -0
  163. pymoo/operators/crossover/pntx.py +49 -0
  164. pymoo/operators/crossover/sbx.py +137 -0
  165. pymoo/operators/crossover/spx.py +5 -0
  166. pymoo/operators/crossover/ux.py +20 -0
  167. pymoo/operators/mutation/__init__.py +0 -0
  168. pymoo/operators/mutation/bitflip.py +17 -0
  169. pymoo/operators/mutation/gauss.py +60 -0
  170. pymoo/operators/mutation/inversion.py +42 -0
  171. pymoo/operators/mutation/nom.py +7 -0
  172. pymoo/operators/mutation/pm.py +96 -0
  173. pymoo/operators/mutation/rm.py +23 -0
  174. pymoo/operators/repair/__init__.py +0 -0
  175. pymoo/operators/repair/bounce_back.py +32 -0
  176. pymoo/operators/repair/bounds_repair.py +97 -0
  177. pymoo/operators/repair/inverse_penalty.py +91 -0
  178. pymoo/operators/repair/rounding.py +18 -0
  179. pymoo/operators/repair/to_bound.py +31 -0
  180. pymoo/operators/repair/vtype.py +11 -0
  181. pymoo/operators/sampling/__init__.py +0 -0
  182. pymoo/operators/sampling/lhs.py +76 -0
  183. pymoo/operators/sampling/rnd.py +52 -0
  184. pymoo/operators/selection/__init__.py +0 -0
  185. pymoo/operators/selection/rnd.py +75 -0
  186. pymoo/operators/selection/tournament.py +78 -0
  187. pymoo/operators/survival/__init__.py +0 -0
  188. pymoo/operators/survival/rank_and_crowding/__init__.py +1 -0
  189. pymoo/operators/survival/rank_and_crowding/classes.py +212 -0
  190. pymoo/operators/survival/rank_and_crowding/metrics.py +208 -0
  191. pymoo/optimize.py +72 -0
  192. pymoo/parallelization/__init__.py +15 -0
  193. pymoo/parallelization/dask.py +25 -0
  194. pymoo/parallelization/joblib.py +28 -0
  195. pymoo/parallelization/ray.py +31 -0
  196. pymoo/parallelization/starmap.py +24 -0
  197. pymoo/problems/__init__.py +157 -0
  198. pymoo/problems/dyn.py +47 -0
  199. pymoo/problems/dynamic/__init__.py +0 -0
  200. pymoo/problems/dynamic/cec2015.py +108 -0
  201. pymoo/problems/dynamic/df.py +451 -0
  202. pymoo/problems/dynamic/misc.py +167 -0
  203. pymoo/problems/functional.py +48 -0
  204. pymoo/problems/many/__init__.py +5 -0
  205. pymoo/problems/many/cdtlz.py +159 -0
  206. pymoo/problems/many/dcdtlz.py +88 -0
  207. pymoo/problems/many/dtlz.py +264 -0
  208. pymoo/problems/many/wfg.py +553 -0
  209. pymoo/problems/multi/__init__.py +14 -0
  210. pymoo/problems/multi/bnh.py +34 -0
  211. pymoo/problems/multi/carside.py +48 -0
  212. pymoo/problems/multi/clutch.py +104 -0
  213. pymoo/problems/multi/csi.py +55 -0
  214. pymoo/problems/multi/ctp.py +198 -0
  215. pymoo/problems/multi/dascmop.py +213 -0
  216. pymoo/problems/multi/kursawe.py +25 -0
  217. pymoo/problems/multi/modact.py +68 -0
  218. pymoo/problems/multi/mw.py +400 -0
  219. pymoo/problems/multi/omnitest.py +48 -0
  220. pymoo/problems/multi/osy.py +32 -0
  221. pymoo/problems/multi/srn.py +28 -0
  222. pymoo/problems/multi/sympart.py +94 -0
  223. pymoo/problems/multi/tnk.py +24 -0
  224. pymoo/problems/multi/truss2d.py +83 -0
  225. pymoo/problems/multi/welded_beam.py +41 -0
  226. pymoo/problems/multi/wrm.py +36 -0
  227. pymoo/problems/multi/zdt.py +151 -0
  228. pymoo/problems/multi_to_single.py +22 -0
  229. pymoo/problems/single/__init__.py +12 -0
  230. pymoo/problems/single/ackley.py +24 -0
  231. pymoo/problems/single/cantilevered_beam.py +34 -0
  232. pymoo/problems/single/flowshop_scheduling.py +113 -0
  233. pymoo/problems/single/g.py +874 -0
  234. pymoo/problems/single/griewank.py +18 -0
  235. pymoo/problems/single/himmelblau.py +15 -0
  236. pymoo/problems/single/knapsack.py +49 -0
  237. pymoo/problems/single/mopta08.py +26 -0
  238. pymoo/problems/single/multimodal.py +20 -0
  239. pymoo/problems/single/pressure_vessel.py +30 -0
  240. pymoo/problems/single/rastrigin.py +20 -0
  241. pymoo/problems/single/rosenbrock.py +22 -0
  242. pymoo/problems/single/schwefel.py +18 -0
  243. pymoo/problems/single/simple.py +13 -0
  244. pymoo/problems/single/sphere.py +19 -0
  245. pymoo/problems/single/traveling_salesman.py +79 -0
  246. pymoo/problems/single/zakharov.py +19 -0
  247. pymoo/problems/static.py +14 -0
  248. pymoo/problems/util.py +42 -0
  249. pymoo/problems/zero_to_one.py +27 -0
  250. pymoo/termination/__init__.py +23 -0
  251. pymoo/termination/collection.py +12 -0
  252. pymoo/termination/cv.py +48 -0
  253. pymoo/termination/default.py +45 -0
  254. pymoo/termination/delta.py +64 -0
  255. pymoo/termination/fmin.py +16 -0
  256. pymoo/termination/ftol.py +144 -0
  257. pymoo/termination/indicator.py +49 -0
  258. pymoo/termination/max_eval.py +14 -0
  259. pymoo/termination/max_gen.py +15 -0
  260. pymoo/termination/max_time.py +20 -0
  261. pymoo/termination/robust.py +34 -0
  262. pymoo/termination/xtol.py +33 -0
  263. pymoo/util/__init__.py +33 -0
  264. pymoo/util/archive.py +152 -0
  265. pymoo/util/cache.py +29 -0
  266. pymoo/util/clearing.py +82 -0
  267. pymoo/util/display/__init__.py +0 -0
  268. pymoo/util/display/column.py +52 -0
  269. pymoo/util/display/display.py +34 -0
  270. pymoo/util/display/multi.py +100 -0
  271. pymoo/util/display/output.py +53 -0
  272. pymoo/util/display/progress.py +54 -0
  273. pymoo/util/display/single.py +67 -0
  274. pymoo/util/dominator.py +67 -0
  275. pymoo/util/hv.py +21 -0
  276. pymoo/util/matlab_engine.py +39 -0
  277. pymoo/util/misc.py +447 -0
  278. pymoo/util/nds/__init__.py +0 -0
  279. pymoo/util/nds/dominance_degree_non_dominated_sort.py +159 -0
  280. pymoo/util/nds/efficient_non_dominated_sort.py +152 -0
  281. pymoo/util/nds/fast_non_dominated_sort.py +70 -0
  282. pymoo/util/nds/find_non_dominated.py +54 -0
  283. pymoo/util/nds/naive_non_dominated_sort.py +36 -0
  284. pymoo/util/nds/non_dominated_sorting.py +94 -0
  285. pymoo/util/nds/tree_based_non_dominated_sort.py +133 -0
  286. pymoo/util/normalization.py +312 -0
  287. pymoo/util/optimum.py +42 -0
  288. pymoo/util/randomized_argsort.py +63 -0
  289. pymoo/util/ref_dirs/__init__.py +24 -0
  290. pymoo/util/ref_dirs/construction.py +89 -0
  291. pymoo/util/ref_dirs/das_dennis.py +52 -0
  292. pymoo/util/ref_dirs/energy.py +317 -0
  293. pymoo/util/ref_dirs/energy_layer.py +119 -0
  294. pymoo/util/ref_dirs/genetic_algorithm.py +64 -0
  295. pymoo/util/ref_dirs/incremental.py +69 -0
  296. pymoo/util/ref_dirs/misc.py +128 -0
  297. pymoo/util/ref_dirs/optimizer.py +59 -0
  298. pymoo/util/ref_dirs/performance.py +162 -0
  299. pymoo/util/ref_dirs/reduction.py +85 -0
  300. pymoo/util/ref_dirs/sample_and_map.py +24 -0
  301. pymoo/util/reference_direction.py +258 -0
  302. pymoo/util/remote.py +55 -0
  303. pymoo/util/roulette.py +29 -0
  304. pymoo/util/running_metric.py +128 -0
  305. pymoo/util/sliding_window.py +25 -0
  306. pymoo/util/value_functions.py +720 -0
  307. pymoo/util/vectors.py +40 -0
  308. pymoo/util/vf_dominator.py +102 -0
  309. pymoo/vendor/__init__.py +0 -0
  310. pymoo/vendor/cec2018.py +398 -0
  311. pymoo/vendor/gta.py +617 -0
  312. pymoo/vendor/vendor_cmaes.py +421 -0
  313. pymoo/vendor/vendor_coco.py +81 -0
  314. pymoo/vendor/vendor_scipy.py +232 -0
  315. pymoo/version.py +1 -0
  316. pymoo/visualization/__init__.py +21 -0
  317. pymoo/visualization/app/__init__.py +0 -0
  318. pymoo/visualization/app/pso.py +61 -0
  319. pymoo/visualization/fitness_landscape.py +128 -0
  320. pymoo/visualization/heatmap.py +123 -0
  321. pymoo/visualization/matplotlib.py +61 -0
  322. pymoo/visualization/pcp.py +121 -0
  323. pymoo/visualization/petal.py +91 -0
  324. pymoo/visualization/radar.py +108 -0
  325. pymoo/visualization/radviz.py +68 -0
  326. pymoo/visualization/scatter.py +150 -0
  327. pymoo/visualization/star_coordinate.py +75 -0
  328. pymoo/visualization/util.py +296 -0
  329. pymoo/visualization/video/__init__.py +0 -0
  330. pymoo/visualization/video/callback_video.py +82 -0
  331. pymoo/visualization/video/one_var_one_obj.py +57 -0
  332. pymoo/visualization/video/two_var_one_obj.py +62 -0
  333. pymoo-0.6.1.6.dist-info/METADATA +209 -0
  334. pymoo-0.6.1.6.dist-info/RECORD +337 -0
  335. pymoo-0.6.1.6.dist-info/WHEEL +6 -0
  336. pymoo-0.6.1.6.dist-info/licenses/LICENSE +191 -0
  337. pymoo-0.6.1.6.dist-info/top_level.txt +1 -0
@@ -0,0 +1,251 @@
1
+ import numpy as np
2
+
3
+ from pymoo.algorithms.base.local import LocalSearch
4
+ from pymoo.algorithms.soo.nonconvex.ga import FitnessSurvival
5
+ from pymoo.core.individual import Individual
6
+ from pymoo.core.population import Population
7
+ from pymoo.core.population import pop_from_array_or_individual
8
+ from pymoo.core.replacement import is_better
9
+ from pymoo.core.termination import Termination
10
+ from pymoo.docs import parse_doc_string
11
+ from pymoo.operators.repair.to_bound import set_to_bounds_if_outside_by_problem
12
+ from pymoo.util.display.single import SingleObjectiveOutput
13
+ from pymoo.util.misc import vectorized_cdist
14
+ from pymoo.util.vectors import max_alpha
15
+
16
+
17
+ # =========================================================================================================
18
+ # Implementation
19
+ # =========================================================================================================
20
+
21
+
22
+ class NelderAndMeadTermination(Termination):
23
+
24
+ def __init__(self,
25
+ x_tol=1e-6,
26
+ f_tol=1e-6,
27
+ n_max_iter=1e6,
28
+ n_max_evals=1e6):
29
+
30
+ super().__init__()
31
+ self.x_tol = x_tol
32
+ self.f_tol = f_tol
33
+ self.n_max_iter = n_max_iter
34
+ self.n_max_evals = n_max_evals
35
+
36
+ def _update(self, algorithm):
37
+ pop, problem = algorithm.pop, algorithm.problem
38
+
39
+ if len(pop) <= 1:
40
+ return 0.0
41
+
42
+ X, F = pop.get("X", "F")
43
+
44
+ f_delta = np.abs(F[1:] - F[0]).max()
45
+ f_tol = 1 / (1 + (f_delta - self.f_tol))
46
+
47
+ # if the problem has bounds we can normalize the x space to to be more accurate
48
+ if problem.has_bounds():
49
+ x_delta = np.abs((X[1:] - X[0]) / (problem.xu - problem.xl)).max()
50
+ else:
51
+ x_delta = np.abs(X[1:] - X[0]).max()
52
+
53
+ x_tol = 1 / (1 + (x_delta - self.x_tol))
54
+
55
+ # degenerated simplex - get all edges and minimum and maximum length
56
+ D = vectorized_cdist(X, X)
57
+ val = D[np.triu_indices(len(pop), 1)]
58
+ min_e, max_e = val.min(), val.max()
59
+
60
+ # either if the maximum length is very small or the ratio is degenerated
61
+ is_degenerated = int(max_e < 1e-16 or min_e / max_e < 1e-16)
62
+
63
+ max_iter = algorithm.n_iter / self.n_max_iter
64
+ max_evals = algorithm.evaluator.n_eval / self.n_max_evals
65
+
66
+ return max(f_tol, x_tol, max_iter, max_evals, is_degenerated)
67
+
68
+
69
+ def adaptive_params(problem):
70
+ n = problem.n_var
71
+ alpha = 1
72
+ beta = 1 + 2 / n
73
+ gamma = 0.75 - 1 / (2 * n)
74
+ delta = 1 - 1 / n
75
+ return alpha, beta, gamma, delta
76
+
77
+
78
+ def default_params(_):
79
+ alpha = 1
80
+ beta = 2.0
81
+ gamma = 0.5
82
+ delta = 0.05
83
+ return alpha, beta, gamma, delta
84
+
85
+
86
+ def initialize_simplex(problem, x0, scale=0.05):
87
+ n = len(x0)
88
+
89
+ if problem.has_bounds():
90
+ delta = scale * (problem.xu - problem.xl)
91
+ else:
92
+ delta = scale * x0
93
+ delta[delta == 0] = 0.00025
94
+
95
+ # repeat the x0 already and add the values
96
+ X = x0[None, :].repeat(n, axis=0)
97
+
98
+ for k in range(n):
99
+
100
+ # if the problem has bounds do the check
101
+ if problem.has_bounds():
102
+ if X[k, k] + delta[k] < problem.xu[k]:
103
+ X[k, k] = X[k, k] + delta[k]
104
+ else:
105
+ X[k, k] = X[k, k] - delta[k]
106
+
107
+ # otherwise just add the init_simplex_scale
108
+ else:
109
+ X[k, k] = X[k, k] + delta[k]
110
+
111
+ return X
112
+
113
+
114
+ class NelderMead(LocalSearch):
115
+
116
+ def __init__(self,
117
+ init_simplex_scale=0.05,
118
+ func_params=adaptive_params,
119
+ output=SingleObjectiveOutput(),
120
+ **kwargs):
121
+
122
+ super().__init__(output=output, **kwargs)
123
+
124
+ # the function to return the parameter
125
+ self.func_params = func_params
126
+
127
+ # the attributes for the simplex operations
128
+ self.alpha = None
129
+ self.beta = None
130
+ self.gamma = None
131
+ self.delta = None
132
+
133
+ # whether the simplex has been initialized or not
134
+ self.is_simplex_initialized = False
135
+
136
+ # the initial simplex scale used
137
+ self.init_simplex_scale = init_simplex_scale
138
+
139
+ # the termination used for nelder and mead if nothing else provided
140
+ self.termination = NelderAndMeadTermination()
141
+
142
+ def _setup(self, problem, **kwargs):
143
+ self.alpha, self.beta, self.gamma, self.delta = self.func_params(self.problem)
144
+
145
+ def _initialize_simplex(self):
146
+ simplex = pop_from_array_or_individual(initialize_simplex(self.problem, self.x0.X, scale=0.05))
147
+ return Population.merge(self.x0, simplex)
148
+
149
+ def _next(self):
150
+ if not self.is_simplex_initialized:
151
+ self.pop = yield self._initialize_simplex()
152
+ self.is_simplex_initialized = True
153
+ else:
154
+ yield from self._step()
155
+
156
+ def _step(self):
157
+
158
+ # number of variables increased by one - matches equations in the paper
159
+ xl, xu = self.problem.bounds()
160
+ pop, n = self.pop, self.problem.n_var - 1
161
+
162
+ # calculate the centroid
163
+ centroid = pop[:n + 1].get("X").mean(axis=0)
164
+
165
+ # -------------------------------------------------------------------------------------------
166
+ # REFLECT
167
+ # -------------------------------------------------------------------------------------------
168
+
169
+ # check the maximum alpha until the bounds are hit
170
+ alphaU = max_alpha(centroid, (centroid - pop[n + 1].X), xl, xu)
171
+
172
+ # reflect the point, consider factor if bounds are there, make sure in bounds (floating point) evaluate
173
+ x_reflect = centroid + min(self.alpha, alphaU) * (centroid - pop[n + 1].X)
174
+ x_reflect = set_to_bounds_if_outside_by_problem(self.problem, x_reflect)
175
+ reflect = yield Individual(X=x_reflect)
176
+
177
+ # whether a shrink is necessary or not - decided during this step
178
+ shrink = False
179
+
180
+ better_than_current_best = is_better(reflect, pop[0])
181
+ better_than_second_worst = is_better(reflect, pop[n])
182
+ better_than_worst = is_better(reflect, pop[n + 1])
183
+
184
+ # if better than the current best - check for expansion
185
+ if better_than_current_best:
186
+
187
+ # -------------------------------------------------------------------------------------------
188
+ # EXPAND
189
+ # -------------------------------------------------------------------------------------------
190
+
191
+ # the maximum expansion until the bounds are hit
192
+ betaU = max_alpha(centroid, (x_reflect - centroid), xl, xu)
193
+
194
+ # expand using the factor, consider bounds, make sure in case of floating point issues
195
+ x_expand = centroid + min(self.beta, betaU) * (x_reflect - centroid)
196
+ x_expand = set_to_bounds_if_outside_by_problem(self.problem, x_expand)
197
+ expand = yield Individual(X=x_expand)
198
+
199
+ # if the expansion further improved take it - otherwise use expansion
200
+ if is_better(expand, reflect):
201
+ pop[n + 1] = expand
202
+ else:
203
+ pop[n + 1] = reflect
204
+
205
+ # if the new point is not better than the best, but better than second worst - just keep it
206
+ elif not better_than_current_best and better_than_second_worst:
207
+ pop[n + 1] = reflect
208
+
209
+ # if not worse than the worst - outside contraction
210
+ elif not better_than_second_worst and better_than_worst:
211
+
212
+ # -------------------------------------------------------------------------------------------
213
+ # Outside Contraction
214
+ # -------------------------------------------------------------------------------------------
215
+
216
+ x_contract_outside = centroid + self.gamma * (x_reflect - centroid)
217
+ contract_outside = yield Individual(X=x_contract_outside)
218
+
219
+ if is_better(contract_outside, reflect):
220
+ pop[n + 1] = contract_outside
221
+ else:
222
+ shrink = True
223
+
224
+ # if the reflection was worse than the worst - inside contraction
225
+ else:
226
+
227
+ # -------------------------------------------------------------------------------------------
228
+ # Inside Contraction
229
+ # -------------------------------------------------------------------------------------------
230
+
231
+ x_contract_inside = centroid - self.gamma * (x_reflect - centroid)
232
+ contract_inside = yield Individual(X=x_contract_inside)
233
+
234
+ if is_better(contract_inside, pop[n + 1]):
235
+ pop[n + 1] = contract_inside
236
+ else:
237
+ shrink = True
238
+
239
+ # -------------------------------------------------------------------------------------------
240
+ # Shrink (only if necessary)
241
+ # -------------------------------------------------------------------------------------------
242
+
243
+ if shrink:
244
+ x_best, x_others = pop[0].X, pop[1:].get("X")
245
+ x_shrink = x_best + self.delta * (x_others - x_best)
246
+ pop[1:] = yield Population.new(X=x_shrink)
247
+
248
+ self.pop = FitnessSurvival().do(self.problem, pop, n_survive=len(pop))
249
+
250
+
251
+ parse_doc_string(NelderMead.__init__)
@@ -0,0 +1,191 @@
1
+ """
2
+ Newton-Raphson-based optimizer (NRBO)
3
+
4
+ -------------------------------- Description -------------------------------
5
+
6
+
7
+
8
+ -------------------------------- References --------------------------------
9
+
10
+ [1]. Sowmya, R., Premkumar, M. & Jangir, P. Newton-Raphson-based optimizer:
11
+ A new population-based metaheuristic algorithm for continuous optimization problems.
12
+ Engineering Applications of Artificial Intelligence 128, 107532 (2024).
13
+
14
+ ----------------------------------------------------------------------------
15
+ """
16
+
17
+ import numpy as np
18
+
19
+ from pymoo.core.algorithm import Algorithm
20
+ from pymoo.core.initialization import Initialization
21
+ from pymoo.core.population import Population
22
+ from pymoo.core.repair import NoRepair
23
+ from pymoo.core.replacement import ImprovementReplacement
24
+ from pymoo.core.survival import Survival
25
+ from pymoo.operators.repair.bounds_repair import repair_random_init
26
+ from pymoo.operators.sampling.lhs import LHS
27
+ from pymoo.util import default_random_state
28
+
29
+
30
+ class FitnessSurvival(Survival):
31
+
32
+ def __init__(self) -> None:
33
+ super().__init__(filter_infeasible=False)
34
+
35
+ def _do(self, problem, pop, n_survive=None, **kwargs):
36
+ F, cv = pop.get("F", "cv")
37
+ assert F.shape[1] == 1, "FitnessSurvival can only used for single objective single!"
38
+ S = np.lexsort([F[:, 0], cv])
39
+ pop.set("rank", np.argsort(S))
40
+ return pop[S[:n_survive]]
41
+
42
+
43
+ @default_random_state
44
+ def Search_Rule(Xb, Xw, Xn, rho, random_state=None):
45
+ dim = len(Xn)
46
+
47
+ dx = random_state.random(dim) * np.abs(Xb - Xn)
48
+
49
+ tmp = Xw + Xb - 2 * Xn
50
+ idx = np.where(tmp == 0.0)
51
+ # repair if xj=0
52
+ if idx:
53
+ tmp[idx] = tmp[idx] + 1e-12
54
+ nrsr = random_state.standard_normal() * (((Xw - Xb) * dx) / (2 * tmp))
55
+ Z = Xn - nrsr
56
+
57
+ r1 = random_state.random()
58
+ # r2 = random_state.random()
59
+ tmp = np.mean(Z + Xn)
60
+
61
+ yw = r1 * (tmp + r1 * dx)
62
+ yb = r1 * (tmp - r1 * dx)
63
+
64
+ NRSR = random_state.standard_normal() * ((yw - yb) * dx) / (2 * (yw + yb - 2 * Xn))
65
+
66
+ step = NRSR - rho
67
+ X1 = Xn - step
68
+ X2 = Xb - step
69
+ return X1, X2
70
+
71
+
72
+ class NRBO(Algorithm):
73
+ def __init__(
74
+ self,
75
+ pop_size=50,
76
+ deciding_factor=0.6,
77
+ sampling=LHS(),
78
+ max_iteration=100,
79
+ repair=NoRepair(),
80
+ output=None,
81
+ display=None,
82
+ callback=None,
83
+ archive=None,
84
+ return_least_infeasible=False,
85
+ save_history=False,
86
+ verbose=False,
87
+ seed=None,
88
+ evaluator=None,
89
+ **kwargs,
90
+ ):
91
+ self.max_iteration = max_iteration
92
+ termination = ("n_gen", self.max_iteration)
93
+ self.pop_size = pop_size
94
+ self.deciding_factor = deciding_factor
95
+ self.repair = repair
96
+ self.survial = FitnessSurvival()
97
+ self.initialization = Initialization(sampling, self.repair)
98
+ super().__init__(
99
+ termination,
100
+ output,
101
+ display,
102
+ callback,
103
+ archive,
104
+ return_least_infeasible,
105
+ save_history,
106
+ verbose,
107
+ seed,
108
+ evaluator,
109
+ **kwargs,
110
+ )
111
+
112
+ def _setup(self, problem, **kwargs):
113
+ return super()._setup(problem, **kwargs)
114
+
115
+ def _initialize_infill(self):
116
+ return self.initialization.do(self.problem, self.pop_size, algorithm=self, random_state=self.random_state)
117
+
118
+ def _initialize_advance(self, infills=None, **kwargs):
119
+ self.pop = self.survial.do(self.problem, infills)
120
+
121
+ def _infill(self):
122
+ delta = (1 - (2 * self.n_iter) / self.max_iteration) ** 5
123
+
124
+ # find Xb, Xw inviduals
125
+ rank = self.pop.get("rank")
126
+ Xb_idx = np.argmin(rank)
127
+ X = self.pop.get("X")
128
+ Xb = X[Xb_idx]
129
+ Xw_idx = np.argmax(rank)
130
+ Xw = X[Xw_idx]
131
+
132
+ off = []
133
+
134
+ for i in range(self.pop_size):
135
+
136
+ # random select r1,r2
137
+ idx = np.arange(self.pop_size)
138
+ idx = np.delete(idx, i)
139
+ r1, r2 = self.random_state.choice(idx, size=2, replace=False)
140
+
141
+ a, b = self.random_state.random(2)
142
+ rho = a * (Xb - X[i]) + b * (X[r1] - X[r2])
143
+
144
+ # NRSR
145
+ X1, X2 = Search_Rule(Xb=Xb, Xw=Xw, Xn=X[i], rho=rho, random_state=self.random_state)
146
+
147
+ X3 = X[i] - delta * (X2 - X1)
148
+
149
+ r2 = self.random_state.random()
150
+ Xn_new = r2 * (r2 * X1 + (1 - r2) * X2) + (1 - r2) * X3
151
+
152
+ # TAO
153
+ if self.random_state.random() < self.deciding_factor:
154
+ theta1 = self.random_state.uniform(-1, 1, 1)
155
+ theta2 = self.random_state.uniform(-0.5, 0.5, 1)
156
+
157
+ beta = 0 if self.random_state.random() > 0.5 else 1
158
+ u1 = beta * 3 * self.random_state.random() + (1 - beta)
159
+ u2 = beta * self.random_state.random() + (1 - beta)
160
+
161
+ tmp = theta1 * (u1 * Xb - u2 * X[i]) + theta2 * delta * (u1 * np.mean(X[i]) - u2 * X[i])
162
+ if u1 < 0.5:
163
+ X_tao = Xn_new + tmp
164
+ else:
165
+ X_tao = Xb + tmp
166
+
167
+ Xn_new = X_tao
168
+ off.append(Xn_new)
169
+
170
+ off = np.array(off)
171
+ if self.problem.has_bounds():
172
+ # off = set_to_bounds_if_outside(off, *self.problem.bounds())
173
+ off = repair_random_init(off, X, *self.problem.bounds(), random_state=self.random_state)
174
+
175
+ off = Population.new(X=off)
176
+
177
+ off = self.repair.do(self.problem, off)
178
+ return off
179
+
180
+ def _advance(self, infills=None, **kwargs):
181
+ off = infills
182
+ has_improved = ImprovementReplacement().do(self.problem, self.pop, off, return_indices=True)
183
+
184
+ self.pop[has_improved] = off[has_improved]
185
+ self.survial.do(self.problem, self.pop)
186
+
187
+ def _set_optimum(self):
188
+ k = self.pop.get("rank") == 0
189
+ self.opt = self.pop[k]
190
+
191
+
@@ -0,0 +1,80 @@
1
+ from pymoo.util.optimum import filter_optimum
2
+
3
+ try:
4
+ import optuna
5
+ from optuna.samplers import TPESampler
6
+ from optuna.logging import get_logger
7
+ except:
8
+ raise Exception("Please install optuna: pip install optuna")
9
+
10
+ from pymoo.core.algorithm import Algorithm
11
+ from pymoo.core.individual import Individual
12
+ from pymoo.core.population import Population
13
+ from pymoo.core.variable import Real, Integer, Choice, Binary
14
+ from pymoo.util.display.single import SingleObjectiveOutput
15
+
16
+
17
+ class Optuna(Algorithm):
18
+
19
+ def __init__(self, sampler=None, output=SingleObjectiveOutput(), **kwargs):
20
+ super().__init__(output=output, **kwargs)
21
+ self.sampler = sampler
22
+
23
+ def _setup(self, problem, **kwargs):
24
+
25
+ sampler = self.sampler
26
+ if sampler is None:
27
+ sampler = TPESampler(seed=self.seed)
28
+
29
+ # that disables the warning in the very beginning
30
+ get_logger('optuna.storages._in_memory').disabled = True
31
+
32
+ # create a new study
33
+ self.study = optuna.create_study(study_name=f"Study@{id(self)}", sampler=sampler, direction='minimize')
34
+
35
+ # the current trial for an individual
36
+ self.trial = None
37
+
38
+ def _infill(self):
39
+ self.trial = self.study.ask()
40
+
41
+ vars = self.problem.vars
42
+ assert vars is not None, "Optuna needs explicitly defined variables."
43
+
44
+ x = {}
45
+ for name, param in vars.items():
46
+ if isinstance(param, Real):
47
+ lower, upper = param.bounds
48
+ v = self.trial.suggest_float(name, lower, upper)
49
+ elif isinstance(param, Integer):
50
+ lower, upper = param.bounds
51
+ v = self.trial.suggest_int(name, lower, upper)
52
+ elif isinstance(param, Choice):
53
+ options = param.options
54
+ v = self.trial.suggest_categorical(name, options)
55
+ elif isinstance(param, Binary):
56
+ v = self.trial.suggest_categorical(name, [False, True])
57
+ else:
58
+ raise Exception("Type not supported yet.")
59
+ x[name] = v
60
+
61
+ return Individual(X=x)
62
+
63
+ def _advance(self, infills=None, **kwargs):
64
+ self.pop = Population.create(infills)
65
+ self.study.tell(self.trial, infills.f)
66
+
67
+ def _initialize_infill(self):
68
+ return self._infill()
69
+
70
+ def _initialize_advance(self, **kwargs):
71
+ return self._advance(**kwargs)
72
+
73
+ def _set_optimum(self):
74
+ pop = self.pop
75
+ if self.opt is not None:
76
+ pop = Population.merge(self.opt, pop)
77
+ self.opt = filter_optimum(pop, least_infeasible=True)
78
+
79
+
80
+
@@ -0,0 +1,185 @@
1
+ import numpy as np
2
+
3
+ from pymoo.algorithms.base.local import LocalSearch
4
+ from pymoo.core.individual import Individual
5
+ from pymoo.core.population import Population
6
+ from pymoo.core.replacement import is_better
7
+ from pymoo.docs import parse_doc_string
8
+ from pymoo.operators.repair.to_bound import set_to_bounds_if_outside_by_problem
9
+ from pymoo.util.display.single import SingleObjectiveOutput
10
+ from pymoo.util.optimum import filter_optimum
11
+ from pymoo.util import default_random_state
12
+
13
+
14
+ # =========================================================================================================
15
+ # Implementation
16
+ # =========================================================================================================
17
+
18
+
19
+ class PatternSearch(LocalSearch):
20
+ def __init__(self,
21
+ init_delta=0.25,
22
+ init_rho=0.5,
23
+ step_size=1.0,
24
+ output=SingleObjectiveOutput(),
25
+ **kwargs):
26
+ """
27
+ An implementation of well-known Hooke and Jeeves Pattern Search.
28
+
29
+ Parameters
30
+ ----------
31
+
32
+ x0 : numpy.array
33
+ The initial value where the local search should be initiated. If not provided `n_sample_points` are
34
+ created using latin hypercube sampling and the best solution found is set to `x0`.
35
+
36
+ n_sample_points : int
37
+ Number of sample points to be used to determine the initial search point. (Only used of `x0` is not provided)
38
+
39
+ delta : float
40
+ The `delta` values which is used for the exploration move. If lower and upper bounds are provided the
41
+ value is in relation to the overall search space. For instance, a value of 0.25 means that initially the
42
+ pattern is created in 25% distance of the initial search point.
43
+
44
+ rho : float
45
+ If the move was unsuccessful then the `delta` value is reduced by multiplying it with the value provided.
46
+ For instance, `explr_rho` implies that with a value of `delta/2` is continued.
47
+
48
+ step_size : float
49
+ After the exploration move the new center is determined by following a promising direction.
50
+ This value defines how large to step on this direction will be.
51
+
52
+ """
53
+
54
+ super().__init__(output=output, **kwargs)
55
+ self.init_rho = init_rho
56
+ self.init_delta = init_delta
57
+ self.step_size = step_size
58
+
59
+ self.n_not_improved = 0
60
+
61
+ self._rho = init_rho
62
+ self._delta = None
63
+ self._center = None
64
+ self._current = None
65
+ self._trial = None
66
+ self._direction = None
67
+ self._sign = None
68
+
69
+ def _initialize_advance(self, infills=None, **kwargs):
70
+ super()._initialize_advance(infills=infills, **kwargs)
71
+ self._center, self._explr = self.x0, self.x0
72
+ self._sign = np.ones(self.problem.n_var)
73
+
74
+ if self.problem.has_bounds():
75
+ xl, xu = self.problem.bounds()
76
+ self._delta = self.init_delta * (xu - xl)
77
+ else:
78
+ self._delta = np.abs(self.x0.X) / 2.0
79
+ self._delta[self._delta <= 1.0] = 1.0
80
+
81
+ def _next(self):
82
+
83
+ # whether the last iteration has resulted in a new optimum or not
84
+ has_improved = is_better(self._explr, self._center)
85
+
86
+ # that means that the exploration did not find any new point and was thus unsuccessful
87
+ if not has_improved:
88
+
89
+ # increase the counter (by default this will be initialized to 0 and directly increased to 1)
90
+ self.n_not_improved += 1
91
+
92
+ # keep track of the rho values in the normalized space
93
+ self._rho = self.init_rho ** self.n_not_improved
94
+
95
+ # explore around the current center - try finding a suitable direction
96
+ self._explr = yield from exploration_move(self.problem, self._center, self._sign, self._delta, self._rho)
97
+
98
+ # if we have found a direction in the last iteration to be worth following
99
+ else:
100
+
101
+ # get the direction which was successful in the last move
102
+ self._direction = (self._explr.X - self._center.X)
103
+
104
+ # declare the exploration point the new center (it has led to an improvement in the last iteration!)
105
+ self._center = self._explr
106
+
107
+ # use the pattern move to get a new trial vector along that given direction
108
+ self._trial = yield pattern_move(self.problem, self._center, self._direction, self.step_size)
109
+
110
+ # get the delta sign adjusted for the exploration
111
+ self._sign = calc_sign(self._direction)
112
+
113
+ # explore around the current center to try finding a suitable direction
114
+ self._explr = yield from exploration_move(self.problem, self._trial, self._sign, self._delta, self._rho)
115
+
116
+ self.pop = Population.create(self._center, self._explr)
117
+
118
+ def _set_optimum(self):
119
+ pop = self.pop if self.opt is None else Population.merge(self.opt, self.pop)
120
+ self.opt = filter_optimum(pop, least_infeasible=True)
121
+
122
+
123
+ @default_random_state
124
+ def exploration_move(problem, center, sign, delta, rho, randomize=True, random_state=None):
125
+ n_var = problem.n_var
126
+
127
+ # the order for the variable iteration
128
+ if randomize:
129
+ K = random_state.permutation(n_var)
130
+ else:
131
+ K = np.arange(n_var)
132
+
133
+ # iterate over each variable
134
+ for k in K:
135
+
136
+ # the value to be tried first is given by the amount times the sign
137
+ _delta = sign[k] * rho * delta
138
+
139
+ # make a step of delta on the k-th variable
140
+ _explr = yield step_along_axis(problem, center.X, _delta, k)
141
+
142
+ if is_better(_explr, center, eps=0.0):
143
+ center = _explr
144
+
145
+ # if not successful try the other direction
146
+ else:
147
+
148
+ # now try the negative value of delta and see if we can improve
149
+ _explr = yield step_along_axis(problem, center.X, -1 * _delta, k)
150
+
151
+ if is_better(_explr, center, eps=0.0):
152
+ center = _explr
153
+
154
+ return center
155
+
156
+
157
+ def pattern_move(problem, current, direction, step_size):
158
+ # calculate the new X and repair out of bounds if necessary
159
+ X = current.X + step_size * direction
160
+ set_to_bounds_if_outside_by_problem(problem, X)
161
+
162
+ # create the new center individual
163
+ return Individual(X=X)
164
+
165
+
166
+ def calc_sign(direction):
167
+ sign = np.sign(direction)
168
+ sign[sign == 0] = -1
169
+ return sign
170
+
171
+
172
+ def step_along_axis(problem, x, delta, axis):
173
+ # copy and add delta to the new point
174
+ X = np.copy(x)
175
+
176
+ # now add to the current solution
177
+ X[axis] = X[axis] + delta[axis]
178
+
179
+ # repair if out of bounds if necessary
180
+ X = set_to_bounds_if_outside_by_problem(problem, X)
181
+
182
+ return Individual(X=X)
183
+
184
+
185
+ parse_doc_string(PatternSearch.__init__)