fcmaes 1.3.17__py3-none-any.whl → 1.6.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,36 @@
1
+ fcmaes/__init__.py,sha256=nUQ6rXiyzd6oQcfOHbk6uf6hCRXRCHCSuTnzCIsp8s8,507
2
+ fcmaes/advretry.py,sha256=cDTRgdpnmJXW7Ego3Pz5IOy2dJSVBAF5P9yZVZ1E-Zo,18584
3
+ fcmaes/astro.py,sha256=ZQek32Q4u9RqFBYFfsA0gTL1ke_ciXDbfcYjDPagKJI,10603
4
+ fcmaes/bitecpp.py,sha256=Q4omEf38KfFm_gFJAvEX4bVYpCf24ftsThjEH3eG28I,4516
5
+ fcmaes/cmaes.py,sha256=J66REBCH-hQcHL821f9czh31aaZWnX9n0R6pF8vJtlo,26495
6
+ fcmaes/cmaescpp.py,sha256=THJN9sJvkl8VrYb6CIA0B-3CBURsHsRPWLYHV8xRX00,13303
7
+ fcmaes/crfmnes.py,sha256=vgCD0jztApaNs8r7LLgxoWuOF7Gmz8SR8kJpW2H_JR0,15105
8
+ fcmaes/crfmnescpp.py,sha256=aU7z-JV_2oKmfRxWuwmiosFnjXaTyFoyj0PZa9lniLc,10928
9
+ fcmaes/dacpp.py,sha256=By1pJ_ZXbOHlNP8uwkgu7dkz4ci00JiK_gYU84HVC1c,3927
10
+ fcmaes/de.py,sha256=zJkhGAFnl25r4Ac1PCC185PiF3o1RoQCGL7cIpKTl78,19829
11
+ fcmaes/decpp.py,sha256=JYkhBydeHUzqM5rGffR7BOKj1uM2bu5J1UZfsU4uxms,12498
12
+ fcmaes/diversifier.py,sha256=K3UlP-eSCiSXkhNVLIxPSrDD4iN540NQNexIU5J97w4,16511
13
+ fcmaes/evaluator.py,sha256=2MGjDKgEl81sheCNfUD5dBMtGkZCb2ao-2898A-Vn1o,11547
14
+ fcmaes/mapelites.py,sha256=sFZJ6jYTBwpOUmKrXgRY78fuCxDvhlyYQyOp_uuCd4w,29624
15
+ fcmaes/mode.py,sha256=9A06iR3ELS0JmYxYKhyNvMLdY9D0W_63fMHj7qjRUi8,29538
16
+ fcmaes/modecpp.py,sha256=IL_shkzSFmB4lYMNeh361sVEPvIa-JLfEdaU5dO4QJE,20876
17
+ fcmaes/moretry.py,sha256=9f1i23wqLxGmYFuKYXyeQvhyw5KGoPG3NVVID22ZRYg,11037
18
+ fcmaes/multiretry.py,sha256=s9AfogJYeJ-HOjkueBq25zXx3AKZ12WuwOGm0mY8Xuw,7047
19
+ fcmaes/optimizer.py,sha256=zfo8u6Y_JkVO64NyedKDOqInhOWTvlaVzIoO6z6f9LM,46215
20
+ fcmaes/pgpecpp.py,sha256=HEeL2WkV6ZuckXm_znwGB9zCdcKelS7qg5mYNOFA7rw,14444
21
+ fcmaes/pygmoretry.py,sha256=dT_EktuUiiNwubqqv-BDUbCNx0xlfF-10LztsAMOB68,4044
22
+ fcmaes/retry.py,sha256=_bpF8_haBNGsVQjWcpvV5L3a0cqQ7sgcuxVTUAPJCdo,17586
23
+ fcmaes/test_cma.py,sha256=PsOP2Mw18KEnT3oUJ1FnAHRP_SM4MO5CbBYrvrCdbDk,14332
24
+ fcmaes/testfun.py,sha256=3T14avHA9sRMuVF2Ua-s9JyXJl1lsvkyyASIYudXBSk,3924
25
+ fcmaes/lib/libacmalib.dll,sha256=NX56nxX3zeYNxAKUTr8_DHIS7uR1lwmiYKpvaXs7vgE,3804965
26
+ fcmaes/lib/libacmalib.dylib,sha256=6G6ttXLNkqKpZDNlijZvplJwGGHRfKBKyesefAIL-qk,1004744
27
+ fcmaes/lib/libacmalib.so,sha256=WZPX4ZNRVJm8RqzuuoTdGh1v7SwqfrGBOKRmLxYWiKM,2750968
28
+ fcmaes/lib/libhbv.so,sha256=TFhps8YXUHUxenkIP6-rO8Cassx4ZUHKhLXR0byTViI,60504
29
+ fcmaes/lib/liblrgv.so,sha256=NAEEgJA3R6-8SQbbmykSexe5-2a1t9X3-aIKf79crvs,190912
30
+ fcmaes/lib/librw_top_trumps.dll,sha256=q-tamxFTwv3X_ByXnFw0SS_CRgAh59xBBJbsp_IXpj8,214992
31
+ fcmaes/lib/librw_top_trumps.so,sha256=gPgydLnAu6UD34znWtRw33mAd_TWcSea1MwUxpahX0Y,129920
32
+ fcmaes-1.6.9.dist-info/LICENSE,sha256=QB09uUsI-9MeK34mmjlL1YVz0ofHIbuXUP8fBBIDnnI,1072
33
+ fcmaes-1.6.9.dist-info/METADATA,sha256=LCzNkQ7asm6bY-OOaCf1wa8ceNPQYIit_RZpxI-UPZM,1693
34
+ fcmaes-1.6.9.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
35
+ fcmaes-1.6.9.dist-info/top_level.txt,sha256=JXYyWNvUB6Q6XokDpYaJWpQvxEYotJFnFjwFJ37tnbA,7
36
+ fcmaes-1.6.9.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.37.1)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
fcmaes/csmacpp.py DELETED
@@ -1,108 +0,0 @@
1
- # Copyright (c) Dietmar Wolz.
2
- #
3
- # This source code is licensed under the MIT license found in the
4
- # LICENSE file in the root directory.
5
-
6
- """ Implements a Sigma Adaptation Evolution Strategy
7
- similar to CMA-ES, but mainly focuses on sigma adaptation.
8
- Description is available at https://github.com/avaneev/biteopt
9
- """
10
-
11
- import sys
12
- import os
13
- import math
14
- import ctypes as ct
15
- import numpy as np
16
- from numpy.random import MT19937, Generator
17
- from scipy.optimize import OptimizeResult
18
- from fcmaes.cmaes import _check_bounds
19
- from fcmaes.decpp import mo_call_back_type, callback, libcmalib
20
-
21
- os.environ['MKL_DEBUG_CPU_TYPE'] = '5'
22
-
23
- def minimize(fun,
24
- bounds=None,
25
- x0=None,
26
- input_sigma = 0.166,
27
- popsize = 0,
28
- max_evaluations = 100000,
29
- stop_fitness = None,
30
- rg = Generator(MT19937()),
31
- runid=0):
32
-
33
- """Minimization of a scalar function of one or more variables using a
34
- C++ SCMA implementation called via ctypes.
35
-
36
- Parameters
37
- ----------
38
- fun : callable
39
- The objective function to be minimized.
40
- ``fun(x, *args) -> float``
41
- where ``x`` is an 1-D array with shape (dim,) and ``args``
42
- is a tuple of the fixed parameters needed to completely
43
- specify the function.
44
- bounds : sequence or `Bounds`, optional
45
- Bounds on variables. There are two ways to specify the bounds:
46
- 1. Instance of the `scipy.Bounds` class.
47
- 2. Sequence of ``(min, max)`` pairs for each element in `x`. None
48
- is used to specify no bound.
49
- x0 : ndarray, shape (dim,)
50
- Initial guess. Array of real elements of size (dim,),
51
- where 'dim' is the number of independent variables.
52
- input_sigma : ndarray, shape (dim,) or scalar
53
- Initial step size for each dimension.
54
- popsize = int, optional
55
- CMA-ES population size.
56
- max_evaluations : int, optional
57
- Forced termination after ``max_evaluations`` function evaluations.
58
- stop_fitness : float, optional
59
- Limit for fitness value. If reached minimize terminates.
60
- rg = numpy.random.Generator, optional
61
- Random generator for creating random guesses.
62
- runid : int, optional
63
- id used to identify the run for debugging / logging.
64
-
65
- Returns
66
- -------
67
- res : scipy.OptimizeResult
68
- The optimization result is represented as an ``OptimizeResult`` object.
69
- Important attributes are: ``x`` the solution array,
70
- ``fun`` the best function value,
71
- ``nfev`` the number of function evaluations,
72
- ``nit`` the number of CMA-ES iterations,
73
- ``status`` the stopping critera and
74
- ``success`` a Boolean flag indicating if the optimizer exited successfully. """
75
-
76
- lower, upper, guess = _check_bounds(bounds, x0, rg)
77
- dim = guess.size
78
- if lower is None:
79
- lower = [0]*dim
80
- upper = [0]*dim
81
- if callable(input_sigma):
82
- input_sigma=input_sigma()
83
- if np.ndim(input_sigma) == 0:
84
- input_sigma = [input_sigma] * dim
85
- if stop_fitness is None:
86
- stop_fitness = -math.inf
87
- array_type = ct.c_double * dim
88
- c_callback = mo_call_back_type(callback(fun, dim))
89
- res = np.empty(dim+4)
90
- res_p = res.ctypes.data_as(ct.POINTER(ct.c_double))
91
- try:
92
- optimizeCsma_C(runid, c_callback, dim, int(rg.uniform(0, 2**32 - 1)),
93
- array_type(*guess), array_type(*lower), array_type(*upper),
94
- array_type(*input_sigma), max_evaluations, stop_fitness, popsize, res_p)
95
- x = res[:dim]
96
- val = res[dim]
97
- evals = int(res[dim+1])
98
- iterations = int(res[dim+2])
99
- stop = int(res[dim+3])
100
- return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True)
101
- except Exception as ex:
102
- return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
103
-
104
- optimizeCsma_C = libcmalib.optimizeCsma_C
105
- optimizeCsma_C.argtypes = [ct.c_long, mo_call_back_type, ct.c_int, ct.c_int, \
106
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), \
107
- ct.POINTER(ct.c_double), ct.c_int, ct.c_double, ct.c_int, ct.POINTER(ct.c_double)]
108
-
fcmaes/gcldecpp.py DELETED
@@ -1,148 +0,0 @@
1
- # Copyright (c) Mingcheng Zuo, Dietmar Wolz.
2
- #
3
- # This source code is licensed under the MIT license found in the
4
- # LICENSE file in the root directory.
5
-
6
- """Eigen based implementation of differential evolution (GCL-DE) derived from
7
- "A case learning-based differential evolution algorithm for global optimization of interplanetary trajectory design,
8
- Mingcheng Zuo, Guangming Dai, Lei Peng, Maocai Wang, Zhengquan Liu", https://doi.org/10.1016/j.asoc.2020.106451
9
- """
10
-
11
- import sys
12
- import os
13
- import math
14
- import ctypes as ct
15
- import numpy as np
16
- import multiprocessing as mp
17
- from numpy.random import MT19937, Generator
18
- from scipy.optimize import OptimizeResult
19
- from fcmaes.ldecpp import callback_par, call_back_par
20
- from fcmaes.decpp import libcmalib
21
- from fcmaes import de
22
- from fcmaes.evaluator import Evaluator, eval_parallel
23
-
24
- os.environ['MKL_DEBUG_CPU_TYPE'] = '5'
25
-
26
- def minimize(fun,
27
- dim = None,
28
- bounds = None,
29
- popsize = None,
30
- max_evaluations = 100000,
31
- stop_fitness = None,
32
- pbest = 0.7,
33
- f0 = 0.0,
34
- cr0 = 0.0,
35
- rg = Generator(MT19937()),
36
- runid=0,
37
- workers = None):
38
-
39
- """Minimization of a scalar function of one or more variables using a
40
- C++ GCL Differential Evolution implementation called via ctypes.
41
-
42
- Parameters
43
- ----------
44
- fun : callable
45
- The objective function to be minimized.
46
- ``fun(x, *args) -> float``
47
- where ``x`` is an 1-D array with shape (dim,) and ``args``
48
- is a tuple of the fixed parameters needed to completely
49
- specify the function.
50
- dim : int
51
- dimension of the argument of the objective function
52
- bounds : sequence or `Bounds`
53
- Bounds on variables. There are two ways to specify the bounds:
54
- 1. Instance of the `scipy.Bounds` class.
55
- 2. Sequence of ``(min, max)`` pairs for each element in `x`.
56
- popsize : int, optional
57
- Population size.
58
- max_evaluations : int, optional
59
- Forced termination after ``max_evaluations`` function evaluations.
60
- stop_fitness : float, optional
61
- Limit for fitness value. If reached minimize terminates.
62
- pbest = float, optional
63
- use low value 0 < pbest <= 1 to narrow search.
64
- f0 = float, optional
65
- The initial mutation constant. In the literature this is also known as differential weight,
66
- being denoted by F. Should be in the range [0, 2].
67
- cr0 = float, optional
68
- The initial recombination constant. Should be in the range [0, 1].
69
- In the literature this is also known as the crossover probability.
70
- rg = numpy.random.Generator, optional
71
- Random generator for creating random guesses.
72
- runid : int, optional
73
- id used to identify the run for debugging / logging.
74
- workers : int or None, optional
75
- If not workers is None, function evaluation is performed in parallel for the whole population.
76
- Useful for costly objective functions but is deactivated for parallel retry.
77
-
78
-
79
- Returns
80
- -------
81
- res : scipy.OptimizeResult
82
- The optimization result is represented as an ``OptimizeResult`` object.
83
- Important attributes are: ``x`` the solution array,
84
- ``fun`` the best function value,
85
- ``nfev`` the number of function evaluations,
86
- ``nit`` the number of iterations,
87
- ``success`` a Boolean flag indicating if the optimizer exited successfully. """
88
-
89
- dim, lower, upper = de._check_bounds(bounds, dim)
90
- if popsize is None:
91
- popsize = int(dim*8.5+150)
92
- if lower is None:
93
- lower = [0]*dim
94
- upper = [0]*dim
95
- if stop_fitness is None:
96
- stop_fitness = math.inf
97
- parfun = None if workers is None else parallel(fun, workers)
98
- array_type = ct.c_double * dim
99
- c_callback_par = call_back_par(callback_par(fun, parfun))
100
- seed = int(rg.uniform(0, 2**32 - 1))
101
- res = np.empty(dim+4)
102
- res_p = res.ctypes.data_as(ct.POINTER(ct.c_double))
103
- try:
104
- optimizeGCLDE_C(runid, c_callback_par, dim, seed,
105
- array_type(*lower), array_type(*upper),
106
- max_evaluations, pbest, stop_fitness,
107
- popsize, f0, cr0, res_p)
108
- x = res[:dim]
109
- val = res[dim]
110
- evals = int(res[dim+1])
111
- iterations = int(res[dim+2])
112
- stop = int(res[dim+3])
113
- if not parfun is None:
114
- parfun.stop() # stop all parallel evaluation processes
115
- return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True)
116
- except Exception as ex:
117
- if not workers is None:
118
- fun.stop() # stop all parallel evaluation processes
119
- return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
120
-
121
- class parallel(object):
122
- """Convert an objective function for parallel execution for cmaes.minimize.
123
-
124
- Parameters
125
- ----------
126
- fun : objective function mapping a list of float arguments to a float value.
127
-
128
- represents a function mapping a list of lists of float arguments to a list of float values
129
- by applying the input function using parallel processes. stop needs to be called to avoid
130
- a resource leak"""
131
-
132
- def __init__(self, fun, workers = mp.cpu_count()):
133
- self.evaluator = Evaluator(fun)
134
- self.evaluator.start(workers)
135
-
136
- def __call__(self, xs):
137
- return eval_parallel(xs, self.evaluator)
138
-
139
- def stop(self):
140
- self.evaluator.stop()
141
-
142
- optimizeGCLDE_C = libcmalib.optimizeGCLDE_C
143
- optimizeGCLDE_C.argtypes = [ct.c_long, call_back_par, ct.c_int, ct.c_int, \
144
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), \
145
- ct.c_int, ct.c_double, ct.c_double, ct.c_int, \
146
- ct.c_double, ct.c_double, ct.POINTER(ct.c_double)]
147
-
148
-
fcmaes/lcldecpp.py DELETED
@@ -1,138 +0,0 @@
1
- # Copyright (c) Mingcheng Zuo, Dietmar Wolz.
2
- #
3
- # This source code is licensed under the MIT license found in the
4
- # LICENSE file in the root directory.
5
-
6
- """Eigen based implementation of differential evolution (LCL-DE) derived from
7
- "A case learning-based differential evolution algorithm for global optimization of interplanetary trajectory design,
8
- Mingcheng Zuo, Guangming Dai, Lei Peng, Maocai Wang, Zhengquan Liu", https://doi.org/10.1016/j.asoc.2020.106451
9
- To be used to further optimize a given solution. Initial population is created using a normal distribition
10
- with mean=x0 and sdev = input_sigma (normalized over the bounds and can be defined separately for each variable)
11
- """
12
-
13
- import sys
14
- import os
15
- import math
16
- import ctypes as ct
17
- import numpy as np
18
- from numpy.random import MT19937, Generator
19
- from scipy.optimize import OptimizeResult
20
- from fcmaes.ldecpp import callback_par, call_back_par
21
- from fcmaes.decpp import libcmalib
22
- from fcmaes.cmaes import _check_bounds
23
- from fcmaes.gcldecpp import parallel
24
-
25
- os.environ['MKL_DEBUG_CPU_TYPE'] = '5'
26
-
27
- def minimize(fun,
28
- bounds=None,
29
- x0=None,
30
- input_sigma = 0.3,
31
- popsize = None,
32
- max_evaluations = 100000,
33
- stop_fitness = None,
34
- pbest = 0.7,
35
- f0 = 0.0,
36
- cr0 = 0.0,
37
- rg = Generator(MT19937()),
38
- runid=0,
39
- workers = None):
40
-
41
- """Minimization of a scalar function of one or more variables using a
42
- C++ LCL Differential Evolution implementation called via ctypes.
43
-
44
- Parameters
45
- ----------
46
- fun : callable
47
- The objective function to be minimized.
48
- ``fun(x, *args) -> float``
49
- where ``x`` is an 1-D array with shape (dim,) and ``args``
50
- is a tuple of the fixed parameters needed to completely
51
- specify the function.
52
- bounds : sequence or `Bounds`
53
- Bounds on variables. There are two ways to specify the bounds:
54
- 1. Instance of the `scipy.Bounds` class.
55
- 2. Sequence of ``(min, max)`` pairs for each element in `x`.
56
- x0 : ndarray, shape (dim,)
57
- Initial guess. Array of real elements of size (dim,),
58
- where 'dim' is the number of independent variables.
59
- input_sigma : ndarray, shape (dim,) or scalar
60
- Initial step size for each dimension.
61
- popsize : int, optional
62
- Population size.
63
- max_evaluations : int, optional
64
- Forced termination after ``max_evaluations`` function evaluations.
65
- stop_fitness : float, optional
66
- Limit for fitness value. If reached minimize terminates.
67
- pbest = float, optional
68
- use low value 0 < pbest <= 1 to narrow search.
69
- f0 = float, optional
70
- The initial mutation constant. In the literature this is also known as differential weight,
71
- being denoted by F. Should be in the range [0, 2].
72
- cr0 = float, optional
73
- The initial recombination constant. Should be in the range [0, 1].
74
- In the literature this is also known as the crossover probability.
75
- rg = numpy.random.Generator, optional
76
- Random generator for creating random guesses.
77
- runid : int, optional
78
- id used to identify the run for debugging / logging.
79
- workers : int or None, optional
80
- If not workers is None, function evaluation is performed in parallel for the whole population.
81
- Useful for costly objective functions but is deactivated for parallel retry.
82
-
83
-
84
- Returns
85
- -------
86
- res : scipy.OptimizeResult
87
- The optimization result is represented as an ``OptimizeResult`` object.
88
- Important attributes are: ``x`` the solution array,
89
- ``fun`` the best function value,
90
- ``nfev`` the number of function evaluations,
91
- ``nit`` the number of iterations,
92
- ``success`` a Boolean flag indicating if the optimizer exited successfully. """
93
-
94
- lower, upper, guess = _check_bounds(bounds, x0, rg)
95
- dim = guess.size
96
- if popsize is None:
97
- popsize = int(dim*8.5+150)
98
- if lower is None:
99
- lower = [0]*dim
100
- upper = [0]*dim
101
- if callable(input_sigma):
102
- input_sigma=input_sigma()
103
- if np.ndim(input_sigma) == 0:
104
- input_sigma = [input_sigma] * dim
105
- if stop_fitness is None:
106
- stop_fitness = math.inf
107
- parfun = None if workers is None else parallel(fun, workers)
108
- array_type = ct.c_double * dim
109
- c_callback_par = call_back_par(callback_par(fun, parfun))
110
- seed = int(rg.uniform(0, 2**32 - 1))
111
- res = np.empty(dim+4)
112
- res_p = res.ctypes.data_as(ct.POINTER(ct.c_double))
113
- try:
114
- optimizeLCLDE_C(runid, c_callback_par, dim,
115
- array_type(*guess), array_type(*input_sigma), seed,
116
- array_type(*lower), array_type(*upper),
117
- max_evaluations, pbest, stop_fitness,
118
- popsize, f0, cr0, res_p)
119
- x = res[:dim]
120
- val = res[dim]
121
- evals = int(res[dim+1])
122
- iterations = int(res[dim+2])
123
- stop = int(res[dim+3])
124
- if not parfun is None:
125
- parfun.stop() # stop all parallel evaluation processes
126
- return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True)
127
- except Exception as ex:
128
- if not workers is None:
129
- fun.stop() # stop all parallel evaluation processes
130
- return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
131
-
132
- optimizeLCLDE_C = libcmalib.optimizeLCLDE_C
133
- optimizeLCLDE_C.argtypes = [ct.c_long, call_back_par, ct.c_int,
134
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), ct.c_int, \
135
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), \
136
- ct.c_int, ct.c_double, ct.c_double, ct.c_int, \
137
- ct.c_double, ct.c_double, ct.POINTER(ct.c_double)]
138
-
fcmaes/ldecpp.py DELETED
@@ -1,172 +0,0 @@
1
- # Copyright (c) Dietmar Wolz.
2
- #
3
- # This source code is licensed under the MIT license found in the
4
- # LICENSE file in the root directory.
5
-
6
- """Eigen based implementation of differential evolution using onl the DE/best/1 strategy.
7
- Uses two deviations from the standard DE algorithm:
8
- a) temporal locality introduced in
9
- https://www.researchgate.net/publication/309179699_Differential_evolution_for_protein_folding_optimization_based_on_a_three-dimensional_AB_off-lattice_model
10
- b) reinitialization of individuals based on their age.
11
- To be used to further optimize a given solution. Initial population is created using a normal distribition
12
- with mean=x0 and sdev = input_sigma (normalized over the bounds and can be defined separately for each variable)
13
- """
14
-
15
- import sys
16
- import os
17
- import math
18
- import ctypes as ct
19
- import numpy as np
20
- from numpy.random import MT19937, Generator
21
- from scipy.optimize import OptimizeResult
22
- from fcmaes.decpp import libcmalib
23
- from fcmaes.cmaes import _check_bounds
24
-
25
- os.environ['MKL_DEBUG_CPU_TYPE'] = '5'
26
-
27
- def minimize(fun,
28
- bounds=None,
29
- x0=None,
30
- input_sigma = 0.3,
31
- popsize = None,
32
- max_evaluations = 100000,
33
- stop_fitness = None,
34
- keep = 200,
35
- f = 0.5,
36
- cr = 0.9,
37
- rg = Generator(MT19937()),
38
- runid=0):
39
-
40
- """Minimization of a scalar function of one or more variables using a
41
- C++ Differential Evolution implementation called via ctypes.
42
-
43
- Parameters
44
- ----------
45
- fun : callable
46
- The objective function to be minimized.
47
- ``fun(x, *args) -> float``
48
- where ``x`` is an 1-D array with shape (dim,) and ``args``
49
- is a tuple of the fixed parameters needed to completely
50
- specify the function.
51
- bounds : sequence or `Bounds`, optional
52
- Bounds on variables. There are two ways to specify the bounds:
53
- 1. Instance of the `scipy.Bounds` class.
54
- 2. Sequence of ``(min, max)`` pairs for each element in `x`. None
55
- is used to specify no bound.
56
- x0 : ndarray, shape (dim,)
57
- Initial guess. Array of real elements of size (dim,),
58
- where 'dim' is the number of independent variables.
59
- input_sigma : ndarray, shape (dim,) or scalar
60
- Initial step size for each dimension.
61
- popsize : int, optional
62
- Population size.
63
- max_evaluations : int, optional
64
- Forced termination after ``max_evaluations`` function evaluations.
65
- stop_fitness : float, optional
66
- Limit for fitness value. If reached minimize terminates.
67
- keep = float, optional
68
- changes the reinitialization probability of individuals based on their age. Higher value
69
- means lower probablity of reinitialization.
70
- f = float, optional
71
- The mutation constant. In the literature this is also known as differential weight,
72
- being denoted by F. Should be in the range [0, 2].
73
- cr = float, optional
74
- The recombination constant. Should be in the range [0, 1].
75
- In the literature this is also known as the crossover probability.
76
- rg = numpy.random.Generator, optional
77
- Random generator for creating random guesses.
78
- runid : int, optional
79
- id used to identify the run for debugging / logging.
80
-
81
- Returns
82
- -------
83
- res : scipy.OptimizeResult
84
- The optimization result is represented as an ``OptimizeResult`` object.
85
- Important attributes are: ``x`` the solution array,
86
- ``fun`` the best function value,
87
- ``nfev`` the number of function evaluations,
88
- ``nit`` the number of iterations,
89
- ``success`` a Boolean flag indicating if the optimizer exited successfully. """
90
-
91
- lower, upper, guess = _check_bounds(bounds, x0, rg)
92
- dim = guess.size
93
- if popsize is None:
94
- popsize = 31
95
- if lower is None:
96
- lower = [0]*dim
97
- upper = [0]*dim
98
- if callable(input_sigma):
99
- input_sigma=input_sigma()
100
- if np.ndim(input_sigma) == 0:
101
- input_sigma = [input_sigma] * dim
102
- if stop_fitness is None:
103
- stop_fitness = math.inf
104
- array_type = ct.c_double * dim
105
- c_callback = call_back_type(callback(fun))
106
- seed = int(rg.uniform(0, 2**32 - 1))
107
- res = np.empty(dim+4)
108
- res_p = res.ctypes.data_as(ct.POINTER(ct.c_double))
109
- try:
110
- optimizeLDE_C(runid, c_callback, dim,
111
- array_type(*guess), array_type(*input_sigma), seed,
112
- array_type(*lower), array_type(*upper),
113
- max_evaluations, keep, stop_fitness,
114
- popsize, f, cr, res_p)
115
- x = res[:dim]
116
- val = res[dim]
117
- evals = int(res[dim+1])
118
- iterations = int(res[dim+2])
119
- stop = int(res[dim+3])
120
- return OptimizeResult(x=x, fun=val, nfev=evals, nit=iterations, status=stop, success=True)
121
- except Exception as ex:
122
- return OptimizeResult(x=None, fun=sys.float_info.max, nfev=0, nit=0, status=-1, success=False)
123
-
124
- class callback(object):
125
-
126
- def __init__(self, fun):
127
- self.fun = fun
128
-
129
- def __call__(self, n, x):
130
- try:
131
- fit = self.fun([x[i] for i in range(n)])
132
- return fit if math.isfinite(fit) else sys.float_info.max
133
- except Exception as ex:
134
- return sys.float_info.max
135
-
136
- class callback_par(object):
137
-
138
- def __init__(self, fun, parfun):
139
- self.fun = fun
140
- self.parfun = parfun
141
-
142
- def __call__(self, popsize, n, xs_, ys_):
143
- try:
144
- arrType = ct.c_double*(popsize*n)
145
- addr = ct.addressof(xs_.contents)
146
- xall = np.frombuffer(arrType.from_address(addr))
147
-
148
- if self.parfun is None:
149
- for p in range(popsize):
150
- ys_[p] = self.fun(xall[p*n : (p+1)*n])
151
- else:
152
- xs = []
153
- for p in range(popsize):
154
- x = xall[p*n : (p+1)*n]
155
- xs.append(x)
156
- ys = self.parfun(xs)
157
- for p in range(popsize):
158
- ys_[p] = ys[p]
159
- except Exception as ex:
160
- print (ex)
161
-
162
- call_back_type = ct.CFUNCTYPE(ct.c_double, ct.c_int, ct.POINTER(ct.c_double))
163
- call_back_par = ct.CFUNCTYPE(None, ct.c_int, ct.c_int, \
164
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double))
165
-
166
- optimizeLDE_C = libcmalib.optimizeLDE_C
167
- optimizeLDE_C.argtypes = [ct.c_long, call_back_type, ct.c_int,
168
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), ct.c_int, \
169
- ct.POINTER(ct.c_double), ct.POINTER(ct.c_double), \
170
- ct.c_int, ct.c_double, ct.c_double, ct.c_int, \
171
- ct.c_double, ct.c_double, ct.POINTER(ct.c_double)]
172
-
Binary file
fcmaes/lib/libgtoplib.dll DELETED
Binary file
fcmaes/lib/libgtoplib.so DELETED
Binary file
Binary file
Binary file
@@ -1,55 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: fcmaes
3
- Version: 1.3.17
4
- Summary: A Python 3 gradient-free optimization library.
5
- Home-page: https://github.com/dietmarwo/fast-cma-es
6
- Author: Dietmar Wolz
7
- Author-email: drdietmarwolz@yahoo.de
8
- License: MIT
9
- Keywords: optimization,multi-objective,constraints,CMA-ES,BiteOpt,MO-DE,differential evolution,annealing,stochastic,gradient free,parallel execution,boundary management
10
- Platform: UNKNOWN
11
- Classifier: Intended Audience :: Manufacturing
12
- Classifier: Intended Audience :: Financial and Insurance Industry
13
- Classifier: Intended Audience :: Healthcare Industry
14
- Classifier: Intended Audience :: Telecommunications Industry
15
- Classifier: Intended Audience :: Information Technology
16
- Classifier: Intended Audience :: Science/Research
17
- Classifier: Intended Audience :: Education
18
- Classifier: Topic :: Office/Business :: Financial
19
- Classifier: Topic :: Office/Business :: Scheduling
20
- Classifier: Topic :: Scientific/Engineering
21
- Classifier: Topic :: Scientific/Engineering :: Information Analysis
22
- Classifier: Topic :: Scientific/Engineering :: Mathematics
23
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
24
- Classifier: Operating System :: OS Independent
25
- Classifier: Programming Language :: Python :: 3
26
- Classifier: Development Status :: 4 - Beta
27
- Classifier: Environment :: Console
28
- Classifier: License :: OSI Approved :: MIT License
29
- Description-Content-Type: text/markdown
30
- License-File: LICENSE
31
- Requires-Dist: numpy
32
- Requires-Dist: scipy
33
-
34
- # fcmaes
35
- A Python 3 gradient-free optimization library.
36
-
37
- - [README](https://github.com/dietmarwo/fast-cma-es/blob/master/README.adoc)
38
- - [Tutorial](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/Tutorial.adoc)
39
- - [Multi-Objective](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/MultiObjective.adoc),
40
- - [MO-DE-NSGAII](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/NSGAII.adoc),
41
- - [gbea TopTrumps Benchmark](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/TopTrumps.adoc),
42
- - [Pagmo results](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/PAGMO.adoc),
43
- - [PYKEP Gym Results](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/PYKEP.adoc)
44
- - [Constraints](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/Constraints.adoc)
45
- - [Expressions](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/Expressions.adoc)
46
- - [ODE](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/ODE.adoc)
47
- - [MINLP](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/MINLP.adoc)
48
- - [HyperParameters](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/HyperparameterOptimization.adoc)
49
- - [Delayed Update](https://github.com/dietmarwo/fast-cma-es/blob/master/tutorials/DelayedUpdate.adoc)
50
-
51
-
52
-
53
-
54
-
55
-