gpjax 0.9.3__py3-none-any.whl → 0.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gpjax/__init__.py CHANGED
@@ -19,7 +19,6 @@ from beartype.roar import BeartypeDecorHintPep585DeprecationWarning
19
19
  filterwarnings("ignore", category=BeartypeDecorHintPep585DeprecationWarning)
20
20
 
21
21
  from gpjax import (
22
- decision_making,
23
22
  gps,
24
23
  integrators,
25
24
  kernels,
@@ -40,11 +39,10 @@ __license__ = "MIT"
40
39
  __description__ = "Didactic Gaussian processes in JAX"
41
40
  __url__ = "https://github.com/JaxGaussianProcesses/GPJax"
42
41
  __contributors__ = "https://github.com/JaxGaussianProcesses/GPJax/graphs/contributors"
43
- __version__ = "0.9.3"
42
+ __version__ = "0.9.5"
44
43
 
45
44
  __all__ = [
46
45
  "base",
47
- "decision_making",
48
46
  "gps",
49
47
  "integrators",
50
48
  "kernels",
gpjax/citation.py CHANGED
@@ -10,11 +10,6 @@ from beartype.typing import (
10
10
  )
11
11
  from jaxlib.xla_extension import PjitFunction
12
12
 
13
- from gpjax.decision_making.test_functions import (
14
- Forrester,
15
- LogarithmicGoldsteinPrice,
16
- )
17
- from gpjax.decision_making.utility_functions import ThompsonSampling
18
13
  from gpjax.kernels import (
19
14
  RFF,
20
15
  ArcCosine,
@@ -149,41 +144,3 @@ def _(tree) -> PaperCitation:
149
144
  booktitle="Advances in neural information processing systems",
150
145
  citation_type="article",
151
146
  )
152
-
153
-
154
- ####################
155
- # Decision making citations
156
- ####################
157
- @cite.register(ThompsonSampling)
158
- def _(tree) -> PaperCitation:
159
- return PaperCitation(
160
- citation_key="wilson2020efficiently",
161
- title="Efficiently sampling functions from Gaussian process posteriors",
162
- authors="Wilson, James and Borovitskiy, Viacheslav and Terenin, Alexander and Mostowsky, Peter and Deisenroth, Marc",
163
- year="2020",
164
- booktitle="International Conference on Machine Learning",
165
- citation_type="article",
166
- )
167
-
168
-
169
- @cite.register(Forrester)
170
- def _(tree) -> BookCitation:
171
- return BookCitation(
172
- citation_key="forrester2008engineering",
173
- authors="Forrester, Alexander and Sobester, Andras and Keane, Andy",
174
- title="Engineering design via surrogate modelling: a practical guide",
175
- year="2008",
176
- publisher="John Wiley & Sons",
177
- )
178
-
179
-
180
- @cite.register(LogarithmicGoldsteinPrice)
181
- def _(tree) -> PaperCitation:
182
- return PaperCitation(
183
- citation_key="picheny2013benchmark",
184
- authors="Picheny, Victor and Wagner, Tobias and Ginsbourger, David",
185
- title="A benchmark of kriging-based infill criteria for noisy optimization",
186
- year="2013",
187
- booktitle="Structural and multidisciplinary optimization",
188
- citation_type="article",
189
- )
gpjax/distributions.py CHANGED
@@ -162,7 +162,9 @@ class GaussianDistribution(tfd.Distribution):
162
162
 
163
163
  return vmap(affine_transformation)(Z)
164
164
 
165
- def sample(self, seed: KeyArray, sample_shape: Tuple[int, ...]): # pylint: disable=useless-super-delegation
165
+ def sample(
166
+ self, seed: KeyArray, sample_shape: Tuple[int, ...]
167
+ ): # pylint: disable=useless-super-delegation
166
168
  r"""See `Distribution.sample`."""
167
169
  return self._sample_n(
168
170
  seed, sample_shape[0]
gpjax/gps.py CHANGED
@@ -652,7 +652,8 @@ class NonConjugatePosterior(AbstractPosterior[P, NGL]):
652
652
  """
653
653
  super().__init__(prior=prior, likelihood=likelihood, jitter=jitter)
654
654
 
655
- latent = latent or jr.normal(key, shape=(self.likelihood.num_datapoints, 1))
655
+ if latent is None:
656
+ latent = jr.normal(key, shape=(self.likelihood.num_datapoints, 1))
656
657
 
657
658
  # TODO: static or intermediate?
658
659
  self.latent = latent if isinstance(latent, Parameter) else Real(latent)
@@ -149,12 +149,14 @@ class VariationalGaussian(AbstractVariationalGaussian[L]):
149
149
  ):
150
150
  super().__init__(posterior, inducing_inputs, jitter)
151
151
 
152
- self.variational_mean = Real(
153
- variational_mean or jnp.zeros((self.num_inducing, 1))
154
- )
155
- self.variational_root_covariance = LowerTriangular(
156
- variational_root_covariance or jnp.eye(self.num_inducing)
157
- )
152
+ if variational_mean is None:
153
+ variational_mean = jnp.zeros((self.num_inducing, 1))
154
+
155
+ if variational_root_covariance is None:
156
+ variational_root_covariance = jnp.eye(self.num_inducing)
157
+
158
+ self.variational_mean = Real(variational_mean)
159
+ self.variational_root_covariance = LowerTriangular(variational_root_covariance)
158
160
 
159
161
  def prior_kl(self) -> ScalarFloat:
160
162
  r"""Compute the prior KL divergence.
@@ -378,12 +380,14 @@ class NaturalVariationalGaussian(AbstractVariationalGaussian[L]):
378
380
  ):
379
381
  super().__init__(posterior, inducing_inputs, jitter)
380
382
 
381
- self.natural_vector = Static(
382
- natural_vector or jnp.zeros((self.num_inducing, 1))
383
- )
384
- self.natural_matrix = Static(
385
- natural_matrix or -0.5 * jnp.eye(self.num_inducing)
386
- )
383
+ if natural_vector is None:
384
+ natural_vector = jnp.zeros((self.num_inducing, 1))
385
+
386
+ if natural_matrix is None:
387
+ natural_matrix = -0.5 * jnp.eye(self.num_inducing)
388
+
389
+ self.natural_vector = Static(natural_vector)
390
+ self.natural_matrix = Static(natural_matrix)
387
391
 
388
392
  def prior_kl(self) -> ScalarFloat:
389
393
  r"""Compute the KL-divergence between our current variational approximation
@@ -540,13 +544,14 @@ class ExpectationVariationalGaussian(AbstractVariationalGaussian[L]):
540
544
  ):
541
545
  super().__init__(posterior, inducing_inputs, jitter)
542
546
 
543
- # must come after super().__init__
544
- self.expectation_vector = Static(
545
- expectation_vector or jnp.zeros((self.num_inducing, 1))
546
- )
547
- self.expectation_matrix = Static(
548
- expectation_matrix or jnp.eye(self.num_inducing)
549
- )
547
+ if expectation_vector is None:
548
+ expectation_vector = jnp.zeros((self.num_inducing, 1))
549
+
550
+ if expectation_matrix is None:
551
+ expectation_matrix = jnp.eye(self.num_inducing)
552
+
553
+ self.expectation_vector = Static(expectation_vector)
554
+ self.expectation_matrix = Static(expectation_matrix)
550
555
 
551
556
  def prior_kl(self) -> ScalarFloat:
552
557
  r"""Evaluate the prior KL-divergence.
@@ -1,13 +1,13 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: gpjax
3
- Version: 0.9.3
3
+ Version: 0.9.5
4
4
  Summary: Gaussian processes in JAX.
5
5
  Project-URL: Documentation, https://docs.jaxgaussianprocesses.com/
6
6
  Project-URL: Issues, https://github.com/JaxGaussianProcesses/GPJax/issues
7
7
  Project-URL: Source, https://github.com/JaxGaussianProcesses/GPJax
8
8
  Author-email: Thomas Pinder <tompinder@live.co.uk>
9
- License-Expression: Apache-2.0
10
- License-File: LICENSE
9
+ License: MIT
10
+ License-File: LICENSE.txt
11
11
  Keywords: gaussian-processes jax machine-learning bayesian
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Programming Language :: Python
@@ -19,10 +19,9 @@ Classifier: Programming Language :: Python :: Implementation :: PyPy
19
19
  Requires-Python: <3.13,>=3.10
20
20
  Requires-Dist: beartype>0.16.1
21
21
  Requires-Dist: cola-ml==0.0.5
22
- Requires-Dist: flax>=0.8.5
22
+ Requires-Dist: flax<0.10.0
23
23
  Requires-Dist: jax<0.4.28
24
24
  Requires-Dist: jaxlib<0.4.28
25
- Requires-Dist: jaxopt==0.8.2
26
25
  Requires-Dist: jaxtyping>0.2.10
27
26
  Requires-Dist: numpy<2.0.0
28
27
  Requires-Dist: optax>0.2.1
@@ -103,23 +102,23 @@ helped to shape GPJax into the package it is today.
103
102
 
104
103
  ## Notebook examples
105
104
 
106
- > - [**Conjugate Inference**](https://docs.jaxgaussianprocesses.com/examples/regression/)
107
- > - [**Classification**](https://docs.jaxgaussianprocesses.com/examples/classification/)
108
- > - [**Sparse Variational Inference**](https://docs.jaxgaussianprocesses.com/examples/collapsed_vi/)
109
- > - [**Stochastic Variational Inference**](https://docs.jaxgaussianprocesses.com/examples/uncollapsed_vi/)
110
- > - [**Laplace Approximation**](https://docs.jaxgaussianprocesses.com/examples/classification/#laplace-approximation)
111
- > - [**Inference on Non-Euclidean Spaces**](https://docs.jaxgaussianprocesses.com/examples/constructing_new_kernels/#custom-kernel)
112
- > - [**Inference on Graphs**](https://docs.jaxgaussianprocesses.com/examples/graph_kernels/)
113
- > - [**Pathwise Sampling**](https://docs.jaxgaussianprocesses.com/examples/spatial/)
114
- > - [**Learning Gaussian Process Barycentres**](https://docs.jaxgaussianprocesses.com/examples/barycentres/)
115
- > - [**Deep Kernel Regression**](https://docs.jaxgaussianprocesses.com/examples/deep_kernels/)
116
- > - [**Poisson Regression**](https://docs.jaxgaussianprocesses.com/examples/poisson/)
117
- > - [**Bayesian Optimisation**](https://docs.jaxgaussianprocesses.com/examples/bayesian_optimisation/)
105
+ > - [**Conjugate Inference**](https://docs.jaxgaussianprocesses.com/_examples/regression/)
106
+ > - [**Classification**](https://docs.jaxgaussianprocesses.com/_examples/classification/)
107
+ > - [**Sparse Variational Inference**](https://docs.jaxgaussianprocesses.com/_examples/collapsed_vi/)
108
+ > - [**Stochastic Variational Inference**](https://docs.jaxgaussianprocesses.com/_examples/uncollapsed_vi/)
109
+ > - [**Laplace Approximation**](https://docs.jaxgaussianprocesses.com/_examples/classification/#laplace-approximation)
110
+ > - [**Inference on Non-Euclidean Spaces**](https://docs.jaxgaussianprocesses.com/_examples/constructing_new_kernels/#custom-kernel)
111
+ > - [**Inference on Graphs**](https://docs.jaxgaussianprocesses.com/_examples/graph_kernels/)
112
+ > - [**Pathwise Sampling**](https://docs.jaxgaussianprocesses.com/_examples/spatial/)
113
+ > - [**Learning Gaussian Process Barycentres**](https://docs.jaxgaussianprocesses.com/_examples/barycentres/)
114
+ > - [**Deep Kernel Regression**](https://docs.jaxgaussianprocesses.com/_examples/deep_kernels/)
115
+ > - [**Poisson Regression**](https://docs.jaxgaussianprocesses.com/_examples/poisson/)
116
+ > - [**Bayesian Optimisation**](https://docs.jaxgaussianprocesses.com/_examples/bayesian_optimisation/)
118
117
 
119
118
  ## Guides for customisation
120
119
  >
121
- > - [**Custom kernels**](https://docs.jaxgaussianprocesses.com/examples/constructing_new_kernels/#custom-kernel)
122
- > - [**UCI regression**](https://docs.jaxgaussianprocesses.com/examples/yacht/)
120
+ > - [**Custom kernels**](https://docs.jaxgaussianprocesses.com/_examples/constructing_new_kernels/#custom-kernel)
121
+ > - [**UCI regression**](https://docs.jaxgaussianprocesses.com/_examples/yacht/)
123
122
 
124
123
  ## Conversion between `.ipynb` and `.py`
125
124
  Above examples are stored in [examples](docs/examples) directory in the double
@@ -180,7 +179,7 @@ optimiser = ox.adam(learning_rate=1e-2)
180
179
  # Obtain Type 2 MLEs of the hyperparameters
181
180
  opt_posterior, history = gpx.fit(
182
181
  model=posterior,
183
- objective=gpx.objectives.conjugate_mll,
182
+ objective=lambda p, d: -gpx.objectives.conjugate_mll(p, d),
184
183
  train_data=D,
185
184
  optim=optimiser,
186
185
  num_iters=500,
@@ -1,9 +1,9 @@
1
- gpjax/__init__.py,sha256=UNfvnpRhJEZfz9qYjzWYNSSh3xg1FqNFst_A6xl_nfE,1697
2
- gpjax/citation.py,sha256=R4Pmvjt0ndA0avEDSvIbxDxKapkRRYXWX7RRWBvZCRQ,5306
1
+ gpjax/__init__.py,sha256=T-2EbsNxg5VcdTeSH_G-mWwNcMTJVqbdI55gl9HMvG8,1653
2
+ gpjax/citation.py,sha256=f2Hzj5MLyCE7l0hHAzsEQoTORZH5hgV_eis4uoBiWvE,3811
3
3
  gpjax/dataset.py,sha256=NsToLKq4lOsHnfLfukrUIRKvhOEuoUk8aHTF0oAqRbU,4079
4
- gpjax/distributions.py,sha256=zxkSEZIlTg0PHvvgj0BQuIFEg-ugx6_NkEwSsbqWUM0,9325
4
+ gpjax/distributions.py,sha256=X48FJr3reop9maherdMVt7-XZOm2f26T8AJt_IKM_oE,9339
5
5
  gpjax/fit.py,sha256=OHv8jUHxa1ndpqMERSDRtYtUDzubk9rMPVIhfCiIH5Q,11551
6
- gpjax/gps.py,sha256=NO18geRfcjo4mA3PGkuGont_Mj_yRqfvWzJqYmoKwiY,31225
6
+ gpjax/gps.py,sha256=97lYGrsmsufQxKEd8qz5wPNvui6FKXTF_Ps-sMFIjnY,31246
7
7
  gpjax/integrators.py,sha256=eyJPqWNPKj6pKP5da0fEj4HW7BVyevqeGrurEuy_XPw,5694
8
8
  gpjax/likelihoods.py,sha256=DOyV1L0ompkpeImMTiOOiWLJfqSqvDX_acOumuFqPEc,9234
9
9
  gpjax/lower_cholesky.py,sha256=3pnHaBrlGckFsrfYJ9Lsbd0pGmO7NIXdyY4aGm48MpY,1952
@@ -12,21 +12,7 @@ gpjax/objectives.py,sha256=XwkPyL_iovTNKpKGVNt0Lt2_OMTJitSPhuyCtUrJpbc,15383
12
12
  gpjax/parameters.py,sha256=Z4Wy3gEzPZG23-dtqC437_ZWnd_sPe9LcLCKn21ZBvA,4886
13
13
  gpjax/scan.py,sha256=jStQvwkE9MGttB89frxam1kaeXdWih7cVxkGywyaeHQ,5365
14
14
  gpjax/typing.py,sha256=M3CvWsYtZ3PFUvBvvbRNjpwerNII0w4yGuP0I-sLeYI,1705
15
- gpjax/variational_families.py,sha256=JO78dywHNH9__hjJkrP2ASb1L3C9aEBOW7fd0run-e4,27918
16
- gpjax/decision_making/__init__.py,sha256=SDuPQl80lJ7nhfRsiB_7c22wCMiQO5ehSNohxUGnB7w,2170
17
- gpjax/decision_making/decision_maker.py,sha256=S4pOXrWcEHy0NDA0gfWzhk7pG0NJfaPpMXvq03yTy0g,13915
18
- gpjax/decision_making/posterior_handler.py,sha256=UgXf1Gu7GMh2YDSmiSWJIzmWlFW06KTS44HYz3mazZQ,5905
19
- gpjax/decision_making/search_space.py,sha256=bXwtMOhHZ2klnABpXm5Raxe7b0NTRDjo_cN3ecbk53Y,3545
20
- gpjax/decision_making/utility_maximizer.py,sha256=VT2amwSJbB64IL_MiWNl9ZgjcqO757qK6NW2gUBKsqs,5965
21
- gpjax/decision_making/utils.py,sha256=5j1GO5kcmG2laZR39NjhqgEjRekAWWzrnREv_5Zct_Y,2367
22
- gpjax/decision_making/test_functions/__init__.py,sha256=GDCY9_kaAnxDWwzo1FkdxnDx-80MErAHchbGybT9xYs,1109
23
- gpjax/decision_making/test_functions/continuous_functions.py,sha256=oL5ZQkvmbC3u9rEvSYI2DRAN3r7Ynf7wRZQlUWjKjt0,5612
24
- gpjax/decision_making/test_functions/non_conjugate_functions.py,sha256=cfo3xQOzB5ajMjjl0YFfNlJClkAcY7ZbT23UyBYEofQ,2955
25
- gpjax/decision_making/utility_functions/__init__.py,sha256=xXI-4JKWAfTJ7XZ1vRDpqtb91MNzSPD0lP6xo0tOc7o,1445
26
- gpjax/decision_making/utility_functions/base.py,sha256=FOqrsRDmtHiCVl6IHr12-AEYBLStzMT5EBs-F92e1Og,3882
27
- gpjax/decision_making/utility_functions/expected_improvement.py,sha256=H6hjC-lj1oiHf2BomeQqroORQ7vtcOngiDAWxRwkNbg,4481
28
- gpjax/decision_making/utility_functions/probability_of_improvement.py,sha256=O_rHH1yR34JJlpAueSDJ_yo95fPI2aAGkwphS8snBYk,5220
29
- gpjax/decision_making/utility_functions/thompson_sampling.py,sha256=S-Yyn-9jsKkaXTvKFBP4sG_eCCKApGbHao5RR5tqXAo,4353
15
+ gpjax/variational_families.py,sha256=s1rk7PtNTjQPabmVu-jBsuJBoqsxAAXwKFZJOEswkNQ,28161
30
16
  gpjax/kernels/__init__.py,sha256=WZanH0Tpdkt0f7VfMqnalm_VZAMVwBqeOVaICNj6xQU,1901
31
17
  gpjax/kernels/base.py,sha256=abkj3zidsBs7YSkYEfjeJ5jTs1YyDCPoBM2ZzqaqrgI,11561
32
18
  gpjax/kernels/approximations/__init__.py,sha256=bK9HlGd-PZeGrqtG5RpXxUTXNUrZTgfjH1dP626yNMA,68
@@ -56,7 +42,7 @@ gpjax/kernels/stationary/rational_quadratic.py,sha256=dYONp3i4rnKj3ET8UyxAKXv6UO
56
42
  gpjax/kernels/stationary/rbf.py,sha256=G13gg5phO7ite7D9QgoCy7gB2_y0FM6GZhgFW4RL6Xw,1734
57
43
  gpjax/kernels/stationary/utils.py,sha256=Xa9EEnxgFqEi08ZSFAZYYHhJ85_3Ac-ZUyUk18B63M4,2225
58
44
  gpjax/kernels/stationary/white.py,sha256=TkdXXZCCjDs7JwR_gj5uvn2s1wyfRbe1vyHhUMJ8jjI,2212
59
- gpjax-0.9.3.dist-info/METADATA,sha256=A-tRR4wxz_YizCra4tZdOfRD1aUGJ_5sKgzL7Ax81B0,9976
60
- gpjax-0.9.3.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
61
- gpjax-0.9.3.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
62
- gpjax-0.9.3.dist-info/RECORD,,
45
+ gpjax-0.9.5.dist-info/METADATA,sha256=T-OvGAyBe1N_QW6F9RbV-sx8wBJSAYQjpildBdhotS0,9967
46
+ gpjax-0.9.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
47
+ gpjax-0.9.5.dist-info/licenses/LICENSE.txt,sha256=3umwi0h8wmKXOZO8XwRBwSl3vJt2hpWKEqSrSXLR7-I,1084
48
+ gpjax-0.9.5.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.25.0
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -0,0 +1,19 @@
1
+ (C) Copyright 2019 Hewlett Packard Enterprise Development LP
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining a
4
+ copy of this software and associated documentation files (the "Software"),
5
+ to deal in the Software without restriction, including without limitation
6
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
7
+ and/or sell copies of the Software, and to permit persons to whom the
8
+ Software is furnished to do so, subject to the following conditions:
9
+
10
+ The above copyright notice and this permission notice shall be included
11
+ in all copies or substantial portions of the Software.
12
+
13
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16
+ THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
17
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
18
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
19
+ OTHER DEALINGS IN THE SOFTWARE.
@@ -1,63 +0,0 @@
1
- # Copyright 2023 The JaxGaussianProcesses Contributors. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- # ==============================================================================
15
- from gpjax.decision_making.decision_maker import (
16
- AbstractDecisionMaker,
17
- UtilityDrivenDecisionMaker,
18
- )
19
- from gpjax.decision_making.posterior_handler import PosteriorHandler
20
- from gpjax.decision_making.search_space import (
21
- AbstractSearchSpace,
22
- ContinuousSearchSpace,
23
- )
24
- from gpjax.decision_making.test_functions import (
25
- AbstractContinuousTestFunction,
26
- Forrester,
27
- LogarithmicGoldsteinPrice,
28
- Quadratic,
29
- )
30
- from gpjax.decision_making.utility_functions import (
31
- AbstractSinglePointUtilityFunctionBuilder,
32
- AbstractUtilityFunctionBuilder,
33
- SinglePointUtilityFunction,
34
- ThompsonSampling,
35
- UtilityFunction,
36
- )
37
- from gpjax.decision_making.utility_maximizer import (
38
- AbstractSinglePointUtilityMaximizer,
39
- AbstractUtilityMaximizer,
40
- ContinuousSinglePointUtilityMaximizer,
41
- )
42
- from gpjax.decision_making.utils import build_function_evaluator
43
-
44
- __all__ = [
45
- "AbstractUtilityFunctionBuilder",
46
- "AbstractUtilityMaximizer",
47
- "AbstractDecisionMaker",
48
- "AbstractSearchSpace",
49
- "AbstractSinglePointUtilityFunctionBuilder",
50
- "AbstractSinglePointUtilityMaximizer",
51
- "UtilityFunction",
52
- "build_function_evaluator",
53
- "ContinuousSinglePointUtilityMaximizer",
54
- "ContinuousSearchSpace",
55
- "UtilityDrivenDecisionMaker",
56
- "AbstractContinuousTestFunction",
57
- "Forrester",
58
- "LogarithmicGoldsteinPrice",
59
- "PosteriorHandler",
60
- "Quadratic",
61
- "SinglePointUtilityFunction",
62
- "ThompsonSampling",
63
- ]