gpjax 0.10.0__tar.gz → 0.10.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. gpjax-0.10.2/.cursorrules +37 -0
  2. {gpjax-0.10.0 → gpjax-0.10.2}/PKG-INFO +1 -1
  3. {gpjax-0.10.0 → gpjax-0.10.2}/docs/sharp_bits.md +57 -0
  4. gpjax-0.10.2/examples/oak_example.py +216 -0
  5. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/__init__.py +1 -1
  6. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/base.py +4 -1
  7. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/nonstationary/polynomial.py +1 -1
  8. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/mean_functions.py +4 -3
  9. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/parameters.py +88 -26
  10. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_mean_functions.py +25 -32
  11. gpjax-0.10.2/tests/test_parameters.py +113 -0
  12. gpjax-0.10.0/tests/test_parameters.py +0 -56
  13. {gpjax-0.10.0 → gpjax-0.10.2}/.github/CODE_OF_CONDUCT.md +0 -0
  14. {gpjax-0.10.0 → gpjax-0.10.2}/.github/ISSUE_TEMPLATE/01_BUG_REPORT.md +0 -0
  15. {gpjax-0.10.0 → gpjax-0.10.2}/.github/ISSUE_TEMPLATE/02_FEATURE_REQUEST.md +0 -0
  16. {gpjax-0.10.0 → gpjax-0.10.2}/.github/ISSUE_TEMPLATE/03_CODEBASE_IMPROVEMENT.md +0 -0
  17. {gpjax-0.10.0 → gpjax-0.10.2}/.github/ISSUE_TEMPLATE/04_DOCS_IMPROVEMENT.md +0 -0
  18. {gpjax-0.10.0 → gpjax-0.10.2}/.github/ISSUE_TEMPLATE/config.yml +0 -0
  19. {gpjax-0.10.0 → gpjax-0.10.2}/.github/codecov.yml +0 -0
  20. {gpjax-0.10.0 → gpjax-0.10.2}/.github/labels.yml +0 -0
  21. {gpjax-0.10.0 → gpjax-0.10.2}/.github/pull_request_template.md +0 -0
  22. {gpjax-0.10.0 → gpjax-0.10.2}/.github/release-drafter.yml +0 -0
  23. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/build_docs.yml +0 -0
  24. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/integration.yml +0 -0
  25. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/pr_greeting.yml +0 -0
  26. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/ruff.yml +0 -0
  27. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/stale_prs.yml +0 -0
  28. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/test_docs.yml +0 -0
  29. {gpjax-0.10.0 → gpjax-0.10.2}/.github/workflows/tests.yml +0 -0
  30. {gpjax-0.10.0 → gpjax-0.10.2}/.gitignore +0 -0
  31. {gpjax-0.10.0 → gpjax-0.10.2}/CITATION.bib +0 -0
  32. {gpjax-0.10.0 → gpjax-0.10.2}/LICENSE.txt +0 -0
  33. {gpjax-0.10.0 → gpjax-0.10.2}/Makefile +0 -0
  34. {gpjax-0.10.0 → gpjax-0.10.2}/README.md +0 -0
  35. {gpjax-0.10.0 → gpjax-0.10.2}/docs/CODE_OF_CONDUCT.md +0 -0
  36. {gpjax-0.10.0 → gpjax-0.10.2}/docs/GOVERNANCE.md +0 -0
  37. {gpjax-0.10.0 → gpjax-0.10.2}/docs/contributing.md +0 -0
  38. {gpjax-0.10.0 → gpjax-0.10.2}/docs/design.md +0 -0
  39. {gpjax-0.10.0 → gpjax-0.10.2}/docs/index.md +0 -0
  40. {gpjax-0.10.0 → gpjax-0.10.2}/docs/index.rst +0 -0
  41. {gpjax-0.10.0 → gpjax-0.10.2}/docs/installation.md +0 -0
  42. {gpjax-0.10.0 → gpjax-0.10.2}/docs/javascripts/katex.js +0 -0
  43. {gpjax-0.10.0 → gpjax-0.10.2}/docs/refs.bib +0 -0
  44. {gpjax-0.10.0 → gpjax-0.10.2}/docs/scripts/gen_examples.py +0 -0
  45. {gpjax-0.10.0 → gpjax-0.10.2}/docs/scripts/gen_pages.py +0 -0
  46. {gpjax-0.10.0 → gpjax-0.10.2}/docs/scripts/notebook_converter.py +0 -0
  47. {gpjax-0.10.0 → gpjax-0.10.2}/docs/scripts/sharp_bits_figure.py +0 -0
  48. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/GP.pdf +0 -0
  49. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/GP.svg +0 -0
  50. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/bijector_figure.svg +0 -0
  51. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/css/gpjax_theme.css +0 -0
  52. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/favicon.ico +0 -0
  53. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/gpjax.mplstyle +0 -0
  54. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/gpjax_logo.pdf +0 -0
  55. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/gpjax_logo.svg +0 -0
  56. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/jaxkern/lato.ttf +0 -0
  57. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/jaxkern/logo.png +0 -0
  58. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/jaxkern/logo.svg +0 -0
  59. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/jaxkern/main.py +0 -0
  60. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/step_size_figure.png +0 -0
  61. {gpjax-0.10.0 → gpjax-0.10.2}/docs/static/step_size_figure.svg +0 -0
  62. {gpjax-0.10.0 → gpjax-0.10.2}/docs/stylesheets/extra.css +0 -0
  63. {gpjax-0.10.0 → gpjax-0.10.2}/docs/stylesheets/permalinks.css +0 -0
  64. {gpjax-0.10.0 → gpjax-0.10.2}/examples/backend.py +0 -0
  65. {gpjax-0.10.0 → gpjax-0.10.2}/examples/barycentres/barycentre_gp.gif +0 -0
  66. {gpjax-0.10.0 → gpjax-0.10.2}/examples/barycentres.py +0 -0
  67. {gpjax-0.10.0 → gpjax-0.10.2}/examples/classification.py +0 -0
  68. {gpjax-0.10.0 → gpjax-0.10.2}/examples/collapsed_vi.py +0 -0
  69. {gpjax-0.10.0 → gpjax-0.10.2}/examples/constructing_new_kernels.py +0 -0
  70. {gpjax-0.10.0 → gpjax-0.10.2}/examples/data/max_tempeature_switzerland.csv +0 -0
  71. {gpjax-0.10.0 → gpjax-0.10.2}/examples/data/yacht_hydrodynamics.data +0 -0
  72. {gpjax-0.10.0 → gpjax-0.10.2}/examples/deep_kernels.py +0 -0
  73. {gpjax-0.10.0 → gpjax-0.10.2}/examples/gpjax.mplstyle +0 -0
  74. {gpjax-0.10.0 → gpjax-0.10.2}/examples/graph_kernels.py +0 -0
  75. {gpjax-0.10.0 → gpjax-0.10.2}/examples/intro_to_gps/decomposed_mll.png +0 -0
  76. {gpjax-0.10.0 → gpjax-0.10.2}/examples/intro_to_gps/generating_process.png +0 -0
  77. {gpjax-0.10.0 → gpjax-0.10.2}/examples/intro_to_gps.py +0 -0
  78. {gpjax-0.10.0 → gpjax-0.10.2}/examples/intro_to_kernels.py +0 -0
  79. {gpjax-0.10.0 → gpjax-0.10.2}/examples/likelihoods_guide.py +0 -0
  80. {gpjax-0.10.0 → gpjax-0.10.2}/examples/oceanmodelling.py +0 -0
  81. {gpjax-0.10.0 → gpjax-0.10.2}/examples/poisson.py +0 -0
  82. {gpjax-0.10.0 → gpjax-0.10.2}/examples/regression.py +0 -0
  83. {gpjax-0.10.0 → gpjax-0.10.2}/examples/uncollapsed_vi.py +0 -0
  84. {gpjax-0.10.0 → gpjax-0.10.2}/examples/utils.py +0 -0
  85. {gpjax-0.10.0 → gpjax-0.10.2}/examples/yacht.py +0 -0
  86. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/citation.py +0 -0
  87. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/dataset.py +0 -0
  88. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/distributions.py +0 -0
  89. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/fit.py +0 -0
  90. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/gps.py +0 -0
  91. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/integrators.py +0 -0
  92. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/__init__.py +0 -0
  93. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/approximations/__init__.py +0 -0
  94. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/approximations/rff.py +0 -0
  95. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/__init__.py +0 -0
  96. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/base.py +0 -0
  97. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/basis_functions.py +0 -0
  98. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/constant_diagonal.py +0 -0
  99. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/dense.py +0 -0
  100. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/diagonal.py +0 -0
  101. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/computations/eigen.py +0 -0
  102. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/non_euclidean/__init__.py +0 -0
  103. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/non_euclidean/graph.py +0 -0
  104. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/non_euclidean/utils.py +0 -0
  105. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/nonstationary/__init__.py +0 -0
  106. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/nonstationary/arccosine.py +0 -0
  107. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/nonstationary/linear.py +0 -0
  108. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/__init__.py +0 -0
  109. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/base.py +0 -0
  110. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/matern12.py +0 -0
  111. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/matern32.py +0 -0
  112. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/matern52.py +0 -0
  113. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/periodic.py +0 -0
  114. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/powered_exponential.py +0 -0
  115. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/rational_quadratic.py +0 -0
  116. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/rbf.py +0 -0
  117. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/utils.py +0 -0
  118. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/kernels/stationary/white.py +0 -0
  119. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/likelihoods.py +0 -0
  120. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/lower_cholesky.py +0 -0
  121. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/objectives.py +0 -0
  122. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/scan.py +0 -0
  123. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/typing.py +0 -0
  124. {gpjax-0.10.0 → gpjax-0.10.2}/gpjax/variational_families.py +0 -0
  125. {gpjax-0.10.0 → gpjax-0.10.2}/mkdocs.yml +0 -0
  126. {gpjax-0.10.0 → gpjax-0.10.2}/pyproject.toml +0 -0
  127. {gpjax-0.10.0 → gpjax-0.10.2}/static/CONTRIBUTING.md +0 -0
  128. {gpjax-0.10.0 → gpjax-0.10.2}/static/paper.bib +0 -0
  129. {gpjax-0.10.0 → gpjax-0.10.2}/static/paper.md +0 -0
  130. {gpjax-0.10.0 → gpjax-0.10.2}/static/paper.pdf +0 -0
  131. {gpjax-0.10.0 → gpjax-0.10.2}/tests/__init__.py +0 -0
  132. {gpjax-0.10.0 → gpjax-0.10.2}/tests/conftest.py +0 -0
  133. {gpjax-0.10.0 → gpjax-0.10.2}/tests/integration_tests.py +0 -0
  134. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_citations.py +0 -0
  135. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_dataset.py +0 -0
  136. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_fit.py +0 -0
  137. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_gaussian_distribution.py +0 -0
  138. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_gps.py +0 -0
  139. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_integrators.py +0 -0
  140. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/__init__.py +0 -0
  141. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_approximations.py +0 -0
  142. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_base.py +0 -0
  143. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_computation.py +0 -0
  144. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_non_euclidean.py +0 -0
  145. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_nonstationary.py +0 -0
  146. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_stationary.py +0 -0
  147. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_kernels/test_utils.py +0 -0
  148. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_likelihoods.py +0 -0
  149. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_lower_cholesky.py +0 -0
  150. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_markdown.py +0 -0
  151. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_objectives.py +0 -0
  152. {gpjax-0.10.0 → gpjax-0.10.2}/tests/test_variational_families.py +0 -0
@@ -0,0 +1,37 @@
1
+ You are an AI assistant specialized in Python development and machine learning. Your approach emphasizes:
2
+
3
+ Clear project structure with separate directories for source code, tests, docs, and config.
4
+
5
+ Modular design with distinct files for models, services, controllers, and utilities.
6
+
7
+ Configuration management using environment variables.
8
+
9
+ Robust error handling and logging, including context capture.
10
+
11
+ Comprehensive testing with pytest.
12
+
13
+ Detailed documentation using docstrings and README files.
14
+
15
+ Code style consistency using Ruff.
16
+
17
+ CI/CD implementation with GitHub Actions or GitLab CI.
18
+
19
+ AI-friendly coding practices:
20
+
21
+ You provide code snippets and explanations tailored to these principles, optimizing for clarity and AI-assisted development.
22
+
23
+ Follow the following rules:
24
+
25
+ For any python file, be sure to ALWAYS add typing annotations to each function or class. Be sure to include return types when necessary. Add descriptive docstrings to all python functions and classes as well. Please use pep257 convention. Update existing docstrings if need be.
26
+
27
+ Make sure you keep any comments that exist in a file.
28
+
29
+ When writing tests, make sure that you ONLY use pytest or pytest plugins, do NOT use the unittest module. All tests should have typing annotations as well. All tests should be in ./tests. Be sure to create all necessary files and folders. If you are creating files inside of ./tests or ./src/goob_ai, be sure to make a init.py file if one does not exist.
30
+
31
+ All tests should be fully annotated and should contain docstrings. Be sure to import the following if TYPE_CHECKING:
32
+
33
+ from _pytest.capture import CaptureFixture
34
+ from _pytest.fixtures import FixtureRequest
35
+ from _pytest.logging import LogCaptureFixture
36
+ from _pytest.monkeypatch import MonkeyPatch
37
+ from pytest_mock.plugin import MockerFixture
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gpjax
3
- Version: 0.10.0
3
+ Version: 0.10.2
4
4
  Summary: Gaussian processes in JAX.
5
5
  Project-URL: Documentation, https://docs.jaxgaussianprocesses.com/
6
6
  Project-URL: Issues, https://github.com/JaxGaussianProcesses/GPJax/issues
@@ -175,3 +175,60 @@ mini-batch optimisation of the parameters of your sparse Gaussian process model.
175
175
  model will scale linearly in the batch size and quadratically in the number of inducing
176
176
  points. We demonstrate its use in
177
177
  [our sparse stochastic variational inference notebook](_examples/uncollapsed_vi.md).
178
+
179
+ ## JIT compilation
180
+
181
+ There are a subset of operations in GPJax that are not JIT compatible by default. This
182
+ is because we have assertions in place to check the properties of the parameters. For
183
+ example, we check that the lengthscale parameter that a user provides is positive. This
184
+ makes for a better user experience as we can provide more informative error messages;
185
+ however, JIT compiling functions wherein these assertions are made will break the code.
186
+ As an example, consider the following code:
187
+
188
+ ```python
189
+ import jax
190
+ import jax.numpy as jnp
191
+ import gpjax as gpx
192
+
193
+ x = jnp.linspace(0, 1, 10)[:, None]
194
+
195
+ def compute_gram(lengthscale):
196
+ k = gpx.kernels.RBF(active_dims=[0], lengthscale=lengthscale, variance=jnp.array(1.0))
197
+ return k.gram(x)
198
+
199
+ compute_gram(1.0)
200
+ ```
201
+
202
+ so far so good. However, if we try to JIT compile this function, we will get an error:
203
+
204
+ ```python
205
+ jit_compute_gram = jax.jit(compute_gram)
206
+ try:
207
+ jit_compute_gram(1.0)
208
+ except Exception as e:
209
+ print(e)
210
+ ```
211
+
212
+ This error is due to the fact that the `RBF` kernel contains an assertion that checks
213
+ that the lengthscale is positive. It does not matter that the assertion is satisfied;
214
+ the very presence of the assertion will break JIT compilation.
215
+
216
+ To resolve this, we can use the `checkify` decorator to remove the assertion. This will
217
+ allow the function to be JIT compiled.
218
+
219
+ ```python
220
+ from jax.experimental import checkify
221
+
222
+ jit_compute_gram = jax.jit(checkify.checkify(compute_gram))
223
+ error, value = jit_compute_gram(1.0)
224
+ ```
225
+ By virtue of the `checkify.checkify`, a tuple is returned where the first element is the
226
+ output of the assertion, and the second element is the value of the function.
227
+
228
+ This design is not perfect, and in an ideal world we would not enforce the user to wrap
229
+ their code in `checkify.checkify`. We are actively looking into cleaner ways to provide
230
+ guardrails in a less intrusive manner. However, for now, should you try to JIT compile
231
+ a component of GPJax wherein there is an assertion, you will need to wrap the function
232
+ in `checkify.checkify` as shown above.
233
+
234
+ For more on `checkify`, please see the [JAX Checkify Doc](https://docs.jax.dev/en/latest/debugging/checkify_guide.html).
@@ -0,0 +1,216 @@
1
+ # -*- coding: utf-8 -*-
2
+ # ---
3
+ # jupyter:
4
+ # jupytext:
5
+ # cell_metadata_filter: -all
6
+ # custom_cell_magics: kql
7
+ # text_representation:
8
+ # extension: .py
9
+ # format_name: percent
10
+ # format_version: '1.3'
11
+ # jupytext_version: 1.16.7
12
+ # kernelspec:
13
+ # display_name: docs
14
+ # language: python
15
+ # name: python3
16
+ # ---
17
+
18
+ # %% [markdown]
19
+ # Copyright 2022 The JaxGaussianProcesses Contributors. All Rights Reserved.
20
+ #
21
+ # Licensed under the Apache License, Version 2.0 (the "License");
22
+ # you may not use this file except in compliance with the License.
23
+ # You may obtain a copy of the License at
24
+ #
25
+ # http://www.apache.org/licenses/LICENSE-2.0
26
+ #
27
+ # Unless required by applicable law or agreed to in writing, software
28
+ # distributed under the License is distributed on an "AS IS" BASIS,
29
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
30
+ # See the License for the specific language governing permissions and
31
+ # limitations under the License.
32
+ # ==============================================================================
33
+
34
+ # %%
35
+ """Example of using the OrthogonalAdditiveKernel."""
36
+
37
+ # %%
38
+ import jax
39
+ from jax import config
40
+ import jax.numpy as jnp
41
+ from matplotlib.colors import ListedColormap
42
+ import matplotlib.pyplot as plt
43
+ import optax
44
+
45
+ import gpjax as gpx
46
+ from gpjax.dataset import Dataset
47
+ from gpjax.kernels import (
48
+ RBF,
49
+ OrthogonalAdditiveKernel,
50
+ )
51
+ from gpjax.typing import KeyArray
52
+
53
+ config.update("jax_enable_x64", True) # Enable Float64 precision
54
+
55
+
56
+ # %%
57
+ def f(x: jnp.ndarray) -> jnp.ndarray:
58
+ """Additive function with mixed dependencies:
59
+ f(x) = sin(π*x₁) + 2*cos(2π*x₂) + 0.5*sin(3π*x₁*x₂)
60
+
61
+ Args:
62
+ x: Input points array with shape (..., 2)
63
+
64
+ Returns:
65
+ Function values at the input points
66
+ """
67
+ return (
68
+ jnp.sin(jnp.pi * x[..., 0])
69
+ + 2.0 * jnp.cos(2.0 * jnp.pi * x[..., 1])
70
+ + 0.5 * jnp.sin(3.0 * jnp.pi * x[..., 0] * x[..., 1])
71
+ )
72
+
73
+
74
+ # %%
75
+ def generate_data(
76
+ key: KeyArray, n_train: int = 100, noise_std: float = 0.1
77
+ ) -> tuple[Dataset, jnp.ndarray, jnp.ndarray]:
78
+ """Generate synthetic training data.
79
+
80
+ Args:
81
+ key: JAX PRNG key for random number generation
82
+ n_train: Number of training points to generate
83
+ noise_std: Standard deviation of Gaussian observation noise
84
+
85
+ Returns:
86
+ Tuple of (training_data, X_test, meshgrid_for_plotting)
87
+ """
88
+ key1, key2, key3 = jax.random.split(key, 3)
89
+
90
+ # Generate training data
91
+ X_train = jax.random.uniform(key1, (n_train, 2))
92
+ y_train = f(X_train) + noise_std * jax.random.normal(key2, (n_train,))
93
+
94
+ training_data = Dataset(X=X_train, y=y_train[:, None])
95
+
96
+ # Generate test points for prediction
97
+ n_test = 20
98
+ x_range = jnp.linspace(0.0, 1.0, n_test)
99
+ X1, X2 = jnp.meshgrid(x_range, x_range)
100
+ X_test = jnp.vstack([X1.flatten(), X2.flatten()]).T
101
+
102
+ return training_data, X_test, (X1, X2)
103
+
104
+
105
+ # %%
106
+ def main():
107
+ # Set random seed for reproducibility
108
+ key = jax.random.PRNGKey(42)
109
+
110
+ # Generate synthetic training data
111
+ training_data, X_test, (X1, X2) = generate_data(key, n_train=100, noise_std=0.1)
112
+
113
+ # Create base kernel (RBF)
114
+ base_kernel = RBF(lengthscale=0.2)
115
+
116
+ # Create OAK kernel with second-order interactions
117
+ oak_kernel = OrthogonalAdditiveKernel(
118
+ base_kernel=base_kernel,
119
+ dim=2,
120
+ quad_deg=20,
121
+ second_order=True,
122
+ )
123
+
124
+ # Create a GP prior model
125
+ prior = gpx.gps.Prior(
126
+ mean_function=gpx.mean_functions.Zero(),
127
+ kernel=oak_kernel,
128
+ )
129
+
130
+ # Create a likelihood
131
+ likelihood = gpx.likelihoods.Gaussian(num_datapoints=training_data.n)
132
+
133
+ # Create the posterior
134
+ posterior = prior * likelihood
135
+
136
+ # Create parameter optimizer
137
+ optimizer = optax.adam(learning_rate=0.01)
138
+
139
+ # Define objective function for training
140
+ def objective(model, data):
141
+ return -model.mll(model.params, data)
142
+
143
+ # Optimize hyperparameters
144
+ opt_posterior, history = gpx.fit(
145
+ model=posterior,
146
+ objective=objective,
147
+ train_data=training_data,
148
+ optim=optimizer,
149
+ num_iters=300,
150
+ key=key,
151
+ verbose=True,
152
+ )
153
+
154
+ # Plot training curve
155
+ plt.figure(figsize=(10, 4))
156
+ plt.subplot(1, 2, 1)
157
+ plt.plot(history)
158
+ plt.title("Negative Log Marginal Likelihood")
159
+ plt.xlabel("Iteration")
160
+ plt.ylabel("NLML")
161
+
162
+ # Get posterior predictions
163
+ latent_dist = opt_posterior.predict(params=opt_posterior.params, x=X_test)
164
+ predictive_dist = opt_posterior.likelihood.condition(
165
+ latent_dist, opt_posterior.params
166
+ )
167
+ mu = predictive_dist.mean().reshape(X1.shape)
168
+ std = predictive_dist.stddev().reshape(X1.shape)
169
+
170
+ # Plot predictions
171
+ plt.subplot(1, 2, 2)
172
+ plt.contourf(X1, X2, mu, 50, cmap="viridis")
173
+ plt.colorbar(label="Predicted Mean")
174
+ plt.scatter(
175
+ training_data.X[:, 0],
176
+ training_data.X[:, 1],
177
+ c=training_data.y,
178
+ cmap=ListedColormap(["red", "blue"]),
179
+ alpha=0.6,
180
+ s=20,
181
+ edgecolors="k",
182
+ )
183
+ plt.title("OAK GP Predictions")
184
+ plt.xlabel("$x_1$")
185
+ plt.ylabel("$x_2$")
186
+
187
+ plt.tight_layout()
188
+ plt.savefig("oak_example.png", dpi=300)
189
+ plt.show()
190
+
191
+ # Print learned kernel parameters
192
+ print("\nLearned Parameters:")
193
+ print(f"Offset coefficient: {opt_posterior.params.kernel.offset.value}")
194
+ print(f"First-order coefficients: {opt_posterior.params.kernel.coeffs_1.value}")
195
+
196
+ # Analyze the importance of each dimension
197
+ importance_1st_order = opt_posterior.params.kernel.coeffs_1.value
198
+ total_importance = jnp.sum(importance_1st_order)
199
+ relative_importance = importance_1st_order / total_importance
200
+
201
+ print("\nRelative Importance of Input Dimensions:")
202
+ for i, imp in enumerate(relative_importance):
203
+ print(f"Dimension {i + 1}: {imp:.4f}")
204
+
205
+ if opt_posterior.params.kernel.coeffs_2 is not None:
206
+ # Analyze second-order interactions
207
+ coeffs_2 = opt_posterior.params.kernel.coeffs_2
208
+ print("\nSecond-order Interaction Coefficient:")
209
+ print(f"{coeffs_2[0, 1]:.4f}")
210
+
211
+
212
+ # %%
213
+ if __name__ == "__main__":
214
+ main()
215
+
216
+ # %%
@@ -39,7 +39,7 @@ __license__ = "MIT"
39
39
  __description__ = "Didactic Gaussian processes in JAX"
40
40
  __url__ = "https://github.com/JaxGaussianProcesses/GPJax"
41
41
  __contributors__ = "https://github.com/JaxGaussianProcesses/GPJax/graphs/contributors"
42
- __version__ = "0.10.0"
42
+ __version__ = "0.10.2"
43
43
 
44
44
  __all__ = [
45
45
  "base",
@@ -32,6 +32,7 @@ from gpjax.kernels.computations import (
32
32
  from gpjax.parameters import (
33
33
  Parameter,
34
34
  Real,
35
+ Static,
35
36
  )
36
37
  from gpjax.typing import (
37
38
  Array,
@@ -220,7 +221,9 @@ class Constant(AbstractKernel):
220
221
  def __init__(
221
222
  self,
222
223
  active_dims: tp.Union[list[int], slice, None] = None,
223
- constant: tp.Union[ScalarFloat, Parameter[ScalarFloat]] = jnp.array(0.0),
224
+ constant: tp.Union[
225
+ ScalarFloat, Parameter[ScalarFloat], Static[ScalarFloat]
226
+ ] = jnp.array(0.0),
224
227
  compute_engine: AbstractKernelComputation = DenseKernelComputation(),
225
228
  ):
226
229
  if isinstance(constant, Parameter):
@@ -46,7 +46,7 @@ class Polynomial(AbstractKernel):
46
46
  self,
47
47
  active_dims: tp.Union[list[int], slice, None] = None,
48
48
  degree: int = 2,
49
- shift: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 0.0,
49
+ shift: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0,
50
50
  variance: tp.Union[ScalarFloat, nnx.Variable[ScalarArray]] = 1.0,
51
51
  n_dims: tp.Union[int, None] = None,
52
52
  compute_engine: AbstractKernelComputation = DenseKernelComputation(),
@@ -28,6 +28,7 @@ from jaxtyping import (
28
28
  from gpjax.parameters import (
29
29
  Parameter,
30
30
  Real,
31
+ Static
31
32
  )
32
33
  from gpjax.typing import (
33
34
  Array,
@@ -130,9 +131,9 @@ class Constant(AbstractMeanFunction):
130
131
  """
131
132
 
132
133
  def __init__(
133
- self, constant: tp.Union[ScalarFloat, Float[Array, " O"], Parameter] = 0.0
134
+ self, constant: tp.Union[ScalarFloat, Float[Array, " O"], Parameter, Static] = 0.0
134
135
  ):
135
- if isinstance(constant, Parameter):
136
+ if isinstance(constant, Parameter) or isinstance(constant, Static):
136
137
  self.constant = constant
137
138
  else:
138
139
  self.constant = Real(jnp.array(constant))
@@ -158,7 +159,7 @@ class Zero(Constant):
158
159
  """
159
160
 
160
161
  def __init__(self):
161
- super().__init__(constant=jnp.array(0.0))
162
+ super().__init__(constant=Static(jnp.array(0.0)))
162
163
 
163
164
 
164
165
  class CombinationMeanFunction(AbstractMeanFunction):
@@ -1,6 +1,7 @@
1
1
  import typing as tp
2
2
 
3
3
  from flax import nnx
4
+ from jax.experimental import checkify
4
5
  import jax.numpy as jnp
5
6
  import jax.tree_util as jtu
6
7
  from jax.typing import ArrayLike
@@ -84,8 +85,7 @@ class PositiveReal(Parameter[T]):
84
85
 
85
86
  def __init__(self, value: T, tag: ParameterTag = "positive", **kwargs):
86
87
  super().__init__(value=value, tag=tag, **kwargs)
87
-
88
- _check_is_positive(self.value)
88
+ _safe_assert(_check_is_positive, self.value)
89
89
 
90
90
 
91
91
  class Real(Parameter[T]):
@@ -101,7 +101,17 @@ class SigmoidBounded(Parameter[T]):
101
101
  def __init__(self, value: T, tag: ParameterTag = "sigmoid", **kwargs):
102
102
  super().__init__(value=value, tag=tag, **kwargs)
103
103
 
104
- _check_in_bounds(self.value, 0.0, 1.0)
104
+ # Only perform validation in non-JIT contexts
105
+ if (
106
+ not isinstance(value, jnp.ndarray)
107
+ or not getattr(value, "aval", None) is None
108
+ ):
109
+ _safe_assert(
110
+ _check_in_bounds,
111
+ self.value,
112
+ low=jnp.array(0.0),
113
+ high=jnp.array(1.0),
114
+ )
105
115
 
106
116
 
107
117
  class Static(nnx.Variable[T]):
@@ -120,8 +130,13 @@ class LowerTriangular(Parameter[T]):
120
130
  def __init__(self, value: T, tag: ParameterTag = "lower_triangular", **kwargs):
121
131
  super().__init__(value=value, tag=tag, **kwargs)
122
132
 
123
- _check_is_square(self.value)
124
- _check_is_lower_triangular(self.value)
133
+ # Only perform validation in non-JIT contexts
134
+ if (
135
+ not isinstance(value, jnp.ndarray)
136
+ or not getattr(value, "aval", None) is None
137
+ ):
138
+ _safe_assert(_check_is_square, self.value)
139
+ _safe_assert(_check_is_lower_triangular, self.value)
125
140
 
126
141
 
127
142
  DEFAULT_BIJECTION = {
@@ -132,36 +147,83 @@ DEFAULT_BIJECTION = {
132
147
  }
133
148
 
134
149
 
135
- def _check_is_arraylike(value: T):
150
+ def _check_is_arraylike(value: T) -> None:
151
+ """Check if a value is array-like.
152
+
153
+ Args:
154
+ value: The value to check.
155
+
156
+ Raises:
157
+ TypeError: If the value is not array-like.
158
+ """
136
159
  if not isinstance(value, (ArrayLike, list)):
137
160
  raise TypeError(
138
161
  f"Expected parameter value to be an array-like type. Got {value}."
139
162
  )
140
163
 
141
164
 
142
- def _check_is_positive(value: T):
143
- if jnp.any(value < 0):
144
- raise ValueError(
145
- f"Expected parameter value to be strictly positive. Got {value}."
146
- )
165
+ @checkify.checkify
166
+ def _check_is_positive(value):
167
+ checkify.check(
168
+ jnp.all(value > 0), "value needs to be positive, got {value}", value=value
169
+ )
147
170
 
148
171
 
149
- def _check_is_square(value: T):
150
- if value.shape[0] != value.shape[1]:
151
- raise ValueError(
152
- f"Expected parameter value to be a square matrix. Got {value}."
153
- )
172
+ @checkify.checkify
173
+ def _check_is_square(value: T) -> None:
174
+ """Check if a value is a square matrix.
154
175
 
176
+ Args:
177
+ value: The value to check.
155
178
 
156
- def _check_is_lower_triangular(value: T):
157
- if not jnp.all(jnp.tril(value) == value):
158
- raise ValueError(
159
- f"Expected parameter value to be a lower triangular matrix. Got {value}."
160
- )
179
+ Raises:
180
+ ValueError: If the value is not a square matrix.
181
+ """
182
+ checkify.check(
183
+ value.shape[0] == value.shape[1],
184
+ "value needs to be a square matrix, got {value}",
185
+ value=value,
186
+ )
161
187
 
162
188
 
163
- def _check_in_bounds(value: T, low: float, high: float):
164
- if jnp.any((value < low) | (value > high)):
165
- raise ValueError(
166
- f"Expected parameter value to be bounded between {low} and {high}. Got {value}."
167
- )
189
+ @checkify.checkify
190
+ def _check_is_lower_triangular(value: T) -> None:
191
+ """Check if a value is a lower triangular matrix.
192
+
193
+ Args:
194
+ value: The value to check.
195
+
196
+ Raises:
197
+ ValueError: If the value is not a lower triangular matrix.
198
+ """
199
+ checkify.check(
200
+ jnp.all(jnp.tril(value) == value),
201
+ "value needs to be a lower triangular matrix, got {value}",
202
+ value=value,
203
+ )
204
+
205
+
206
+ @checkify.checkify
207
+ def _check_in_bounds(value: T, low: T, high: T) -> None:
208
+ """Check if a value is bounded between low and high.
209
+
210
+ Args:
211
+ value: The value to check.
212
+ low: The lower bound.
213
+ high: The upper bound.
214
+
215
+ Raises:
216
+ ValueError: If any element of value is outside the bounds.
217
+ """
218
+ checkify.check(
219
+ jnp.all((value >= low) & (value <= high)),
220
+ "value needs to be bounded between {low} and {high}, got {value}",
221
+ value=value,
222
+ low=low,
223
+ high=high,
224
+ )
225
+
226
+
227
+ def _safe_assert(fn: tp.Callable[[tp.Any], None], value: T, **kwargs) -> None:
228
+ error, _ = fn(value, **kwargs)
229
+ checkify.check_error(error)
@@ -5,17 +5,20 @@ config.update("jax_enable_x64", True)
5
5
 
6
6
 
7
7
  import jax.numpy as jnp
8
+ import jax.random as jr
8
9
  from jaxtyping import (
9
10
  Array,
10
11
  Float,
11
12
  )
12
13
  import pytest
13
14
 
15
+ import gpjax as gpx
14
16
  from gpjax.mean_functions import (
15
17
  AbstractMeanFunction,
16
18
  Constant,
17
19
  Zero,
18
20
  )
21
+ from gpjax.parameters import Static
19
22
 
20
23
 
21
24
  def test_abstract() -> None:
@@ -49,38 +52,28 @@ def test_constant(constant: Float[Array, " Q"]) -> None:
49
52
  ).all()
50
53
 
51
54
 
52
- # TODO: rewrite this test after work on fit
53
- # def test_zero_mean_remains_zero() -> None:
54
- # key = jr.PRNGKey(123)
55
-
56
- # x = jr.uniform(key=key, minval=0, maxval=1, shape=(20, 1))
57
- # y = jnp.full((20, 1), 50, dtype=jnp.float64) # Dataset with non-zero mean
58
- # D = gpx.Dataset(X=x, y=y)
59
-
60
- # kernel = gpx.kernels.Constant(constant=jnp.array(0.0))
61
- # kernel = kernel.replace_trainable(
62
- # constant=False
63
- # ) # Prevent kernel from modelling non-zero mean
64
- # meanf = Zero()
65
- # prior = gpx.gps.Prior(mean_function=meanf, kernel=kernel)
66
- # likelihood = gpx.likelihoods.Gaussian(
67
- # num_datapoints=D.n, obs_stddev=jnp.array(1e-3)
68
- # )
69
- # likelihood = likelihood.replace_trainable(obs_stddev=False)
70
- # posterior = prior * likelihood
71
-
72
- # negative_mll = gpx.objectives.ConjugateMLL(negative=True)
73
- # opt_posterior, _ = gpx.fit(
74
- # model=posterior,
75
- # objective=negative_mll,
76
- # train_data=D,
77
- # optim=ox.adam(learning_rate=0.5),
78
- # num_iters=1000,
79
- # safe=True,
80
- # key=key,
81
- # )
82
-
83
- # assert opt_posterior.prior.mean_function.constant == 0.0
55
+ def test_zero_mean_remains_zero() -> None:
56
+ key = jr.PRNGKey(123)
57
+
58
+ x = jr.uniform(key=key, minval=0, maxval=1, shape=(20, 1))
59
+ y = jnp.full((20, 1), 50, dtype=jnp.float64) # Dataset with non-zero mean
60
+ D = gpx.Dataset(X=x, y=y)
61
+
62
+ constant = Static(jnp.array(0.0))
63
+ kernel = gpx.kernels.Constant(constant=constant)
64
+ meanf = Zero()
65
+ prior = gpx.gps.Prior(mean_function=meanf, kernel=kernel)
66
+ likelihood = gpx.likelihoods.Gaussian(
67
+ num_datapoints=D.n, obs_stddev=jnp.array(1e-3)
68
+ )
69
+ posterior = prior * likelihood
70
+
71
+ opt_posterior, _ = gpx.fit_scipy(
72
+ model=posterior,
73
+ objective=lambda p, d: -gpx.objectives.conjugate_mll(p, d),
74
+ train_data=D,
75
+ )
76
+ assert opt_posterior.prior.mean_function.constant.value == 0.0
84
77
 
85
78
 
86
79
  def test_initialising_zero_mean_with_constant_raises_error():