ennbo 0.0.7__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. ennbo-0.1.1/.gitignore +6 -0
  2. {ennbo-0.0.7 → ennbo-0.1.1}/PKG-INFO +46 -12
  3. ennbo-0.1.1/README.md +63 -0
  4. ennbo-0.1.1/admin/conda-macos.yml +16 -0
  5. ennbo-0.1.1/examples/demo_enn.ipynb +127 -0
  6. ennbo-0.1.1/examples/demo_morbo.ipynb +320 -0
  7. ennbo-0.1.1/examples/demo_turbo_enn.ipynb +299 -0
  8. {ennbo-0.0.7 → ennbo-0.1.1}/pyproject.toml +3 -2
  9. ennbo-0.1.1/src/enn/__init__.py +24 -0
  10. ennbo-0.1.1/src/enn/enn/__init__.py +4 -0
  11. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/enn/enn.py +18 -11
  12. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/enn/enn_fit.py +13 -2
  13. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/enn/enn_util.py +8 -4
  14. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/proposal.py +10 -48
  15. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_config.py +1 -0
  16. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_enn_impl.py +5 -14
  17. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_gp_base.py +3 -3
  18. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_optimizer.py +3 -9
  19. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_trust_region.py +9 -7
  20. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_utils.py +13 -63
  21. {ennbo-0.0.7 → ennbo-0.1.1}/tests/conftest.py +6 -12
  22. {ennbo-0.0.7 → ennbo-0.1.1}/tests/test_enn_core.py +38 -11
  23. ennbo-0.1.1/tests/test_enn_fit.py +110 -0
  24. {ennbo-0.0.7 → ennbo-0.1.1}/tests/test_enn_util.py +6 -6
  25. {ennbo-0.0.7 → ennbo-0.1.1}/tests/test_turbo.py +65 -287
  26. ennbo-0.0.7/.cursorrules +0 -54
  27. ennbo-0.0.7/.cursorrules~ +0 -51
  28. ennbo-0.0.7/README.md +0 -30
  29. ennbo-0.0.7/assets/image-2b7c2993-a63b-4d7c-bddd-245c066ad6db.png +0 -0
  30. ennbo-0.0.7/assets/image-2bea4cb6-098f-4c0c-9dc2-b04e03808d96.png +0 -0
  31. ennbo-0.0.7/assets/image-40487dc1-d2d3-4603-81d6-7a5dbb4b500e.png +0 -0
  32. ennbo-0.0.7/assets/image-5fd9563a-6797-4835-87c4-72d9e7e92ada.png +0 -0
  33. ennbo-0.0.7/assets/image-a5905007-8b6b-4ce5-8af2-ab9dfc66506d.png +0 -0
  34. ennbo-0.0.7/assets/image-c634c557-87d0-47c1-b9e7-779872808905.png +0 -0
  35. ennbo-0.0.7/assets/image-fbf2c91f-2c75-47de-a7c9-bc2d6dea4263.png +0 -0
  36. ennbo-0.0.7/data/MNIST/raw/t10k-images-idx3-ubyte +0 -0
  37. ennbo-0.0.7/data/MNIST/raw/t10k-images-idx3-ubyte.gz +0 -0
  38. ennbo-0.0.7/data/MNIST/raw/t10k-labels-idx1-ubyte +0 -0
  39. ennbo-0.0.7/data/MNIST/raw/t10k-labels-idx1-ubyte.gz +0 -0
  40. ennbo-0.0.7/data/MNIST/raw/train-images-idx3-ubyte +0 -0
  41. ennbo-0.0.7/data/MNIST/raw/train-images-idx3-ubyte.gz +0 -0
  42. ennbo-0.0.7/data/MNIST/raw/train-labels-idx1-ubyte +0 -0
  43. ennbo-0.0.7/data/MNIST/raw/train-labels-idx1-ubyte.gz +0 -0
  44. ennbo-0.0.7/examples/data/MNIST/raw/t10k-images-idx3-ubyte +0 -0
  45. ennbo-0.0.7/examples/data/MNIST/raw/t10k-images-idx3-ubyte.gz +0 -0
  46. ennbo-0.0.7/examples/data/MNIST/raw/t10k-labels-idx1-ubyte +0 -0
  47. ennbo-0.0.7/examples/data/MNIST/raw/t10k-labels-idx1-ubyte.gz +0 -0
  48. ennbo-0.0.7/examples/data/MNIST/raw/train-images-idx3-ubyte +0 -0
  49. ennbo-0.0.7/examples/data/MNIST/raw/train-images-idx3-ubyte.gz +0 -0
  50. ennbo-0.0.7/examples/data/MNIST/raw/train-labels-idx1-ubyte +0 -0
  51. ennbo-0.0.7/examples/data/MNIST/raw/train-labels-idx1-ubyte.gz +0 -0
  52. ennbo-0.0.7/examples/demo_enn.ipynb +0 -197
  53. ennbo-0.0.7/examples/demo_turbo_enn.ipynb +0 -333
  54. ennbo-0.0.7/requirements.md +0 -16
  55. ennbo-0.0.7/requirements.txt~ +0 -6
  56. ennbo-0.0.7/src/enn/__init__.py +0 -28
  57. ennbo-0.0.7/style.md +0 -89
  58. ennbo-0.0.7/tests/examples/__init__.py +0 -0
  59. ennbo-0.0.7/tests/examples/mnist/__init__.py +0 -0
  60. ennbo-0.0.7/tests/examples/mnist/test_mnist.py +0 -149
  61. ennbo-0.0.7/tests/test_enn_fit.py +0 -40
  62. {ennbo-0.0.7 → ennbo-0.1.1}/.pre-commit-config.yaml +0 -0
  63. {ennbo-0.0.7 → ennbo-0.1.1}/LICENSE +0 -0
  64. {ennbo-0.0.7 → ennbo-0.1.1}/admin/find_forgotten_py.sh +0 -0
  65. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/enn/enn_normal.py +0 -0
  66. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/enn/enn_params.py +0 -0
  67. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/__init__.py +0 -0
  68. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/base_turbo_impl.py +0 -0
  69. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/lhd_only_impl.py +0 -0
  70. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_gp.py +0 -0
  71. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_gp_noisy.py +0 -0
  72. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_mode.py +0 -0
  73. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_mode_impl.py +0 -0
  74. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_one_impl.py +0 -0
  75. {ennbo-0.0.7/src → ennbo-0.1.1/src/enn}/turbo/turbo_zero_impl.py +0 -0
ennbo-0.1.1/.gitignore ADDED
@@ -0,0 +1,6 @@
1
+ *~
2
+ dist/
3
+ style.md
4
+ requirements.md
5
+ .cursorrules
6
+ __pycache__/
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ennbo
3
- Version: 0.0.7
3
+ Version: 0.1.1
4
4
  Summary: Epistemic Nearest Neighbors
5
5
  Project-URL: Homepage, https://github.com/yubo-research/enn
6
6
  Project-URL: Source, https://github.com/yubo-research/enn
@@ -36,8 +36,9 @@ Classifier: Topic :: Scientific/Engineering
36
36
  Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
37
37
  Classifier: Topic :: Scientific/Engineering :: Mathematics
38
38
  Requires-Python: >=3.11
39
- Requires-Dist: faiss-cpu==1.9.0
39
+ Requires-Dist: faiss-cpu>=1.9.0
40
40
  Requires-Dist: gpytorch==1.13
41
+ Requires-Dist: nds==0.4.3
41
42
  Requires-Dist: numpy==1.26.4
42
43
  Requires-Dist: scipy==1.15.3
43
44
  Requires-Dist: torch==2.5.1
@@ -49,27 +50,60 @@ A fast, alternative surrogate for Bayesian optimization
49
50
  ENN estimates a function's value and associated epistemic uncertainty using a K-Nearest Neighbors model. Queries take $O(N lnK)$ time, where $N$ is the number of observations available for KNN lookups. Compare to an exact GP, which takes $O(N^2)$ time. Additionally, measured running times are very small compared to GPs and other alternative surrogates. [1]
50
51
 
51
52
  ## Contents
52
- - ENN model, [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/core.py) [1]
53
- - TuRBO-ENN optimizer, class [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo_optimizer.py) has four modes
53
+ - ENN model, [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/enn/enn.py) [1]
54
+ - TuRBO-ENN optimizer, class [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo/turbo_optimizer.py) has four modes
54
55
  - `TURBO_ONE` - A clone of the TuRBO [2] reference [code](https://github.com/uber-research/TuRBO), reworked to have an `ask()`/`tell()` interface.
55
56
  - `TURBO_ENN` - Same as TURBO_ONE, except uses ENN instead of GP and Pareto(mu, se) instead of Thompson sampling.
56
57
  - `TURBO_ZERO` - Same as TURBO_ONE, except randomly-chosen RAASP [3] candidates are picked to be proposals. There is no surrogate.
57
58
  - `LHD_ONLY` - Just creates an LHD design for every `ask()`. Good for a baseline and for testing.
58
59
 
59
- [1] **Sweet, D., & Jadhav, S. A. (2025).** Taking the GP Out of the Loop. *arXiv preprint arXiv:2506.12818*.
60
- https://arxiv.org/abs/2506.12818
61
- [2] **Eriksson, D., Pearce, M., Gardner, J. R., Turner, R., & Poloczek, M. (2020).** Scalable Global Optimization via Local Bayesian Optimization. *Advances in Neural Information Processing Systems, 32*.
62
- https://arxiv.org/abs/1910.01739
63
- [3] **Rashidi, B., Johnstonbaugh, K., & Gao, C. (2024).** Cylindrical Thompson Sampling for High-Dimensional Bayesian Optimization. *Proceedings of The 27th International Conference on Artificial Intelligence and Statistics* (pp. 3502–3510). PMLR.
64
- https://proceedings.mlr.press/v238/rashidi24a.html
60
+ [1] **Sweet, D., & Jadhav, S. A. (2025).** Taking the GP Out of the Loop. *arXiv preprint arXiv:2506.12818*.
61
+ https://arxiv.org/abs/2506.12818
62
+ [2] **Eriksson, D., Pearce, M., Gardner, J. R., Turner, R., & Poloczek, M. (2020).** Scalable Global Optimization via Local Bayesian Optimization. *Advances in Neural Information Processing Systems, 32*.
63
+ https://arxiv.org/abs/1910.01739
64
+ [3] **Rashidi, B., Johnstonbaugh, K., & Gao, C. (2024).** Cylindrical Thompson Sampling for High-Dimensional Bayesian Optimization. *Proceedings of The 27th International Conference on Artificial Intelligence and Statistics* (pp. 3502–3510). PMLR.
65
+ https://proceedings.mlr.press/v238/rashidi24a.html
65
66
 
66
67
 
67
68
  ## Installation
68
69
  `pip install ennbo`
69
70
 
70
71
  ## Demonstration
71
- [`demo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples) - Shows how to use [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/core.py) to build and query an ENN model.
72
- [`demo_turbo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples) - Shows how to use [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo_optimizer.py) to optimize the Ackley function.
72
+ [`demo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples/demo_enn.ipynb) - Shows how to use [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/enn/enn.py) to build and query an ENN model.
73
+ [`demo_turbo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples/demo_turbo_enn.ipynb) - Shows how to use [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo/turbo_optimizer.py) to optimize the Ackley function.
73
74
 
74
75
 
75
76
 
77
+ ## Installation, MacOS
78
+
79
+ On my MacBook I can run into problems with dependencies and compatibilities.
80
+
81
+ On MacOS try:
82
+ ```
83
+ micromamba env create -n ennbo -f conda-macos.yml
84
+ micromamba activate ennbo
85
+ pip install --no-deps ennbo
86
+ ```
87
+
88
+ You may replace `micromamba` with `conda` and this will probably still work.
89
+
90
+ The commands above make sure
91
+ - You use the MacOS-specific PyTorch (with `mps`).
92
+ - You avoid having multiple, competing OpenMPs installed [PyTorch issue](https://github.com/pytorch/pytorch/issues/44282) [faiss issue](https://github.com/faiss-wheels/faiss-wheels/issues/40).
93
+ - You use old enough versions of NumPy and PyTorch to be compatible with faiss [faiss issue](https://github.com/faiss-wheels/faiss-wheels/issues/104).
94
+ - Prevent matplotlib's installation from upgrading your NumPy to an incompatible version.
95
+ - `ennbo`'s listed dependencies do not undo any of the above (which is fine b/c the above commands set the up correctly).
96
+
97
+ Run tests with
98
+ ```
99
+ pytest -x -sv tests
100
+ ```
101
+ and they should all pass fairly quickly (~10s-30s).
102
+
103
+
104
+ If your code still crashes or hangs your, try this [hack](https://discuss.pytorch.org/t/ran-into-this-issue-while-executing/101460):
105
+ ```
106
+ export KMP_DUPLICATE_LIB_OK=TRUE
107
+ export OMP_NUM_THREADS=1
108
+ ```
109
+ I don't recommend this, however, as it will slow things down.
ennbo-0.1.1/README.md ADDED
@@ -0,0 +1,63 @@
1
+ # Epistemic Nearest Neighbors
2
+ A fast, alternative surrogate for Bayesian optimization
3
+
4
+ ENN estimates a function's value and associated epistemic uncertainty using a K-Nearest Neighbors model. Queries take $O(N lnK)$ time, where $N$ is the number of observations available for KNN lookups. Compare to an exact GP, which takes $O(N^2)$ time. Additionally, measured running times are very small compared to GPs and other alternative surrogates. [1]
5
+
6
+ ## Contents
7
+ - ENN model, [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/enn/enn.py) [1]
8
+ - TuRBO-ENN optimizer, class [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo/turbo_optimizer.py) has four modes
9
+ - `TURBO_ONE` - A clone of the TuRBO [2] reference [code](https://github.com/uber-research/TuRBO), reworked to have an `ask()`/`tell()` interface.
10
+ - `TURBO_ENN` - Same as TURBO_ONE, except uses ENN instead of GP and Pareto(mu, se) instead of Thompson sampling.
11
+ - `TURBO_ZERO` - Same as TURBO_ONE, except randomly-chosen RAASP [3] candidates are picked to be proposals. There is no surrogate.
12
+ - `LHD_ONLY` - Just creates an LHD design for every `ask()`. Good for a baseline and for testing.
13
+
14
+ [1] **Sweet, D., & Jadhav, S. A. (2025).** Taking the GP Out of the Loop. *arXiv preprint arXiv:2506.12818*.
15
+ https://arxiv.org/abs/2506.12818
16
+ [2] **Eriksson, D., Pearce, M., Gardner, J. R., Turner, R., & Poloczek, M. (2020).** Scalable Global Optimization via Local Bayesian Optimization. *Advances in Neural Information Processing Systems, 32*.
17
+ https://arxiv.org/abs/1910.01739
18
+ [3] **Rashidi, B., Johnstonbaugh, K., & Gao, C. (2024).** Cylindrical Thompson Sampling for High-Dimensional Bayesian Optimization. *Proceedings of The 27th International Conference on Artificial Intelligence and Statistics* (pp. 3502–3510). PMLR.
19
+ https://proceedings.mlr.press/v238/rashidi24a.html
20
+
21
+
22
+ ## Installation
23
+ `pip install ennbo`
24
+
25
+ ## Demonstration
26
+ [`demo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples/demo_enn.ipynb) - Shows how to use [`EpistemicNearestNeighbors`](https://github.com/yubo-research/enn/blob/main/src/enn/enn/enn.py) to build and query an ENN model.
27
+ [`demo_turbo_enn.ipynb`](https://github.com/yubo-research/enn/tree/main/examples/demo_turbo_enn.ipynb) - Shows how to use [`TurboOptimizer`](https://github.com/yubo-research/enn/blob/main/src/enn/turbo/turbo_optimizer.py) to optimize the Ackley function.
28
+
29
+
30
+
31
+ ## Installation, MacOS
32
+
33
+ On my MacBook I can run into problems with dependencies and compatibilities.
34
+
35
+ On MacOS try:
36
+ ```
37
+ micromamba env create -n ennbo -f conda-macos.yml
38
+ micromamba activate ennbo
39
+ pip install --no-deps ennbo
40
+ ```
41
+
42
+ You may replace `micromamba` with `conda` and this will probably still work.
43
+
44
+ The commands above make sure
45
+ - You use the MacOS-specific PyTorch (with `mps`).
46
+ - You avoid having multiple, competing OpenMPs installed [PyTorch issue](https://github.com/pytorch/pytorch/issues/44282) [faiss issue](https://github.com/faiss-wheels/faiss-wheels/issues/40).
47
+ - You use old enough versions of NumPy and PyTorch to be compatible with faiss [faiss issue](https://github.com/faiss-wheels/faiss-wheels/issues/104).
48
+ - Prevent matplotlib's installation from upgrading your NumPy to an incompatible version.
49
+ - `ennbo`'s listed dependencies do not undo any of the above (which is fine b/c the above commands set the up correctly).
50
+
51
+ Run tests with
52
+ ```
53
+ pytest -x -sv tests
54
+ ```
55
+ and they should all pass fairly quickly (~10s-30s).
56
+
57
+
58
+ If your code still crashes or hangs your, try this [hack](https://discuss.pytorch.org/t/ran-into-this-issue-while-executing/101460):
59
+ ```
60
+ export KMP_DUPLICATE_LIB_OK=TRUE
61
+ export OMP_NUM_THREADS=1
62
+ ```
63
+ I don't recommend this, however, as it will slow things down.
@@ -0,0 +1,16 @@
1
+ channels:
2
+ - conda-forge
3
+ - pkgs/main
4
+ dependencies:
5
+ - python=3.11.14=h18782d2_2_cpython
6
+ - faiss-cpu=1.9.0=hd610c6a_0
7
+ - nomkl=1.0=h5ca1d4c_0
8
+ - llvm-openmp=20.1.2=hdb05f8b_1
9
+ - gpytorch=1.13=pyh101cb37_1
10
+ - pytorch=2.5.1=gpu_mps_py311h80af30b_202
11
+ - scipy=1.15.3=py311hb81b3df_0
12
+ - pytest=8.3.4=pyhd8ed1ab_1
13
+ - ipykernel=6.29.5=pyh57ce528_0
14
+ - pip:
15
+ - matplotlib==3.10.8
16
+ - nds==0.4.3
@@ -0,0 +1,127 @@
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": "534cb992",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Epistemic Nearest Neighbors (ENN)\n",
9
+ "\n",
10
+ "ENN is a non-parametric surrogate with $O(N)$ computation-time scaling, where $N$ is the number of observations in the data set. ENN can be used in Bayesian optimization as a scalable alternative to a GP (which scales as $O(N^2)$.)\n",
11
+ "\n",
12
+ "**Sweet, D., & Jadhav, S. A. (2025).** Taking the GP Out of the Loop. *arXiv preprint arXiv:2506.12818*. \n",
13
+ " https://arxiv.org/abs/2506.12818\n",
14
+ "\n",
15
+ " ---"
16
+ ]
17
+ },
18
+ {
19
+ "cell_type": "code",
20
+ "execution_count": null,
21
+ "id": "8792c830",
22
+ "metadata": {},
23
+ "outputs": [],
24
+ "source": [
25
+ "import numpy as np\n",
26
+ "\n",
27
+ "from enn import EpistemicNearestNeighbors, enn_fit\n",
28
+ "\n",
29
+ "\n",
30
+ "def plot_enn_demo(ax, num_samples: int, k: int, noise: float, m: int = 1) -> None:\n",
31
+ " x = np.sort(np.random.rand(num_samples + 4))\n",
32
+ " x[-3] = x[-4]\n",
33
+ " x[-2] = x[-4]\n",
34
+ " x[-1] = x[-4]\n",
35
+ " x[1] = x[0] + 0.03\n",
36
+ " eps = np.random.randn(num_samples + 4)\n",
37
+ " y = np.sin(2 * m * np.pi * x) + noise * eps\n",
38
+ " yvar = (noise**2) * np.ones_like(y)\n",
39
+ " train_x = x[:, None]\n",
40
+ " train_y = y[:, None]\n",
41
+ " train_yvar = yvar[:, None]\n",
42
+ " model = EpistemicNearestNeighbors(\n",
43
+ " train_x,\n",
44
+ " train_y,\n",
45
+ " train_yvar,\n",
46
+ " )\n",
47
+ " rng = np.random.default_rng(0)\n",
48
+ " result = enn_fit(\n",
49
+ " model,\n",
50
+ " k=k,\n",
51
+ " num_fit_candidates=100,\n",
52
+ " num_fit_samples=min(10, num_samples),\n",
53
+ " rng=rng,\n",
54
+ " )\n",
55
+ " print(k, noise, result)\n",
56
+ " params = result\n",
57
+ " x_hat = np.linspace(0.0, 1.0, 30)\n",
58
+ " x_hat_2d = x_hat[:, None]\n",
59
+ " posterior = model.posterior(x_hat_2d, params=params, exclude_nearest=False)\n",
60
+ " mu = posterior.mu[:, 0]\n",
61
+ " se = posterior.se[:, 0]\n",
62
+ " marker_size = 3 if num_samples >= 100 else 15\n",
63
+ " ax.scatter(x, y, s=marker_size, color=\"black\", alpha=0.5)\n",
64
+ " ax.plot(x_hat, mu, linestyle=\"--\", color=\"tab:blue\", alpha=0.7)\n",
65
+ " ax.fill_between(x_hat, mu - 2 * se, mu + 2 * se, color=\"tab:blue\", alpha=0.2)\n",
66
+ " ax.set_ylim(-5, 5)\n",
67
+ " ax.set_title(f\"n={num_samples}, noise={noise}\")"
68
+ ]
69
+ },
70
+ {
71
+ "cell_type": "code",
72
+ "execution_count": null,
73
+ "id": "992d16f9",
74
+ "metadata": {},
75
+ "outputs": [],
76
+ "source": [
77
+ "import matplotlib.pyplot as plt\n",
78
+ "\n",
79
+ "# Blue area is the epistemic uncertainty only\n",
80
+ "\n",
81
+ "k = 5\n",
82
+ "fig, axes = plt.subplots(2, 3, figsize=(9, 6), sharex=True, sharey=True)\n",
83
+ "num_samples_list = [5, 10]\n",
84
+ "noise_list = [0.0, 0.1, 0.3]\n",
85
+ "for row_idx, num_samples in enumerate(num_samples_list):\n",
86
+ " for col_idx, noise in enumerate(noise_list):\n",
87
+ " ax = axes[row_idx, col_idx]\n",
88
+ " np.random.seed(4)\n",
89
+ " plot_enn_demo(ax, num_samples=num_samples, k=k, noise=noise)\n",
90
+ "for ax in axes[-1, :]:\n",
91
+ " ax.set_xlabel(\"x\")\n",
92
+ "for ax in axes[:, 0]:\n",
93
+ " ax.set_ylabel(\"y\")\n",
94
+ "fig.tight_layout()"
95
+ ]
96
+ },
97
+ {
98
+ "cell_type": "code",
99
+ "execution_count": null,
100
+ "id": "84968071",
101
+ "metadata": {},
102
+ "outputs": [],
103
+ "source": [
104
+ "import time\n",
105
+ "import matplotlib.pyplot as plt\n",
106
+ "\n",
107
+ "np.random.seed(1)\n",
108
+ "fig, ax = plt.subplots(figsize=(5, 3))\n",
109
+ "t_0 = time.time()\n",
110
+ "plot_enn_demo(ax, num_samples=1_000_000, k=5, noise=0.3, m=3)\n",
111
+ "t_1 = time.time()\n",
112
+ "print(f\"Time taken: {t_1 - t_0:.2f} seconds\")\n",
113
+ "ax.set_xlabel(\"x\")\n",
114
+ "ax.set_ylabel(\"y\")\n",
115
+ "fig.tight_layout()"
116
+ ]
117
+ }
118
+ ],
119
+ "metadata": {
120
+ "language_info": {
121
+ "name": "python",
122
+ "pygments_lexer": "ipython3"
123
+ }
124
+ },
125
+ "nbformat": 4,
126
+ "nbformat_minor": 5
127
+ }