aspire-inference 0.1.0a8__tar.gz → 0.1.0a10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. aspire_inference-0.1.0a10/PKG-INFO +130 -0
  2. aspire_inference-0.1.0a10/README.md +90 -0
  3. aspire_inference-0.1.0a10/aspire_inference.egg-info/PKG-INFO +130 -0
  4. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/aspire_inference.egg-info/SOURCES.txt +3 -1
  5. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/aspire_inference.egg-info/requires.txt +2 -1
  6. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/conf.py +14 -0
  7. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/index.rst +11 -3
  8. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/installation.rst +5 -1
  9. aspire_inference-0.1.0a10/docs/multiprocessing.rst +70 -0
  10. aspire_inference-0.1.0a10/docs/recipes.rst +70 -0
  11. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/requirements.txt +1 -0
  12. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/user_guide.rst +51 -7
  13. aspire_inference-0.1.0a10/examples/blackjax_smc_example.py +158 -0
  14. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/pyproject.toml +2 -1
  15. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/__init__.py +2 -0
  16. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/base.py +6 -2
  17. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/smc/blackjax.py +17 -5
  18. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/smc/minipcn.py +16 -4
  19. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samples.py +10 -6
  20. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/transforms.py +1 -0
  21. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/utils.py +59 -4
  22. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/integration_tests/conftest.py +17 -4
  23. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/integration_tests/test_integration.py +2 -2
  24. aspire_inference-0.1.0a8/PKG-INFO +0 -66
  25. aspire_inference-0.1.0a8/README.md +0 -27
  26. aspire_inference-0.1.0a8/aspire_inference.egg-info/PKG-INFO +0 -66
  27. aspire_inference-0.1.0a8/docs/api.rst +0 -28
  28. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/.github/workflows/lint.yml +0 -0
  29. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/.github/workflows/publish.yml +0 -0
  30. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/.github/workflows/tests.yml +0 -0
  31. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/.gitignore +0 -0
  32. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/.pre-commit-config.yaml +0 -0
  33. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/LICENSE +0 -0
  34. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/aspire_inference.egg-info/dependency_links.txt +0 -0
  35. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/aspire_inference.egg-info/top_level.txt +0 -0
  36. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/Makefile +0 -0
  37. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/entry_points.rst +0 -0
  38. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/docs/examples.rst +0 -0
  39. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/examples/basic_example.py +0 -0
  40. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/examples/smc_example.py +0 -0
  41. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/readthedocs.yml +0 -0
  42. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/setup.cfg +0 -0
  43. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/aspire.py +0 -0
  44. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/__init__.py +0 -0
  45. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/base.py +0 -0
  46. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/jax/__init__.py +0 -0
  47. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/jax/flows.py +0 -0
  48. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/jax/utils.py +0 -0
  49. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/torch/__init__.py +0 -0
  50. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/flows/torch/flows.py +0 -0
  51. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/history.py +0 -0
  52. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/plot.py +0 -0
  53. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/__init__.py +0 -0
  54. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/importance.py +0 -0
  55. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/mcmc.py +0 -0
  56. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/smc/__init__.py +0 -0
  57. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/smc/base.py +0 -0
  58. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/src/aspire/samplers/smc/emcee.py +0 -0
  59. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/conftest.py +0 -0
  60. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_flows/test_flows_core.py +0 -0
  61. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_flows/test_jax_flows/test_flowjax_flows.py +0 -0
  62. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_flows/test_torch_flows/test_zuko_flows.py +0 -0
  63. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_samples.py +0 -0
  64. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_transforms.py +0 -0
  65. {aspire_inference-0.1.0a8 → aspire_inference-0.1.0a10}/tests/test_utils.py +0 -0
@@ -0,0 +1,130 @@
1
+ Metadata-Version: 2.4
2
+ Name: aspire-inference
3
+ Version: 0.1.0a10
4
+ Summary: Accelerate Sequential Posterior Inference via REuse
5
+ Author-email: "Michael J. Williams" <michaeljw1@googlemail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/mj-will/aspire
8
+ Project-URL: Documentation, https://aspire.readthedocs.io/
9
+ Classifier: Programming Language :: Python :: 3
10
+ Requires-Python: >=3.10
11
+ Description-Content-Type: text/markdown
12
+ License-File: LICENSE
13
+ Requires-Dist: matplotlib
14
+ Requires-Dist: numpy
15
+ Requires-Dist: array-api-compat
16
+ Requires-Dist: wrapt
17
+ Requires-Dist: h5py
18
+ Provides-Extra: scipy
19
+ Requires-Dist: scipy; extra == "scipy"
20
+ Provides-Extra: jax
21
+ Requires-Dist: jax; extra == "jax"
22
+ Requires-Dist: jaxlib; extra == "jax"
23
+ Requires-Dist: flowjax; extra == "jax"
24
+ Provides-Extra: torch
25
+ Requires-Dist: torch; extra == "torch"
26
+ Requires-Dist: zuko; extra == "torch"
27
+ Requires-Dist: tqdm; extra == "torch"
28
+ Provides-Extra: minipcn
29
+ Requires-Dist: minipcn[array-api]>=0.2.0a3; extra == "minipcn"
30
+ Requires-Dist: orng; extra == "minipcn"
31
+ Provides-Extra: emcee
32
+ Requires-Dist: emcee; extra == "emcee"
33
+ Provides-Extra: blackjax
34
+ Requires-Dist: blackjax; extra == "blackjax"
35
+ Provides-Extra: test
36
+ Requires-Dist: pytest; extra == "test"
37
+ Requires-Dist: pytest-requires; extra == "test"
38
+ Requires-Dist: pytest-cov; extra == "test"
39
+ Dynamic: license-file
40
+
41
+ # aspire: Accelerated Sequential Posterior Inference via REuse
42
+
43
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.15658747.svg)](https://doi.org/10.5281/zenodo.15658747)
44
+ [![PyPI](https://img.shields.io/pypi/v/aspire-inference)](https://pypi.org/project/aspire-inference/)
45
+ [![Documentation Status](https://readthedocs.org/projects/aspire/badge/?version=latest)](https://aspire.readthedocs.io/en/latest/?badge=latest)
46
+ ![tests](https://github.com/mj-will/aspire/actions/workflows/tests.yml/badge.svg)
47
+
48
+
49
+ aspire is a framework for reusing existing posterior samples to obtain new results at a reduced cost.
50
+
51
+ ## Installation
52
+
53
+ aspire can be installed from PyPI using `pip`. By default, you need to install
54
+ one of the backends for the normalizing flows, either `torch` or `jax`.
55
+ We also recommend installing `minipcn` if using the `smc` sampler:
56
+
57
+
58
+ **Torch**
59
+
60
+ We recommend installing `torch` manually to ensure correct CPU/CUDA versions are
61
+ installed. See the [PyTorch installation instructions](https://pytorch.org/)
62
+ for more details.
63
+
64
+ ```
65
+ pip install aspire-inference[torch,minipcn]
66
+ ```
67
+
68
+ **Jax**:
69
+
70
+ We recommend install `jax` manually to ensure the correct GPU/CUDA versions
71
+ are installed. See the [jax documentation for details](https://docs.jax.dev/en/latest/installation.html)
72
+
73
+ ```
74
+ pip install aspire-inference[jax,minipcn]
75
+ ```
76
+
77
+ **Important:** the name of `aspire` on PyPI is `aspire-inference` but once installed
78
+ the package can be imported and used as `aspire`.
79
+
80
+ ## Quickstart
81
+
82
+ ```python
83
+ import numpy as np
84
+ from aspire import Aspire, Samples
85
+
86
+ # Define a log-likelihood and log-prior
87
+ def log_likelihood(samples):
88
+ x = samples.x
89
+ return -0.5 * np.sum(x**2, axis=-1)
90
+
91
+ def log_prior(samples):
92
+ return -0.5 * np.sum(samples.x**2, axis=-1)
93
+
94
+ # Create the initial samples
95
+ init = Samples(np.random.normal(size=(2_000, 4)))
96
+
97
+ # Define the aspire object
98
+ aspire = Aspire(
99
+ log_likelihood=log_likelihood,
100
+ log_prior=log_prior,
101
+ dims=4,
102
+ parameters=[f"x{i}" for i in range(4)],
103
+ )
104
+
105
+ # Fit the normalizing flow
106
+ aspire.fit(init, n_epochs=20)
107
+
108
+ # Sample the posterior
109
+ posterior = aspire.sample_posterior(
110
+ sampler="smc",
111
+ n_samples=500,
112
+ sampler_kwargs=dict(n_steps=100),
113
+ )
114
+
115
+ # Plot the posterior distribution
116
+ posterior.plot_corner()
117
+ ```
118
+
119
+ ## Documentation
120
+
121
+ See the [documentation on ReadTheDocs][docs].
122
+
123
+ ## Citation
124
+
125
+ If you use `aspire` in your work please cite the [DOI][DOI] and [paper][paper].
126
+
127
+
128
+ [docs]: https://aspire.readthedocs.io/
129
+ [DOI]: https://doi.org/10.5281/zenodo.15658747
130
+ [paper]: https://arxiv.org/abs/2511.04218
@@ -0,0 +1,90 @@
1
+ # aspire: Accelerated Sequential Posterior Inference via REuse
2
+
3
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.15658747.svg)](https://doi.org/10.5281/zenodo.15658747)
4
+ [![PyPI](https://img.shields.io/pypi/v/aspire-inference)](https://pypi.org/project/aspire-inference/)
5
+ [![Documentation Status](https://readthedocs.org/projects/aspire/badge/?version=latest)](https://aspire.readthedocs.io/en/latest/?badge=latest)
6
+ ![tests](https://github.com/mj-will/aspire/actions/workflows/tests.yml/badge.svg)
7
+
8
+
9
+ aspire is a framework for reusing existing posterior samples to obtain new results at a reduced cost.
10
+
11
+ ## Installation
12
+
13
+ aspire can be installed from PyPI using `pip`. By default, you need to install
14
+ one of the backends for the normalizing flows, either `torch` or `jax`.
15
+ We also recommend installing `minipcn` if using the `smc` sampler:
16
+
17
+
18
+ **Torch**
19
+
20
+ We recommend installing `torch` manually to ensure correct CPU/CUDA versions are
21
+ installed. See the [PyTorch installation instructions](https://pytorch.org/)
22
+ for more details.
23
+
24
+ ```
25
+ pip install aspire-inference[torch,minipcn]
26
+ ```
27
+
28
+ **Jax**:
29
+
30
+ We recommend install `jax` manually to ensure the correct GPU/CUDA versions
31
+ are installed. See the [jax documentation for details](https://docs.jax.dev/en/latest/installation.html)
32
+
33
+ ```
34
+ pip install aspire-inference[jax,minipcn]
35
+ ```
36
+
37
+ **Important:** the name of `aspire` on PyPI is `aspire-inference` but once installed
38
+ the package can be imported and used as `aspire`.
39
+
40
+ ## Quickstart
41
+
42
+ ```python
43
+ import numpy as np
44
+ from aspire import Aspire, Samples
45
+
46
+ # Define a log-likelihood and log-prior
47
+ def log_likelihood(samples):
48
+ x = samples.x
49
+ return -0.5 * np.sum(x**2, axis=-1)
50
+
51
+ def log_prior(samples):
52
+ return -0.5 * np.sum(samples.x**2, axis=-1)
53
+
54
+ # Create the initial samples
55
+ init = Samples(np.random.normal(size=(2_000, 4)))
56
+
57
+ # Define the aspire object
58
+ aspire = Aspire(
59
+ log_likelihood=log_likelihood,
60
+ log_prior=log_prior,
61
+ dims=4,
62
+ parameters=[f"x{i}" for i in range(4)],
63
+ )
64
+
65
+ # Fit the normalizing flow
66
+ aspire.fit(init, n_epochs=20)
67
+
68
+ # Sample the posterior
69
+ posterior = aspire.sample_posterior(
70
+ sampler="smc",
71
+ n_samples=500,
72
+ sampler_kwargs=dict(n_steps=100),
73
+ )
74
+
75
+ # Plot the posterior distribution
76
+ posterior.plot_corner()
77
+ ```
78
+
79
+ ## Documentation
80
+
81
+ See the [documentation on ReadTheDocs][docs].
82
+
83
+ ## Citation
84
+
85
+ If you use `aspire` in your work please cite the [DOI][DOI] and [paper][paper].
86
+
87
+
88
+ [docs]: https://aspire.readthedocs.io/
89
+ [DOI]: https://doi.org/10.5281/zenodo.15658747
90
+ [paper]: https://arxiv.org/abs/2511.04218
@@ -0,0 +1,130 @@
1
+ Metadata-Version: 2.4
2
+ Name: aspire-inference
3
+ Version: 0.1.0a10
4
+ Summary: Accelerate Sequential Posterior Inference via REuse
5
+ Author-email: "Michael J. Williams" <michaeljw1@googlemail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/mj-will/aspire
8
+ Project-URL: Documentation, https://aspire.readthedocs.io/
9
+ Classifier: Programming Language :: Python :: 3
10
+ Requires-Python: >=3.10
11
+ Description-Content-Type: text/markdown
12
+ License-File: LICENSE
13
+ Requires-Dist: matplotlib
14
+ Requires-Dist: numpy
15
+ Requires-Dist: array-api-compat
16
+ Requires-Dist: wrapt
17
+ Requires-Dist: h5py
18
+ Provides-Extra: scipy
19
+ Requires-Dist: scipy; extra == "scipy"
20
+ Provides-Extra: jax
21
+ Requires-Dist: jax; extra == "jax"
22
+ Requires-Dist: jaxlib; extra == "jax"
23
+ Requires-Dist: flowjax; extra == "jax"
24
+ Provides-Extra: torch
25
+ Requires-Dist: torch; extra == "torch"
26
+ Requires-Dist: zuko; extra == "torch"
27
+ Requires-Dist: tqdm; extra == "torch"
28
+ Provides-Extra: minipcn
29
+ Requires-Dist: minipcn[array-api]>=0.2.0a3; extra == "minipcn"
30
+ Requires-Dist: orng; extra == "minipcn"
31
+ Provides-Extra: emcee
32
+ Requires-Dist: emcee; extra == "emcee"
33
+ Provides-Extra: blackjax
34
+ Requires-Dist: blackjax; extra == "blackjax"
35
+ Provides-Extra: test
36
+ Requires-Dist: pytest; extra == "test"
37
+ Requires-Dist: pytest-requires; extra == "test"
38
+ Requires-Dist: pytest-cov; extra == "test"
39
+ Dynamic: license-file
40
+
41
+ # aspire: Accelerated Sequential Posterior Inference via REuse
42
+
43
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.15658747.svg)](https://doi.org/10.5281/zenodo.15658747)
44
+ [![PyPI](https://img.shields.io/pypi/v/aspire-inference)](https://pypi.org/project/aspire-inference/)
45
+ [![Documentation Status](https://readthedocs.org/projects/aspire/badge/?version=latest)](https://aspire.readthedocs.io/en/latest/?badge=latest)
46
+ ![tests](https://github.com/mj-will/aspire/actions/workflows/tests.yml/badge.svg)
47
+
48
+
49
+ aspire is a framework for reusing existing posterior samples to obtain new results at a reduced cost.
50
+
51
+ ## Installation
52
+
53
+ aspire can be installed from PyPI using `pip`. By default, you need to install
54
+ one of the backends for the normalizing flows, either `torch` or `jax`.
55
+ We also recommend installing `minipcn` if using the `smc` sampler:
56
+
57
+
58
+ **Torch**
59
+
60
+ We recommend installing `torch` manually to ensure correct CPU/CUDA versions are
61
+ installed. See the [PyTorch installation instructions](https://pytorch.org/)
62
+ for more details.
63
+
64
+ ```
65
+ pip install aspire-inference[torch,minipcn]
66
+ ```
67
+
68
+ **Jax**:
69
+
70
+ We recommend install `jax` manually to ensure the correct GPU/CUDA versions
71
+ are installed. See the [jax documentation for details](https://docs.jax.dev/en/latest/installation.html)
72
+
73
+ ```
74
+ pip install aspire-inference[jax,minipcn]
75
+ ```
76
+
77
+ **Important:** the name of `aspire` on PyPI is `aspire-inference` but once installed
78
+ the package can be imported and used as `aspire`.
79
+
80
+ ## Quickstart
81
+
82
+ ```python
83
+ import numpy as np
84
+ from aspire import Aspire, Samples
85
+
86
+ # Define a log-likelihood and log-prior
87
+ def log_likelihood(samples):
88
+ x = samples.x
89
+ return -0.5 * np.sum(x**2, axis=-1)
90
+
91
+ def log_prior(samples):
92
+ return -0.5 * np.sum(samples.x**2, axis=-1)
93
+
94
+ # Create the initial samples
95
+ init = Samples(np.random.normal(size=(2_000, 4)))
96
+
97
+ # Define the aspire object
98
+ aspire = Aspire(
99
+ log_likelihood=log_likelihood,
100
+ log_prior=log_prior,
101
+ dims=4,
102
+ parameters=[f"x{i}" for i in range(4)],
103
+ )
104
+
105
+ # Fit the normalizing flow
106
+ aspire.fit(init, n_epochs=20)
107
+
108
+ # Sample the posterior
109
+ posterior = aspire.sample_posterior(
110
+ sampler="smc",
111
+ n_samples=500,
112
+ sampler_kwargs=dict(n_steps=100),
113
+ )
114
+
115
+ # Plot the posterior distribution
116
+ posterior.plot_corner()
117
+ ```
118
+
119
+ ## Documentation
120
+
121
+ See the [documentation on ReadTheDocs][docs].
122
+
123
+ ## Citation
124
+
125
+ If you use `aspire` in your work please cite the [DOI][DOI] and [paper][paper].
126
+
127
+
128
+ [docs]: https://aspire.readthedocs.io/
129
+ [DOI]: https://doi.org/10.5281/zenodo.15658747
130
+ [paper]: https://arxiv.org/abs/2511.04218
@@ -13,15 +13,17 @@ aspire_inference.egg-info/dependency_links.txt
13
13
  aspire_inference.egg-info/requires.txt
14
14
  aspire_inference.egg-info/top_level.txt
15
15
  docs/Makefile
16
- docs/api.rst
17
16
  docs/conf.py
18
17
  docs/entry_points.rst
19
18
  docs/examples.rst
20
19
  docs/index.rst
21
20
  docs/installation.rst
21
+ docs/multiprocessing.rst
22
+ docs/recipes.rst
22
23
  docs/requirements.txt
23
24
  docs/user_guide.rst
24
25
  examples/basic_example.py
26
+ examples/blackjax_smc_example.py
25
27
  examples/smc_example.py
26
28
  src/aspire/__init__.py
27
29
  src/aspire/aspire.py
@@ -16,7 +16,8 @@ jaxlib
16
16
  flowjax
17
17
 
18
18
  [minipcn]
19
- minipcn
19
+ minipcn[array-api]>=0.2.0a3
20
+ orng
20
21
 
21
22
  [scipy]
22
23
  scipy
@@ -29,6 +29,7 @@ extensions = [
29
29
  "sphinx.ext.autosummary",
30
30
  "sphinx.ext.napoleon",
31
31
  "sphinx.ext.viewcode",
32
+ "autoapi.extension",
32
33
  ]
33
34
 
34
35
  autodoc_typehints = "description"
@@ -41,6 +42,19 @@ napoleon_preprocess_types = True
41
42
  templates_path = ["_templates"]
42
43
  exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
43
44
 
45
+ # -- Configure autoapi -------------------------------------------------------
46
+
47
+ autoapi_type = "python"
48
+ autoapi_dirs = ["../src/aspire/"]
49
+ autoapi_add_toctree_entry = False
50
+ autoapi_options = [
51
+ "members",
52
+ "imported-members",
53
+ "show-inheritance",
54
+ "show-module-summary",
55
+ "undoc-members",
56
+ ]
57
+
44
58
  # -- Options for HTML output -------------------------------------------------
45
59
 
46
60
  html_theme = "sphinx_book_theme"
@@ -22,8 +22,7 @@ Quick start
22
22
  .. code-block:: python
23
23
 
24
24
  import numpy as np
25
- from aspire import Aspire
26
- from aspire.samples import Samples
25
+ from aspire import Aspire, Samples
27
26
 
28
27
  def log_likelihood(samples):
29
28
  x = samples.x
@@ -58,6 +57,15 @@ examples, and the complete API reference.
58
57
 
59
58
  installation
60
59
  user_guide
60
+ recipes
61
+ multiprocessing
61
62
  examples
62
63
  entry_points
63
- api
64
+ API Reference </autoapi/aspire/index>
65
+
66
+
67
+ .. toctree::
68
+ :maxdepth: 2
69
+ :caption: Related Packages
70
+
71
+ aspire-bilby <https://aspire.readthedocs.io/projects/aspire-bilby/en/latest/>
@@ -14,7 +14,11 @@ Install the library from PyPI (note the published name):
14
14
 
15
15
  $ python -m pip install aspire-inference
16
16
 
17
- The installed distribution exposes the ``aspire`` import namespace.
17
+ The installed distribution exposes the ``aspire`` import namespace. By default,
18
+ this doesn't include any optional dependencies beyond the core ones listed above.
19
+ We recommend installing with at least one backend for normalizing flows, e.g.
20
+ ``torch`` (PyTorch + ``zuko``) or ``jax`` (JAX + ``flowjax``).
21
+ and optionally the ``minipcn`` SMC kernel.
18
22
 
19
23
  Optional extras
20
24
  ---------------
@@ -0,0 +1,70 @@
1
+ Multiprocessing
2
+ ===============
3
+
4
+ Use :meth:`aspire.Aspire.enable_pool` to run your likelihood (and optionally
5
+ prior) in parallel across a :class:`multiprocessing.Pool`. The helper swaps the
6
+ ``map_fn`` argument expected by your log-likelihood / log-prior for
7
+ ``pool.map`` while the context is active, then restores the original methods.
8
+
9
+ Prepare a map-aware likelihood
10
+ ------------------------------
11
+
12
+ Your likelihood must accept ``map_fn``. A minimal
13
+ pattern:
14
+
15
+ .. code-block:: python
16
+
17
+ import numpy as np
18
+
19
+
20
+ def _global_log_likelihood(x):
21
+ # Expensive likelihood computation for a single sample `x`
22
+ return -np.sum(x**2) # Example likelihood
23
+
24
+ def log_likelihood(samples, map_fn=map):
25
+ logl = -np.inf * np.ones(len(samples.x))
26
+ if samples.log_prior is None:
27
+ raise RuntimeError("log-prior has not been evaluated!")
28
+ mask = np.isfinite(samples.log_prior, dtype=bool)
29
+ x = np.asarray(samples.x[mask, :], dtype=float)
30
+ logl[mask] = np.fromiter(
31
+ map_fn(_global_log_likelihood, x),
32
+ dtype=float,
33
+ )
34
+ return logl
35
+
36
+ Swap in a multiprocessing pool
37
+ ------------------------------
38
+
39
+ Wrap your sampling call inside ``enable_pool`` to parallelize the map step:
40
+
41
+ .. code-block:: python
42
+
43
+ import multiprocessing as mp
44
+ from aspire import Aspire
45
+
46
+ aspire = Aspire(
47
+ log_likelihood=log_likelihood,
48
+ log_prior=log_prior, # must also accept map_fn if parallelize_prior=True
49
+ dims=4,
50
+ parameters=["a", "b", "c", "d"],
51
+ )
52
+
53
+ with mp.Pool() as pool, aspire.enable_pool(pool):
54
+ samples, history = aspire.sample_posterior(
55
+ sampler="smc",
56
+ n_samples=1_000,
57
+ return_history=True,
58
+ )
59
+
60
+ Notes
61
+ -----
62
+
63
+ - By default only the likelihood is parallelized; set
64
+ ``aspire.enable_pool(pool, parallelize_prior=True)`` if your prior also
65
+ accepts ``map_fn``.
66
+ - ``enable_pool`` closes the pool on exit unless you pass ``close_pool=False``.
67
+ - The context manager itself is implemented by
68
+ :class:`aspire.utils.PoolHandler`; if you need finer control (for example,
69
+ reusing the same pool across multiple ``Aspire`` instances) you can
70
+ instantiate it directly.
@@ -0,0 +1,70 @@
1
+ Practical recipes
2
+ =================
3
+
4
+ Checking the prior when evaluating the likelihood
5
+ -------------------------------------------------
6
+
7
+ By default, Aspire samplers always evaluate the log-prior before the
8
+ log-likelihood. This allows users to check the prior support and skip
9
+ likelihood evaluations for samples that lie outside the prior bounds.
10
+
11
+ .. code-block:: python
12
+
13
+ import aspire
14
+ import numpy as np
15
+
16
+
17
+ def log_likelihood(samples: aspire.Samples) -> np.ndarray:
18
+ if samples.log_prior is None:
19
+ raise RuntimeError("log-prior has not been evaluated!")
20
+ # Return -inf for samples with invalid prior
21
+ logl = np.full(samples.n_samples, -np.inf, dtype=float)
22
+ # Only evaluate the likelihood where the prior is finite
23
+ mask = np.isfinite(samples.log_prior, dtype=bool)
24
+ # Valid samples
25
+ x = samples.x[mask, :]
26
+ logl[mask] = -np.sum(x**2, axis=1) # Example likelihood
27
+ return logl
28
+
29
+
30
+ Checking the flow distribution
31
+ ------------------------------
32
+
33
+ It can be useful to inspect the flow-based proposal distribution before sampling
34
+ from the posterior. You can do this by drawing samples from the flow after fitting
35
+ and comparing them to the initial samples:
36
+
37
+
38
+ .. code-block:: python
39
+
40
+ from aspire import Aspire, Samples
41
+ from aspire.plot import plot_comparison
42
+
43
+ # Define the initial samples
44
+ initial_samples = Samples(...)
45
+
46
+ # Define the Aspire instance
47
+ aspire = Aspire(
48
+ log_likelihood=log_likelihood,
49
+ log_prior=log_prior,
50
+ ...
51
+ )
52
+
53
+ # Fit the flow to the initial samples
54
+ fit_history = aspire.fit(initial_samples)
55
+
56
+ # Draw samples from the flow
57
+ flow_samples = aspire.sample_flow(10_000)
58
+
59
+ # Plot a comparison between initial samples and flow samples
60
+ fig = plot_comparison(
61
+ initial_samples,
62
+ flow_samples,
63
+ per_samples_kwargs=[
64
+ dict(include_weights=False, color="C0"),
65
+ dict(include_weights=False, color="C1"),
66
+ ],
67
+ labels=["Initial samples", "Flow samples"],
68
+ )
69
+ # Save or show the figure
70
+ fig.savefig("flow_comparison.png")
@@ -1,2 +1,3 @@
1
1
  sphinx>=7.2
2
2
  sphinx-book-theme>=1.1
3
+ sphinx-autoapi>=3.2
@@ -44,8 +44,8 @@ switch namespaces or merge multiple runs with
44
44
  Flows and transforms
45
45
  --------------------
46
46
 
47
- Aspire can work with any proposal that implements ``sample_and_log_prob`` and
48
- ``log_prob``; normalising flows remain the default. Flows are defined via
47
+ Aspire can work with any flow that implements ``sample_and_log_prob`` and
48
+ ``log_prob``. Flows are defined via
49
49
  :class:`aspire.flows.base.Flow` and instantiated by
50
50
  :meth:`aspire.Aspire.init_flow`. By default Aspire uses the ``zuko``
51
51
  implementation of Masked Autoregressive Flows on top of PyTorch. The flow is
@@ -61,6 +61,55 @@ estimator (requires the `zuko` backend).
61
61
  External flow implementations can be plugged in via the
62
62
  ``aspire.flows`` entry point group. See :ref:`custom_flows` for details.
63
63
 
64
+ Transform mechanics
65
+ ~~~~~~~~~~~~~~~~~~~
66
+
67
+ Aspire keeps a clear separation between your native parameters and the space
68
+ where flows or kernels operate:
69
+
70
+ * :class:`aspire.transforms.FlowTransform` is attached to every flow created by
71
+ :meth:`aspire.Aspire.init_flow`. By default, it maps bounded parameters to the real line (``probit`` or
72
+ ``logit``), and recentres / rescales dimensions with an affine
73
+ transform learned from the training samples. Log-Jacobian terms are tracked so
74
+ calls to ``log_prob`` or ``sample_and_log_prob`` remain properly normalised.
75
+ ``bounded_to_unbounded`` and ``affine_transform`` can be specified when creating
76
+ the Aspire instance to control this behaviour.
77
+ * The same components are exposed via :class:`aspire.transforms.CompositeTransform`
78
+ if you want to opt out of the bounded-to-unbounded step or the affine
79
+ whitening when building custom transports.
80
+
81
+ Preconditioning inside samplers
82
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
83
+
84
+ SMC and MCMC samplers also work in a transformed space. They fit the chosen
85
+ ``preconditioning`` transform to the initial particles, perform moves there, and
86
+ then call ``inverse(...)`` (including the log-Jacobian) whenever the likelihood
87
+ or prior is evaluated. Configure it via
88
+ :meth:`aspire.Aspire.sample_posterior`:
89
+
90
+ * ``"default"`` / ``"standard"`` uses :class:`aspire.transforms.CompositeTransform`
91
+ with bounded-to-unbounded and affine scaling turned off by default; periodic
92
+ wrapping still applies. To whiten dimensions or map bounds to the real line,
93
+ pass ``preconditioning_kwargs={"affine_transform": True, "bounded_to_unbounded": True}``.
94
+ * ``"flow"`` fits a lightweight :class:`aspire.transforms.FlowPreconditioningTransform`
95
+ to the current particles and treats it as a transport map during SMC/MCMC
96
+ updates. This reuses the same bounded / periodic handling while providing a
97
+ richer geometry for the kernels.
98
+ * ``None`` leaves the sampler in the original parameterisation with an identity
99
+ transform. The importance sampler defaults to this; other samplers default to
100
+ ``"standard"`` so periodic parameters are at least kept consistent with their
101
+ bounds.
102
+
103
+
104
+ .. note::
105
+
106
+ By default, the preconditioning transform does not include bounded-to-unbounded
107
+ steps. This means your log-prior and log-likelihood must handle points that
108
+ lie outside the specified bounds (e.g. by returning ``-inf``). If you want
109
+ the sampler to automatically map bounded parameters to an unconstrained
110
+ space, enable the ``bounded_to_unbounded`` option in
111
+ ``preconditioning_kwargs``.
112
+
64
113
  Sampling strategies
65
114
  -------------------
66
115
 
@@ -101,11 +150,6 @@ Sequential Monte Carlo
101
150
  Replaces the internal MCMC move with the ``emcee`` ensemble sampler,
102
151
  providing a gradient-free option that still benefits from SMC tempering.
103
152
 
104
- You can plug in custom preconditioning by setting ``preconditioning`` to
105
- ``"standard"`` (affine normalisation based on current samples), ``"flow"``
106
- (use the fitted flow as a transport map), or ``None`` to disable additional
107
- transforms.
108
-
109
153
  History, diagnostics, and persistence
110
154
  -------------------------------------
111
155