bayinx 0.2.11__tar.gz → 0.2.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {bayinx-0.2.11 → bayinx-0.2.12}/PKG-INFO +1 -1
  2. {bayinx-0.2.11 → bayinx-0.2.12}/pyproject.toml +2 -2
  3. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/flows/planar.py +3 -17
  4. {bayinx-0.2.11 → bayinx-0.2.12}/tests/test_variational.py +16 -23
  5. bayinx-0.2.12/uv.lock +596 -0
  6. bayinx-0.2.11/uv.lock +0 -360
  7. {bayinx-0.2.11 → bayinx-0.2.12}/.github/workflows/release_and_publish.yml +0 -0
  8. {bayinx-0.2.11 → bayinx-0.2.12}/.gitignore +0 -0
  9. {bayinx-0.2.11 → bayinx-0.2.12}/README.md +0 -0
  10. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/__init__.py +0 -0
  11. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/core/__init__.py +0 -0
  12. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/core/flow.py +0 -0
  13. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/core/model.py +0 -0
  14. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/core/utils.py +0 -0
  15. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/core/variational.py +0 -0
  16. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/__init__.py +0 -0
  17. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/bernoulli.py +0 -0
  18. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/binomial.py +0 -0
  19. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/gamma.py +0 -0
  20. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/gamma2.py +0 -0
  21. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/dists/normal.py +0 -0
  22. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/__init__.py +0 -0
  23. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/__init__.py +0 -0
  24. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/flows/__init__.py +0 -0
  25. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/flows/fullaffine.py +0 -0
  26. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/flows/radial.py +0 -0
  27. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/flows/sylvester.py +0 -0
  28. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/meanfield.py +0 -0
  29. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/normalizing_flow.py +0 -0
  30. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/mhx/vi/standard.py +0 -0
  31. {bayinx-0.2.11 → bayinx-0.2.12}/src/bayinx/py.typed +0 -0
  32. {bayinx-0.2.11 → bayinx-0.2.12}/tests/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: bayinx
3
- Version: 0.2.11
3
+ Version: 0.2.12
4
4
  Summary: Bayesian Inference with JAX
5
5
  Requires-Python: >=3.12
6
6
  Requires-Dist: equinox>=0.11.12
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "bayinx"
3
- version = "0.2.11"
3
+ version = "0.2.12"
4
4
  description = "Bayesian Inference with JAX"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
@@ -19,4 +19,4 @@ build-backend = "hatchling.build"
19
19
  addopts = "-q --benchmark-min-rounds=30 --benchmark-columns=rounds,mean,median,stddev --benchmark-group-by=func"
20
20
 
21
21
  [dependency-groups]
22
- dev = ["pytest>=8.3.5", "pytest-benchmark>=5.1.0"]
22
+ dev = ["matplotlib>=3.10.1", "pytest>=8.3.5", "pytest-benchmark>=5.1.0"]
@@ -3,7 +3,6 @@ from typing import Callable, Dict, Tuple
3
3
 
4
4
  import equinox as eqx
5
5
  import jax
6
- import jax.nn as jnn
7
6
  import jax.numpy as jnp
8
7
  import jax.random as jr
9
8
  from jaxtyping import Array, Float, Scalar
@@ -31,25 +30,12 @@ class Planar(Flow):
31
30
  - `dim`: The dimension of the parameter space.
32
31
  """
33
32
  self.params = {
34
- "u": jr.normal(key, (dim,)),
35
- "w": jr.normal(key, (dim,)),
36
- "b": jr.normal(key, (1,)),
33
+ "u": jnp.ones(dim),
34
+ "w": jnp.ones(dim),
35
+ "b": jnp.zeros(1),
37
36
  }
38
37
  self.constraints = {}
39
38
 
40
- def transform_pars(self):
41
- params = self.params
42
-
43
- u = params['u']
44
- w = params['w']
45
- b = params['b']
46
-
47
- m = jnn.softplus(w.dot(u)) - 1.0
48
-
49
- u = u + (m - w.dot(u)) * w / (w**2).sum()
50
-
51
- return {'u': u, 'w': w, 'b': b}
52
-
53
39
  @eqx.filter_jit
54
40
  @partial(jax.vmap, in_axes=(None, 0))
55
41
  def forward(self, draws: Array) -> Array:
@@ -8,7 +8,7 @@ from jaxtyping import Array
8
8
  from bayinx import Model
9
9
  from bayinx.dists import normal
10
10
  from bayinx.mhx.vi import MeanField, NormalizingFlow, Standard
11
- from bayinx.mhx.vi.flows import FullAffine, Planar
11
+ from bayinx.mhx.vi.flows import FullAffine, Planar, Radial
12
12
 
13
13
 
14
14
  # Tests ----
@@ -44,7 +44,7 @@ def test_meanfield(benchmark, var_draws):
44
44
  vari.fit(10000, var_draws=var_draws)
45
45
 
46
46
  benchmark(benchmark_fit)
47
- vari = vari.fit(10000)
47
+ vari = vari.fit(20000)
48
48
 
49
49
  # Assert parameters are roughly correct
50
50
  assert all(abs(10.0 - vari.var_params["mean"]) < 0.1) and all(
@@ -84,7 +84,7 @@ def test_affine(benchmark, var_draws):
84
84
  vari.fit(10000, var_draws=var_draws)
85
85
 
86
86
  benchmark(benchmark_fit)
87
- vari = vari.fit(10000)
87
+ vari = vari.fit(20000)
88
88
 
89
89
  params = vari.flows[0].constrain_pars()
90
90
  assert (abs(10.0 - vari.flows[0].params["shift"]) < 0.1).all() and (
@@ -95,37 +95,30 @@ def test_affine(benchmark, var_draws):
95
95
  @pytest.mark.parametrize("var_draws", [1, 10, 100])
96
96
  def test_flows(benchmark, var_draws):
97
97
  # Construct model definition
98
- class Banana(Model):
98
+ class NormalDist(Model):
99
99
  params: Dict[str, Array]
100
100
  constraints: Dict[str, Callable[[Array], Array]]
101
101
 
102
102
  def __init__(self):
103
- self.params = {
104
- 'x': jnp.array(0.0),
105
- 'y': jnp.array(0.0)
106
- }
103
+ self.params = {"mu": jnp.array([0.0, 0.0])}
107
104
  self.constraints = {}
108
105
 
109
- def eval(self, data = None):
110
- params: Dict[str, Array] = self.params
111
- # Extract parameters
112
- x: Array = params['x']
113
- y: Array = params['y']
114
-
115
- # Initialize target density
116
- target = jnp.array(0.0)
117
-
118
- target += normal.logprob(x, mu = jnp.array(0.0), sigma = jnp.array(1.0))
119
- target += normal.logprob(y, mu = x**2 + x, sigma = jnp.array(1.0))
106
+ @eqx.filter_jit
107
+ def eval(self, data: dict):
108
+ # Get constrained parameters
109
+ params = self.constrain_pars()
120
110
 
121
- return target
111
+ # Evaluate mu ~ N(10,1)
112
+ return jnp.sum(
113
+ normal.logprob(x=params["mu"], mu=jnp.array(10.0), sigma=jnp.array(1.0))
114
+ )
122
115
 
123
116
  # Construct model
124
- model = Banana()
117
+ model = NormalDist()
125
118
 
126
119
  # Construct normalizing flow variational
127
120
  vari = NormalizingFlow(
128
- Standard(model), [FullAffine(2), Planar(2)], model
121
+ Standard(model), [FullAffine(2), Planar(2), Radial(2)], model
129
122
  )
130
123
 
131
124
  # Optimize variational distribution
@@ -133,7 +126,7 @@ def test_flows(benchmark, var_draws):
133
126
  vari.fit(10000, var_draws=var_draws)
134
127
 
135
128
  benchmark(benchmark_fit)
136
- vari = vari.fit(100)
129
+ vari = vari.fit(20000)
137
130
 
138
131
  mean = vari.sample(1000).mean(0)
139
132
  var = vari.sample(1000).var(0)