bayinx 0.2.11__tar.gz → 0.2.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {bayinx-0.2.11 → bayinx-0.2.13}/PKG-INFO +1 -1
  2. {bayinx-0.2.11 → bayinx-0.2.13}/pyproject.toml +2 -2
  3. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/core/variational.py +1 -1
  4. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/normal.py +9 -14
  5. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/flows/planar.py +3 -17
  6. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/normalizing_flow.py +3 -0
  7. {bayinx-0.2.11 → bayinx-0.2.13}/tests/test_variational.py +16 -23
  8. bayinx-0.2.13/uv.lock +596 -0
  9. bayinx-0.2.11/uv.lock +0 -360
  10. {bayinx-0.2.11 → bayinx-0.2.13}/.github/workflows/release_and_publish.yml +0 -0
  11. {bayinx-0.2.11 → bayinx-0.2.13}/.gitignore +0 -0
  12. {bayinx-0.2.11 → bayinx-0.2.13}/README.md +0 -0
  13. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/__init__.py +0 -0
  14. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/core/__init__.py +0 -0
  15. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/core/flow.py +0 -0
  16. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/core/model.py +0 -0
  17. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/core/utils.py +0 -0
  18. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/__init__.py +0 -0
  19. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/bernoulli.py +0 -0
  20. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/binomial.py +0 -0
  21. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/gamma.py +0 -0
  22. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/dists/gamma2.py +0 -0
  23. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/__init__.py +0 -0
  24. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/__init__.py +0 -0
  25. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/flows/__init__.py +0 -0
  26. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/flows/fullaffine.py +0 -0
  27. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/flows/radial.py +0 -0
  28. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/flows/sylvester.py +0 -0
  29. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/meanfield.py +0 -0
  30. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/mhx/vi/standard.py +0 -0
  31. {bayinx-0.2.11 → bayinx-0.2.13}/src/bayinx/py.typed +0 -0
  32. {bayinx-0.2.11 → bayinx-0.2.13}/tests/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: bayinx
3
- Version: 0.2.11
3
+ Version: 0.2.13
4
4
  Summary: Bayesian Inference with JAX
5
5
  Requires-Python: >=3.12
6
6
  Requires-Dist: equinox>=0.11.12
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "bayinx"
3
- version = "0.2.11"
3
+ version = "0.2.13"
4
4
  description = "Bayesian Inference with JAX"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.12"
@@ -19,4 +19,4 @@ build-backend = "hatchling.build"
19
19
  addopts = "-q --benchmark-min-rounds=30 --benchmark-columns=rounds,mean,median,stddev --benchmark-group-by=func"
20
20
 
21
21
  [dependency-groups]
22
- dev = ["pytest>=8.3.5", "pytest-benchmark>=5.1.0"]
22
+ dev = ["matplotlib>=3.10.1", "pytest>=8.3.5", "pytest-benchmark>=5.1.0"]
@@ -115,7 +115,7 @@ class Variational(eqx.Module):
115
115
 
116
116
  # Initialize optimizer
117
117
  optim: GradientTransformation = opx.chain(
118
- opx.scale(-1.0), opx.nadamw(schedule,weight_decay=weight_decay)
118
+ opx.scale(-1.0), opx.nadamw(schedule, weight_decay=weight_decay)
119
119
  )
120
120
  opt_state: OptState = optim.init(dyn)
121
121
 
@@ -1,17 +1,12 @@
1
- # MARK: Imports ----
2
1
  import jax.lax as _lax
2
+ from jaxtyping import Array, ArrayLike, Float, Real
3
3
 
4
- ## Typing
5
- from jaxtyping import Array, Real
6
-
7
- # MARK: Constants
8
4
  _PI = 3.141592653589793
9
5
 
10
6
 
11
- # MARK: Functions ----
12
7
  def prob(
13
- x: Real[Array, "..."], mu: Real[Array, "..."], sigma: Real[Array, "..."]
14
- ) -> Real[Array, "..."]:
8
+ x: Real[ArrayLike, "..."], mu: Real[ArrayLike, "..."], sigma: Real[ArrayLike, "..."]
9
+ ) -> Float[Array, "..."]:
15
10
  """
16
11
  The probability density function (PDF) for a Normal distribution.
17
12
 
@@ -30,8 +25,8 @@ def prob(
30
25
 
31
26
 
32
27
  def logprob(
33
- x: Real[Array, "..."], mu: Real[Array, "..."], sigma: Real[Array, "..."]
34
- ) -> Real[Array, "..."]:
28
+ x: Real[ArrayLike, "..."], mu: Real[ArrayLike, "..."], sigma: Real[ArrayLike, "..."]
29
+ ) -> Float[Array, "..."]:
35
30
  """
36
31
  The log of the probability density function (log PDF) for a Normal distribution.
37
32
 
@@ -48,8 +43,8 @@ def logprob(
48
43
 
49
44
 
50
45
  def uprob(
51
- x: Real[Array, "..."], mu: Real[Array, "..."], sigma: Real[Array, "..."]
52
- ) -> Real[Array, "..."]:
46
+ x: Real[ArrayLike, "..."], mu: Real[ArrayLike, "..."], sigma: Real[ArrayLike, "..."]
47
+ ) -> Float[Array, "..."]:
53
48
  """
54
49
  The unnormalized probability density function (uPDF) for a Normal distribution.
55
50
 
@@ -66,8 +61,8 @@ def uprob(
66
61
 
67
62
 
68
63
  def ulogprob(
69
- x: Real[Array, "..."], mu: Real[Array, "..."], sigma: Real[Array, "..."]
70
- ) -> Real[Array, "..."]:
64
+ x: Real[ArrayLike, "..."], mu: Real[ArrayLike, "..."], sigma: Real[ArrayLike, "..."]
65
+ ) -> Float[Array, "..."]:
71
66
  """
72
67
  The log of the unnormalized probability density function (log uPDF) for a Normal distribution.
73
68
 
@@ -3,7 +3,6 @@ from typing import Callable, Dict, Tuple
3
3
 
4
4
  import equinox as eqx
5
5
  import jax
6
- import jax.nn as jnn
7
6
  import jax.numpy as jnp
8
7
  import jax.random as jr
9
8
  from jaxtyping import Array, Float, Scalar
@@ -31,25 +30,12 @@ class Planar(Flow):
31
30
  - `dim`: The dimension of the parameter space.
32
31
  """
33
32
  self.params = {
34
- "u": jr.normal(key, (dim,)),
35
- "w": jr.normal(key, (dim,)),
36
- "b": jr.normal(key, (1,)),
33
+ "u": jnp.ones(dim),
34
+ "w": jnp.ones(dim),
35
+ "b": jnp.zeros(1),
37
36
  }
38
37
  self.constraints = {}
39
38
 
40
- def transform_pars(self):
41
- params = self.params
42
-
43
- u = params['u']
44
- w = params['w']
45
- b = params['b']
46
-
47
- m = jnn.softplus(w.dot(u)) - 1.0
48
-
49
- u = u + (m - w.dot(u)) * w / (w**2).sum()
50
-
51
- return {'u': u, 'w': w, 'b': b}
52
-
53
39
  @eqx.filter_jit
54
40
  @partial(jax.vmap, in_axes=(None, 0))
55
41
  def forward(self, draws: Array) -> Array:
@@ -1,6 +1,8 @@
1
+ from functools import partial
1
2
  from typing import Any, Callable, Self, Tuple
2
3
 
3
4
  import equinox as eqx
5
+ import jax
4
6
  import jax.flatten_util as jfu
5
7
  import jax.numpy as jnp
6
8
  import jax.random as jr
@@ -59,6 +61,7 @@ class NormalizingFlow(Variational):
59
61
  return draws
60
62
 
61
63
  @eqx.filter_jit
64
+ @partial(jax.vmap, in_axes=(None, 0))
62
65
  def eval(self, draws: Array) -> Array:
63
66
  # Evaluate base density
64
67
  variational_evals: Array = self.base.eval(draws)
@@ -8,7 +8,7 @@ from jaxtyping import Array
8
8
  from bayinx import Model
9
9
  from bayinx.dists import normal
10
10
  from bayinx.mhx.vi import MeanField, NormalizingFlow, Standard
11
- from bayinx.mhx.vi.flows import FullAffine, Planar
11
+ from bayinx.mhx.vi.flows import FullAffine, Planar, Radial
12
12
 
13
13
 
14
14
  # Tests ----
@@ -44,7 +44,7 @@ def test_meanfield(benchmark, var_draws):
44
44
  vari.fit(10000, var_draws=var_draws)
45
45
 
46
46
  benchmark(benchmark_fit)
47
- vari = vari.fit(10000)
47
+ vari = vari.fit(20000)
48
48
 
49
49
  # Assert parameters are roughly correct
50
50
  assert all(abs(10.0 - vari.var_params["mean"]) < 0.1) and all(
@@ -84,7 +84,7 @@ def test_affine(benchmark, var_draws):
84
84
  vari.fit(10000, var_draws=var_draws)
85
85
 
86
86
  benchmark(benchmark_fit)
87
- vari = vari.fit(10000)
87
+ vari = vari.fit(20000)
88
88
 
89
89
  params = vari.flows[0].constrain_pars()
90
90
  assert (abs(10.0 - vari.flows[0].params["shift"]) < 0.1).all() and (
@@ -95,37 +95,30 @@ def test_affine(benchmark, var_draws):
95
95
  @pytest.mark.parametrize("var_draws", [1, 10, 100])
96
96
  def test_flows(benchmark, var_draws):
97
97
  # Construct model definition
98
- class Banana(Model):
98
+ class NormalDist(Model):
99
99
  params: Dict[str, Array]
100
100
  constraints: Dict[str, Callable[[Array], Array]]
101
101
 
102
102
  def __init__(self):
103
- self.params = {
104
- 'x': jnp.array(0.0),
105
- 'y': jnp.array(0.0)
106
- }
103
+ self.params = {"mu": jnp.array([0.0, 0.0])}
107
104
  self.constraints = {}
108
105
 
109
- def eval(self, data = None):
110
- params: Dict[str, Array] = self.params
111
- # Extract parameters
112
- x: Array = params['x']
113
- y: Array = params['y']
114
-
115
- # Initialize target density
116
- target = jnp.array(0.0)
117
-
118
- target += normal.logprob(x, mu = jnp.array(0.0), sigma = jnp.array(1.0))
119
- target += normal.logprob(y, mu = x**2 + x, sigma = jnp.array(1.0))
106
+ @eqx.filter_jit
107
+ def eval(self, data: dict):
108
+ # Get constrained parameters
109
+ params = self.constrain_pars()
120
110
 
121
- return target
111
+ # Evaluate mu ~ N(10,1)
112
+ return jnp.sum(
113
+ normal.logprob(x=params["mu"], mu=jnp.array(10.0), sigma=jnp.array(1.0))
114
+ )
122
115
 
123
116
  # Construct model
124
- model = Banana()
117
+ model = NormalDist()
125
118
 
126
119
  # Construct normalizing flow variational
127
120
  vari = NormalizingFlow(
128
- Standard(model), [FullAffine(2), Planar(2)], model
121
+ Standard(model), [FullAffine(2), Planar(2), Radial(2)], model
129
122
  )
130
123
 
131
124
  # Optimize variational distribution
@@ -133,7 +126,7 @@ def test_flows(benchmark, var_draws):
133
126
  vari.fit(10000, var_draws=var_draws)
134
127
 
135
128
  benchmark(benchmark_fit)
136
- vari = vari.fit(100)
129
+ vari = vari.fit(20000)
137
130
 
138
131
  mean = vari.sample(1000).mean(0)
139
132
  var = vari.sample(1000).var(0)