bartz 0.4.0__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
bartz/prepcovars.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # bartz/src/bartz/prepcovars.py
2
2
  #
3
- # Copyright (c) 2024, Giacomo Petrillo
3
+ # Copyright (c) 2024-2025, Giacomo Petrillo
4
4
  #
5
5
  # This file is part of bartz.
6
6
  #
@@ -28,7 +28,7 @@ import jax
28
28
  from jax import numpy as jnp
29
29
 
30
30
  from . import jaxext
31
- from . import grove
31
+
32
32
 
33
33
  @functools.partial(jax.jit, static_argnums=(1,))
34
34
  def quantilized_splits_from_matrix(X, max_bins):
@@ -54,12 +54,15 @@ def quantilized_splits_from_matrix(X, max_bins):
54
54
  The number of actually used values in each row of `splits`.
55
55
  """
56
56
  out_length = min(max_bins, X.shape[1]) - 1
57
+
57
58
  # return _quantilized_splits_from_matrix(X, out_length)
58
- @functools.partial(jaxext.autobatch, max_io_nbytes=2 ** 29)
59
+ @functools.partial(jaxext.autobatch, max_io_nbytes=2**29)
59
60
  def quantilize(X):
60
61
  return _quantilized_splits_from_matrix(X, out_length)
62
+
61
63
  return quantilize(X)
62
64
 
65
+
63
66
  @functools.partial(jax.vmap, in_axes=(0, None))
64
67
  def _quantilized_splits_from_matrix(x, out_length):
65
68
  huge = jaxext.huge_value(x)
@@ -67,21 +70,28 @@ def _quantilized_splits_from_matrix(x, out_length):
67
70
  actual_length -= 1
68
71
  if jnp.issubdtype(x.dtype, jnp.integer):
69
72
  midpoints = u[:-1] + jaxext.ensure_unsigned(u[1:] - u[:-1]) // 2
70
- indices = jnp.arange(midpoints.size, dtype=jaxext.minimal_unsigned_dtype(midpoints.size - 1))
73
+ indices = jnp.arange(
74
+ midpoints.size, dtype=jaxext.minimal_unsigned_dtype(midpoints.size - 1)
75
+ )
71
76
  midpoints = jnp.where(indices < actual_length, midpoints, huge)
72
77
  else:
73
78
  midpoints = (u[1:] + u[:-1]) / 2
74
79
  indices = jnp.linspace(-1, actual_length, out_length + 2)[1:-1]
75
- indices = jnp.around(indices).astype(jaxext.minimal_unsigned_dtype(midpoints.size - 1))
76
- # indices calculation with float rather than int to avoid potential
77
- # overflow with int32, and to round to nearest instead of rounding down
80
+ indices = jnp.around(indices).astype(
81
+ jaxext.minimal_unsigned_dtype(midpoints.size - 1)
82
+ )
83
+ # indices calculation with float rather than int to avoid potential
84
+ # overflow with int32, and to round to nearest instead of rounding down
78
85
  decimated_midpoints = midpoints[indices]
79
86
  truncated_midpoints = midpoints[:out_length]
80
- splits = jnp.where(actual_length > out_length, decimated_midpoints, truncated_midpoints)
87
+ splits = jnp.where(
88
+ actual_length > out_length, decimated_midpoints, truncated_midpoints
89
+ )
81
90
  max_split = jnp.minimum(actual_length, out_length)
82
91
  max_split = max_split.astype(jaxext.minimal_unsigned_dtype(out_length))
83
92
  return splits, max_split
84
93
 
94
+
85
95
  @functools.partial(jax.jit, static_argnums=(1,))
86
96
  def uniform_splits_from_matrix(X, num_bins):
87
97
  """
@@ -110,6 +120,7 @@ def uniform_splits_from_matrix(X, num_bins):
110
120
  max_split = jnp.full(*splits.shape, jaxext.minimal_unsigned_dtype(num_bins - 1))
111
121
  return splits, max_split
112
122
 
123
+
113
124
  @functools.partial(jax.jit, static_argnames=('method',))
114
125
  def bin_predictors(X, splits, **kw):
115
126
  """
@@ -135,9 +146,11 @@ def bin_predictors(X, splits, **kw):
135
146
  A matrix with `p` predictors and `n` observations, where each predictor
136
147
  has been replaced by the index of the bin it falls into.
137
148
  """
138
- @functools.partial(jaxext.autobatch, max_io_nbytes=2 ** 29)
149
+
150
+ @functools.partial(jaxext.autobatch, max_io_nbytes=2**29)
139
151
  @jax.vmap
140
152
  def bin_predictors(x, splits):
141
153
  dtype = jaxext.minimal_unsigned_dtype(splits.size)
142
154
  return jnp.searchsorted(splits, x, **kw).astype(dtype)
155
+
143
156
  return bin_predictors(X, splits)
@@ -0,0 +1,48 @@
1
+ Metadata-Version: 2.4
2
+ Name: bartz
3
+ Version: 0.5.0
4
+ Summary: Super-fast BART (Bayesian Additive Regression Trees) in Python
5
+ Author: Giacomo Petrillo
6
+ Author-email: Giacomo Petrillo <info@giacomopetrillo.com>
7
+ License-Expression: MIT
8
+ Requires-Dist: jax>=0.4.35,<1
9
+ Requires-Dist: jaxlib>=0.4.35,<1
10
+ Requires-Dist: numpy>=1.25.2,<3
11
+ Requires-Dist: scipy>=1.11.4,<2
12
+ Requires-Python: >=3.10
13
+ Project-URL: Documentation, https://gattocrucco.github.io/bartz/docs-dev
14
+ Project-URL: Homepage, https://github.com/Gattocrucco/bartz
15
+ Project-URL: Issues, https://github.com/Gattocrucco/bartz/issues
16
+ Description-Content-Type: text/markdown
17
+
18
+ [![PyPI](https://img.shields.io/pypi/v/bartz)](https://pypi.org/project/bartz/)
19
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.13931477.svg)](https://doi.org/10.5281/zenodo.13931477)
20
+
21
+ # BART vectoriZed
22
+
23
+ An implementation of Bayesian Additive Regression Trees (BART) in JAX.
24
+
25
+ If you don't know what BART is, but know XGBoost, consider BART as a sort of Bayesian XGBoost. bartz makes BART run as fast as XGBoost.
26
+
27
+ BART is a nonparametric Bayesian regression technique. Given training predictors $X$ and responses $y$, BART finds a function to predict $y$ given $X$. The result of the inference is a sample of possible functions, representing the uncertainty over the determination of the function.
28
+
29
+ This Python module provides an implementation of BART that runs on GPU, to process large datasets faster. It is also good on CPU. Most other implementations of BART are for R, and run on CPU only.
30
+
31
+ On CPU, bartz runs at the speed of dbarts (the fastest implementation I know of) if n > 20,000, but using 1/20 of the memory. On GPU, the speed premium depends on sample size; it is convenient over CPU only for n > 10,000. The maximum speedup is currently 200x, on an Nvidia A100 and with at least 2,000,000 observations.
32
+
33
+ [This Colab notebook](https://colab.research.google.com/github/Gattocrucco/bartz/blob/main/docs/examples/basic_simdata.ipynb) runs bartz with n = 100,000 observations, p = 1000 predictors, 10,000 trees, for 1000 MCMC iterations, in 5 minutes.
34
+
35
+ ## Links
36
+
37
+ - [Documentation (latest release)](https://gattocrucco.github.io/bartz/docs)
38
+ - [Documentation (development version)](https://gattocrucco.github.io/bartz/docs-dev)
39
+ - [Repository](https://github.com/Gattocrucco/bartz)
40
+ - [Code coverage](https://gattocrucco.github.io/bartz/coverage)
41
+ - [Benchmarks](https://gattocrucco.github.io/bartz/benchmarks)
42
+ - [List of BART packages](https://gattocrucco.github.io/bartz/docs-dev/pkglist.html)
43
+
44
+ ## Citing bartz
45
+
46
+ Article: Petrillo (2024), "Very fast Bayesian Additive Regression Trees on GPU", [arXiv:2410.23244](https://arxiv.org/abs/2410.23244).
47
+
48
+ To cite the software directly, including the specific version, use [zenodo](https://doi.org/10.5281/zenodo.13931477).
@@ -0,0 +1,13 @@
1
+ bartz/.DS_Store,sha256=7191af46d7b8c0d4c03c502f94eb01353bc2e615d75c45b3af0e31ab238034b5,6148
2
+ bartz/BART.py,sha256=50faa27d82bc1c5ab6eb676a006bd7a95e2cbe10a541c5de7a100f068a76a820,17898
3
+ bartz/__init__.py,sha256=c3b1d04b62e52220f060d28f38403a536fcbe2cea06d0a4087dc0143b05cb1fd,1433
4
+ bartz/_version.py,sha256=d0f688d9e48e0a9e6490d7292a95126cb1dfebaacbf714331696322c6a44b723,22
5
+ bartz/debug.py,sha256=0ab2b06e53245b763d4c684653031c912ed794e328d2f427dbfb017c975840af,5359
6
+ bartz/grove.py,sha256=13d05ca6877b073cfc2d0e887fc03f74be35eadb6b587b83a0a0d41de1accb10,8494
7
+ bartz/jaxext.py,sha256=a439363fd44be9a26770a65dd30c3d140a877ec0ddcdd65ba8fed4471c629cd0,11552
8
+ bartz/mcmcloop.py,sha256=15f7de40d1b3a57183f90db2972b66ab7b455f4829cca22c5686736b154c1fac,9216
9
+ bartz/mcmcstep.py,sha256=48aaf9d4a32aa4b84a73b4ec365fe712204dd872e5df3de5cc3715a37e362430,60504
10
+ bartz/prepcovars.py,sha256=e76c05c5218a47096c368f28c2df0178b44b428d26ba9c15fa6dd6c09fd2f2a1,5849
11
+ bartz-0.5.0.dist-info/WHEEL,sha256=11319513b72fe7d4379f6f9f7984b3cca85f688201c09325839a04908e107b75,78
12
+ bartz-0.5.0.dist-info/METADATA,sha256=d875077205f7d03c4c4f8c84acfe3079c4808be32ea3ab358640fe581b6f3b46,2795
13
+ bartz-0.5.0.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: uv 0.7.4
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
@@ -1,21 +0,0 @@
1
- MIT License
2
-
3
- Copyright (c) 2024 Giacomo Petrillo
4
-
5
- Permission is hereby granted, free of charge, to any person obtaining a copy
6
- of this software and associated documentation files (the "Software"), to deal
7
- in the Software without restriction, including without limitation the rights
8
- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
- copies of the Software, and to permit persons to whom the Software is
10
- furnished to do so, subject to the following conditions:
11
-
12
- The above copyright notice and this permission notice shall be included in all
13
- copies or substantial portions of the Software.
14
-
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
- SOFTWARE.
@@ -1,77 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: bartz
3
- Version: 0.4.0
4
- Summary: A JAX implementation of BART
5
- Home-page: https://github.com/Gattocrucco/bartz
6
- License: MIT
7
- Author: Giacomo Petrillo
8
- Author-email: info@giacomopetrillo.com
9
- Requires-Python: >=3.10,<4.0
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Programming Language :: Python :: 3
12
- Classifier: Programming Language :: Python :: 3.10
13
- Classifier: Programming Language :: Python :: 3.11
14
- Classifier: Programming Language :: Python :: 3.12
15
- Requires-Dist: jax (>=0.4.23,<0.5.0)
16
- Requires-Dist: jaxlib (>=0.4.23,<0.5.0)
17
- Requires-Dist: numpy (>=1.25.2,<2.0.0)
18
- Requires-Dist: scipy (>=1.11.4,<2.0.0)
19
- Project-URL: Bug Tracker, https://github.com/Gattocrucco/bartz/issues
20
- Project-URL: Repository, https://github.com/Gattocrucco/bartz
21
- Description-Content-Type: text/markdown
22
-
23
- [![PyPI](https://img.shields.io/pypi/v/bartz)](https://pypi.org/project/bartz/)
24
-
25
- # BART vectoriZed
26
-
27
- A branchless vectorized implementation of Bayesian Additive Regression Trees (BART) in JAX.
28
-
29
- BART is a nonparametric Bayesian regression technique. Given predictors $X$ and responses $y$, BART finds a function to predict $y$ given $X$. The result of the inference is a sample of possible functions, representing the uncertainty over the determination of the function.
30
-
31
- This Python module provides an implementation of BART that runs on GPU, to process large datasets faster. It is also good on CPU. Most other implementations of BART are for R, and run on CPU only.
32
-
33
- On CPU, bartz runs at the speed of dbarts (the fastest implementation I know of), but using half the memory. On GPU, the speed premium depends on sample size; with 50000 datapoints and 5000 trees, on an Nvidia Tesla V100 GPU it's 12 times faster than an Apple M1 CPU, and this factor is linearly proportional to the number of datapoints.
34
-
35
- ## Links
36
-
37
- - [Documentation (latest release)](https://gattocrucco.github.io/bartz/docs)
38
- - [Documentation (development version)](https://gattocrucco.github.io/bartz/docs-dev)
39
- - [Repository](https://github.com/Gattocrucco/bartz)
40
- - [Code coverage](https://gattocrucco.github.io/bartz/coverage)
41
-
42
- ## Other BART packages
43
-
44
- - [stochtree](https://github.com/StochasticTree) C++ library with R and Python bindings taylored to researchers who want to make their own BART variants
45
- - [bnptools](https://github.com/rsparapa/bnptools) Feature-rich R packages for BART and some variants
46
- - [dbarts](https://github.com/vdorie/dbarts) Fast R package
47
- - [bartMachine](https://github.com/kapelner/bartMachine) Fast R package, supports missing predictors imputation
48
- - [SoftBART](https://github.com/theodds/SoftBART) R package with a smooth version of BART
49
- - [bcf](https://github.com/jaredsmurray/bcf) R package for a version of BART for causal inference
50
- - [flexBART](https://github.com/skdeshpande91/flexBART) Fast R package, supports categorical predictors
51
- - [flexBCF](https://github.com/skdeshpande91/flexBCF) R package, version of bcf optimized for large datasets
52
- - [XBART](https://github.com/JingyuHe/XBART) R/Python package, XBART is a faster variant of BART
53
- - [BART](https://github.com/JingyuHe/BART) R package, BART warm-started with XBART
54
- - [XBCF](https://github.com/socket778/XBCF)
55
- - [BayesTree](https://cran.r-project.org/package=BayesTree) R package, original BART implementation
56
- - [bartCause](https://github.com/vdorie/bartCause) R package, pre-made BART-based workflows for causal inference
57
- - [stan4bart](https://github.com/vdorie/stan4bart)
58
- - [VCBART](https://github.com/skdeshpande91/VCBART)
59
- - [monbart](https://github.com/jaredsmurray/monbart)
60
- - [mBART](https://github.com/remcc/mBART_shlib)
61
- - [SequentialBART](https://github.com/mjdaniels/SequentialBART)
62
- - [sparseBART](https://github.com/cspanbauer/sparseBART)
63
- - [pymc-bart](https://github.com/pymc-devs/pymc-bart)
64
- - [semibart](https://github.com/zeldow/semibart)
65
- - [CSP-BART](https://github.com/ebprado/CSP-BART)
66
- - [AMBARTI](https://github.com/ebprado/AMBARTI)
67
- - [MOTR-BART](https://github.com/ebprado/MOTR-BART)
68
- - [bcfbma](https://github.com/EoghanONeill/bcfbma)
69
- - [bartBMAnew](https://github.com/EoghanONeill/bartBMAnew)
70
- - [BART-BMA](https://github.com/BelindaHernandez/BART-BMA) (superseded by bartBMAnew)
71
- - [gpbart](https://github.com/MateusMaiaDS/gpbart)
72
- - [GPBART](https://github.com/nchenderson/GPBART)
73
- - [bartpy](https://github.com/JakeColtman/bartpy)
74
- - [BayesTreePrior](https://github.com/AlexiaJM/BayesTreePrior)
75
- - [BayesTree.jl](https://github.com/mathcg/BayesTree.jl)
76
- - [longbet](https://github.com/google/longbet)
77
-
@@ -1,13 +0,0 @@
1
- bartz/BART.py,sha256=CbGzFWtYw5u38Z9-Hy3CbDXpKOOvPFAAkSqu2HZl8no,16862
2
- bartz/__init__.py,sha256=E96vsP0bZ8brejpZmEmRoXuMsUdinO_B_SKUUl1rLsg,1448
3
- bartz/_version.py,sha256=2eiWQI55fd-roDdkt4Hvl9WzrTJ4xQo33VzFud6D03U,22
4
- bartz/debug.py,sha256=9ZH-JfwZVu5OPhHBEyXQHAU5H9KIu1vxLK7yNv4m4Ew,5314
5
- bartz/grove.py,sha256=x_6NK_l-hrXfy1PhssYNJkX41-w_WqjDziww0E7YRS8,8500
6
- bartz/jaxext.py,sha256=RcVWTCGS8lXF7GBsNbKrpuA4MTcokItq0CpWm3s7CGk,12033
7
- bartz/mcmcloop.py,sha256=lKDszvniNXka99X3e9RCrTgvEAZHA7ZbVXEgxUYvKMY,7634
8
- bartz/mcmcstep.py,sha256=diI9vHXHMvu_Lk_bSJ-a038OnEbXDpNEikVPhRcxEys,54987
9
- bartz/prepcovars.py,sha256=mMgfL-LGJ_8QpOL6iy7yfkL8A7FrT7Zfn5M3voyNwSQ,5818
10
- bartz-0.4.0.dist-info/LICENSE,sha256=heuIJZQK9IexJYC-fYHoLUrgj8HG8yS3G072EvKh-94,1073
11
- bartz-0.4.0.dist-info/METADATA,sha256=K86CVXT6ayPnc2hjhreYGMEeYWfYJIZdDkKuBB0-FYA,4500
12
- bartz-0.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
13
- bartz-0.4.0.dist-info/RECORD,,
@@ -1,4 +0,0 @@
1
- Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
3
- Root-Is-Purelib: true
4
- Tag: py3-none-any