arviz 0.22.0__py3-none-any.whl → 0.23.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
arviz/__init__.py CHANGED
@@ -1,9 +1,10 @@
1
1
  # pylint: disable=wildcard-import,invalid-name,wrong-import-position
2
2
  """ArviZ is a library for exploratory analysis of Bayesian models."""
3
- __version__ = "0.22.0"
3
+ __version__ = "0.23.1"
4
4
 
5
5
  import logging
6
6
  import os
7
+ import re
7
8
 
8
9
  from matplotlib.colors import LinearSegmentedColormap
9
10
  from matplotlib.pyplot import style
@@ -11,6 +12,45 @@ import matplotlib as mpl
11
12
  from packaging import version
12
13
 
13
14
 
15
+ def _warn_once_per_day():
16
+ from .preview import info
17
+
18
+ # skip warning if all 3 arviz subpackages are already installed
19
+ pat = re.compile(r"arviz_(base|stats|plots) available")
20
+ if len(pat.findall(info)) == 3:
21
+ return
22
+
23
+ import datetime
24
+ from warnings import warn
25
+ from pathlib import Path
26
+
27
+ warning_dir = Path.home() / "arviz_data"
28
+ warning_dir.mkdir(exist_ok=True)
29
+
30
+ stamp_file = warning_dir / "daily_warning"
31
+ today = datetime.date.today()
32
+
33
+ if stamp_file.exists():
34
+ last_date = datetime.date.fromisoformat(stamp_file.read_text().strip())
35
+ else:
36
+ last_date = None
37
+
38
+ if last_date != today:
39
+ warn(
40
+ "\nArviZ is undergoing a major refactor to improve flexibility and extensibility "
41
+ "while maintaining a user-friendly interface."
42
+ "\nSome upcoming changes may be backward incompatible."
43
+ "\nFor details and migration guidance, visit: "
44
+ "https://python.arviz.org/en/latest/user_guide/migration_guide.html",
45
+ FutureWarning,
46
+ )
47
+
48
+ stamp_file.write_text(today.isoformat())
49
+
50
+
51
+ _warn_once_per_day()
52
+
53
+
14
54
  class Logger(logging.Logger):
15
55
  """Override Logger to avoid repeated messages."""
16
56
 
@@ -333,4 +373,4 @@ except ModuleNotFoundError:
333
373
 
334
374
 
335
375
  # clean namespace
336
- del os, logging, LinearSegmentedColormap, Logger, mpl
376
+ del os, re, logging, version, LinearSegmentedColormap, Logger, mpl
arviz/data/converters.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import numpy as np
4
4
  import xarray as xr
5
+ import pandas as pd
5
6
 
6
7
  try:
7
8
  from tree import is_nested
@@ -44,6 +45,8 @@ def convert_to_inference_data(obj, *, group="posterior", coords=None, dims=None,
44
45
  | dict: creates an xarray dataset as the only group
45
46
  | numpy array: creates an xarray dataset as the only group, gives the
46
47
  array an arbitrary name
48
+ | object with __array__: converts to numpy array, then creates an xarray dataset as
49
+ the only group, gives the array an arbitrary name
47
50
  group : str
48
51
  If `obj` is a dict or numpy array, assigns the resulting xarray
49
52
  dataset to this group. Default: "posterior".
@@ -115,6 +118,13 @@ def convert_to_inference_data(obj, *, group="posterior", coords=None, dims=None,
115
118
  dataset = dict_to_dataset(obj, coords=coords, dims=dims)
116
119
  elif isinstance(obj, np.ndarray):
117
120
  dataset = dict_to_dataset({"x": obj}, coords=coords, dims=dims)
121
+ elif (
122
+ hasattr(obj, "__array__")
123
+ and callable(getattr(obj, "__array__"))
124
+ and (not isinstance(obj, pd.DataFrame))
125
+ ):
126
+ obj = obj.__array__()
127
+ dataset = dict_to_dataset({"x": obj}, coords=coords, dims=dims)
118
128
  elif isinstance(obj, (list, tuple)) and isinstance(obj[0], str) and obj[0].endswith(".csv"):
119
129
  if group == "sample_stats":
120
130
  kwargs["posterior"] = kwargs.pop(group)
@@ -129,6 +139,7 @@ def convert_to_inference_data(obj, *, group="posterior", coords=None, dims=None,
129
139
  "pytree (if 'dm-tree' is installed)",
130
140
  "netcdf filename",
131
141
  "numpy array",
142
+ "object with __array__",
132
143
  "pystan fit",
133
144
  "emcee fit",
134
145
  "pyro mcmc fit",
@@ -430,11 +430,12 @@ class InferenceData(Mapping[str, xr.Dataset]):
430
430
  if re.search(key, group):
431
431
  group_kws = kws
432
432
  group_kws.setdefault("engine", engine)
433
- with xr.open_dataset(filename, group=f"{base_group}/{group}", **group_kws) as data:
434
- if rcParams["data.load"] == "eager":
433
+ data = xr.open_dataset(filename, group=f"{base_group}/{group}", **group_kws)
434
+ if rcParams["data.load"] == "eager":
435
+ with data:
435
436
  groups[group] = data.load()
436
- else:
437
- groups[group] = data
437
+ else:
438
+ groups[group] = data
438
439
 
439
440
  with xr.open_dataset(filename, engine=engine, group=base_group) as data:
440
441
  attrs.update(data.load().attrs)
@@ -540,7 +541,9 @@ class InferenceData(Mapping[str, xr.Dataset]):
540
541
  "xarray must be have DataTree in order to use InferenceData.to_datatree. "
541
542
  "Update to xarray>=2024.11.0"
542
543
  ) from err
543
- return DataTree.from_dict({group: ds for group, ds in self.items()})
544
+ dt = DataTree.from_dict({group: ds for group, ds in self.items()})
545
+ dt.attrs = self.attrs
546
+ return dt
544
547
 
545
548
  @staticmethod
546
549
  def from_datatree(datatree):
@@ -551,7 +554,8 @@ class InferenceData(Mapping[str, xr.Dataset]):
551
554
  datatree : DataTree
552
555
  """
553
556
  return InferenceData(
554
- **{group: child.to_dataset() for group, child in datatree.children.items()}
557
+ attrs=datatree.attrs,
558
+ **{group: child.to_dataset() for group, child in datatree.children.items()},
555
559
  )
556
560
 
557
561
  def to_dict(self, groups=None, filter_groups=None):
@@ -811,7 +815,7 @@ class InferenceData(Mapping[str, xr.Dataset]):
811
815
  if version.parse(zarr.__version__) >= version.parse("3.0.0.dev0"):
812
816
  raise ImportError(
813
817
  "Found zarr>=3, which is not supported by ArviZ. Instead, you can use "
814
- "'dt = InfereceData.to_datatree' followed by 'dt.to_zarr()' "
818
+ "'dt = InferenceData.to_datatree' followed by 'dt.to_zarr()' "
815
819
  "(needs xarray>=2024.11.0)"
816
820
  )
817
821
 
arviz/data/io_numpyro.py CHANGED
@@ -241,7 +241,10 @@ class NumPyroConverter:
241
241
  continue
242
242
  name = rename_key.get(stat, stat)
243
243
  value = value.copy()
244
- data[name] = value
244
+ if stat == "potential_energy":
245
+ data[name] = -value
246
+ else:
247
+ data[name] = value
245
248
  if stat == "num_steps":
246
249
  data["tree_depth"] = np.log2(value).astype(int) + 1
247
250
  return dict_to_dataset(
arviz/data/io_pyjags.py CHANGED
@@ -277,7 +277,7 @@ def _extract_arviz_dict_from_inference_data(
277
277
 
278
278
 
279
279
  def _convert_arviz_dict_to_pyjags_dict(
280
- samples: tp.Mapping[str, np.ndarray]
280
+ samples: tp.Mapping[str, np.ndarray],
281
281
  ) -> tp.Mapping[str, np.ndarray]:
282
282
  """
283
283
  Convert and ArviZ dictionary to a PyJAGS dictionary.
arviz/plots/bpvplot.py CHANGED
@@ -251,7 +251,7 @@ def plot_bpv(
251
251
  total_pp_samples = predictive_dataset.sizes["chain"] * predictive_dataset.sizes["draw"]
252
252
 
253
253
  for key in coords.keys():
254
- coords[key] = np.where(np.in1d(observed[key], coords[key]))[0]
254
+ coords[key] = np.where(np.isin(observed[key], coords[key]))[0]
255
255
 
256
256
  obs_plotters = filter_plotters_list(
257
257
  list(
arviz/plots/dotplot.py CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  import numpy as np
4
4
 
5
+
5
6
  from ..rcparams import rcParams
6
7
  from .plot_utils import get_plotting_function
7
8
 
@@ -148,6 +149,7 @@ def plot_dot(
148
149
  raise ValueError("marker argument is valid only for matplotlib backend")
149
150
 
150
151
  values = np.ravel(values)
152
+ values = values[np.isfinite(values)]
151
153
  values.sort()
152
154
 
153
155
  if hdi_prob is None:
arviz/plots/forestplot.py CHANGED
@@ -51,7 +51,7 @@ def plot_forest(
51
51
  data : InferenceData
52
52
  Any object that can be converted to an :class:`arviz.InferenceData` object
53
53
  Refer to documentation of :func:`arviz.convert_to_dataset` for details.
54
- kind : {"foresplot", "ridgeplot"}, default "forestplot"
54
+ kind : {"forestplot", "ridgeplot"}, default "forestplot"
55
55
  Specify the kind of plot:
56
56
 
57
57
  * The ``kind="forestplot"`` generates credible intervals, where the central points are the
@@ -75,8 +75,8 @@ def plot_forest(
75
75
  interpret `var_names` as substrings of the real variables names. If "regex",
76
76
  interpret `var_names` as regular expressions on the real variables names. See
77
77
  :ref:`this section <common_filter_vars>` for usage examples.
78
- transform : callable, optional
79
- Function to transform data (defaults to None i.e.the identity function).
78
+ transform : callable or dict, optional
79
+ Function to transform the data. Defaults to None, i.e., the identity function.
80
80
  coords : dict, optional
81
81
  Coordinates of ``var_names`` to be plotted. Passed to :meth:`xarray.Dataset.sel`.
82
82
  See :ref:`this section <common_coords>` for usage examples.
@@ -228,7 +228,19 @@ def plot_forest(
228
228
 
229
229
  datasets = [convert_to_dataset(datum) for datum in reversed(data)]
230
230
  if transform is not None:
231
- datasets = [transform(dataset) for dataset in datasets]
231
+ if callable(transform):
232
+ datasets = [transform(dataset) for dataset in datasets]
233
+ elif isinstance(transform, dict):
234
+ transformed_datasets = []
235
+ for dataset in datasets:
236
+ new_dataset = dataset.copy()
237
+ for var_name, func in transform.items():
238
+ if var_name in new_dataset:
239
+ new_dataset[var_name] = func(new_dataset[var_name])
240
+ transformed_datasets.append(new_dataset)
241
+ datasets = transformed_datasets
242
+ else:
243
+ raise ValueError("transform must be either a callable or a dict {var_name: callable}")
232
244
  datasets = get_coords(
233
245
  datasets, list(reversed(coords)) if isinstance(coords, (list, tuple)) else coords
234
246
  )
arviz/plots/ppcplot.py CHANGED
@@ -304,7 +304,7 @@ def plot_ppc(
304
304
  pp_sample_ix = np.random.choice(total_pp_samples, size=num_pp_samples, replace=False)
305
305
 
306
306
  for key in coords.keys():
307
- coords[key] = np.where(np.in1d(observed_data[key], coords[key]))[0]
307
+ coords[key] = np.where(np.isin(observed_data[key], coords[key]))[0]
308
308
 
309
309
  obs_plotters = filter_plotters_list(
310
310
  list(
arviz/preview.py CHANGED
@@ -8,41 +8,51 @@ info = ""
8
8
 
9
9
  try:
10
10
  from arviz_base import *
11
+ import arviz_base as base
11
12
 
12
- status = "arviz_base available, exposing its functions as part of arviz.preview"
13
- _log.info(status)
13
+ _status = "arviz_base available, exposing its functions as part of arviz.preview"
14
+ _log.info(_status)
14
15
  except ModuleNotFoundError:
15
- status = "arviz_base not installed"
16
- _log.info(status)
16
+ _status = "arviz_base not installed"
17
+ _log.info(_status)
17
18
  except ImportError:
18
- status = "Unable to import arviz_base"
19
- _log.info(status, exc_info=True)
19
+ _status = "Unable to import arviz_base"
20
+ _log.info(_status, exc_info=True)
20
21
 
21
- info += status + "\n"
22
+ info += _status + "\n"
22
23
 
23
24
  try:
24
25
  from arviz_stats import *
25
26
 
26
- status = "arviz_stats available, exposing its functions as part of arviz.preview"
27
- _log.info(status)
27
+ # the base computational module fron arviz_stats will override the alias to arviz-base
28
+ # arviz.stats.base will still be available
29
+ import arviz_base as base
30
+ import arviz_stats as stats
31
+
32
+ _status = "arviz_stats available, exposing its functions as part of arviz.preview"
33
+ _log.info(_status)
28
34
  except ModuleNotFoundError:
29
- status = "arviz_stats not installed"
30
- _log.info(status)
35
+ _status = "arviz_stats not installed"
36
+ _log.info(_status)
31
37
  except ImportError:
32
- status = "Unable to import arviz_stats"
33
- _log.info(status, exc_info=True)
34
- info += status + "\n"
38
+ _status = "Unable to import arviz_stats"
39
+ _log.info(_status, exc_info=True)
40
+ info += _status + "\n"
35
41
 
36
42
  try:
37
43
  from arviz_plots import *
44
+ import arviz_plots as plots
38
45
 
39
- status = "arviz_plots available, exposing its functions as part of arviz.preview"
40
- _log.info(status)
46
+ _status = "arviz_plots available, exposing its functions as part of arviz.preview"
47
+ _log.info(_status)
41
48
  except ModuleNotFoundError:
42
- status = "arviz_plots not installed"
43
- _log.info(status)
49
+ _status = "arviz_plots not installed"
50
+ _log.info(_status)
44
51
  except ImportError:
45
- status = "Unable to import arviz_plots"
46
- _log.info(status, exc_info=True)
52
+ _status = "Unable to import arviz_plots"
53
+ _log.info(_status, exc_info=True)
54
+
55
+ info += _status + "\n"
47
56
 
48
- info += status + "\n"
57
+ # clean namespace
58
+ del logging, _status, _log
arviz/rcparams.py CHANGED
@@ -12,11 +12,11 @@ from pathlib import Path
12
12
  from typing import Any, Dict
13
13
  from typing_extensions import Literal
14
14
 
15
- NO_GET_ARGS: bool = False
15
+ NO_GET_ARGS: bool = False # pylint: disable=invalid-name
16
16
  try:
17
17
  from typing_extensions import get_args
18
18
  except ImportError:
19
- NO_GET_ARGS = True
19
+ NO_GET_ARGS = True # pylint: disable=invalid-name
20
20
 
21
21
 
22
22
  import numpy as np
arviz/stats/stats.py CHANGED
@@ -1,7 +1,6 @@
1
1
  # pylint: disable=too-many-lines
2
2
  """Statistical functions in ArviZ."""
3
3
 
4
- import itertools
5
4
  import warnings
6
5
  from copy import deepcopy
7
6
  from typing import List, Optional, Tuple, Union, Mapping, cast, Callable
@@ -11,14 +10,14 @@ import pandas as pd
11
10
  import scipy.stats as st
12
11
  from xarray_einstats import stats
13
12
  import xarray as xr
14
- from scipy.optimize import minimize
13
+ from scipy.optimize import minimize, LinearConstraint, Bounds
15
14
  from typing_extensions import Literal
16
15
 
17
- NO_GET_ARGS: bool = False
16
+ NO_GET_ARGS: bool = False # pylint: disable=invalid-name
18
17
  try:
19
18
  from typing_extensions import get_args
20
19
  except ImportError:
21
- NO_GET_ARGS = True
20
+ NO_GET_ARGS = True # pylint: disable=invalid-name
22
21
 
23
22
  from .. import _log
24
23
  from ..data import InferenceData, convert_to_dataset, convert_to_inference_data, extract
@@ -225,37 +224,23 @@ def compare(
225
224
  if method.lower() == "stacking":
226
225
  rows, cols, ic_i_val = _ic_matrix(ics, ic_i)
227
226
  exp_ic_i = np.exp(ic_i_val / scale_value)
228
- km1 = cols - 1
229
-
230
- def w_fuller(weights):
231
- return np.concatenate((weights, [max(1.0 - np.sum(weights), 0.0)]))
232
227
 
233
228
  def log_score(weights):
234
- w_full = w_fuller(weights)
235
- score = 0.0
236
- for i in range(rows):
237
- score += np.log(np.dot(exp_ic_i[i], w_full))
238
- return -score
229
+ return -np.sum(np.log(exp_ic_i @ weights))
239
230
 
240
231
  def gradient(weights):
241
- w_full = w_fuller(weights)
242
- grad = np.zeros(km1)
243
- for k, i in itertools.product(range(km1), range(rows)):
244
- grad[k] += (exp_ic_i[i, k] - exp_ic_i[i, km1]) / np.dot(exp_ic_i[i], w_full)
245
- return -grad
246
-
247
- theta = np.full(km1, 1.0 / cols)
248
- bounds = [(0.0, 1.0) for _ in range(km1)]
249
- constraints = [
250
- {"type": "ineq", "fun": lambda x: -np.sum(x) + 1.0},
251
- {"type": "ineq", "fun": np.sum},
252
- ]
232
+ denominator = exp_ic_i @ weights
233
+ return -np.sum(exp_ic_i / denominator[:, np.newaxis], axis=0)
234
+
235
+ theta = np.full(cols, 1.0 / cols)
236
+ bounds = Bounds(lb=np.zeros(cols), ub=np.ones(cols))
237
+ constraints = LinearConstraint(np.ones(cols), lb=1.0, ub=1.0)
253
238
 
254
- weights = minimize(
239
+ minimize_result = minimize(
255
240
  fun=log_score, x0=theta, jac=gradient, bounds=bounds, constraints=constraints
256
241
  )
257
242
 
258
- weights = w_fuller(weights["x"])
243
+ weights = minimize_result["x"]
259
244
  ses = ics["se"]
260
245
 
261
246
  elif method.lower() == "bb-pseudo-bma":
@@ -1510,6 +1510,15 @@ class TestDataTree:
1510
1510
  assert_identical(ds, idata_back[group])
1511
1511
  assert all(group in dt.children for group in idata.groups())
1512
1512
 
1513
+ def test_datatree_attrs(self):
1514
+ idata = load_arviz_data("centered_eight")
1515
+ idata.attrs = {"not": "empty"}
1516
+ assert idata.attrs
1517
+ dt = idata.to_datatree()
1518
+ idata_back = from_datatree(dt)
1519
+ assert dt.attrs == idata.attrs
1520
+ assert idata_back.attrs == idata.attrs
1521
+
1513
1522
 
1514
1523
  class TestConversions:
1515
1524
  def test_id_conversion_idempotent(self):
@@ -1652,3 +1661,19 @@ class TestExtractDataset:
1652
1661
  post = extract(idata, num_samples=10)
1653
1662
  assert post.sizes["sample"] == 10
1654
1663
  assert post.attrs == idata.posterior.attrs
1664
+
1665
+
1666
+ def test_convert_to_inference_data_with_array_like():
1667
+ class ArrayLike:
1668
+ def __init__(self, data):
1669
+ self._data = np.asarray(data)
1670
+
1671
+ def __array__(self):
1672
+ return self._data
1673
+
1674
+ array_like = ArrayLike(np.random.randn(4, 100))
1675
+ idata = convert_to_inference_data(array_like, group="posterior")
1676
+
1677
+ assert hasattr(idata, "posterior")
1678
+ assert "x" in idata.posterior.data_vars
1679
+ assert idata.posterior["x"].shape == (4, 100)
@@ -2176,5 +2176,22 @@ def test_plot_autocorr_coords(coords, expected_vars):
2176
2176
  idata = load_arviz_data("centered_eight")
2177
2177
 
2178
2178
  axes = plot_autocorr(idata, var_names=expected_vars, coords=coords, show=False)
2179
+ assert axes is not None
2180
+
2181
+
2182
+ def test_plot_forest_with_transform():
2183
+ """Test if plot_forest runs successfully with a transform dictionary."""
2184
+ data = xr.Dataset(
2185
+ {
2186
+ "var1": (["chain", "draw"], np.array([[1, 2, 3], [4, 5, 6]])),
2187
+ "var2": (["chain", "draw"], np.array([[7, 8, 9], [10, 11, 12]])),
2188
+ },
2189
+ coords={"chain": [0, 1], "draw": [0, 1, 2]},
2190
+ )
2191
+ transform_dict = {
2192
+ "var1": lambda x: x + 1,
2193
+ "var2": lambda x: x * 2,
2194
+ }
2179
2195
 
2196
+ axes = plot_forest(data, transform=transform_dict, show=False)
2180
2197
  assert axes is not None
@@ -13,9 +13,9 @@ from ...stats.ecdf_utils import (
13
13
  try:
14
14
  import numba # pylint: disable=unused-import
15
15
 
16
- numba_options = [True, False]
16
+ numba_options = [True, False] # pylint: disable=invalid-name
17
17
  except ImportError:
18
- numba_options = [False]
18
+ numba_options = [False] # pylint: disable=invalid-name
19
19
 
20
20
 
21
21
  def test_compute_ecdf():
@@ -1,4 +1,4 @@
1
- # pylint: disable=no-member, invalid-name, redefined-outer-name
1
+ # pylint: disable=no-member, invalid-name, redefined-outer-name, too-many-public-methods
2
2
  from collections import namedtuple
3
3
  import numpy as np
4
4
  import pytest
@@ -409,3 +409,26 @@ class TestDataNumPyro:
409
409
  )
410
410
  assert inference_data.predictions.obs.dims == ("chain", "draw", "J")
411
411
  assert "J" in inference_data.predictions.obs.coords
412
+
413
+ def test_potential_energy_sign_conversion(self):
414
+ """Test that potential energy is converted to log probability (lp) with correct sign."""
415
+ import numpyro
416
+ import numpyro.distributions as dist
417
+ from numpyro.infer import MCMC, NUTS
418
+
419
+ num_samples = 10
420
+
421
+ def simple_model():
422
+ numpyro.sample("x", dist.Normal(0, 1))
423
+
424
+ nuts_kernel = NUTS(simple_model)
425
+ mcmc = MCMC(nuts_kernel, num_samples=num_samples, num_warmup=5)
426
+ mcmc.run(PRNGKey(0), extra_fields=["potential_energy"])
427
+
428
+ # Get the raw extra fields from NumPyro
429
+ extra_fields = mcmc.get_extra_fields(group_by_chain=True)
430
+ # Convert to ArviZ InferenceData
431
+ inference_data = from_numpyro(mcmc)
432
+ arviz_lp = inference_data["sample_stats"]["lp"].values
433
+
434
+ np.testing.assert_array_equal(arviz_lp, -extra_fields["potential_energy"])
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: arviz
3
- Version: 0.22.0
3
+ Version: 0.23.1
4
4
  Summary: Exploratory analysis of Bayesian models
5
5
  Home-page: http://github.com/arviz-devs/arviz
6
6
  Author: ArviZ Developers
@@ -29,7 +29,8 @@ Requires-Dist: packaging
29
29
  Requires-Dist: pandas>=2.1.0
30
30
  Requires-Dist: xarray>=2023.7.0
31
31
  Requires-Dist: h5netcdf>=1.0.2
32
- Requires-Dist: typing-extensions>=4.1.0
32
+ Requires-Dist: h5py
33
+ Requires-Dist: typing_extensions>=4.1.0
33
34
  Requires-Dist: xarray-einstats>=0.3
34
35
  Provides-Extra: all
35
36
  Requires-Dist: numba; extra == "all"
@@ -45,6 +46,17 @@ Provides-Extra: preview
45
46
  Requires-Dist: arviz-base[h5netcdf]; extra == "preview"
46
47
  Requires-Dist: arviz-stats[xarray]; extra == "preview"
47
48
  Requires-Dist: arviz-plots; extra == "preview"
49
+ Dynamic: author
50
+ Dynamic: classifier
51
+ Dynamic: description
52
+ Dynamic: description-content-type
53
+ Dynamic: home-page
54
+ Dynamic: license
55
+ Dynamic: license-file
56
+ Dynamic: provides-extra
57
+ Dynamic: requires-dist
58
+ Dynamic: requires-python
59
+ Dynamic: summary
48
60
 
49
61
  <img src="https://raw.githubusercontent.com/arviz-devs/arviz-project/main/arviz_logos/ArviZ.png#gh-light-mode-only" width=200></img>
50
62
  <img src="https://raw.githubusercontent.com/arviz-devs/arviz-project/main/arviz_logos/ArviZ_white.png#gh-dark-mode-only" width=200></img>
@@ -1,15 +1,15 @@
1
- arviz/__init__.py,sha256=IOdU7uIJbHurf--mKztuR4Yq-fVHSLyU_neL4nK89KE,10590
1
+ arviz/__init__.py,sha256=HBMDwUgE8CTGybvpN9HEjphGDofJi3ywCFFVbzt3KPc,11756
2
2
  arviz/labels.py,sha256=w4-t0qdJzjKrqRyhzbtk6ucoMIAxle1HpHYlH7up06Q,6828
3
- arviz/preview.py,sha256=Fmff8j9Zlvgi5w2PRwnbkEOioJY8fK9p1SWQWjTl4N8,1314
3
+ arviz/preview.py,sha256=GuAwiStRYSooI4mG9j7JFpr3YxBfg_93DlgroORQBUg,1644
4
4
  arviz/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- arviz/rcparams.py,sha256=lQnBprbDihEcsP5FujoJetGna4T8nc20JnuVf1Wn-1k,21031
5
+ arviz/rcparams.py,sha256=PRQYTnvwgGZrQkb1li3n-ZPdBaowDqfW51VTUa4dOjU,21095
6
6
  arviz/sel_utils.py,sha256=xvAYENhGXDTrhaT4Itlk1SJQUUGZ6BGcR04fPFgvzdM,6951
7
7
  arviz/utils.py,sha256=-q3eAeficZcLakOt6UXN0eyVx6uIVVpkqA_ky8FRSjE,26699
8
8
  arviz/data/__init__.py,sha256=SG2umdZ8uVNYKVBoVYPy5tNxJnzLdyw0spPMjMTae5k,1558
9
9
  arviz/data/base.py,sha256=PALdVidyCxJqje_za4XPwXH010qAMJt4VzzeOflUCC8,21365
10
- arviz/data/converters.py,sha256=4QU5_0OU84HV3gSXXY_N5bd6S9ft81lfpnCT-8E8qyc,7933
10
+ arviz/data/converters.py,sha256=TeiKpYCk4BSqkBzlSmMwAl3PNYvKaEy5sUMpQlYvt88,8411
11
11
  arviz/data/datasets.py,sha256=wPi23KZI-w4HrhjmY1LUg0Rj0aJobSJ4WO3LBrjfZQc,5392
12
- arviz/data/inference_data.py,sha256=BvZsTROxRIGc6c4Jg_NQrjnlfm_WquPyhJi88KBEkis,94545
12
+ arviz/data/inference_data.py,sha256=vfueSyWiqYlVOvn_Y3xdYrNvCuVYaP3QX0x74emH9xI,94639
13
13
  arviz/data/io_beanmachine.py,sha256=QQVBD6rftvs6_kLIb4Vm1QzQ6BsS0J9DTrzw2Jj4ob8,3745
14
14
  arviz/data/io_cmdstan.py,sha256=8YX9Nfkx4LqjfKms4s4GTOkOjZNelb6SukvRZRHY6iM,38994
15
15
  arviz/data/io_cmdstanpy.py,sha256=iSr8ciKBFoIa1tJGHEX-2JKkUJRyaTXzRXf-5mu8q5U,42991
@@ -18,8 +18,8 @@ arviz/data/io_dict.py,sha256=HM4ke-NuopsPnIdU-UGMtppAnj3vcbkBti8eROpMRTs,17698
18
18
  arviz/data/io_emcee.py,sha256=zsJJqwlyXLN_TfI6hgKz97p4N30NYTVvQSrIIpiLmB0,11844
19
19
  arviz/data/io_json.py,sha256=lrSP_9abfUW_5E8TwnG4hsS5HNHzAHZQCJTynl_tXKY,1256
20
20
  arviz/data/io_netcdf.py,sha256=cCxVnXSCTzWP3SU7cM4SqBiRRK9txFOsm-MchzNUzM4,2336
21
- arviz/data/io_numpyro.py,sha256=FgIoHp62eSgTOwNLIdfYhlOWIuPey55pvz949zwyX2I,18567
22
- arviz/data/io_pyjags.py,sha256=AG2ckAuygSxbjULVFQCJQSM72GnUoTMi3T94aQRJYKQ,13271
21
+ arviz/data/io_numpyro.py,sha256=zJiL-U6x4hxHYs3mzTXdzF4iT2t4BBM5vh3zfU3I6eE,18668
22
+ arviz/data/io_pyjags.py,sha256=PqljKXfXN13vxVJsLiNOdz_IGDhT-sTd-6nhn70LKVg,13272
23
23
  arviz/data/io_pyro.py,sha256=JYywUGUU1Qil_ahLuDYhYFafQAKB-y1kIipXdfH_vnQ,12740
24
24
  arviz/data/io_pystan.py,sha256=nRTU6yujilQCKERxzN7LIVwZplfvFNb-Y9Jk9YVJQLk,41700
25
25
  arviz/data/io_zarr.py,sha256=PeSBz-zHDzmwJq3sWzxASnjrfbd-hULJsl8FjK46YQQ,1163
@@ -32,17 +32,17 @@ arviz/data/example_data/data/non_centered_eight.nc,sha256=r7kyd10HyJTTRQs4OlSCXP
32
32
  arviz/plots/__init__.py,sha256=atWhfLXHAD6zaTjbdWdTaJrPTNBl3XpcSCY4etgw_cY,1514
33
33
  arviz/plots/autocorrplot.py,sha256=veeZNEhHoDBzR-mGNm-JOP1gBoSpilpy2E6lOgcoWKk,5926
34
34
  arviz/plots/bfplot.py,sha256=TKCkk60dgIk70CNWz9pHDXb1HGwHT_aLfNgFH6jDz9c,4367
35
- arviz/plots/bpvplot.py,sha256=Pf0ME21_teSQd_7CcuYE2MxfDJHcy5eW7Ms7yIDZSCA,12505
35
+ arviz/plots/bpvplot.py,sha256=JcZPn-ON6nCZYiWTxaxGRMkkTkAEZHE4FE3cX2oYa-c,12505
36
36
  arviz/plots/compareplot.py,sha256=Z8usSMEeQKs4GmkziDR4dVzSh3Ocd4ySfiNDZVaFOUc,6078
37
37
  arviz/plots/densityplot.py,sha256=6477ZljpBCcZRw0SUwcTO4FYjxqw_qYsJupWNo-jCok,10895
38
38
  arviz/plots/distcomparisonplot.py,sha256=gVNQUN0VX7hC527fcUk1oxtQRdIl5mrltU95c0Nra9k,7184
39
39
  arviz/plots/distplot.py,sha256=xWXOsN-pPBwhHrEjC6lbIJdn-17DtpMueSnj6YzWlX4,8472
40
- arviz/plots/dotplot.py,sha256=9HTMeT1ZuZ4Vauxvg4TjsvvNnwORG8WWO2HistJwHiU,7736
40
+ arviz/plots/dotplot.py,sha256=jpXzHhlBxUuyJF6hnAUpaiuyL1zwLOJgq4CYwmqEtWs,7778
41
41
  arviz/plots/ecdfplot.py,sha256=eYasPwOYEmzqx82d6SyDg_iPyXkyFuOKnbOBjPrndH0,13112
42
42
  arviz/plots/elpdplot.py,sha256=NKqPkwTj9BWDzwMnG0cPeLmYBGMX_meP9F6bqvTwLKY,6433
43
43
  arviz/plots/energyplot.py,sha256=znEDPYpWaTIX0XpdVoyhXOITJ4A8BYkI9t1TVhJq4Qo,4797
44
44
  arviz/plots/essplot.py,sha256=ch0DjUQDILk4ohpSUR-9VHejGFb6Xmelly-qa-fKb9E,11741
45
- arviz/plots/forestplot.py,sha256=37Wj4wFGjydZS4SpdqZ9qLxhZBo4rFxV_MWQnZAS7DA,11897
45
+ arviz/plots/forestplot.py,sha256=Cp8Xfk9CI_ZNOQj2qTMDy5UWkE2wVabOLForDuEcWxY,12505
46
46
  arviz/plots/hdiplot.py,sha256=Pii9ZsuejEM-I24dn39muUL-yYKTfe2RWzAuU0W-3SI,7798
47
47
  arviz/plots/kdeplot.py,sha256=t-SJt3LIL1nThAsVM5npXZhRxqkGoCsfF1F0Fkj8ZV8,11924
48
48
  arviz/plots/khatplot.py,sha256=u03gmBG1xwxG07ASLZEJB-GsRhRHtUCtbglpC1N0aIg,8086
@@ -53,7 +53,7 @@ arviz/plots/pairplot.py,sha256=v-NCJIG6UG9cGIdFUWzW5S7Y29Ag5zE9zucxNSv46ME,10787
53
53
  arviz/plots/parallelplot.py,sha256=ZBEsHvnlmSXLRpSwP-KUwzgWBC2S4siKXFGJnLf7uAE,7125
54
54
  arviz/plots/plot_utils.py,sha256=VyVR50HrZegdkWa6ZxtRnC_WJstooYvaB-xsDHf6kaQ,18337
55
55
  arviz/plots/posteriorplot.py,sha256=pC-5SQZOGq1F4opM_sQLxn4ZG2we4Y9ULV8yhxjGVdo,10952
56
- arviz/plots/ppcplot.py,sha256=UPTtXDWHf3wFAb-apNPGcz8qw9CQwINGml_2YkYI-iM,13967
56
+ arviz/plots/ppcplot.py,sha256=QwcgZTuUDijuXK9g3AB8Lc8ShK2URSL8Wc4Jeu5IxZM,13967
57
57
  arviz/plots/rankplot.py,sha256=lz0swHs6EBe-gXn4udP1Um3RS-EatsOAmguYqGMlIjU,8648
58
58
  arviz/plots/separationplot.py,sha256=Fx_QVeFUcF45fm7nn06pt0qubOzvH8QMU1cw5RLyaik,5491
59
59
  arviz/plots/traceplot.py,sha256=dwcF7rsjMAIxZ_LPv7Z8os01uQZHXTkDFWEBtsbzI9k,10216
@@ -140,14 +140,14 @@ arviz/stats/__init__.py,sha256=kvrANzMkqyHMTdry7N5w836E2OP0tJM6bm5-G8OZaA0,721
140
140
  arviz/stats/density_utils.py,sha256=wmPFJzEZR7KgKxwQb5pGhY-w-rnFZpMIavrhpt_6u9w,32215
141
141
  arviz/stats/diagnostics.py,sha256=COTy2c5ROAirCAK_UNo7kQnggN71maBRPwy54ZdabKE,32656
142
142
  arviz/stats/ecdf_utils.py,sha256=Wy38wL-bsHDlpyU9qnDjedYBvbP_6ZrzJuWo6NzD2Xg,11835
143
- arviz/stats/stats.py,sha256=Q4MMxSJPyElFzBXpCqA21bMb1jQfnmLAHVTPmDHGiSo,90129
143
+ arviz/stats/stats.py,sha256=vowy6JVX1BkKynlrcBtDGVZ2ws3wWeiiTrMBFC0E6iY,89743
144
144
  arviz/stats/stats_refitting.py,sha256=trbPC7LCnsb-n5D6g7J0bzXJCPfcDukJDniB4ud1z9E,5415
145
145
  arviz/stats/stats_utils.py,sha256=XG8ILPVs8Jbh_v7jzLfwMkm2HraT2j2-Hxe_kEYlLjQ,20076
146
146
  arviz/tests/__init__.py,sha256=TiS6C1IzwAXmNa8u36Y2xzL1CTTZm2PwzAtmZgoqepE,18
147
147
  arviz/tests/conftest.py,sha256=6U9WpKmYf38EVRoFZNBpV0CunQvESBFJG2SJ8IBEkL4,1270
148
148
  arviz/tests/helpers.py,sha256=qhsOhLtfyz-dC2yuT6ug0frYZlbims06BljJuEVDP6E,23593
149
149
  arviz/tests/base_tests/__init__.py,sha256=zg7V5_0DZrCz7ozETqET6bUpAvUUmmkSoLpJLwaIj2E,23
150
- arviz/tests/base_tests/test_data.py,sha256=vnaYftgX_r58ra-Eo7ivAhUC8cSWAhf-zOLuDlWdZXA,64131
150
+ arviz/tests/base_tests/test_data.py,sha256=TZ9638lELh_rFqp0RnS5WThELGoqiPd39fSBXgcxB8Y,64910
151
151
  arviz/tests/base_tests/test_data_zarr.py,sha256=sPWnIQ7vPhN5Ql3Dq4JENuSwQV5IeignQjy9BAYe1_A,5441
152
152
  arviz/tests/base_tests/test_diagnostics.py,sha256=pbuy1-nvTKWSHv0nnhXOhpG4e2uy-4GGZb4lxAdoMpw,20353
153
153
  arviz/tests/base_tests/test_diagnostics_numba.py,sha256=2G5O-7Hz66DSaHIZtjs2XL45RezYnXQZH6Dg2Ow-p4Q,2847
@@ -155,10 +155,10 @@ arviz/tests/base_tests/test_helpers.py,sha256=PogHpWCMBEtkuzKt9jGQ8uIPg0cLDwztXx
155
155
  arviz/tests/base_tests/test_labels.py,sha256=X08vTMmcgXkYGbE2Qnu_UUDSTAIvSNKdnyqoWwmj008,1686
156
156
  arviz/tests/base_tests/test_plot_utils.py,sha256=lwDZYDNrlEOKP-asJv6qu3sH_4y-FiHcFlqnMTpZyhw,11771
157
157
  arviz/tests/base_tests/test_plots_bokeh.py,sha256=FDw3dp-M89EVsIAWvl7M17GXWyatRalYQJJHsbT5BzQ,41052
158
- arviz/tests/base_tests/test_plots_matplotlib.py,sha256=v_GwjrmWV18AyG-1aetUzpx2QqD9nqIrxsfVOKHtGY0,68533
158
+ arviz/tests/base_tests/test_plots_matplotlib.py,sha256=1MpAZHAlG9hc_vRTfXBy7-qxLLsZQNfns1qAVo6J9X8,69097
159
159
  arviz/tests/base_tests/test_rcparams.py,sha256=b9ueOXd9C0xiUIqgS0qnzvalHFgTFK7sUqL8UAzgJNs,10851
160
160
  arviz/tests/base_tests/test_stats.py,sha256=QHVa8sSzr5FX8X0D8tGFntCz9aW032gLu-EOuC6RHA4,34322
161
- arviz/tests/base_tests/test_stats_ecdf_utils.py,sha256=p1FnQzlC0fjjKDFfhbHIrrbwAVhLiygH4J0aarx89A0,6038
161
+ arviz/tests/base_tests/test_stats_ecdf_utils.py,sha256=1T_9jYyuCWMEwTbps4AFHRDzEF8THcdFSeQN2oZIwVw,6102
162
162
  arviz/tests/base_tests/test_stats_numba.py,sha256=wGXgNuSO_gwJajoYtXSgpIe88PcBRyIkRihxC8paR-o,1582
163
163
  arviz/tests/base_tests/test_stats_utils.py,sha256=Udkw8tODs8mLt3_hO3HgNczrU0n09IJrML2agXF-upQ,13864
164
164
  arviz/tests/base_tests/test_utils.py,sha256=Auggtvwv3Y9STS8Tbram-IQe5IhewkwFN14CTcjRd5M,12533
@@ -168,7 +168,7 @@ arviz/tests/external_tests/test_data_beanmachine.py,sha256=nwOJNJLrk5rY4M5YW-LT6
168
168
  arviz/tests/external_tests/test_data_cmdstan.py,sha256=jHy-dZrY4M7F4uYWf71fOxVwfPxgRpM9E3JAvpk03qA,16829
169
169
  arviz/tests/external_tests/test_data_cmdstanpy.py,sha256=uCSOJVowKXccCPLpAwCiihghx_WxnUVyR8r801Xhw_0,18753
170
170
  arviz/tests/external_tests/test_data_emcee.py,sha256=w-tsP74-n688C9-v_KIf0YxZg7S1WrhOdJUvaHS9e6I,6270
171
- arviz/tests/external_tests/test_data_numpyro.py,sha256=cdUzv0MKPCmsfNLbvz7IgBxapGSJjG42bgaR_MfDmPg,16758
171
+ arviz/tests/external_tests/test_data_numpyro.py,sha256=ehI-xkxoub25xPIU1GyAhJDs4BpVwV_DLS0HYiL0Odw,17678
172
172
  arviz/tests/external_tests/test_data_pyjags.py,sha256=kqZfV8QRnAngO9obnAq5lKPIuJdVJ82sbkIfSr2tpqY,4547
173
173
  arviz/tests/external_tests/test_data_pyro.py,sha256=EaD_hZGALaSKQKK4OFgmuJ_1SsIYKessHQ7Jl9AKbw0,10771
174
174
  arviz/tests/external_tests/test_data_pystan.py,sha256=ebg_JXkmAhXRllP0otjyourGF_fUaKMkwRfrQO6Glwk,11792
@@ -176,8 +176,8 @@ arviz/wrappers/__init__.py,sha256=d8GTUuBW_30LyDyk6qn2MAnvg-GZCeUw_i5SUPqaa1w,35
176
176
  arviz/wrappers/base.py,sha256=FNgPvd_tLCB5C2tRx1ngYjr4F5tEUuNrrLkStuyRXsE,9134
177
177
  arviz/wrappers/wrap_pymc.py,sha256=ltKv55aG0WTWXVDJuff5TXkgJJ_ESLvlT-JPlh3yHAg,1143
178
178
  arviz/wrappers/wrap_stan.py,sha256=sIy38fXg4Ln_0CM6xONDwOJg1Y6FwNM_JQErv3a-8_c,5526
179
- arviz-0.22.0.dist-info/LICENSE,sha256=xllut76FgcGL5zbIRvuRc7aezPbvlMUTWJPsVr2Sugg,11358
180
- arviz-0.22.0.dist-info/METADATA,sha256=co8xturD_y3pE5nwukUw29h3QPDd4fyVFXylpFidKvY,8851
181
- arviz-0.22.0.dist-info/WHEEL,sha256=tZoeGjtWxWRfdplE7E3d45VPlLNQnvbKiYnx7gwAy8A,92
182
- arviz-0.22.0.dist-info/top_level.txt,sha256=5MFvqrTtYRWsIx-SjKuFIUHtrnVJq0Ngd0Nc2_etQhE,6
183
- arviz-0.22.0.dist-info/RECORD,,
179
+ arviz-0.23.1.dist-info/licenses/LICENSE,sha256=xllut76FgcGL5zbIRvuRc7aezPbvlMUTWJPsVr2Sugg,11358
180
+ arviz-0.23.1.dist-info/METADATA,sha256=PYF49qwd_wLFdlbmfyNVwiDg-AzZk3GPxEey5YHi6io,9109
181
+ arviz-0.23.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
182
+ arviz-0.23.1.dist-info/top_level.txt,sha256=5MFvqrTtYRWsIx-SjKuFIUHtrnVJq0Ngd0Nc2_etQhE,6
183
+ arviz-0.23.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.45.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5