pymc-extras 0.4.1__py3-none-any.whl → 0.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. pymc_extras/deserialize.py +10 -4
  2. pymc_extras/distributions/continuous.py +1 -1
  3. pymc_extras/distributions/histogram_utils.py +6 -4
  4. pymc_extras/distributions/multivariate/r2d2m2cp.py +4 -3
  5. pymc_extras/distributions/timeseries.py +4 -2
  6. pymc_extras/inference/__init__.py +8 -1
  7. pymc_extras/inference/dadvi/__init__.py +0 -0
  8. pymc_extras/inference/dadvi/dadvi.py +351 -0
  9. pymc_extras/inference/fit.py +5 -0
  10. pymc_extras/inference/laplace_approx/find_map.py +32 -47
  11. pymc_extras/inference/laplace_approx/idata.py +27 -6
  12. pymc_extras/inference/laplace_approx/laplace.py +24 -6
  13. pymc_extras/inference/laplace_approx/scipy_interface.py +47 -7
  14. pymc_extras/inference/pathfinder/idata.py +517 -0
  15. pymc_extras/inference/pathfinder/pathfinder.py +61 -7
  16. pymc_extras/model/marginal/graph_analysis.py +2 -2
  17. pymc_extras/model_builder.py +9 -4
  18. pymc_extras/prior.py +203 -8
  19. pymc_extras/statespace/core/compile.py +1 -1
  20. pymc_extras/statespace/filters/kalman_filter.py +12 -11
  21. pymc_extras/statespace/filters/kalman_smoother.py +1 -3
  22. pymc_extras/statespace/filters/utilities.py +2 -5
  23. pymc_extras/statespace/models/DFM.py +834 -0
  24. pymc_extras/statespace/models/ETS.py +190 -198
  25. pymc_extras/statespace/models/SARIMAX.py +9 -21
  26. pymc_extras/statespace/models/VARMAX.py +22 -74
  27. pymc_extras/statespace/models/structural/components/autoregressive.py +4 -4
  28. pymc_extras/statespace/models/structural/components/regression.py +4 -26
  29. pymc_extras/statespace/models/utilities.py +7 -0
  30. pymc_extras/statespace/utils/constants.py +3 -1
  31. pymc_extras/utils/model_equivalence.py +2 -2
  32. pymc_extras/utils/prior.py +10 -14
  33. pymc_extras/utils/spline.py +4 -10
  34. {pymc_extras-0.4.1.dist-info → pymc_extras-0.6.0.dist-info}/METADATA +3 -3
  35. {pymc_extras-0.4.1.dist-info → pymc_extras-0.6.0.dist-info}/RECORD +37 -33
  36. {pymc_extras-0.4.1.dist-info → pymc_extras-0.6.0.dist-info}/WHEEL +1 -1
  37. {pymc_extras-0.4.1.dist-info → pymc_extras-0.6.0.dist-info}/licenses/LICENSE +0 -0
@@ -9,12 +9,12 @@ from pytensor.compile.mode import Mode
9
9
  from pytensor.tensor.slinalg import solve_discrete_lyapunov
10
10
 
11
11
  from pymc_extras.statespace.core.statespace import PyMCStateSpace
12
- from pymc_extras.statespace.models.utilities import make_default_coords
12
+ from pymc_extras.statespace.models.utilities import make_default_coords, validate_names
13
13
  from pymc_extras.statespace.utils.constants import (
14
14
  ALL_STATE_AUX_DIM,
15
15
  ALL_STATE_DIM,
16
16
  AR_PARAM_DIM,
17
- EXOGENOUS_DIM,
17
+ EXOG_STATE_DIM,
18
18
  MA_PARAM_DIM,
19
19
  OBS_STATE_AUX_DIM,
20
20
  OBS_STATE_DIM,
@@ -99,9 +99,7 @@ class BayesianVARMAX(PyMCStateSpace):
99
99
  self,
100
100
  order: tuple[int, int],
101
101
  endog_names: list[str] | None = None,
102
- k_endog: int | None = None,
103
102
  exog_state_names: list[str] | dict[str, list[str]] | None = None,
104
- k_exog: int | dict[str, int] | None = None,
105
103
  stationary_initialization: bool = False,
106
104
  filter_type: str = "standard",
107
105
  measurement_error: bool = False,
@@ -118,10 +116,7 @@ class BayesianVARMAX(PyMCStateSpace):
118
116
  specified order are included. For restricted models, set zeros directly on the priors.
119
117
 
120
118
  endog_names: list of str, optional
121
- Names of the endogenous variables being modeled. Used to generate names for the state and shock coords. If
122
- None, the state names will simply be numbered.
123
-
124
- Exactly one of either ``endog_names`` or ``k_endog`` must be specified.
119
+ Names of the endogenous variables being modeled. Used to generate names for the state and shock coords.
125
120
 
126
121
  exog_state_names : list[str] or dict[str, list[str]], optional
127
122
  Names of the exogenous state variables. If a list, all endogenous variables will share the same exogenous
@@ -129,12 +124,6 @@ class BayesianVARMAX(PyMCStateSpace):
129
124
  exogenous variable names for that endogenous variable. Endogenous variables not included in the dict will
130
125
  be assumed to have no exogenous variables. If None, no exogenous variables will be included.
131
126
 
132
- k_exog : int or dict[str, int], optional
133
- Number of exogenous variables. If an int, all endogenous variables will share the same number of exogenous
134
- variables. If a dict, keys should be the names of the endogenous variables, and values should be the number of
135
- exogenous variables for that endogenous variable. Endogenous variables not included in the dict will be
136
- assumed to have no exogenous variables. If None, no exogenous variables will be included.
137
-
138
127
  stationary_initialization: bool, default False
139
128
  If true, the initial state and initial state covariance will not be assigned priors. Instead, their steady
140
129
  state values will be used. If False, the user is responsible for setting priors on the initial state and
@@ -148,15 +137,6 @@ class BayesianVARMAX(PyMCStateSpace):
148
137
  The type of Kalman Filter to use. Options are "standard", "single", "univariate", "steady_state",
149
138
  and "cholesky". See the docs for kalman filters for more details.
150
139
 
151
- state_structure: str, default "fast"
152
- How to represent the state-space system. When "interpretable", each element of the state vector will have a
153
- precise meaning as either lagged data, innovations, or lagged innovations. This comes at the cost of a larger
154
- state vector, which may hurt performance.
155
-
156
- When "fast", states are combined to minimize the dimension of the state vector, but lags and innovations are
157
- mixed together as a result. Only the first state (the modeled timeseries) will have an obvious interpretation
158
- in this case.
159
-
160
140
  measurement_error: bool, default True
161
141
  If true, a measurement error term is added to the model.
162
142
 
@@ -171,57 +151,23 @@ class BayesianVARMAX(PyMCStateSpace):
171
151
  to all sampling methods.
172
152
 
173
153
  """
174
- if (endog_names is None) and (k_endog is None):
175
- raise ValueError("Must specify either endog_names or k_endog")
176
- if (endog_names is not None) and (k_endog is None):
177
- k_endog = len(endog_names)
178
- if (endog_names is None) and (k_endog is not None):
179
- endog_names = [f"observed_{i}" for i in range(k_endog)]
180
- if (endog_names is not None) and (k_endog is not None):
181
- if len(endog_names) != k_endog:
182
- raise ValueError("Length of provided endog_names does not match provided k_endog")
183
-
184
- if k_exog is not None and not isinstance(k_exog, int | dict):
185
- raise ValueError("If not None, k_endog must be either an int or a dict")
154
+
155
+ validate_names(endog_names, var_name="endog_names", optional=False)
156
+ k_endog = len(endog_names)
157
+
158
+ needs_exog_data = False
159
+
186
160
  if exog_state_names is not None and not isinstance(exog_state_names, list | dict):
187
161
  raise ValueError("If not None, exog_state_names must be either a list or a dict")
188
162
 
189
- if k_exog is not None and exog_state_names is not None:
190
- if isinstance(k_exog, int) and isinstance(exog_state_names, list):
191
- if len(exog_state_names) != k_exog:
192
- raise ValueError("Length of exog_state_names does not match provided k_exog")
193
- elif isinstance(k_exog, int) and isinstance(exog_state_names, dict):
194
- raise ValueError(
195
- "If k_exog is an int, exog_state_names must be a list of the same length (or None)"
196
- )
197
- elif isinstance(k_exog, dict) and isinstance(exog_state_names, list):
198
- raise ValueError(
199
- "If k_exog is a dict, exog_state_names must be a dict as well (or None)"
200
- )
201
- elif isinstance(k_exog, dict) and isinstance(exog_state_names, dict):
202
- if set(k_exog.keys()) != set(exog_state_names.keys()):
203
- raise ValueError("Keys of k_exog and exog_state_names dicts must match")
204
- if not all(
205
- len(names) == k for names, k in zip(exog_state_names.values(), k_exog.values())
206
- ):
207
- raise ValueError(
208
- "If both k_endog and exog_state_names are provided, lengths of exog_state_names "
209
- "lists must match corresponding values in k_exog"
210
- )
211
-
212
- if k_exog is not None and exog_state_names is None:
213
- if isinstance(k_exog, int):
214
- exog_state_names = [f"exogenous_{i}" for i in range(k_exog)]
215
- elif isinstance(k_exog, dict):
216
- exog_state_names = {
217
- name: [f"{name}_exogenous_{i}" for i in range(k)] for name, k in k_exog.items()
218
- }
219
-
220
- if k_exog is None and exog_state_names is not None:
163
+ if exog_state_names is not None:
221
164
  if isinstance(exog_state_names, list):
222
165
  k_exog = len(exog_state_names)
223
166
  elif isinstance(exog_state_names, dict):
224
167
  k_exog = {name: len(names) for name, names in exog_state_names.items()}
168
+ needs_exog_data = True
169
+ else:
170
+ k_exog = None
225
171
 
226
172
  # If exog_state_names is a dict but 1) all endog variables are among the keys, and 2) all values are the same
227
173
  # then we can drop back to the list case.
@@ -254,6 +200,8 @@ class BayesianVARMAX(PyMCStateSpace):
254
200
  mode=mode,
255
201
  )
256
202
 
203
+ self._needs_exog_data = needs_exog_data
204
+
257
205
  # Save counts of the number of parameters in each category
258
206
  self.param_counts = {
259
207
  "x0": k_states * (1 - self.stationary_initialization),
@@ -337,12 +285,12 @@ class BayesianVARMAX(PyMCStateSpace):
337
285
 
338
286
  @property
339
287
  def data_info(self) -> dict[str, dict[str, Any]]:
340
- info = None
288
+ info = {}
341
289
 
342
290
  if isinstance(self.exog_state_names, list):
343
291
  info = {
344
292
  "exogenous_data": {
345
- "dims": (TIME_DIM, EXOGENOUS_DIM),
293
+ "dims": (TIME_DIM, EXOG_STATE_DIM),
346
294
  "shape": (None, self.k_exog),
347
295
  }
348
296
  }
@@ -350,7 +298,7 @@ class BayesianVARMAX(PyMCStateSpace):
350
298
  elif isinstance(self.exog_state_names, dict):
351
299
  info = {
352
300
  f"{endog_state}_exogenous_data": {
353
- "dims": (TIME_DIM, f"{EXOGENOUS_DIM}_{endog_state}"),
301
+ "dims": (TIME_DIM, f"{EXOG_STATE_DIM}_{endog_state}"),
354
302
  "shape": (None, len(exog_names)),
355
303
  }
356
304
  for endog_state, exog_names in self.exog_state_names.items()
@@ -399,10 +347,10 @@ class BayesianVARMAX(PyMCStateSpace):
399
347
  coords.update({MA_PARAM_DIM: list(range(1, self.q + 1))})
400
348
 
401
349
  if isinstance(self.exog_state_names, list):
402
- coords[EXOGENOUS_DIM] = self.exog_state_names
350
+ coords[EXOG_STATE_DIM] = self.exog_state_names
403
351
  elif isinstance(self.exog_state_names, dict):
404
352
  for name, exog_names in self.exog_state_names.items():
405
- coords[f"{EXOGENOUS_DIM}_{name}"] = exog_names
353
+ coords[f"{EXOG_STATE_DIM}_{name}"] = exog_names
406
354
 
407
355
  return coords
408
356
 
@@ -428,12 +376,12 @@ class BayesianVARMAX(PyMCStateSpace):
428
376
  del coord_map["x0"]
429
377
 
430
378
  if isinstance(self.exog_state_names, list):
431
- coord_map["beta_exog"] = (OBS_STATE_DIM, EXOGENOUS_DIM)
379
+ coord_map["beta_exog"] = (OBS_STATE_DIM, EXOG_STATE_DIM)
432
380
  elif isinstance(self.exog_state_names, dict):
433
381
  # If each state has its own exogenous variables, each parameter needs it own dim, since we expect the
434
382
  # dim labels to all be different (otherwise we'd be in the list case).
435
383
  for name in self.exog_state_names.keys():
436
- coord_map[f"beta_{name}"] = (f"{EXOGENOUS_DIM}_{name}",)
384
+ coord_map[f"beta_{name}"] = (f"{EXOG_STATE_DIM}_{name}",)
437
385
 
438
386
  return coord_map
439
387
 
@@ -141,19 +141,19 @@ class AutoregressiveComponent(Component):
141
141
 
142
142
  self.param_info = {
143
143
  f"params_{self.name}": {
144
- "shape": (k_states,) if self.k_endog == 1 else (self.k_endog, k_states),
144
+ "shape": (k_endog_effective, k_states) if k_endog_effective > 1 else (k_states,),
145
145
  "constraints": None,
146
146
  "dims": (AR_PARAM_DIM,)
147
- if self.k_endog == 1
147
+ if k_endog_effective == 1
148
148
  else (
149
149
  f"endog_{self.name}",
150
150
  f"lag_{self.name}",
151
151
  ),
152
152
  },
153
153
  f"sigma_{self.name}": {
154
- "shape": () if self.k_endog == 1 else (self.k_endog,),
154
+ "shape": (k_endog_effective,) if k_endog_effective > 1 else (),
155
155
  "constraints": "Positive",
156
- "dims": None if self.k_endog == 1 else (f"endog_{self.name}",),
156
+ "dims": (f"endog_{self.name}",) if k_endog_effective > 1 else None,
157
157
  },
158
158
  }
159
159
 
@@ -3,6 +3,7 @@ import numpy as np
3
3
  from pytensor import tensor as pt
4
4
 
5
5
  from pymc_extras.statespace.models.structural.core import Component
6
+ from pymc_extras.statespace.models.utilities import validate_names
6
7
  from pymc_extras.statespace.utils.constants import TIME_DIM
7
8
 
8
9
 
@@ -12,10 +13,6 @@ class RegressionComponent(Component):
12
13
 
13
14
  Parameters
14
15
  ----------
15
- k_exog : int | None, default None
16
- Number of exogenous variables to include in the regression. Must be specified if
17
- state_names is not provided.
18
-
19
16
  name : str | None, default "regression"
20
17
  A name for this regression component. Used to label dimensions and coordinates.
21
18
 
@@ -107,7 +104,6 @@ class RegressionComponent(Component):
107
104
 
108
105
  def __init__(
109
106
  self,
110
- k_exog: int | None = None,
111
107
  name: str | None = "regression",
112
108
  state_names: list[str] | None = None,
113
109
  observed_state_names: list[str] | None = None,
@@ -120,7 +116,9 @@ class RegressionComponent(Component):
120
116
  observed_state_names = ["data"]
121
117
 
122
118
  self.innovations = innovations
123
- k_exog = self._handle_input_data(k_exog, state_names, name)
119
+ validate_names(state_names, var_name="state_names", optional=False)
120
+ k_exog = len(state_names)
121
+ self.state_names = state_names
124
122
 
125
123
  k_states = k_exog
126
124
  k_endog = len(observed_state_names)
@@ -140,26 +138,6 @@ class RegressionComponent(Component):
140
138
  obs_state_idxs=np.ones(k_states),
141
139
  )
142
140
 
143
- @staticmethod
144
- def _get_state_names(k_exog: int | None, state_names: list[str] | None, name: str):
145
- if k_exog is None and state_names is None:
146
- raise ValueError("Must specify at least one of k_exog or state_names")
147
- if state_names is not None and k_exog is not None:
148
- if len(state_names) != k_exog:
149
- raise ValueError(f"Expected {k_exog} state names, found {len(state_names)}")
150
- elif k_exog is None:
151
- k_exog = len(state_names)
152
- else:
153
- state_names = [f"{name}_{i + 1}" for i in range(k_exog)]
154
-
155
- return k_exog, state_names
156
-
157
- def _handle_input_data(self, k_exog: int, state_names: list[str] | None, name) -> int:
158
- k_exog, state_names = self._get_state_names(k_exog, state_names, name)
159
- self.state_names = state_names
160
-
161
- return k_exog
162
-
163
141
  def make_symbolic_graph(self) -> None:
164
142
  k_endog = self.k_endog
165
143
  k_endog_effective = 1 if self.share_states else k_endog
@@ -670,3 +670,10 @@ def get_exog_dims_from_idata(exog_name, idata):
670
670
  exog_dims = None
671
671
 
672
672
  return exog_dims
673
+
674
+
675
+ def validate_names(names: list[str], var_name: str, optional: bool = True) -> None:
676
+ if names is None:
677
+ if optional:
678
+ return None
679
+ raise ValueError(f"Must specify {var_name}")
@@ -12,7 +12,9 @@ MA_PARAM_DIM = "lag_ma"
12
12
  SEASONAL_AR_PARAM_DIM = "seasonal_lag_ar"
13
13
  SEASONAL_MA_PARAM_DIM = "seasonal_lag_ma"
14
14
  ETS_SEASONAL_DIM = "seasonal_lag"
15
- EXOGENOUS_DIM = "exogenous"
15
+ FACTOR_DIM = "factor"
16
+ ERROR_AR_PARAM_DIM = "error_lag_ar"
17
+ EXOG_STATE_DIM = "exogenous"
16
18
 
17
19
  NEVER_TIME_VARYING = ["initial_state", "initial_state_cov", "a0", "P0"]
18
20
  VECTOR_VALUED = ["initial_state", "state_intercept", "obs_intercept", "a0", "c", "d"]
@@ -4,8 +4,8 @@ from pymc.model.core import Model
4
4
  from pymc.model.fgraph import fgraph_from_model
5
5
  from pytensor import Variable
6
6
  from pytensor.compile import SharedVariable
7
- from pytensor.graph import Constant, graph_inputs
8
- from pytensor.graph.basic import equal_computations
7
+ from pytensor.graph.basic import Constant, equal_computations
8
+ from pytensor.graph.traversal import graph_inputs
9
9
  from pytensor.tensor.random.type import RandomType
10
10
 
11
11
 
@@ -176,20 +176,16 @@ def prior_from_idata(
176
176
 
177
177
  >>> with pm.Model(coords=dict(test=range(4), options=range(3))) as model2:
178
178
  ... priors = prior_from_idata(
179
- ... trace, # the old trace (posterior)
180
- ... var_names=["a", "d"], # take variables as is
181
- ...
182
- ... e="new_e", # assign new name "new_e" for a variable
183
- ... # similar to dict(name="new_e")
184
- ...
185
- ... b=("test", ), # set a dim to "test"
186
- ... # similar to dict(dims=("test", ))
187
- ...
188
- ... c=transforms.log, # apply log transform to a positive variable
189
- ... # similar to dict(transform=transforms.log)
190
- ...
191
- ... # set a name, assign a dim and apply simplex transform
192
- ... f=dict(name="new_f", dims="options", transform=transforms.simplex)
179
+ ... trace, # the old trace (posterior)
180
+ ... var_names=["a", "d"], # take variables as is
181
+ ... e="new_e", # assign new name "new_e" for a variable
182
+ ... # similar to dict(name="new_e")
183
+ ... b=("test",), # set a dim to "test"
184
+ ... # similar to dict(dims=("test", ))
185
+ ... c=transforms.log, # apply log transform to a positive variable
186
+ ... # similar to dict(transform=transforms.log)
187
+ ... # set a name, assign a dim and apply simplex transform
188
+ ... f=dict(name="new_f", dims="options", transform=transforms.simplex),
193
189
  ... )
194
190
  ... trace1 = pm.sample_prior_predictive(100)
195
191
  """
@@ -97,19 +97,13 @@ def bspline_interpolation(x, *, n=None, eval_points=None, degree=3, sparse=True)
97
97
  --------
98
98
  >>> import pymc as pm
99
99
  >>> import numpy as np
100
- >>> half_months = np.linspace(0, 365, 12*2)
100
+ >>> half_months = np.linspace(0, 365, 12 * 2)
101
101
  >>> with pm.Model(coords=dict(knots_time=half_months, time=np.arange(365))) as model:
102
- ... kernel = pm.gp.cov.ExpQuad(1, ls=365/12)
102
+ ... kernel = pm.gp.cov.ExpQuad(1, ls=365 / 12)
103
103
  ... # ready to define gp (a latent process over parameters)
104
- ... gp = pm.gp.gp.Latent(
105
- ... cov_func=kernel
106
- ... )
104
+ ... gp = pm.gp.gp.Latent(cov_func=kernel)
107
105
  ... y_knots = gp.prior("y_knots", half_months[:, None], dims="knots_time")
108
- ... y = pm.Deterministic(
109
- ... "y",
110
- ... bspline_interpolation(y_knots, n=365, degree=3),
111
- ... dims="time"
112
- ... )
106
+ ... y = pm.Deterministic("y", bspline_interpolation(y_knots, n=365, degree=3), dims="time")
113
107
  ... trace = pm.sample_prior_predictive(1)
114
108
 
115
109
  Notes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pymc-extras
3
- Version: 0.4.1
3
+ Version: 0.6.0
4
4
  Summary: A home for new additions to PyMC, which may include unusual probability distribitions, advanced model fitting algorithms, or any code that may be inappropriate to include in the pymc repository, but may want to be made available to users.
5
5
  Project-URL: Documentation, https://pymc-extras.readthedocs.io/
6
6
  Project-URL: Repository, https://github.com/pymc-devs/pymc-extras.git
@@ -235,8 +235,8 @@ Requires-Python: >=3.11
235
235
  Requires-Dist: better-optimize>=0.1.5
236
236
  Requires-Dist: preliz>=0.20.0
237
237
  Requires-Dist: pydantic>=2.0.0
238
- Requires-Dist: pymc>=5.24.1
239
- Requires-Dist: pytensor>=2.31.4
238
+ Requires-Dist: pymc>=5.26.1
239
+ Requires-Dist: pytensor>=2.35.1
240
240
  Requires-Dist: scikit-learn
241
241
  Provides-Extra: complete
242
242
  Requires-Dist: dask[complete]<2025.1.1; extra == 'complete'
@@ -1,38 +1,41 @@
1
1
  pymc_extras/__init__.py,sha256=YsR6OG72aW73y6dGS7w3nGGMV-V-ImHkmUOXKMPfMRA,1230
2
- pymc_extras/deserialize.py,sha256=dktK5gsR96X3zAUoRF5udrTiconknH3uupiAWqkZi0M,5937
2
+ pymc_extras/deserialize.py,sha256=lA5Nc3ZMjlq8sXVBzJLdb3ZkK_PsJNkaH-QhBcQZcd4,5924
3
3
  pymc_extras/linearmodel.py,sha256=KkvZ_DBXOD6myPgVNzu742YV0OzDK449_pDqNC5yae4,3975
4
- pymc_extras/model_builder.py,sha256=sAw77fxdiy046BvDPjocuMlbJ0Efj-CDAGtmcwYmoG0,26361
4
+ pymc_extras/model_builder.py,sha256=cypRVbSR2XE7xDU2mL2MfjNXoyruAwtKbuUEhzmWPao,26460
5
5
  pymc_extras/printing.py,sha256=bFOANgsOWDk0vbRMvm2h_D5TsT7OiSojdG7tvyfCw28,6506
6
- pymc_extras/prior.py,sha256=0XbyRRVuS7aKY5gmvJr_iq4fGyHrRDeI_OjWu_O7CTA,39449
6
+ pymc_extras/prior.py,sha256=SyBGmZ6XZKpBd8E2tGGZjWvki0ngh1-_h8rLOTTv4hI,44276
7
7
  pymc_extras/distributions/__init__.py,sha256=Cge3AP7gzD6qTJY7v2tYRtSgn-rlnIo7wQBgf3IfKQ8,1377
8
- pymc_extras/distributions/continuous.py,sha256=530wvcO-QcYVdiVN-iQRveImWfyJzzmxiZLMVShP7w4,11251
8
+ pymc_extras/distributions/continuous.py,sha256=bCXOgnw2Vh_FbYOHCqB0c3ozFVay5Qwua2A211kvWNQ,11251
9
9
  pymc_extras/distributions/discrete.py,sha256=HNi-K0_hnNWTcfyBkWGh26sc71FwBgukQ_EjGAaAOjY,13036
10
- pymc_extras/distributions/histogram_utils.py,sha256=xvCc19nlOmeb9PLZDcsR5PRdmcr5sRefZlPlCvxmGfM,5814
11
- pymc_extras/distributions/timeseries.py,sha256=M5MZ-nik_tgkaoZ1hdUGEZ9g04DQyVLwszVJqSKwNcY,12719
10
+ pymc_extras/distributions/histogram_utils.py,sha256=kkZHu1F_2qMfOEzwNP4K6QYA_xEKUk9cChImOQ2Nkjs,5847
11
+ pymc_extras/distributions/timeseries.py,sha256=R9htJ24LCFe5zyR9v6nX7S8iFmSMoQKSIGnLWZ1849Y,12747
12
12
  pymc_extras/distributions/multivariate/__init__.py,sha256=E8OeLW9tTotCbrUjEo4um76-_WQD56PehsPzkKmhfyA,93
13
- pymc_extras/distributions/multivariate/r2d2m2cp.py,sha256=bUj9bB-hQi6CpaJfvJjgNPi727uTbvAdxl9fm1zNBqY,16005
13
+ pymc_extras/distributions/multivariate/r2d2m2cp.py,sha256=5SzvD41pu-EWyWlDNz4AR4Sl8MkyC-1dYwkADFh5Avg,16009
14
14
  pymc_extras/distributions/transforms/__init__.py,sha256=FUp2vyRE6_2eUcQ_FVt5Dn0-vy5I-puV-Kz13-QtLNc,104
15
15
  pymc_extras/distributions/transforms/partial_order.py,sha256=oEZlc9WgnGR46uFEjLzKEUxlhzIo2vrUUbBE3vYrsfQ,8404
16
16
  pymc_extras/gp/__init__.py,sha256=sFHw2y3lEl5tG_FDQHZUonQ_k0DF1JRf0Rp8dpHmge0,745
17
17
  pymc_extras/gp/latent_approx.py,sha256=cDEMM6H1BL2qyKg7BZU-ISrKn2HJe7hDaM4Y8GgQDf4,6682
18
- pymc_extras/inference/__init__.py,sha256=sy1JYQGNZNvPs-3jVFfbFQTW0iCIrbjH3aHBpx1HQi0,917
19
- pymc_extras/inference/fit.py,sha256=U_jfzuyjk5bV6AvOxtOKzBg-q4z-_BOR06Hn38T0W6E,1328
18
+ pymc_extras/inference/__init__.py,sha256=hI3yqfEVzoUNlCpL1z579F9EqM-NlPTzMfHj8IKY-xE,1009
19
+ pymc_extras/inference/fit.py,sha256=hNTqLms_mTdjfnCEVIHMcMiPZ3fkU3HEEkbt6LWWhLw,1443
20
+ pymc_extras/inference/dadvi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
+ pymc_extras/inference/dadvi/dadvi.py,sha256=Ry1Of_zo0eURVKbWE0U7NDmb7HQ2I_PWBFQcyowAXbE,11799
20
22
  pymc_extras/inference/laplace_approx/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- pymc_extras/inference/laplace_approx/find_map.py,sha256=fP8DQ21OZbkUiBaq-TXGe7CtH0umupFacRC3qReoiKU,14022
22
- pymc_extras/inference/laplace_approx/idata.py,sha256=P_GyodNJy2yr6FBYBqSoMShW2CKKuljBTFY1jOAHEKE,13332
23
- pymc_extras/inference/laplace_approx/laplace.py,sha256=V49TdsCYGxt7Evg7Ml2qtHW0xeZYP5YjCOBaewTvJog,18778
24
- pymc_extras/inference/laplace_approx/scipy_interface.py,sha256=qMxYodmmxaUGsOp1jc7HxBJc6L8NnmFT2Fd4UNNXu2c,8835
23
+ pymc_extras/inference/laplace_approx/find_map.py,sha256=fbK0swDsSBo7pP1TBokREa2wkK1ajL_gLVVuREHH33k,13658
24
+ pymc_extras/inference/laplace_approx/idata.py,sha256=Dxj6A8aJXn8c24vD_PZmMgIgrwEmaYDlbw5UAJq0Nyw,14172
25
+ pymc_extras/inference/laplace_approx/laplace.py,sha256=suSxCibVry1p3VjdvG9I_9QxazKdcKKT-xmCN1qXNRA,19706
26
+ pymc_extras/inference/laplace_approx/scipy_interface.py,sha256=Crhix_dLA8Y_NvuUDmVQnKWAWGjufmQwDLh-bK9dz_o,10235
25
27
  pymc_extras/inference/pathfinder/__init__.py,sha256=FhAYrCWNx_dCrynEdjg2CZ9tIinvcVLBm67pNx_Y3kA,101
28
+ pymc_extras/inference/pathfinder/idata.py,sha256=muAPc9JeI8ZmpjzSp9tSj-uNrcsoNkYb4raJqjgf5UQ,18636
26
29
  pymc_extras/inference/pathfinder/importance_sampling.py,sha256=NwxepXOFit3cA5zEebniKdlnJ1rZWg56aMlH4MEOcG4,6264
27
30
  pymc_extras/inference/pathfinder/lbfgs.py,sha256=GOoJBil5Kft_iFwGNUGKSeqzI5x_shA4KQWDwgGuQtQ,7110
28
- pymc_extras/inference/pathfinder/pathfinder.py,sha256=wVDbyvE97iqiYLDHLfnl1MFtDdmEmaI5XZS3Lr6f9sE,64475
31
+ pymc_extras/inference/pathfinder/pathfinder.py,sha256=3fYq3cgchwXxTEehT2PLu32r7hP7vAMoPpOS7TYpZ-w,67193
29
32
  pymc_extras/inference/smc/__init__.py,sha256=wyaT4NJl1YsSQRLiDy-i0Jq3CbJZ2BQd4nnCk-dIngY,603
30
33
  pymc_extras/inference/smc/sampling.py,sha256=AYwmKqGoV6pBtKnh9SUbBKbN7VcoFgb3MmNWV7SivMA,15365
31
34
  pymc_extras/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
32
35
  pymc_extras/model/model_api.py,sha256=UHMfQXxWBujeSiUySU0fDUC5Sd_BjT8FoVz3iBxQH_4,2400
33
36
  pymc_extras/model/marginal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
37
  pymc_extras/model/marginal/distributions.py,sha256=iM1yT7_BmivgUSloQPKE2QXGPgjvLqDMY_OTBGsdAWg,15563
35
- pymc_extras/model/marginal/graph_analysis.py,sha256=l_WSZHivm82297zMIm8i3G_h2F-4Tq397pQlcuEP-0I,15874
38
+ pymc_extras/model/marginal/graph_analysis.py,sha256=Ft7RZC126R0TW2GuFdgb9uN-JSgDGTeffs-UuPcDHQE,15884
36
39
  pymc_extras/model/marginal/marginal_model.py,sha256=oIdikaSnefCkyMxmzAe222qGXNucxZpHYk7548fK6iA,23631
37
40
  pymc_extras/model/transforms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
41
  pymc_extras/model/transforms/autoreparam.py,sha256=_NltGWmNqi_X9sHCqAvWcBveLTPxVy11-wENFTcN6kk,12377
@@ -40,39 +43,40 @@ pymc_extras/preprocessing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJW
40
43
  pymc_extras/preprocessing/standard_scaler.py,sha256=Vajp33ma6OkwlU54JYtSS8urHbMJ3CRiRFxZpvFNuus,600
41
44
  pymc_extras/statespace/__init__.py,sha256=PxV8i4aa2XJarRM6aKU14_bEY1AoLu4bNXIBy_E1rRw,431
42
45
  pymc_extras/statespace/core/__init__.py,sha256=LEhkqdMZzzcTyzYml45IM4ykWoCdbWWj2c29IpM_ey8,309
43
- pymc_extras/statespace/core/compile.py,sha256=9FZfE8Bi3VfElxujfOIKRVvmyL9M5R0WfNEqPc5kbVQ,1603
46
+ pymc_extras/statespace/core/compile.py,sha256=GB2H7sE28OdQ6GmNIjtq1R1Oua2GPf6kWJ7IPuYJaNA,1607
44
47
  pymc_extras/statespace/core/representation.py,sha256=boY-jjlkd3KuuO2XiSuV-GwEAyEqRJ9267H72AmE3BU,18956
45
48
  pymc_extras/statespace/core/statespace.py,sha256=yu7smA5w7l1LFNjTwuKLnGarGLx4HEPJKQ9ZMDbWhDY,108161
46
49
  pymc_extras/statespace/filters/__init__.py,sha256=F0EtZUhArp23lj3upy6zB0mDTjLIjwGh0pKmMny0QfY,420
47
50
  pymc_extras/statespace/filters/distributions.py,sha256=-s1c5s2zm6FMc0UqKSrWnJzIF4U5bvJT_3mMNTyV_ak,11927
48
- pymc_extras/statespace/filters/kalman_filter.py,sha256=rgpgF4KZXX5M8yRwblrt2SEINKgoXgiKNfKkbl7ZU9Y,31464
49
- pymc_extras/statespace/filters/kalman_smoother.py,sha256=5jlSZAPveJzD5Q8omnpn7Gb1jgElBMgixGR7H9zoH8U,4183
50
- pymc_extras/statespace/filters/utilities.py,sha256=iwdaYnO1cO06t_XUjLLRmqb8vwzzVH6Nx1iyZcbJL2k,1584
51
- pymc_extras/statespace/models/ETS.py,sha256=08sbiuNvKdxcgKzS7jWj-z4jf-su73WFkYc8sKkGdEs,28538
52
- pymc_extras/statespace/models/SARIMAX.py,sha256=Yppz_k1ZyZuKPC62WIye6K7luw44cP-dog73VVkw0L4,25096
53
- pymc_extras/statespace/models/VARMAX.py,sha256=7obJFXES9t9NONlcUQoeJ9TCqyoDlVat9FkPviQhAq0,25947
51
+ pymc_extras/statespace/filters/kalman_filter.py,sha256=0Ei_ZSogSAAC9OQn4v0tvUqvRZFHh6V3Wx8013-POBQ,31542
52
+ pymc_extras/statespace/filters/kalman_smoother.py,sha256=Q1mwKeZPEMQL3BK2IvGwLfblGGQrfJ2t60fzll1Pung,4128
53
+ pymc_extras/statespace/filters/utilities.py,sha256=BBMDeWBcJWZfGc9owuMsOedVIXVDQ8Z2eMiU9vWeVr0,1494
54
+ pymc_extras/statespace/models/DFM.py,sha256=EiZ3x4iFPGeha8bPp1tok4us8Z6UVUu1sFmKIM1i0xc,36458
55
+ pymc_extras/statespace/models/ETS.py,sha256=LEsSKzbfm9Ol8UZQjNurcrM1CLQyozKfJtby7AzsDeI,27667
56
+ pymc_extras/statespace/models/SARIMAX.py,sha256=CNac0LVOqE6qM40YKZ4KdYF6EUR2gZfR5H__AdPDFOs,24558
57
+ pymc_extras/statespace/models/VARMAX.py,sha256=i9r4DcIl2MWH8JWG4u5T3k3Oe8aXHYld43d8sIqO_pg,22374
54
58
  pymc_extras/statespace/models/__init__.py,sha256=DUwPrwfnz9AUbgZOFvZeUpWEw5FiPAK5X9x7vZrRWqY,319
55
- pymc_extras/statespace/models/utilities.py,sha256=jpUYByAy6rMFP7l56uST1SEYchRa-clsFQ-At_1NLSw,27123
59
+ pymc_extras/statespace/models/utilities.py,sha256=D1VMCXzwlNChfk-x4f9cOhfsK_xOoBBhmRzpjdx0tEs,27329
56
60
  pymc_extras/statespace/models/structural/__init__.py,sha256=jvbczE1IeNkhW7gMQ2vF2BhhKHeYyfD90mV-Awko-Vs,811
57
61
  pymc_extras/statespace/models/structural/core.py,sha256=n0cbP8_-NFLmflFF4x37AyOOIHcY5iylRrgTzjyOAhM,35374
58
62
  pymc_extras/statespace/models/structural/utils.py,sha256=Eze34Z0iXJzDC_gZEY2mHrp2VIYu8rHV915vM4U5Sn4,359
59
63
  pymc_extras/statespace/models/structural/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
- pymc_extras/statespace/models/structural/components/autoregressive.py,sha256=HkS5an5fuNOBGcjHFNMUVNJrF1BNnlpQxvmPq_5dD0s,8021
64
+ pymc_extras/statespace/models/structural/components/autoregressive.py,sha256=qlV38eJtjejYi9ujiCyUWfeSbFBXdNvi-hgKx57OQ28,8048
61
65
  pymc_extras/statespace/models/structural/components/cycle.py,sha256=qEiGFGMEXKS2Tl_zgzKIp77ijGXCVq6UIHEZp_ErHSQ,13931
62
66
  pymc_extras/statespace/models/structural/components/level_trend.py,sha256=7glYX_tKOJPq6uB1NBuPQFFZGkhcwK4GMZUBTcU0xIY,11357
63
67
  pymc_extras/statespace/models/structural/components/measurement_error.py,sha256=5LHDx3IplNrWSGcsY3xJLywKPosTqr42jlrvm80ZApM,5316
64
- pymc_extras/statespace/models/structural/components/regression.py,sha256=27PRV9I64_VXIyjUi7pRr_gbk7sSI5DfJ4FBAbq5WCM,9856
68
+ pymc_extras/statespace/models/structural/components/regression.py,sha256=U2zlVY31WbhFCime69aN6R3VKPlNVf5HNTfIjfiPy-M,8949
65
69
  pymc_extras/statespace/models/structural/components/seasonality.py,sha256=soXJIZ2xewUhSUb5s2MGnxvnQCcir7ZgbgkSr94xEvc,26987
66
70
  pymc_extras/statespace/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
- pymc_extras/statespace/utils/constants.py,sha256=-4vCXo7-X3IuzdcplWBrAV9m9tm8JngcgoE-8imGmj0,2518
71
+ pymc_extras/statespace/utils/constants.py,sha256=Dj1XpY_u5EliyStGrEFq5jmA5d_EMHCT4teaifxiTko,2577
68
72
  pymc_extras/statespace/utils/coord_tools.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
69
73
  pymc_extras/statespace/utils/data_tools.py,sha256=Tomur7d8WCKlMXUCrPqufqVTKUe_nLLCHdipsM9pmaI,6620
70
74
  pymc_extras/utils/__init__.py,sha256=yxI9cJ7fCtVQS0GFw0y6mDGZIQZiK53vm3UNKqIuGSk,758
71
75
  pymc_extras/utils/linear_cg.py,sha256=KkXhuimFsrKtNd_0By2ApxQQQNm5FdBtmDQJOVbLYkA,10056
72
- pymc_extras/utils/model_equivalence.py,sha256=8QIftID2HDxD659i0RXHazQ-l2Q5YegCRLcDqb2p9Pc,2187
73
- pymc_extras/utils/prior.py,sha256=QlWVr7uKIK9VncBw7Fz3YgaASKGDfqpORZHc-vz_9gQ,6841
74
- pymc_extras/utils/spline.py,sha256=qGq0gcoMG5dpdazKFzG0RXkkCWP8ADPPXN-653-oFn4,4820
75
- pymc_extras-0.4.1.dist-info/METADATA,sha256=TpuX_8nEFjQfPlC51u_2EvQV3XwHAvgYCQMKYzeVU_E,18898
76
- pymc_extras-0.4.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
77
- pymc_extras-0.4.1.dist-info/licenses/LICENSE,sha256=WjiLhUKEysJvy5e9jk6WwFv9tmAPtnov1uJ6gcH1kIs,11720
78
- pymc_extras-0.4.1.dist-info/RECORD,,
76
+ pymc_extras/utils/model_equivalence.py,sha256=9MLwSj7VwxxKupzmEkKBbwGD1X0WM2FGcGIpfb8bViw,2197
77
+ pymc_extras/utils/prior.py,sha256=mnuFpamp04eQJuTU5NyB2PfCG5r-1McSmQGwQXSR_Lg,6670
78
+ pymc_extras/utils/spline.py,sha256=R0u3eAcV5bRmD2YSLqDm0qnaJbEuf3V38OZ7amV7-Tc,4732
79
+ pymc_extras-0.6.0.dist-info/METADATA,sha256=Dn4Xwbebxw8QiFA0veLrEktkowKsAx06h00p7Epec1E,18898
80
+ pymc_extras-0.6.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
81
+ pymc_extras-0.6.0.dist-info/licenses/LICENSE,sha256=WjiLhUKEysJvy5e9jk6WwFv9tmAPtnov1uJ6gcH1kIs,11720
82
+ pymc_extras-0.6.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.27.0
2
+ Generator: hatchling 1.28.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any