pymc-extras 0.4.0__tar.gz → 0.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (172) hide show
  1. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/PKG-INFO +1 -1
  2. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/_version.py +16 -3
  3. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/histogram_utils.py +1 -1
  4. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/__init__.py +8 -1
  5. pymc_extras-0.5.0/pymc_extras/inference/dadvi/dadvi.py +261 -0
  6. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/fit.py +5 -0
  7. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/laplace_approx/find_map.py +16 -8
  8. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/laplace_approx/idata.py +5 -2
  9. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/laplace_approx/laplace.py +1 -0
  10. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/printing.py +1 -1
  11. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/__init__.py +4 -4
  12. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/core/__init__.py +1 -1
  13. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/core/statespace.py +94 -23
  14. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/filters/kalman_filter.py +16 -11
  15. pymc_extras-0.5.0/pymc_extras/statespace/models/DFM.py +849 -0
  16. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/SARIMAX.py +138 -74
  17. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/VARMAX.py +248 -57
  18. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/__init__.py +2 -2
  19. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/autoregressive.py +49 -24
  20. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/cycle.py +48 -28
  21. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/level_trend.py +61 -29
  22. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/measurement_error.py +22 -5
  23. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/regression.py +47 -18
  24. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/components/seasonality.py +278 -95
  25. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/core.py +27 -8
  26. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/utils/constants.py +19 -14
  27. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/utils/data_tools.py +1 -1
  28. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/__init__.py +1 -1
  29. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/inference/laplace_approx/test_find_map.py +6 -2
  30. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/core/test_statespace.py +45 -14
  31. pymc_extras-0.5.0/tests/statespace/models/structural/components/test_autoregressive.py +267 -0
  32. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/components/test_cycle.py +119 -6
  33. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/components/test_level_trend.py +125 -0
  34. pymc_extras-0.5.0/tests/statespace/models/structural/components/test_measurement_error.py +74 -0
  35. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/components/test_regression.py +102 -1
  36. pymc_extras-0.5.0/tests/statespace/models/structural/components/test_seasonality.py +716 -0
  37. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/test_against_statsmodels.py +13 -13
  38. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/test_core.py +12 -5
  39. pymc_extras-0.5.0/tests/statespace/models/test_DFM.py +727 -0
  40. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/test_SARIMAX.py +64 -11
  41. pymc_extras-0.5.0/tests/statespace/models/test_VARMAX.py +545 -0
  42. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/utils/test_coord_assignment.py +1 -1
  43. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_histogram_approximation.py +2 -2
  44. pymc_extras-0.5.0/tests/utils.py +0 -0
  45. pymc_extras-0.4.0/tests/statespace/models/structural/components/test_autoregressive.py +0 -132
  46. pymc_extras-0.4.0/tests/statespace/models/structural/components/test_measurement_error.py +0 -32
  47. pymc_extras-0.4.0/tests/statespace/models/structural/components/test_seasonality.py +0 -439
  48. pymc_extras-0.4.0/tests/statespace/models/test_VARMAX.py +0 -190
  49. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/.gitignore +0 -0
  50. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/.gitpod.yml +0 -0
  51. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/.pre-commit-config.yaml +0 -0
  52. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/.readthedocs.yaml +0 -0
  53. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/CODE_OF_CONDUCT.md +0 -0
  54. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/CONTRIBUTING.md +0 -0
  55. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/LICENSE +0 -0
  56. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/README.md +0 -0
  57. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/codecov.yml +0 -0
  58. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/conda-envs/environment-test.yml +0 -0
  59. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/.nojekyll +0 -0
  60. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/Makefile +0 -0
  61. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/_templates/autosummary/base.rst +0 -0
  62. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/_templates/autosummary/class.rst +0 -0
  63. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/api_reference.rst +0 -0
  64. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/conf.py +0 -0
  65. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/index.rst +0 -0
  66. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/make.bat +0 -0
  67. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/statespace/core.rst +0 -0
  68. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/statespace/filters.rst +0 -0
  69. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/statespace/models/structural.rst +0 -0
  70. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/docs/statespace/models.rst +0 -0
  71. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/__init__.py +0 -0
  72. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/deserialize.py +0 -0
  73. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/__init__.py +5 -5
  74. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/continuous.py +0 -0
  75. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/discrete.py +0 -0
  76. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/multivariate/__init__.py +0 -0
  77. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/multivariate/r2d2m2cp.py +0 -0
  78. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/timeseries.py +0 -0
  79. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/transforms/__init__.py +0 -0
  80. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/distributions/transforms/partial_order.py +0 -0
  81. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/gp/__init__.py +0 -0
  82. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/gp/latent_approx.py +0 -0
  83. {pymc_extras-0.4.0/pymc_extras/inference/laplace_approx → pymc_extras-0.5.0/pymc_extras/inference/dadvi}/__init__.py +0 -0
  84. {pymc_extras-0.4.0/pymc_extras/model → pymc_extras-0.5.0/pymc_extras/inference/laplace_approx}/__init__.py +0 -0
  85. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/laplace_approx/scipy_interface.py +0 -0
  86. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/pathfinder/__init__.py +0 -0
  87. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/pathfinder/importance_sampling.py +0 -0
  88. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/pathfinder/lbfgs.py +0 -0
  89. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/pathfinder/pathfinder.py +0 -0
  90. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/smc/__init__.py +0 -0
  91. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/inference/smc/sampling.py +0 -0
  92. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/linearmodel.py +0 -0
  93. {pymc_extras-0.4.0/pymc_extras/model/marginal → pymc_extras-0.5.0/pymc_extras/model}/__init__.py +0 -0
  94. {pymc_extras-0.4.0/pymc_extras/model/transforms → pymc_extras-0.5.0/pymc_extras/model/marginal}/__init__.py +0 -0
  95. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model/marginal/distributions.py +0 -0
  96. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model/marginal/graph_analysis.py +0 -0
  97. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model/marginal/marginal_model.py +0 -0
  98. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model/model_api.py +0 -0
  99. {pymc_extras-0.4.0/pymc_extras/preprocessing → pymc_extras-0.5.0/pymc_extras/model/transforms}/__init__.py +0 -0
  100. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model/transforms/autoreparam.py +0 -0
  101. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/model_builder.py +0 -0
  102. {pymc_extras-0.4.0/pymc_extras/statespace/models/structural/components → pymc_extras-0.5.0/pymc_extras/preprocessing}/__init__.py +0 -0
  103. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/preprocessing/standard_scaler.py +0 -0
  104. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/prior.py +0 -0
  105. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/core/compile.py +0 -0
  106. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/core/representation.py +8 -8
  107. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/filters/__init__.py +3 -3
  108. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/filters/distributions.py +0 -0
  109. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/filters/kalman_smoother.py +0 -0
  110. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/filters/utilities.py +0 -0
  111. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/ETS.py +0 -0
  112. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/__init__.py +4 -4
  113. {pymc_extras-0.4.0/pymc_extras/statespace/utils → pymc_extras-0.5.0/pymc_extras/statespace/models/structural/components}/__init__.py +0 -0
  114. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/structural/utils.py +0 -0
  115. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/models/utilities.py +0 -0
  116. {pymc_extras-0.4.0/tests/inference → pymc_extras-0.5.0/pymc_extras/statespace/utils}/__init__.py +0 -0
  117. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/statespace/utils/coord_tools.py +0 -0
  118. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/utils/__init__.py +0 -0
  119. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/utils/linear_cg.py +0 -0
  120. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/utils/model_equivalence.py +0 -0
  121. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/utils/prior.py +0 -0
  122. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pymc_extras/utils/spline.py +0 -0
  123. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/pyproject.toml +0 -0
  124. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/__init__.py +0 -0
  125. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/conftest.py +0 -0
  126. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/test_continuous.py +0 -0
  127. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/test_discrete.py +0 -0
  128. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/test_discrete_markov_chain.py +0 -0
  129. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/test_multivariate.py +0 -0
  130. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/distributions/test_transform.py +0 -0
  131. {pymc_extras-0.4.0/tests/inference/laplace_approx → pymc_extras-0.5.0/tests/inference}/__init__.py +0 -0
  132. {pymc_extras-0.4.0/tests/model → pymc_extras-0.5.0/tests/inference/laplace_approx}/__init__.py +0 -0
  133. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/inference/laplace_approx/test_idata.py +0 -0
  134. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/inference/laplace_approx/test_laplace.py +0 -0
  135. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/inference/laplace_approx/test_scipy_interface.py +0 -0
  136. {pymc_extras-0.4.0/tests/model/marginal → pymc_extras-0.5.0/tests/model}/__init__.py +0 -0
  137. {pymc_extras-0.4.0/tests/statespace → pymc_extras-0.5.0/tests/model/marginal}/__init__.py +0 -0
  138. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/model/marginal/test_distributions.py +0 -0
  139. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/model/marginal/test_graph_analysis.py +0 -0
  140. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/model/marginal/test_marginal_model.py +0 -0
  141. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/model/test_model_api.py +0 -0
  142. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/model/transforms/test_autoreparam.py +0 -0
  143. {pymc_extras-0.4.0/tests/statespace/core → pymc_extras-0.5.0/tests/statespace}/__init__.py +0 -0
  144. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/_data/airpass.csv +0 -0
  145. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/_data/airpassangers.csv +0 -0
  146. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/_data/nile.csv +0 -0
  147. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/_data/statsmodels_macrodata_processed.csv +0 -0
  148. {pymc_extras-0.4.0/tests/statespace/filters → pymc_extras-0.5.0/tests/statespace/core}/__init__.py +0 -0
  149. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/core/test_representation.py +0 -0
  150. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/core/test_statespace_JAX.py +0 -0
  151. {pymc_extras-0.4.0/tests/statespace/models → pymc_extras-0.5.0/tests/statespace/filters}/__init__.py +0 -0
  152. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/filters/test_distributions.py +0 -0
  153. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/filters/test_kalman_filter.py +0 -0
  154. {pymc_extras-0.4.0/tests/statespace/models/structural → pymc_extras-0.5.0/tests/statespace/models}/__init__.py +0 -0
  155. {pymc_extras-0.4.0/tests/statespace/models/structural/components → pymc_extras-0.5.0/tests/statespace/models/structural}/__init__.py +0 -0
  156. {pymc_extras-0.4.0/tests/statespace/utils → pymc_extras-0.5.0/tests/statespace/models/structural/components}/__init__.py +0 -0
  157. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/structural/conftest.py +0 -0
  158. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/test_ETS.py +0 -0
  159. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/models/test_utilities.py +0 -0
  160. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/shared_fixtures.py +0 -0
  161. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/statsmodel_local_level.py +0 -0
  162. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/statespace/test_utilities.py +0 -0
  163. /pymc_extras-0.4.0/tests/utils.py → /pymc_extras-0.5.0/tests/statespace/utils/__init__.py +0 -0
  164. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_blackjax_smc.py +0 -0
  165. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_deserialize.py +0 -0
  166. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_linearmodel.py +0 -0
  167. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_model_builder.py +0 -0
  168. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_pathfinder.py +0 -0
  169. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_printing.py +0 -0
  170. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_prior.py +0 -0
  171. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_prior_from_trace.py +0 -0
  172. {pymc_extras-0.4.0 → pymc_extras-0.5.0}/tests/test_splines.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pymc-extras
3
- Version: 0.4.0
3
+ Version: 0.5.0
4
4
  Summary: A home for new additions to PyMC, which may include unusual probability distribitions, advanced model fitting algorithms, or any code that may be inappropriate to include in the pymc repository, but may want to be made available to users.
5
5
  Project-URL: Documentation, https://pymc-extras.readthedocs.io/
6
6
  Project-URL: Repository, https://github.com/pymc-devs/pymc-extras.git
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.4.0'
21
- __version_tuple__ = version_tuple = (0, 4, 0)
31
+ __version__ = version = '0.5.0'
32
+ __version_tuple__ = version_tuple = (0, 5, 0)
33
+
34
+ __commit_id__ = commit_id = None
@@ -18,7 +18,7 @@ import pymc as pm
18
18
 
19
19
  from numpy.typing import ArrayLike
20
20
 
21
- __all__ = ["quantile_histogram", "discrete_histogram", "histogram_approximation"]
21
+ __all__ = ["discrete_histogram", "histogram_approximation", "quantile_histogram"]
22
22
 
23
23
 
24
24
  def quantile_histogram(
@@ -12,9 +12,16 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
+ from pymc_extras.inference.dadvi.dadvi import fit_dadvi
15
16
  from pymc_extras.inference.fit import fit
16
17
  from pymc_extras.inference.laplace_approx.find_map import find_MAP
17
18
  from pymc_extras.inference.laplace_approx.laplace import fit_laplace
18
19
  from pymc_extras.inference.pathfinder.pathfinder import fit_pathfinder
19
20
 
20
- __all__ = ["fit", "fit_pathfinder", "fit_laplace", "find_MAP"]
21
+ __all__ = [
22
+ "find_MAP",
23
+ "fit",
24
+ "fit_laplace",
25
+ "fit_pathfinder",
26
+ "fit_dadvi",
27
+ ]
@@ -0,0 +1,261 @@
1
+ import arviz as az
2
+ import numpy as np
3
+ import pymc
4
+ import pytensor
5
+ import pytensor.tensor as pt
6
+ import xarray
7
+
8
+ from better_optimize import minimize
9
+ from better_optimize.constants import minimize_method
10
+ from pymc import DictToArrayBijection, Model, join_nonshared_inputs
11
+ from pymc.backends.arviz import (
12
+ PointFunc,
13
+ apply_function_over_dataset,
14
+ coords_and_dims_for_inferencedata,
15
+ )
16
+ from pymc.util import RandomSeed, get_default_varnames
17
+ from pytensor.tensor.variable import TensorVariable
18
+
19
+ from pymc_extras.inference.laplace_approx.laplace import unstack_laplace_draws
20
+ from pymc_extras.inference.laplace_approx.scipy_interface import (
21
+ _compile_functions_for_scipy_optimize,
22
+ )
23
+
24
+
25
+ def fit_dadvi(
26
+ model: Model | None = None,
27
+ n_fixed_draws: int = 30,
28
+ random_seed: RandomSeed = None,
29
+ n_draws: int = 1000,
30
+ keep_untransformed: bool = False,
31
+ optimizer_method: minimize_method = "trust-ncg",
32
+ use_grad: bool = True,
33
+ use_hessp: bool = True,
34
+ use_hess: bool = False,
35
+ **minimize_kwargs,
36
+ ) -> az.InferenceData:
37
+ """
38
+ Does inference using deterministic ADVI (automatic differentiation
39
+ variational inference), DADVI for short.
40
+
41
+ For full details see the paper cited in the references:
42
+ https://www.jmlr.org/papers/v25/23-1015.html
43
+
44
+ Parameters
45
+ ----------
46
+ model : pm.Model
47
+ The PyMC model to be fit. If None, the current model context is used.
48
+
49
+ n_fixed_draws : int
50
+ The number of fixed draws to use for the optimisation. More
51
+ draws will result in more accurate estimates, but also
52
+ increase inference time. Usually, the default of 30 is a good
53
+ tradeoff.between speed and accuracy.
54
+
55
+ random_seed: int
56
+ The random seed to use for the fixed draws. Running the optimisation
57
+ twice with the same seed should arrive at the same result.
58
+
59
+ n_draws: int
60
+ The number of draws to return from the variational approximation.
61
+
62
+ keep_untransformed: bool
63
+ Whether or not to keep the unconstrained variables (such as
64
+ logs of positive-constrained parameters) in the output.
65
+
66
+ optimizer_method: str
67
+ Which optimization method to use. The function calls
68
+ ``scipy.optimize.minimize``, so any of the methods there can
69
+ be used. The default is trust-ncg, which uses second-order
70
+ information and is generally very reliable. Other methods such
71
+ as L-BFGS-B might be faster but potentially more brittle and
72
+ may not converge exactly to the optimum.
73
+
74
+ minimize_kwargs:
75
+ Additional keyword arguments to pass to the
76
+ ``scipy.optimize.minimize`` function. See the documentation of
77
+ that function for details.
78
+
79
+ use_grad:
80
+ If True, pass the gradient function to
81
+ `scipy.optimize.minimize` (where it is referred to as `jac`).
82
+
83
+ use_hessp:
84
+ If True, pass the hessian vector product to `scipy.optimize.minimize`.
85
+
86
+ use_hess:
87
+ If True, pass the hessian to `scipy.optimize.minimize`. Note that
88
+ this is generally not recommended since its computation can be slow
89
+ and memory-intensive if there are many parameters.
90
+
91
+ Returns
92
+ -------
93
+ :class:`~arviz.InferenceData`
94
+ The inference data containing the results of the DADVI algorithm.
95
+
96
+ References
97
+ ----------
98
+ Giordano, R., Ingram, M., & Broderick, T. (2024). Black Box
99
+ Variational Inference with a Deterministic Objective: Faster, More
100
+ Accurate, and Even More Black Box. Journal of Machine Learning
101
+ Research, 25(18), 1–39.
102
+ """
103
+
104
+ model = pymc.modelcontext(model) if model is None else model
105
+
106
+ initial_point_dict = model.initial_point()
107
+ n_params = DictToArrayBijection.map(initial_point_dict).data.shape[0]
108
+
109
+ var_params, objective = create_dadvi_graph(
110
+ model,
111
+ n_fixed_draws=n_fixed_draws,
112
+ random_seed=random_seed,
113
+ n_params=n_params,
114
+ )
115
+
116
+ f_fused, f_hessp = _compile_functions_for_scipy_optimize(
117
+ objective,
118
+ [var_params],
119
+ compute_grad=use_grad,
120
+ compute_hessp=use_hessp,
121
+ compute_hess=use_hess,
122
+ )
123
+
124
+ derivative_kwargs = {}
125
+
126
+ if use_grad:
127
+ derivative_kwargs["jac"] = True
128
+ if use_hessp:
129
+ derivative_kwargs["hessp"] = f_hessp
130
+ if use_hess:
131
+ derivative_kwargs["hess"] = True
132
+
133
+ result = minimize(
134
+ f_fused,
135
+ np.zeros(2 * n_params),
136
+ method=optimizer_method,
137
+ **derivative_kwargs,
138
+ **minimize_kwargs,
139
+ )
140
+
141
+ opt_var_params = result.x
142
+ opt_means, opt_log_sds = np.split(opt_var_params, 2)
143
+
144
+ # Make the draws:
145
+ generator = np.random.default_rng(seed=random_seed)
146
+ draws_raw = generator.standard_normal(size=(n_draws, n_params))
147
+
148
+ draws = opt_means + draws_raw * np.exp(opt_log_sds)
149
+ draws_arviz = unstack_laplace_draws(draws, model, chains=1, draws=n_draws)
150
+
151
+ transformed_draws = transform_draws(draws_arviz, model, keep_untransformed=keep_untransformed)
152
+
153
+ return transformed_draws
154
+
155
+
156
+ def create_dadvi_graph(
157
+ model: Model,
158
+ n_params: int,
159
+ n_fixed_draws: int = 30,
160
+ random_seed: RandomSeed = None,
161
+ ) -> tuple[TensorVariable, TensorVariable]:
162
+ """
163
+ Sets up the DADVI graph in pytensor and returns it.
164
+
165
+ Parameters
166
+ ----------
167
+ model : pm.Model
168
+ The PyMC model to be fit.
169
+
170
+ n_params: int
171
+ The total number of parameters in the model.
172
+
173
+ n_fixed_draws : int
174
+ The number of fixed draws to use.
175
+
176
+ random_seed: int
177
+ The random seed to use for the fixed draws.
178
+
179
+ Returns
180
+ -------
181
+ Tuple[TensorVariable, TensorVariable]
182
+ A tuple whose first element contains the variational parameters,
183
+ and whose second contains the DADVI objective.
184
+ """
185
+
186
+ # Make the fixed draws
187
+ generator = np.random.default_rng(seed=random_seed)
188
+ draws = generator.standard_normal(size=(n_fixed_draws, n_params))
189
+
190
+ inputs = model.continuous_value_vars + model.discrete_value_vars
191
+ initial_point_dict = model.initial_point()
192
+ logp = model.logp()
193
+
194
+ # Graph in terms of a flat input
195
+ [logp], flat_input = join_nonshared_inputs(
196
+ point=initial_point_dict, outputs=[logp], inputs=inputs
197
+ )
198
+
199
+ var_params = pt.vector(name="eta", shape=(2 * n_params,))
200
+
201
+ means, log_sds = pt.split(var_params, axis=0, splits_size=[n_params, n_params], n_splits=2)
202
+
203
+ draw_matrix = pt.constant(draws)
204
+ samples = means + pt.exp(log_sds) * draw_matrix
205
+
206
+ logp_vectorized_draws = pytensor.graph.vectorize_graph(logp, replace={flat_input: samples})
207
+
208
+ mean_log_density = pt.mean(logp_vectorized_draws)
209
+ entropy = pt.sum(log_sds)
210
+
211
+ objective = -mean_log_density - entropy
212
+
213
+ return var_params, objective
214
+
215
+
216
+ def transform_draws(
217
+ unstacked_draws: xarray.Dataset,
218
+ model: Model,
219
+ keep_untransformed: bool = False,
220
+ ):
221
+ """
222
+ Transforms the unconstrained draws back into the constrained space.
223
+
224
+ Parameters
225
+ ----------
226
+ unstacked_draws : xarray.Dataset
227
+ The draws to constrain back into the original space.
228
+
229
+ model : Model
230
+ The PyMC model the variables were derived from.
231
+
232
+ n_draws: int
233
+ The number of draws to return from the variational approximation.
234
+
235
+ keep_untransformed: bool
236
+ Whether or not to keep the unconstrained variables in the output.
237
+
238
+ Returns
239
+ -------
240
+ :class:`~arviz.InferenceData`
241
+ Draws from the original constrained parameters.
242
+ """
243
+
244
+ filtered_var_names = model.unobserved_value_vars
245
+ vars_to_sample = list(
246
+ get_default_varnames(filtered_var_names, include_transformed=keep_untransformed)
247
+ )
248
+ fn = pytensor.function(model.value_vars, vars_to_sample)
249
+ point_func = PointFunc(fn)
250
+
251
+ coords, dims = coords_and_dims_for_inferencedata(model)
252
+
253
+ transformed_result = apply_function_over_dataset(
254
+ point_func,
255
+ unstacked_draws,
256
+ output_var_names=[x.name for x in vars_to_sample],
257
+ coords=coords,
258
+ dims=dims,
259
+ )
260
+
261
+ return transformed_result
@@ -40,3 +40,8 @@ def fit(method: str, **kwargs) -> az.InferenceData:
40
40
  from pymc_extras.inference import fit_laplace
41
41
 
42
42
  return fit_laplace(**kwargs)
43
+
44
+ if method == "dadvi":
45
+ from pymc_extras.inference import fit_dadvi
46
+
47
+ return fit_dadvi(**kwargs)
@@ -198,6 +198,7 @@ def find_MAP(
198
198
  include_transformed: bool = True,
199
199
  gradient_backend: GradientBackend = "pytensor",
200
200
  compile_kwargs: dict | None = None,
201
+ compute_hessian: bool = False,
201
202
  **optimizer_kwargs,
202
203
  ) -> (
203
204
  dict[str, np.ndarray]
@@ -239,6 +240,10 @@ def find_MAP(
239
240
  Whether to include transformed variable values in the returned dictionary. Defaults to True.
240
241
  gradient_backend: str, default "pytensor"
241
242
  Which backend to use to compute gradients. Must be one of "pytensor" or "jax".
243
+ compute_hessian: bool
244
+ If True, the inverse Hessian matrix at the optimum will be computed and included in the returned
245
+ InferenceData object. This is needed for the Laplace approximation, but can be computationally expensive for
246
+ high-dimensional problems. Defaults to False.
242
247
  compile_kwargs: dict, optional
243
248
  Additional options to pass to the ``pytensor.function`` function when compiling loss functions.
244
249
  **optimizer_kwargs
@@ -316,14 +321,17 @@ def find_MAP(
316
321
  **optimizer_kwargs,
317
322
  )
318
323
 
319
- H_inv = _compute_inverse_hessian(
320
- optimizer_result=optimizer_result,
321
- optimal_point=None,
322
- f_fused=f_fused,
323
- f_hessp=f_hessp,
324
- use_hess=use_hess,
325
- method=method,
326
- )
324
+ if compute_hessian:
325
+ H_inv = _compute_inverse_hessian(
326
+ optimizer_result=optimizer_result,
327
+ optimal_point=None,
328
+ f_fused=f_fused,
329
+ f_hessp=f_hessp,
330
+ use_hess=use_hess,
331
+ method=method,
332
+ )
333
+ else:
334
+ H_inv = None
327
335
 
328
336
  raveled_optimized = RaveledVars(optimizer_result.x, initial_params.point_map_info)
329
337
  unobserved_vars = get_default_varnames(model.unobserved_value_vars, include_transformed=True)
@@ -136,7 +136,10 @@ def map_results_to_inference_data(
136
136
 
137
137
 
138
138
  def add_fit_to_inference_data(
139
- idata: az.InferenceData, mu: RaveledVars, H_inv: np.ndarray, model: pm.Model | None = None
139
+ idata: az.InferenceData,
140
+ mu: RaveledVars,
141
+ H_inv: np.ndarray | None,
142
+ model: pm.Model | None = None,
140
143
  ) -> az.InferenceData:
141
144
  """
142
145
  Add the mean vector and covariance matrix of the Laplace approximation to an InferenceData object.
@@ -147,7 +150,7 @@ def add_fit_to_inference_data(
147
150
  An InferenceData object containing the approximated posterior samples.
148
151
  mu: RaveledVars
149
152
  The MAP estimate of the model parameters.
150
- H_inv: np.ndarray
153
+ H_inv: np.ndarray, optional
151
154
  The inverse Hessian matrix of the log-posterior evaluated at the MAP estimate.
152
155
  model: Model, optional
153
156
  A PyMC model. If None, the model is taken from the current model context.
@@ -389,6 +389,7 @@ def fit_laplace(
389
389
  include_transformed=include_transformed,
390
390
  gradient_backend=gradient_backend,
391
391
  compile_kwargs=compile_kwargs,
392
+ compute_hessian=True,
392
393
  **optimizer_kwargs,
393
394
  )
394
395
 
@@ -166,7 +166,7 @@ def model_table(
166
166
 
167
167
  for var in group:
168
168
  var_name = var.name
169
- sep = f'[b]{" ~" if (var in model.basic_RVs) else " ="}[/b]'
169
+ sep = f"[b]{' ~' if (var in model.basic_RVs) else ' ='}[/b]"
170
170
  var_expr = variable_expression(model, var, truncate_deterministic)
171
171
  dims_expr = dims_expression(model, var)
172
172
  if dims_expr == "[]":
@@ -1,13 +1,13 @@
1
1
  from pymc_extras.statespace.core.compile import compile_statespace
2
2
  from pymc_extras.statespace.models import structural
3
3
  from pymc_extras.statespace.models.ETS import BayesianETS
4
- from pymc_extras.statespace.models.SARIMAX import BayesianSARIMA
4
+ from pymc_extras.statespace.models.SARIMAX import BayesianSARIMAX
5
5
  from pymc_extras.statespace.models.VARMAX import BayesianVARMAX
6
6
 
7
7
  __all__ = [
8
- "compile_statespace",
9
- "structural",
10
8
  "BayesianETS",
11
- "BayesianSARIMA",
9
+ "BayesianSARIMAX",
12
10
  "BayesianVARMAX",
11
+ "compile_statespace",
12
+ "structural",
13
13
  ]
@@ -4,4 +4,4 @@ from pymc_extras.statespace.core.representation import PytensorRepresentation
4
4
  from pymc_extras.statespace.core.statespace import PyMCStateSpace
5
5
  from pymc_extras.statespace.core.compile import compile_statespace
6
6
 
7
- __all__ = ["PytensorRepresentation", "PyMCStateSpace", "compile_statespace"]
7
+ __all__ = ["PyMCStateSpace", "PytensorRepresentation", "compile_statespace"]
@@ -60,7 +60,7 @@ FILTER_FACTORY = {
60
60
  def _validate_filter_arg(filter_arg):
61
61
  if filter_arg.lower() not in FILTER_OUTPUT_TYPES:
62
62
  raise ValueError(
63
- f'filter_output should be one of {", ".join(FILTER_OUTPUT_TYPES)}, received {filter_arg}'
63
+ f"filter_output should be one of {', '.join(FILTER_OUTPUT_TYPES)}, received {filter_arg}"
64
64
  )
65
65
 
66
66
 
@@ -233,10 +233,9 @@ class PyMCStateSpace:
233
233
  self._fit_coords: dict[str, Sequence[str]] | None = None
234
234
  self._fit_dims: dict[str, Sequence[str]] | None = None
235
235
  self._fit_data: pt.TensorVariable | None = None
236
+ self._fit_exog_data: dict[str, dict] = {}
236
237
 
237
238
  self._needs_exog_data = None
238
- self._exog_names = []
239
- self._exog_data_info = {}
240
239
  self._name_to_variable = {}
241
240
  self._name_to_data = {}
242
241
 
@@ -671,7 +670,7 @@ class PyMCStateSpace:
671
670
  pymc_mod = modelcontext(None)
672
671
  for data_name in self.data_names:
673
672
  data = pymc_mod[data_name]
674
- self._exog_data_info[data_name] = {
673
+ self._fit_exog_data[data_name] = {
675
674
  "name": data_name,
676
675
  "value": data.get_value(),
677
676
  "dims": pymc_mod.named_vars_to_dims.get(data_name, None),
@@ -685,7 +684,7 @@ class PyMCStateSpace:
685
684
  --------
686
685
  .. code:: python
687
686
 
688
- ss_mod = pmss.BayesianSARIMA(order=(2, 0, 2), verbose=False, stationary_initialization=True)
687
+ ss_mod = pmss.BayesianSARIMAX(order=(2, 0, 2), verbose=False, stationary_initialization=True)
689
688
  with pm.Model():
690
689
  x0 = pm.Normal('x0', size=ss_mod.k_states)
691
690
  ar_params = pm.Normal('ar_params', size=ss_mod.p)
@@ -805,16 +804,16 @@ class PyMCStateSpace:
805
804
  states, covs = outputs[:4], outputs[4:]
806
805
 
807
806
  state_names = [
808
- "filtered_state",
809
- "predicted_state",
810
- "predicted_observed_state",
811
- "smoothed_state",
807
+ "filtered_states",
808
+ "predicted_states",
809
+ "predicted_observed_states",
810
+ "smoothed_states",
812
811
  ]
813
812
  cov_names = [
814
- "filtered_covariance",
815
- "predicted_covariance",
816
- "predicted_observed_covariance",
817
- "smoothed_covariance",
813
+ "filtered_covariances",
814
+ "predicted_covariances",
815
+ "predicted_observed_covariances",
816
+ "smoothed_covariances",
818
817
  ]
819
818
 
820
819
  with mod:
@@ -939,7 +938,7 @@ class PyMCStateSpace:
939
938
  all_kf_outputs = [*states, smooth_states, *covs, smooth_covariances]
940
939
  self._register_kalman_filter_outputs_with_pymc_model(all_kf_outputs)
941
940
 
942
- obs_dims = FILTER_OUTPUT_DIMS["predicted_observed_state"]
941
+ obs_dims = FILTER_OUTPUT_DIMS["predicted_observed_states"]
943
942
  obs_dims = obs_dims if all([dim in pm_mod.coords.keys() for dim in obs_dims]) else None
944
943
 
945
944
  SequenceMvNormal(
@@ -1082,7 +1081,7 @@ class PyMCStateSpace:
1082
1081
 
1083
1082
  for name in self.data_names:
1084
1083
  if name not in pm_mod:
1085
- pm.Data(**self._exog_data_info[name])
1084
+ pm.Data(**self._fit_exog_data[name])
1086
1085
 
1087
1086
  self._insert_data_variables()
1088
1087
 
@@ -1229,7 +1228,7 @@ class PyMCStateSpace:
1229
1228
  method=mvn_method,
1230
1229
  )
1231
1230
 
1232
- obs_mu = (Z @ mu[..., None]).squeeze(-1)
1231
+ obs_mu = d + (Z @ mu[..., None]).squeeze(-1)
1233
1232
  obs_cov = Z @ cov @ pt.swapaxes(Z, -2, -1) + H
1234
1233
 
1235
1234
  SequenceMvNormal(
@@ -1351,7 +1350,7 @@ class PyMCStateSpace:
1351
1350
  self._insert_random_variables()
1352
1351
 
1353
1352
  for name in self.data_names:
1354
- pm.Data(**self._exog_data_info[name])
1353
+ pm.Data(**self._fit_exog_data[name])
1355
1354
 
1356
1355
  self._insert_data_variables()
1357
1356
 
@@ -1651,7 +1650,7 @@ class PyMCStateSpace:
1651
1650
  self._insert_random_variables()
1652
1651
 
1653
1652
  for name in self.data_names:
1654
- pm.Data(**self._exog_data_info[name])
1653
+ pm.Data(**self.data_info[name])
1655
1654
 
1656
1655
  self._insert_data_variables()
1657
1656
  matrices = self.unpack_statespace()
@@ -1678,6 +1677,78 @@ class PyMCStateSpace:
1678
1677
 
1679
1678
  return matrix_idata
1680
1679
 
1680
+ def sample_filter_outputs(
1681
+ self, idata, filter_output_names: str | list[str] | None, group: str = "posterior", **kwargs
1682
+ ):
1683
+ if isinstance(filter_output_names, str):
1684
+ filter_output_names = [filter_output_names]
1685
+
1686
+ if filter_output_names is None:
1687
+ filter_output_names = list(FILTER_OUTPUT_DIMS.keys())
1688
+ else:
1689
+ unknown_filter_output_names = np.setdiff1d(
1690
+ filter_output_names, list(FILTER_OUTPUT_DIMS.keys())
1691
+ )
1692
+ if unknown_filter_output_names.size > 0:
1693
+ raise ValueError(f"{unknown_filter_output_names} not a valid filter output name!")
1694
+ filter_output_names = [x for x in FILTER_OUTPUT_DIMS.keys() if x in filter_output_names]
1695
+
1696
+ compile_kwargs = kwargs.pop("compile_kwargs", {})
1697
+ compile_kwargs.setdefault("mode", self.mode)
1698
+
1699
+ with pm.Model(coords=self.coords) as m:
1700
+ self._build_dummy_graph()
1701
+ self._insert_random_variables()
1702
+
1703
+ if self.data_names:
1704
+ for name in self.data_names:
1705
+ pm.Data(**self._fit_exog_data[name])
1706
+
1707
+ self._insert_data_variables()
1708
+
1709
+ x0, P0, c, d, T, Z, R, H, Q = self.unpack_statespace()
1710
+ data = self._fit_data
1711
+
1712
+ obs_coords = m.coords.get(OBS_STATE_DIM, None)
1713
+
1714
+ data, nan_mask = register_data_with_pymc(
1715
+ data,
1716
+ n_obs=self.ssm.k_endog,
1717
+ obs_coords=obs_coords,
1718
+ register_data=True,
1719
+ )
1720
+
1721
+ filter_outputs = self.kalman_filter.build_graph(
1722
+ data,
1723
+ x0,
1724
+ P0,
1725
+ c,
1726
+ d,
1727
+ T,
1728
+ Z,
1729
+ R,
1730
+ H,
1731
+ Q,
1732
+ )
1733
+
1734
+ smoother_outputs = self.kalman_smoother.build_graph(
1735
+ T, R, Q, filter_outputs[0], filter_outputs[3]
1736
+ )
1737
+
1738
+ filter_outputs = filter_outputs[:-1] + list(smoother_outputs)
1739
+ for output in filter_outputs:
1740
+ if output.name in filter_output_names:
1741
+ dims = FILTER_OUTPUT_DIMS[output.name]
1742
+ pm.Deterministic(output.name, output, dims=dims)
1743
+
1744
+ with freeze_dims_and_data(m):
1745
+ return pm.sample_posterior_predictive(
1746
+ idata if group == "posterior" else idata.prior,
1747
+ var_names=filter_output_names,
1748
+ compile_kwargs=compile_kwargs,
1749
+ **kwargs,
1750
+ )
1751
+
1681
1752
  @staticmethod
1682
1753
  def _validate_forecast_args(
1683
1754
  time_index: pd.RangeIndex | pd.DatetimeIndex,
@@ -1774,7 +1845,7 @@ class PyMCStateSpace:
1774
1845
  }
1775
1846
 
1776
1847
  if self._needs_exog_data and scenario is None:
1777
- exog_str = ",".join(self._exog_names)
1848
+ exog_str = ",".join(self.data_names)
1778
1849
  suffix = "s" if len(exog_str) > 1 else ""
1779
1850
  raise ValueError(
1780
1851
  f"This model was fit using exogenous data. Forecasting cannot be performed without "
@@ -1783,7 +1854,7 @@ class PyMCStateSpace:
1783
1854
 
1784
1855
  if isinstance(scenario, dict):
1785
1856
  for name, data in scenario.items():
1786
- if name not in self._exog_names:
1857
+ if name not in self.data_names:
1787
1858
  raise ValueError(
1788
1859
  f"Scenario data provided for variable '{name}', which is not an exogenous variable "
1789
1860
  f"used to fit the model."
@@ -1824,12 +1895,12 @@ class PyMCStateSpace:
1824
1895
  # name should only be None on the first non-recursive call. We only arrive to this branch in that case
1825
1896
  # if a non-dictionary was passed, which in turn should only happen if only a single exogenous data
1826
1897
  # needs to be set.
1827
- if len(self._exog_names) > 1:
1898
+ if len(self.data_names) > 1:
1828
1899
  raise ValueError(
1829
1900
  "Multiple exogenous variables were used to fit the model. Provide a dictionary of "
1830
1901
  "scenario data instead."
1831
1902
  )
1832
- name = self._exog_names[0]
1903
+ name = self.data_names[0]
1833
1904
 
1834
1905
  # Omit dataframe from this basic shape check so we can give more detailed information about missing columns
1835
1906
  # in the next check
@@ -2031,7 +2102,7 @@ class PyMCStateSpace:
2031
2102
  return scenario
2032
2103
 
2033
2104
  # This was already checked as valid
2034
- name = self._exog_names[0] if name is None else name
2105
+ name = self.data_names[0] if name is None else name
2035
2106
 
2036
2107
  # Small tidying up in the case we just have a single scenario that's already a dataframe.
2037
2108
  if isinstance(scenario, pd.DataFrame | pd.Series):