pymc-extras 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pymc_extras/__init__.py +29 -0
- pymc_extras/distributions/__init__.py +40 -0
- pymc_extras/distributions/continuous.py +351 -0
- pymc_extras/distributions/discrete.py +399 -0
- pymc_extras/distributions/histogram_utils.py +163 -0
- pymc_extras/distributions/multivariate/__init__.py +3 -0
- pymc_extras/distributions/multivariate/r2d2m2cp.py +446 -0
- pymc_extras/distributions/timeseries.py +356 -0
- pymc_extras/gp/__init__.py +18 -0
- pymc_extras/gp/latent_approx.py +183 -0
- pymc_extras/inference/__init__.py +18 -0
- pymc_extras/inference/find_map.py +431 -0
- pymc_extras/inference/fit.py +44 -0
- pymc_extras/inference/laplace.py +570 -0
- pymc_extras/inference/pathfinder.py +134 -0
- pymc_extras/inference/smc/__init__.py +13 -0
- pymc_extras/inference/smc/sampling.py +451 -0
- pymc_extras/linearmodel.py +130 -0
- pymc_extras/model/__init__.py +0 -0
- pymc_extras/model/marginal/__init__.py +0 -0
- pymc_extras/model/marginal/distributions.py +276 -0
- pymc_extras/model/marginal/graph_analysis.py +372 -0
- pymc_extras/model/marginal/marginal_model.py +595 -0
- pymc_extras/model/model_api.py +56 -0
- pymc_extras/model/transforms/__init__.py +0 -0
- pymc_extras/model/transforms/autoreparam.py +434 -0
- pymc_extras/model_builder.py +759 -0
- pymc_extras/preprocessing/__init__.py +0 -0
- pymc_extras/preprocessing/standard_scaler.py +17 -0
- pymc_extras/printing.py +182 -0
- pymc_extras/statespace/__init__.py +13 -0
- pymc_extras/statespace/core/__init__.py +7 -0
- pymc_extras/statespace/core/compile.py +48 -0
- pymc_extras/statespace/core/representation.py +438 -0
- pymc_extras/statespace/core/statespace.py +2268 -0
- pymc_extras/statespace/filters/__init__.py +15 -0
- pymc_extras/statespace/filters/distributions.py +453 -0
- pymc_extras/statespace/filters/kalman_filter.py +820 -0
- pymc_extras/statespace/filters/kalman_smoother.py +126 -0
- pymc_extras/statespace/filters/utilities.py +59 -0
- pymc_extras/statespace/models/ETS.py +670 -0
- pymc_extras/statespace/models/SARIMAX.py +536 -0
- pymc_extras/statespace/models/VARMAX.py +393 -0
- pymc_extras/statespace/models/__init__.py +6 -0
- pymc_extras/statespace/models/structural.py +1651 -0
- pymc_extras/statespace/models/utilities.py +387 -0
- pymc_extras/statespace/utils/__init__.py +0 -0
- pymc_extras/statespace/utils/constants.py +74 -0
- pymc_extras/statespace/utils/coord_tools.py +0 -0
- pymc_extras/statespace/utils/data_tools.py +182 -0
- pymc_extras/utils/__init__.py +23 -0
- pymc_extras/utils/linear_cg.py +290 -0
- pymc_extras/utils/pivoted_cholesky.py +69 -0
- pymc_extras/utils/prior.py +200 -0
- pymc_extras/utils/spline.py +131 -0
- pymc_extras/version.py +11 -0
- pymc_extras/version.txt +1 -0
- pymc_extras-0.2.0.dist-info/LICENSE +212 -0
- pymc_extras-0.2.0.dist-info/METADATA +99 -0
- pymc_extras-0.2.0.dist-info/RECORD +101 -0
- pymc_extras-0.2.0.dist-info/WHEEL +5 -0
- pymc_extras-0.2.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +13 -0
- tests/distributions/__init__.py +19 -0
- tests/distributions/test_continuous.py +185 -0
- tests/distributions/test_discrete.py +210 -0
- tests/distributions/test_discrete_markov_chain.py +258 -0
- tests/distributions/test_multivariate.py +304 -0
- tests/model/__init__.py +0 -0
- tests/model/marginal/__init__.py +0 -0
- tests/model/marginal/test_distributions.py +131 -0
- tests/model/marginal/test_graph_analysis.py +182 -0
- tests/model/marginal/test_marginal_model.py +867 -0
- tests/model/test_model_api.py +29 -0
- tests/statespace/__init__.py +0 -0
- tests/statespace/test_ETS.py +411 -0
- tests/statespace/test_SARIMAX.py +405 -0
- tests/statespace/test_VARMAX.py +184 -0
- tests/statespace/test_coord_assignment.py +116 -0
- tests/statespace/test_distributions.py +270 -0
- tests/statespace/test_kalman_filter.py +326 -0
- tests/statespace/test_representation.py +175 -0
- tests/statespace/test_statespace.py +818 -0
- tests/statespace/test_statespace_JAX.py +156 -0
- tests/statespace/test_structural.py +829 -0
- tests/statespace/utilities/__init__.py +0 -0
- tests/statespace/utilities/shared_fixtures.py +9 -0
- tests/statespace/utilities/statsmodel_local_level.py +42 -0
- tests/statespace/utilities/test_helpers.py +310 -0
- tests/test_blackjax_smc.py +222 -0
- tests/test_find_map.py +98 -0
- tests/test_histogram_approximation.py +109 -0
- tests/test_laplace.py +238 -0
- tests/test_linearmodel.py +208 -0
- tests/test_model_builder.py +306 -0
- tests/test_pathfinder.py +45 -0
- tests/test_pivoted_cholesky.py +24 -0
- tests/test_printing.py +98 -0
- tests/test_prior_from_trace.py +172 -0
- tests/test_splines.py +77 -0
- tests/utils.py +31 -0
|
@@ -0,0 +1,393 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
import pytensor
|
|
6
|
+
import pytensor.tensor as pt
|
|
7
|
+
|
|
8
|
+
from pytensor.tensor.slinalg import solve_discrete_lyapunov
|
|
9
|
+
|
|
10
|
+
from pymc_extras.statespace.core.statespace import PyMCStateSpace
|
|
11
|
+
from pymc_extras.statespace.models.utilities import make_default_coords
|
|
12
|
+
from pymc_extras.statespace.utils.constants import (
|
|
13
|
+
ALL_STATE_AUX_DIM,
|
|
14
|
+
ALL_STATE_DIM,
|
|
15
|
+
AR_PARAM_DIM,
|
|
16
|
+
MA_PARAM_DIM,
|
|
17
|
+
OBS_STATE_AUX_DIM,
|
|
18
|
+
OBS_STATE_DIM,
|
|
19
|
+
SHOCK_AUX_DIM,
|
|
20
|
+
SHOCK_DIM,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
floatX = pytensor.config.floatX
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class BayesianVARMAX(PyMCStateSpace):
|
|
27
|
+
r"""
|
|
28
|
+
Vector AutoRegressive Moving Average with eXogenous Regressors
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
order: tuple of (int, int)
|
|
33
|
+
Number of autoregressive (AR) and moving average (MA) terms to include in the model. All terms up to the
|
|
34
|
+
specified order are included. For restricted models, set zeros directly on the priors.
|
|
35
|
+
|
|
36
|
+
endog_names: list of str, optional
|
|
37
|
+
Names of the endogenous variables being modeled. Used to generate names for the state and shock coords. If
|
|
38
|
+
None, the state names will simply be numbered.
|
|
39
|
+
|
|
40
|
+
Exactly one of either ``endog_names`` or ``k_endog`` must be specified.
|
|
41
|
+
|
|
42
|
+
k_endog: int, optional
|
|
43
|
+
Number of endogenous states to be modeled.
|
|
44
|
+
|
|
45
|
+
Exactly one of either ``endog_names`` or ``k_endog`` must be specified.
|
|
46
|
+
|
|
47
|
+
stationary_initialization: bool, default False
|
|
48
|
+
If true, the initial state and initial state covariance will not be assigned priors. Instead, their steady
|
|
49
|
+
state values will be used. If False, the user is responsible for setting priors on the initial state and
|
|
50
|
+
initial covariance.
|
|
51
|
+
|
|
52
|
+
..warning :: This option is very sensitive to the priors placed on the AR and MA parameters. If the model dynamics
|
|
53
|
+
for a given sample are not stationary, sampling will fail with a "covariance is not positive semi-definite"
|
|
54
|
+
error.
|
|
55
|
+
|
|
56
|
+
filter_type: str, default "standard"
|
|
57
|
+
The type of Kalman Filter to use. Options are "standard", "single", "univariate", "steady_state",
|
|
58
|
+
and "cholesky". See the docs for kalman filters for more details.
|
|
59
|
+
|
|
60
|
+
state_structure: str, default "fast"
|
|
61
|
+
How to represent the state-space system. When "interpretable", each element of the state vector will have a
|
|
62
|
+
precise meaning as either lagged data, innovations, or lagged innovations. This comes at the cost of a larger
|
|
63
|
+
state vector, which may hurt performance.
|
|
64
|
+
|
|
65
|
+
When "fast", states are combined to minimize the dimension of the state vector, but lags and innovations are
|
|
66
|
+
mixed together as a result. Only the first state (the modeled timeseries) will have an obvious interpretation
|
|
67
|
+
in this case.
|
|
68
|
+
|
|
69
|
+
measurement_error: bool, default True
|
|
70
|
+
If true, a measurement error term is added to the model.
|
|
71
|
+
|
|
72
|
+
verbose: bool, default True
|
|
73
|
+
If true, a message will be logged to the terminal explaining the variable names, dimensions, and supports.
|
|
74
|
+
|
|
75
|
+
Notes
|
|
76
|
+
-----
|
|
77
|
+
The VARMA model is a multivariate extension of the SARIMAX model. Given a set of timeseries :math:`\{x_t\}_{t=0}^T`,
|
|
78
|
+
with :math:`x_t = \begin{bmatrix} x_{1,t} & x_{2,t} & \cdots & x_{k,t} \end{bmatrix}^T`, a VARMA models each series
|
|
79
|
+
as a function of the histories of all series. Specifically, denoting the AR-MA order as (p, q), a VARMA can be
|
|
80
|
+
written:
|
|
81
|
+
|
|
82
|
+
.. math::
|
|
83
|
+
x_t = A_1 x_{t-1} + A_2 x_{t-2} + \cdots + A_p x_{t-p} + B_1 \varepsilon_{t-1} + \cdots
|
|
84
|
+
+ B_q \varepsilon_{t-q} + \varepsilon_t
|
|
85
|
+
|
|
86
|
+
Where :math:`\varepsilon_t = \begin{bmatrix} \varepsilon_{1,t} & \varepsilon_{2,t} & \cdots &
|
|
87
|
+
\varepsilon_{k,t}\end{bmatrix}^T \sim N(0, \Sigma)` is a vector of i.i.d stochastic innovations or shocks that drive
|
|
88
|
+
intertemporal variation in the data. Matrices :math:`A_i, B_i` are :math:`k \times k` coefficient matrices:
|
|
89
|
+
|
|
90
|
+
.. math::
|
|
91
|
+
A_i = \begin{bmatrix} \rho_{1,i,1} & \rho_{1,i,2} & \cdots & \rho_{1,i,k} \\
|
|
92
|
+
\rho_{2,i,1} & \rho_{2,i,2} & \cdots & \rho_{2,i,k} \\
|
|
93
|
+
\vdots & \vdots & \cdots & \vdots \\
|
|
94
|
+
\rho{k,i,1} & \rho_{k,i,2} & \cdots & rho_{k,i,k} \end{bmatrix}
|
|
95
|
+
|
|
96
|
+
Internally, this representation is not used. Instead, the vectors :math:`x_t, x_{t-1}, \cdots, x_{t-p},
|
|
97
|
+
\varepsilon_{t-1}, \cdots, \varepsilon_{t-q}` are concatenated into a single column vector of length ``k * (p+q)``,
|
|
98
|
+
while the coefficients matrices are likewise concatenated into a single coefficient matrix, :math:`T`.
|
|
99
|
+
|
|
100
|
+
As the dimensionality of the VARMA system increases -- either because there are a large number of timeseries
|
|
101
|
+
included in the analysis, or because the order is large -- the probability of sampling a stationary matrix :math:`T`
|
|
102
|
+
goes to zero. This has two implications for applied work. First, a non-stationary system will exhibit explosive
|
|
103
|
+
behavior, potentially rending impulse response functions and long-term forecasts useless. Secondly, it is not
|
|
104
|
+
possible to do stationary initialization. Stationary initialization significantly speeds up sampling, and should be
|
|
105
|
+
preferred when possible.
|
|
106
|
+
|
|
107
|
+
Examples
|
|
108
|
+
--------
|
|
109
|
+
The following code snippet estimates a VARMA(1, 1):
|
|
110
|
+
|
|
111
|
+
.. code:: python
|
|
112
|
+
|
|
113
|
+
import pymc_extras.statespace as pmss
|
|
114
|
+
import pymc as pm
|
|
115
|
+
|
|
116
|
+
# Create VAR Statespace Model
|
|
117
|
+
bvar_mod = pmss.BayesianVARMAX(endog_names=data.columns, order=(2, 0),
|
|
118
|
+
stationary_initialization=False, measurement_error=False,
|
|
119
|
+
filter_type="standard", verbose=True)
|
|
120
|
+
|
|
121
|
+
# Unpack dims and coords
|
|
122
|
+
x0_dims, P0_dims, state_cov_dims, ar_dims = bvar_mod.param_dims.values()
|
|
123
|
+
coords = bvar_mod.coords
|
|
124
|
+
|
|
125
|
+
# Estimate PyMC model
|
|
126
|
+
with pm.Model(coords=coords) as var_mod:
|
|
127
|
+
x0 = pm.Normal("x0", dims=x0_dims)
|
|
128
|
+
P0_diag = pm.Gamma("P0_diag", alpha=2, beta=1, size=data.shape[1] * 2, dims=P0_dims[0])
|
|
129
|
+
P0 = pm.Deterministic("P0", pt.diag(P0_diag), dims=P0_dims)
|
|
130
|
+
|
|
131
|
+
state_chol, _, _ = pm.LKJCholeskyCov(
|
|
132
|
+
"state_chol", eta=1, n=bvar_mod.k_posdef, sd_dist=pm.Exponential.dist(lam=1)
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
ar_params = pm.Normal("ar_params", mu=0, sigma=1, dims=ar_dims)
|
|
136
|
+
state_cov = pm.Deterministic("state_cov", state_chol @ state_chol.T, dims=state_cov_dims)
|
|
137
|
+
|
|
138
|
+
bvar_mod.build_statespace_graph(data, mode="JAX")
|
|
139
|
+
idata = pm.sample(nuts_sampler="numpyro")
|
|
140
|
+
"""
|
|
141
|
+
|
|
142
|
+
def __init__(
|
|
143
|
+
self,
|
|
144
|
+
order: tuple[int, int],
|
|
145
|
+
endog_names: list[str] | None = None,
|
|
146
|
+
k_endog: int | None = None,
|
|
147
|
+
stationary_initialization: bool = False,
|
|
148
|
+
filter_type: str = "standard",
|
|
149
|
+
measurement_error: bool = False,
|
|
150
|
+
verbose=True,
|
|
151
|
+
):
|
|
152
|
+
if (endog_names is None) and (k_endog is None):
|
|
153
|
+
raise ValueError("Must specify either endog_names or k_endog")
|
|
154
|
+
if (endog_names is not None) and (k_endog is None):
|
|
155
|
+
k_endog = len(endog_names)
|
|
156
|
+
if (endog_names is None) and (k_endog is not None):
|
|
157
|
+
endog_names = [f"state.{i + 1}" for i in range(k_endog)]
|
|
158
|
+
if (endog_names is not None) and (k_endog is not None):
|
|
159
|
+
if len(endog_names) != k_endog:
|
|
160
|
+
raise ValueError("Length of provided endog_names does not match provided k_endog")
|
|
161
|
+
|
|
162
|
+
self.endog_names = list(endog_names)
|
|
163
|
+
self.p, self.q = order
|
|
164
|
+
self.stationary_initialization = stationary_initialization
|
|
165
|
+
|
|
166
|
+
k_order = max(self.p, 1) + self.q
|
|
167
|
+
k_states = int(k_endog * k_order)
|
|
168
|
+
k_posdef = k_endog
|
|
169
|
+
|
|
170
|
+
super().__init__(
|
|
171
|
+
k_endog,
|
|
172
|
+
k_states,
|
|
173
|
+
k_posdef,
|
|
174
|
+
filter_type,
|
|
175
|
+
verbose=verbose,
|
|
176
|
+
measurement_error=measurement_error,
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
# Save counts of the number of parameters in each category
|
|
180
|
+
self.param_counts = {
|
|
181
|
+
"x0": k_states * (1 - self.stationary_initialization),
|
|
182
|
+
"P0": k_states**2 * (1 - self.stationary_initialization),
|
|
183
|
+
"AR": k_endog**2 * self.p,
|
|
184
|
+
"MA": k_endog**2 * self.q,
|
|
185
|
+
"state_cov": k_posdef**2,
|
|
186
|
+
"sigma_obs": k_endog * self.measurement_error,
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
@property
|
|
190
|
+
def param_names(self):
|
|
191
|
+
names = ["x0", "P0", "ar_params", "ma_params", "state_cov", "sigma_obs"]
|
|
192
|
+
if self.stationary_initialization:
|
|
193
|
+
names.remove("P0")
|
|
194
|
+
names.remove("x0")
|
|
195
|
+
if not self.measurement_error:
|
|
196
|
+
names.remove("sigma_obs")
|
|
197
|
+
if self.p == 0:
|
|
198
|
+
names.remove("ar_params")
|
|
199
|
+
if self.q == 0:
|
|
200
|
+
names.remove("ma_params")
|
|
201
|
+
return names
|
|
202
|
+
|
|
203
|
+
@property
|
|
204
|
+
def param_info(self) -> dict[str, dict[str, Any]]:
|
|
205
|
+
info = {
|
|
206
|
+
"x0": {
|
|
207
|
+
"shape": (self.k_states,),
|
|
208
|
+
"constraints": None,
|
|
209
|
+
},
|
|
210
|
+
"P0": {
|
|
211
|
+
"shape": (self.k_states, self.k_states),
|
|
212
|
+
"constraints": "Positive Semi-definite",
|
|
213
|
+
},
|
|
214
|
+
"sigma_obs": {
|
|
215
|
+
"shape": (self.k_endog, self.k_endog),
|
|
216
|
+
"constraints": "Positive Semi-definite",
|
|
217
|
+
},
|
|
218
|
+
"state_cov": {
|
|
219
|
+
"shape": (self.k_posdef, self.k_posdef),
|
|
220
|
+
"constraints": "Positive Semi-definite",
|
|
221
|
+
},
|
|
222
|
+
"ar_params": {
|
|
223
|
+
"shape": (self.k_endog, self.p, self.k_endog),
|
|
224
|
+
"constraints": "None",
|
|
225
|
+
},
|
|
226
|
+
"ma_params": {
|
|
227
|
+
"shape": (self.k_endog, self.q, self.k_endog),
|
|
228
|
+
"constraints": "None",
|
|
229
|
+
},
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
for name in self.param_names:
|
|
233
|
+
info[name]["dims"] = self.param_dims[name]
|
|
234
|
+
|
|
235
|
+
return {name: info[name] for name in self.param_names}
|
|
236
|
+
|
|
237
|
+
@property
|
|
238
|
+
def state_names(self):
|
|
239
|
+
state_names = self.endog_names.copy()
|
|
240
|
+
state_names += [
|
|
241
|
+
f"L{i + 1}.{state}" for i in range(self.p - 1) for state in self.endog_names
|
|
242
|
+
]
|
|
243
|
+
state_names += [
|
|
244
|
+
f"L{i + 1}.{state}_innov" for i in range(self.q) for state in self.endog_names
|
|
245
|
+
]
|
|
246
|
+
|
|
247
|
+
return state_names
|
|
248
|
+
|
|
249
|
+
@property
|
|
250
|
+
def observed_states(self):
|
|
251
|
+
return self.endog_names
|
|
252
|
+
|
|
253
|
+
@property
|
|
254
|
+
def shock_names(self):
|
|
255
|
+
return self.endog_names
|
|
256
|
+
|
|
257
|
+
@property
|
|
258
|
+
def default_priors(self):
|
|
259
|
+
raise NotImplementedError
|
|
260
|
+
|
|
261
|
+
@property
|
|
262
|
+
def coords(self) -> dict[str, Sequence]:
|
|
263
|
+
coords = make_default_coords(self)
|
|
264
|
+
if self.p > 0:
|
|
265
|
+
coords.update({AR_PARAM_DIM: list(range(1, self.p + 1))})
|
|
266
|
+
if self.q > 0:
|
|
267
|
+
coords.update({MA_PARAM_DIM: list(range(1, self.q + 1))})
|
|
268
|
+
|
|
269
|
+
return coords
|
|
270
|
+
|
|
271
|
+
@property
|
|
272
|
+
def param_dims(self):
|
|
273
|
+
coord_map = {
|
|
274
|
+
"x0": (ALL_STATE_DIM,),
|
|
275
|
+
"P0": (ALL_STATE_DIM, ALL_STATE_AUX_DIM),
|
|
276
|
+
"sigma_obs": (OBS_STATE_DIM,),
|
|
277
|
+
"state_cov": (SHOCK_DIM, SHOCK_AUX_DIM),
|
|
278
|
+
"ar_params": (OBS_STATE_DIM, AR_PARAM_DIM, OBS_STATE_AUX_DIM),
|
|
279
|
+
"ma_params": (OBS_STATE_DIM, MA_PARAM_DIM, OBS_STATE_AUX_DIM),
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
if not self.measurement_error:
|
|
283
|
+
del coord_map["sigma_obs"]
|
|
284
|
+
if self.p == 0:
|
|
285
|
+
del coord_map["ar_params"]
|
|
286
|
+
if self.q == 0:
|
|
287
|
+
del coord_map["ma_params"]
|
|
288
|
+
if self.stationary_initialization:
|
|
289
|
+
del coord_map["P0"]
|
|
290
|
+
del coord_map["x0"]
|
|
291
|
+
|
|
292
|
+
return coord_map
|
|
293
|
+
|
|
294
|
+
def add_default_priors(self):
|
|
295
|
+
raise NotImplementedError
|
|
296
|
+
|
|
297
|
+
def make_symbolic_graph(self) -> None:
|
|
298
|
+
# Initialize the matrices
|
|
299
|
+
if not self.stationary_initialization:
|
|
300
|
+
# initial states
|
|
301
|
+
x0 = self.make_and_register_variable("x0", shape=(self.k_states,), dtype=floatX)
|
|
302
|
+
self.ssm["initial_state", :] = x0
|
|
303
|
+
|
|
304
|
+
# initial covariance
|
|
305
|
+
P0 = self.make_and_register_variable(
|
|
306
|
+
"P0", shape=(self.k_states, self.k_states), dtype=floatX
|
|
307
|
+
)
|
|
308
|
+
self.ssm["initial_state_cov", :, :] = P0
|
|
309
|
+
|
|
310
|
+
# Design matrix is a truncated identity (first k_obs states observed)
|
|
311
|
+
self.ssm[("design", *np.diag_indices(self.k_endog))] = 1
|
|
312
|
+
|
|
313
|
+
# Transition matrix has 4 blocks:
|
|
314
|
+
# Upper left: AR coefs (k_obs, k_obs * min(p, 1))
|
|
315
|
+
# Upper right: MA coefs (k_obs, k_obs * q)
|
|
316
|
+
# Lower left: Truncated identity (k_obs * min(p, 1), k_obs * min(p, 1))
|
|
317
|
+
# Lower right: Shifted identity (k_obs * p, k_obs * q)
|
|
318
|
+
self.ssm["transition"] = np.zeros((self.k_states, self.k_states))
|
|
319
|
+
if self.p > 1:
|
|
320
|
+
idx = (
|
|
321
|
+
slice(self.k_endog, self.k_endog * self.p),
|
|
322
|
+
slice(0, self.k_endog * (self.p - 1)),
|
|
323
|
+
)
|
|
324
|
+
self.ssm[("transition", *idx)] = np.eye(self.k_endog * (self.p - 1))
|
|
325
|
+
|
|
326
|
+
if self.q > 1:
|
|
327
|
+
idx = (
|
|
328
|
+
slice(-self.k_endog * (self.q - 1), None),
|
|
329
|
+
slice(-self.k_endog * self.q, -self.k_endog),
|
|
330
|
+
)
|
|
331
|
+
self.ssm[("transition", *idx)] = np.eye(self.k_endog * (self.q - 1))
|
|
332
|
+
|
|
333
|
+
if self.p > 0:
|
|
334
|
+
ar_param_idx = ("transition", slice(0, self.k_endog), slice(0, self.k_endog * self.p))
|
|
335
|
+
|
|
336
|
+
# Register the AR parameter matrix as a (k, p, k), then reshape it and allocate it in the transition matrix
|
|
337
|
+
# This way the user can use 3 dimensions in the prior (clearer?)
|
|
338
|
+
ar_params = self.make_and_register_variable(
|
|
339
|
+
"ar_params", shape=(self.k_endog, self.p, self.k_endog), dtype=floatX
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
ar_params = ar_params.reshape((self.k_endog, self.k_endog * self.p))
|
|
343
|
+
self.ssm[ar_param_idx] = ar_params
|
|
344
|
+
|
|
345
|
+
# The selection matrix is (k_states, k_obs), with two (k_obs, k_obs) identity
|
|
346
|
+
# matrix blocks inside. One is always on top, the other starts after (k_obs * p) rows
|
|
347
|
+
self.ssm["selection"] = np.zeros((self.k_states, self.k_endog))
|
|
348
|
+
self.ssm["selection", slice(0, self.k_endog), :] = np.eye(self.k_endog)
|
|
349
|
+
if self.q > 0:
|
|
350
|
+
ma_param_idx = (
|
|
351
|
+
"transition",
|
|
352
|
+
slice(0, self.k_endog),
|
|
353
|
+
slice(self.k_endog * max(1, self.p), None),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
# Same as above, register with 3 dimensions then reshape
|
|
357
|
+
ma_params = self.make_and_register_variable(
|
|
358
|
+
"ma_params", shape=(self.k_endog, self.q, self.k_endog), dtype=floatX
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
ma_params = ma_params.reshape((self.k_endog, self.k_endog * self.q))
|
|
362
|
+
self.ssm[ma_param_idx] = ma_params
|
|
363
|
+
|
|
364
|
+
end = -self.k_endog * (self.q - 1) if self.q > 1 else None
|
|
365
|
+
self.ssm["selection", slice(self.k_endog * -self.q, end), :] = np.eye(self.k_endog)
|
|
366
|
+
|
|
367
|
+
if self.measurement_error:
|
|
368
|
+
obs_cov_idx = ("obs_cov", *np.diag_indices(self.k_endog))
|
|
369
|
+
sigma_obs = self.make_and_register_variable(
|
|
370
|
+
"sigma_obs", shape=(self.k_endog,), dtype=floatX
|
|
371
|
+
)
|
|
372
|
+
self.ssm[obs_cov_idx] = sigma_obs
|
|
373
|
+
|
|
374
|
+
state_cov = self.make_and_register_variable(
|
|
375
|
+
"state_cov", shape=(self.k_posdef, self.k_posdef), dtype=floatX
|
|
376
|
+
)
|
|
377
|
+
self.ssm["state_cov", :, :] = state_cov
|
|
378
|
+
|
|
379
|
+
if self.stationary_initialization:
|
|
380
|
+
# Solve for matrix quadratic for P0
|
|
381
|
+
T = self.ssm["transition"]
|
|
382
|
+
R = self.ssm["selection"]
|
|
383
|
+
Q = self.ssm["state_cov"]
|
|
384
|
+
c = self.ssm["state_intercept"]
|
|
385
|
+
|
|
386
|
+
x0 = pt.linalg.solve(pt.eye(T.shape[0]) - T, c, assume_a="gen", check_finite=False)
|
|
387
|
+
P0 = solve_discrete_lyapunov(
|
|
388
|
+
T,
|
|
389
|
+
pt.linalg.matrix_dot(R, Q, R.T),
|
|
390
|
+
method="direct" if self.k_states < 10 else "bilinear",
|
|
391
|
+
)
|
|
392
|
+
self.ssm["initial_state", :] = x0
|
|
393
|
+
self.ssm["initial_state_cov", :, :] = P0
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
from pymc_extras.statespace.models import structural
|
|
2
|
+
from pymc_extras.statespace.models.ETS import BayesianETS
|
|
3
|
+
from pymc_extras.statespace.models.SARIMAX import BayesianSARIMA
|
|
4
|
+
from pymc_extras.statespace.models.VARMAX import BayesianVARMAX
|
|
5
|
+
|
|
6
|
+
__all__ = ["structural", "BayesianSARIMA", "BayesianVARMAX", "BayesianETS"]
|