pymc-extras 0.2.4__py3-none-any.whl → 0.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pymc_extras/__init__.py +6 -4
- pymc_extras/distributions/__init__.py +2 -0
- pymc_extras/distributions/continuous.py +3 -2
- pymc_extras/distributions/discrete.py +3 -1
- pymc_extras/distributions/transforms/__init__.py +3 -0
- pymc_extras/distributions/transforms/partial_order.py +227 -0
- pymc_extras/inference/__init__.py +4 -2
- pymc_extras/inference/find_map.py +62 -17
- pymc_extras/inference/fit.py +6 -4
- pymc_extras/inference/laplace.py +14 -8
- pymc_extras/inference/pathfinder/lbfgs.py +49 -13
- pymc_extras/inference/pathfinder/pathfinder.py +89 -103
- pymc_extras/statespace/core/statespace.py +191 -52
- pymc_extras/statespace/filters/distributions.py +15 -16
- pymc_extras/statespace/filters/kalman_filter.py +1 -18
- pymc_extras/statespace/filters/kalman_smoother.py +2 -6
- pymc_extras/statespace/models/ETS.py +10 -0
- pymc_extras/statespace/models/SARIMAX.py +26 -5
- pymc_extras/statespace/models/VARMAX.py +12 -2
- pymc_extras/statespace/models/structural.py +18 -5
- pymc_extras/statespace/utils/data_tools.py +24 -9
- pymc_extras-0.2.6.dist-info/METADATA +318 -0
- pymc_extras-0.2.6.dist-info/RECORD +65 -0
- {pymc_extras-0.2.4.dist-info → pymc_extras-0.2.6.dist-info}/WHEEL +1 -2
- pymc_extras/version.py +0 -11
- pymc_extras/version.txt +0 -1
- pymc_extras-0.2.4.dist-info/METADATA +0 -110
- pymc_extras-0.2.4.dist-info/RECORD +0 -105
- pymc_extras-0.2.4.dist-info/top_level.txt +0 -2
- tests/__init__.py +0 -13
- tests/distributions/__init__.py +0 -19
- tests/distributions/test_continuous.py +0 -185
- tests/distributions/test_discrete.py +0 -210
- tests/distributions/test_discrete_markov_chain.py +0 -258
- tests/distributions/test_multivariate.py +0 -304
- tests/model/__init__.py +0 -0
- tests/model/marginal/__init__.py +0 -0
- tests/model/marginal/test_distributions.py +0 -132
- tests/model/marginal/test_graph_analysis.py +0 -182
- tests/model/marginal/test_marginal_model.py +0 -967
- tests/model/test_model_api.py +0 -38
- tests/statespace/__init__.py +0 -0
- tests/statespace/test_ETS.py +0 -411
- tests/statespace/test_SARIMAX.py +0 -405
- tests/statespace/test_VARMAX.py +0 -184
- tests/statespace/test_coord_assignment.py +0 -116
- tests/statespace/test_distributions.py +0 -270
- tests/statespace/test_kalman_filter.py +0 -326
- tests/statespace/test_representation.py +0 -175
- tests/statespace/test_statespace.py +0 -872
- tests/statespace/test_statespace_JAX.py +0 -156
- tests/statespace/test_structural.py +0 -836
- tests/statespace/utilities/__init__.py +0 -0
- tests/statespace/utilities/shared_fixtures.py +0 -9
- tests/statespace/utilities/statsmodel_local_level.py +0 -42
- tests/statespace/utilities/test_helpers.py +0 -310
- tests/test_blackjax_smc.py +0 -222
- tests/test_find_map.py +0 -103
- tests/test_histogram_approximation.py +0 -109
- tests/test_laplace.py +0 -265
- tests/test_linearmodel.py +0 -208
- tests/test_model_builder.py +0 -306
- tests/test_pathfinder.py +0 -203
- tests/test_pivoted_cholesky.py +0 -24
- tests/test_printing.py +0 -98
- tests/test_prior_from_trace.py +0 -172
- tests/test_splines.py +0 -77
- tests/utils.py +0 -0
- {pymc_extras-0.2.4.dist-info → pymc_extras-0.2.6.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,836 +0,0 @@
|
|
|
1
|
-
import functools as ft
|
|
2
|
-
import warnings
|
|
3
|
-
|
|
4
|
-
from collections import defaultdict
|
|
5
|
-
from copyreg import remove_extension
|
|
6
|
-
from typing import Optional
|
|
7
|
-
|
|
8
|
-
import numpy as np
|
|
9
|
-
import pandas as pd
|
|
10
|
-
import pymc as pm
|
|
11
|
-
import pytensor
|
|
12
|
-
import pytensor.tensor as pt
|
|
13
|
-
import pytest
|
|
14
|
-
import statsmodels.api as sm
|
|
15
|
-
|
|
16
|
-
from numpy.testing import assert_allclose
|
|
17
|
-
from scipy import linalg
|
|
18
|
-
|
|
19
|
-
from pymc_extras.statespace import structural as st
|
|
20
|
-
from pymc_extras.statespace.utils.constants import (
|
|
21
|
-
ALL_STATE_AUX_DIM,
|
|
22
|
-
ALL_STATE_DIM,
|
|
23
|
-
AR_PARAM_DIM,
|
|
24
|
-
OBS_STATE_AUX_DIM,
|
|
25
|
-
OBS_STATE_DIM,
|
|
26
|
-
SHOCK_AUX_DIM,
|
|
27
|
-
SHOCK_DIM,
|
|
28
|
-
SHORT_NAME_TO_LONG,
|
|
29
|
-
)
|
|
30
|
-
from tests.statespace.utilities.shared_fixtures import ( # pylint: disable=unused-import
|
|
31
|
-
rng,
|
|
32
|
-
)
|
|
33
|
-
from tests.statespace.utilities.test_helpers import (
|
|
34
|
-
assert_pattern_repeats,
|
|
35
|
-
simulate_from_numpy_model,
|
|
36
|
-
unpack_symbolic_matrices_with_params,
|
|
37
|
-
)
|
|
38
|
-
|
|
39
|
-
floatX = pytensor.config.floatX
|
|
40
|
-
ATOL = 1e-8 if floatX.endswith("64") else 1e-4
|
|
41
|
-
RTOL = 0 if floatX.endswith("64") else 1e-6
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def _assert_all_statespace_matrices_match(mod, params, sm_mod):
|
|
45
|
-
x0, P0, c, d, T, Z, R, H, Q = unpack_symbolic_matrices_with_params(mod, params)
|
|
46
|
-
|
|
47
|
-
sm_x0, sm_H0, sm_P0 = sm_mod.initialization()
|
|
48
|
-
|
|
49
|
-
if len(x0) > 0:
|
|
50
|
-
assert_allclose(x0, sm_x0)
|
|
51
|
-
|
|
52
|
-
for name, matrix in zip(["T", "R", "Z", "Q"], [T, R, Z, Q]):
|
|
53
|
-
long_name = SHORT_NAME_TO_LONG[name]
|
|
54
|
-
if np.any([x == 0 for x in matrix.shape]):
|
|
55
|
-
continue
|
|
56
|
-
assert_allclose(
|
|
57
|
-
sm_mod.ssm[long_name],
|
|
58
|
-
matrix,
|
|
59
|
-
err_msg=f"matrix {name} does not match statsmodels",
|
|
60
|
-
atol=ATOL,
|
|
61
|
-
rtol=RTOL,
|
|
62
|
-
)
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
def _assert_coord_shapes_match_matrices(mod, params):
|
|
66
|
-
if "initial_state_cov" not in params:
|
|
67
|
-
params["initial_state_cov"] = np.eye(mod.k_states)
|
|
68
|
-
|
|
69
|
-
x0, P0, c, d, T, Z, R, H, Q = unpack_symbolic_matrices_with_params(mod, params)
|
|
70
|
-
|
|
71
|
-
n_states = len(mod.coords[ALL_STATE_DIM])
|
|
72
|
-
|
|
73
|
-
# There will always be one shock dimension -- dummies are inserted into fully deterministic models to avoid errors
|
|
74
|
-
# in the state space representation.
|
|
75
|
-
n_shocks = max(1, len(mod.coords[SHOCK_DIM]))
|
|
76
|
-
n_obs = len(mod.coords[OBS_STATE_DIM])
|
|
77
|
-
|
|
78
|
-
assert x0.shape[-1:] == (
|
|
79
|
-
n_states,
|
|
80
|
-
), f"x0 expected to have shape (n_states, ), found {x0.shape[-1:]}"
|
|
81
|
-
assert P0.shape[-2:] == (
|
|
82
|
-
n_states,
|
|
83
|
-
n_states,
|
|
84
|
-
), f"P0 expected to have shape (n_states, n_states), found {P0.shape[-2:]}"
|
|
85
|
-
assert c.shape[-1:] == (
|
|
86
|
-
n_states,
|
|
87
|
-
), f"c expected to have shape (n_states, ), found {c.shape[-1:]}"
|
|
88
|
-
assert d.shape[-1:] == (n_obs,), f"d expected to have shape (n_obs, ), found {d.shape[-1:]}"
|
|
89
|
-
assert T.shape[-2:] == (
|
|
90
|
-
n_states,
|
|
91
|
-
n_states,
|
|
92
|
-
), f"T expected to have shape (n_states, n_states), found {T.shape[-2:]}"
|
|
93
|
-
assert Z.shape[-2:] == (
|
|
94
|
-
n_obs,
|
|
95
|
-
n_states,
|
|
96
|
-
), f"Z expected to have shape (n_obs, n_states), found {Z.shape[-2:]}"
|
|
97
|
-
assert R.shape[-2:] == (
|
|
98
|
-
n_states,
|
|
99
|
-
n_shocks,
|
|
100
|
-
), f"R expected to have shape (n_states, n_shocks), found {R.shape[-2:]}"
|
|
101
|
-
assert H.shape[-2:] == (
|
|
102
|
-
n_obs,
|
|
103
|
-
n_obs,
|
|
104
|
-
), f"H expected to have shape (n_obs, n_obs), found {H.shape[-2:]}"
|
|
105
|
-
assert Q.shape[-2:] == (
|
|
106
|
-
n_shocks,
|
|
107
|
-
n_shocks,
|
|
108
|
-
), f"Q expected to have shape (n_shocks, n_shocks), found {Q.shape[-2:]}"
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
def _assert_basic_coords_correct(mod):
|
|
112
|
-
assert mod.coords[ALL_STATE_DIM] == mod.state_names
|
|
113
|
-
assert mod.coords[ALL_STATE_AUX_DIM] == mod.state_names
|
|
114
|
-
assert mod.coords[SHOCK_DIM] == mod.shock_names
|
|
115
|
-
assert mod.coords[SHOCK_AUX_DIM] == mod.shock_names
|
|
116
|
-
assert mod.coords[OBS_STATE_DIM] == ["data"]
|
|
117
|
-
assert mod.coords[OBS_STATE_AUX_DIM] == ["data"]
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
def _assert_keys_match(test_dict, expected_dict):
|
|
121
|
-
expected_keys = list(expected_dict.keys())
|
|
122
|
-
param_keys = list(test_dict.keys())
|
|
123
|
-
key_diff = set(expected_keys) - set(param_keys)
|
|
124
|
-
assert len(key_diff) == 0, f'{", ".join(key_diff)} were not found in the test_dict keys.'
|
|
125
|
-
|
|
126
|
-
key_diff = set(param_keys) - set(expected_keys)
|
|
127
|
-
assert (
|
|
128
|
-
len(key_diff) == 0
|
|
129
|
-
), f'{", ".join(key_diff)} were keys of the tests_dict not in expected_dict.'
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
def _assert_param_dims_correct(param_dims, expected_dims):
|
|
133
|
-
if len(expected_dims) == 0 and len(param_dims) == 0:
|
|
134
|
-
return
|
|
135
|
-
|
|
136
|
-
_assert_keys_match(param_dims, expected_dims)
|
|
137
|
-
for param, dims in expected_dims.items():
|
|
138
|
-
assert dims == param_dims[param], f"dims for parameter {param} do not match"
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def _assert_coords_correct(coords, expected_coords):
|
|
142
|
-
if len(coords) == 0 and len(expected_coords) == 0:
|
|
143
|
-
return
|
|
144
|
-
|
|
145
|
-
_assert_keys_match(coords, expected_coords)
|
|
146
|
-
for dim, labels in expected_coords.items():
|
|
147
|
-
assert labels == coords[dim], f"labels on dimension {dim} do not match"
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
def _assert_params_info_correct(param_info, coords, param_dims):
|
|
151
|
-
for param in param_info.keys():
|
|
152
|
-
info = param_info[param]
|
|
153
|
-
|
|
154
|
-
dims = info["dims"]
|
|
155
|
-
labels = [coords[dim] for dim in dims] if dims is not None else None
|
|
156
|
-
if labels is not None:
|
|
157
|
-
assert param in param_dims.keys()
|
|
158
|
-
inferred_dims = param_dims[param]
|
|
159
|
-
else:
|
|
160
|
-
inferred_dims = None
|
|
161
|
-
|
|
162
|
-
shape = tuple(len(label) for label in labels) if labels is not None else ()
|
|
163
|
-
|
|
164
|
-
assert info["shape"] == shape
|
|
165
|
-
assert dims == inferred_dims
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
def create_structural_model_and_equivalent_statsmodel(
|
|
169
|
-
rng,
|
|
170
|
-
level: bool | None = False,
|
|
171
|
-
trend: bool | None = False,
|
|
172
|
-
seasonal: int | None = None,
|
|
173
|
-
freq_seasonal: list[dict] | None = None,
|
|
174
|
-
cycle: bool = False,
|
|
175
|
-
autoregressive: int | None = None,
|
|
176
|
-
exog: np.ndarray | None = None,
|
|
177
|
-
irregular: bool | None = False,
|
|
178
|
-
stochastic_level: bool | None = True,
|
|
179
|
-
stochastic_trend: bool | None = False,
|
|
180
|
-
stochastic_seasonal: bool | None = True,
|
|
181
|
-
stochastic_freq_seasonal: list[bool] | None = None,
|
|
182
|
-
stochastic_cycle: bool | None = False,
|
|
183
|
-
damped_cycle: bool | None = False,
|
|
184
|
-
):
|
|
185
|
-
with warnings.catch_warnings():
|
|
186
|
-
warnings.simplefilter("ignore")
|
|
187
|
-
mod = ft.partial(
|
|
188
|
-
sm.tsa.UnobservedComponents,
|
|
189
|
-
level=level,
|
|
190
|
-
trend=trend,
|
|
191
|
-
seasonal=seasonal,
|
|
192
|
-
freq_seasonal=freq_seasonal,
|
|
193
|
-
cycle=cycle,
|
|
194
|
-
autoregressive=autoregressive,
|
|
195
|
-
exog=exog,
|
|
196
|
-
irregular=irregular,
|
|
197
|
-
stochastic_level=stochastic_level,
|
|
198
|
-
stochastic_trend=stochastic_trend,
|
|
199
|
-
stochastic_seasonal=stochastic_seasonal,
|
|
200
|
-
stochastic_freq_seasonal=stochastic_freq_seasonal,
|
|
201
|
-
stochastic_cycle=stochastic_cycle,
|
|
202
|
-
damped_cycle=damped_cycle,
|
|
203
|
-
mle_regression=False,
|
|
204
|
-
)
|
|
205
|
-
|
|
206
|
-
params = {}
|
|
207
|
-
sm_params = {}
|
|
208
|
-
sm_init = {}
|
|
209
|
-
expected_param_dims = defaultdict(tuple)
|
|
210
|
-
expected_coords = defaultdict(list)
|
|
211
|
-
expected_param_dims["P0"] += ("state", "state_aux")
|
|
212
|
-
|
|
213
|
-
default_states = [
|
|
214
|
-
ALL_STATE_DIM,
|
|
215
|
-
ALL_STATE_AUX_DIM,
|
|
216
|
-
OBS_STATE_DIM,
|
|
217
|
-
OBS_STATE_AUX_DIM,
|
|
218
|
-
SHOCK_DIM,
|
|
219
|
-
SHOCK_AUX_DIM,
|
|
220
|
-
]
|
|
221
|
-
default_values = [[], [], ["data"], ["data"], [], []]
|
|
222
|
-
for dim, value in zip(default_states, default_values):
|
|
223
|
-
expected_coords[dim] += value
|
|
224
|
-
|
|
225
|
-
components = []
|
|
226
|
-
|
|
227
|
-
if irregular:
|
|
228
|
-
sigma2 = np.abs(rng.normal()).astype(floatX).item()
|
|
229
|
-
params["sigma_irregular"] = np.sqrt(sigma2)
|
|
230
|
-
sm_params["sigma2.irregular"] = sigma2
|
|
231
|
-
|
|
232
|
-
comp = st.MeasurementError("irregular")
|
|
233
|
-
components.append(comp)
|
|
234
|
-
|
|
235
|
-
level_trend_order = [0, 0]
|
|
236
|
-
level_trend_innov_order = [0, 0]
|
|
237
|
-
|
|
238
|
-
if level:
|
|
239
|
-
level_trend_order[0] = 1
|
|
240
|
-
expected_coords["trend_state"] += [
|
|
241
|
-
"level",
|
|
242
|
-
]
|
|
243
|
-
expected_coords[ALL_STATE_DIM] += [
|
|
244
|
-
"level",
|
|
245
|
-
]
|
|
246
|
-
expected_coords[ALL_STATE_AUX_DIM] += [
|
|
247
|
-
"level",
|
|
248
|
-
]
|
|
249
|
-
if stochastic_level:
|
|
250
|
-
level_trend_innov_order[0] = 1
|
|
251
|
-
expected_coords["trend_shock"] += ["level"]
|
|
252
|
-
expected_coords[SHOCK_DIM] += [
|
|
253
|
-
"level",
|
|
254
|
-
]
|
|
255
|
-
expected_coords[SHOCK_AUX_DIM] += [
|
|
256
|
-
"level",
|
|
257
|
-
]
|
|
258
|
-
|
|
259
|
-
if trend:
|
|
260
|
-
level_trend_order[1] = 1
|
|
261
|
-
expected_coords["trend_state"] += [
|
|
262
|
-
"trend",
|
|
263
|
-
]
|
|
264
|
-
expected_coords[ALL_STATE_DIM] += [
|
|
265
|
-
"trend",
|
|
266
|
-
]
|
|
267
|
-
expected_coords[ALL_STATE_AUX_DIM] += [
|
|
268
|
-
"trend",
|
|
269
|
-
]
|
|
270
|
-
|
|
271
|
-
if stochastic_trend:
|
|
272
|
-
level_trend_innov_order[1] = 1
|
|
273
|
-
expected_coords["trend_shock"] += ["trend"]
|
|
274
|
-
expected_coords[SHOCK_DIM] += ["trend"]
|
|
275
|
-
expected_coords[SHOCK_AUX_DIM] += ["trend"]
|
|
276
|
-
|
|
277
|
-
if level or trend:
|
|
278
|
-
expected_param_dims["initial_trend"] += ("trend_state",)
|
|
279
|
-
level_value = np.where(
|
|
280
|
-
level_trend_order,
|
|
281
|
-
rng.normal(
|
|
282
|
-
size=2,
|
|
283
|
-
).astype(floatX),
|
|
284
|
-
np.zeros(2, dtype=floatX),
|
|
285
|
-
)
|
|
286
|
-
sigma_level_value2 = np.abs(rng.normal(size=(2,)))[
|
|
287
|
-
np.array(level_trend_innov_order, dtype="bool")
|
|
288
|
-
]
|
|
289
|
-
max_order = np.flatnonzero(level_value)[-1].item() + 1
|
|
290
|
-
level_trend_order = level_trend_order[:max_order]
|
|
291
|
-
|
|
292
|
-
params["initial_trend"] = level_value[:max_order]
|
|
293
|
-
sm_init["level"] = level_value[0]
|
|
294
|
-
sm_init["trend"] = level_value[1]
|
|
295
|
-
|
|
296
|
-
if sum(level_trend_innov_order) > 0:
|
|
297
|
-
expected_param_dims["sigma_trend"] += ("trend_shock",)
|
|
298
|
-
params["sigma_trend"] = np.sqrt(sigma_level_value2)
|
|
299
|
-
|
|
300
|
-
sigma_level_value = sigma_level_value2.tolist()
|
|
301
|
-
if stochastic_level:
|
|
302
|
-
sigma = sigma_level_value.pop(0)
|
|
303
|
-
sm_params["sigma2.level"] = sigma
|
|
304
|
-
if stochastic_trend:
|
|
305
|
-
sigma = sigma_level_value.pop(0)
|
|
306
|
-
sm_params["sigma2.trend"] = sigma
|
|
307
|
-
|
|
308
|
-
comp = st.LevelTrendComponent(
|
|
309
|
-
name="level", order=level_trend_order, innovations_order=level_trend_innov_order
|
|
310
|
-
)
|
|
311
|
-
components.append(comp)
|
|
312
|
-
|
|
313
|
-
if seasonal is not None:
|
|
314
|
-
state_names = [f"seasonal_{i}" for i in range(seasonal)][1:]
|
|
315
|
-
seasonal_coefs = rng.normal(size=(seasonal - 1,)).astype(floatX)
|
|
316
|
-
params["seasonal_coefs"] = seasonal_coefs
|
|
317
|
-
expected_param_dims["seasonal_coefs"] += ("seasonal_state",)
|
|
318
|
-
|
|
319
|
-
expected_coords["seasonal_state"] += tuple(state_names)
|
|
320
|
-
expected_coords[ALL_STATE_DIM] += state_names
|
|
321
|
-
expected_coords[ALL_STATE_AUX_DIM] += state_names
|
|
322
|
-
|
|
323
|
-
seasonal_dict = {
|
|
324
|
-
"seasonal" if i == 0 else f"seasonal.L{i}": c for i, c in enumerate(seasonal_coefs)
|
|
325
|
-
}
|
|
326
|
-
sm_init.update(seasonal_dict)
|
|
327
|
-
|
|
328
|
-
if stochastic_seasonal:
|
|
329
|
-
sigma2 = np.abs(rng.normal()).astype(floatX)
|
|
330
|
-
params["sigma_seasonal"] = np.sqrt(sigma2)
|
|
331
|
-
sm_params["sigma2.seasonal"] = sigma2
|
|
332
|
-
expected_coords[SHOCK_DIM] += [
|
|
333
|
-
"seasonal",
|
|
334
|
-
]
|
|
335
|
-
expected_coords[SHOCK_AUX_DIM] += [
|
|
336
|
-
"seasonal",
|
|
337
|
-
]
|
|
338
|
-
|
|
339
|
-
comp = st.TimeSeasonality(
|
|
340
|
-
name="seasonal", season_length=seasonal, innovations=stochastic_seasonal
|
|
341
|
-
)
|
|
342
|
-
components.append(comp)
|
|
343
|
-
|
|
344
|
-
if freq_seasonal is not None:
|
|
345
|
-
state_count = 0
|
|
346
|
-
for d, has_innov in zip(freq_seasonal, stochastic_freq_seasonal):
|
|
347
|
-
n = d["harmonics"]
|
|
348
|
-
s = d["period"]
|
|
349
|
-
last_state_not_identified = (s / n) == 2.0
|
|
350
|
-
n_states = 2 * n - int(last_state_not_identified)
|
|
351
|
-
state_names = [f"seasonal_{s}_{f}_{i}" for i in range(n) for f in ["Cos", "Sin"]]
|
|
352
|
-
|
|
353
|
-
seasonal_params = rng.normal(size=n_states).astype(floatX)
|
|
354
|
-
|
|
355
|
-
params[f"seasonal_{s}"] = seasonal_params
|
|
356
|
-
expected_param_dims[f"seasonal_{s}"] += (f"seasonal_{s}_state",)
|
|
357
|
-
expected_coords[ALL_STATE_DIM] += state_names
|
|
358
|
-
expected_coords[ALL_STATE_AUX_DIM] += state_names
|
|
359
|
-
expected_coords[f"seasonal_{s}_state"] += (
|
|
360
|
-
tuple(state_names[:-1]) if last_state_not_identified else tuple(state_names)
|
|
361
|
-
)
|
|
362
|
-
|
|
363
|
-
for param in seasonal_params:
|
|
364
|
-
sm_init[f"freq_seasonal.{state_count}"] = param
|
|
365
|
-
state_count += 1
|
|
366
|
-
if last_state_not_identified:
|
|
367
|
-
sm_init[f"freq_seasonal.{state_count}"] = 0.0
|
|
368
|
-
state_count += 1
|
|
369
|
-
|
|
370
|
-
if has_innov:
|
|
371
|
-
sigma2 = np.abs(rng.normal()).astype(floatX)
|
|
372
|
-
params[f"sigma_seasonal_{s}"] = np.sqrt(sigma2)
|
|
373
|
-
sm_params[f"sigma2.freq_seasonal_{s}({n})"] = sigma2
|
|
374
|
-
expected_coords[SHOCK_DIM] += state_names
|
|
375
|
-
expected_coords[SHOCK_AUX_DIM] += state_names
|
|
376
|
-
|
|
377
|
-
comp = st.FrequencySeasonality(
|
|
378
|
-
name=f"seasonal_{s}", season_length=s, n=n, innovations=has_innov
|
|
379
|
-
)
|
|
380
|
-
components.append(comp)
|
|
381
|
-
|
|
382
|
-
if cycle:
|
|
383
|
-
cycle_length = np.random.choice(np.arange(2, 12)).astype(floatX)
|
|
384
|
-
|
|
385
|
-
# Statsmodels takes the frequency not the cycle length, so convert it.
|
|
386
|
-
sm_params["frequency.cycle"] = 2.0 * np.pi / cycle_length
|
|
387
|
-
params["cycle_length"] = cycle_length
|
|
388
|
-
|
|
389
|
-
init_cycle = rng.normal(size=(2,)).astype(floatX)
|
|
390
|
-
params["cycle"] = init_cycle
|
|
391
|
-
expected_param_dims["cycle"] += ("cycle_state",)
|
|
392
|
-
|
|
393
|
-
state_names = ["cycle_Cos", "cycle_Sin"]
|
|
394
|
-
expected_coords["cycle_state"] += state_names
|
|
395
|
-
expected_coords[ALL_STATE_DIM] += state_names
|
|
396
|
-
expected_coords[ALL_STATE_AUX_DIM] += state_names
|
|
397
|
-
|
|
398
|
-
sm_init["cycle"] = init_cycle[0]
|
|
399
|
-
sm_init["cycle.auxilliary"] = init_cycle[1]
|
|
400
|
-
|
|
401
|
-
if stochastic_cycle:
|
|
402
|
-
sigma2 = np.abs(rng.normal()).astype(floatX)
|
|
403
|
-
params["sigma_cycle"] = np.sqrt(sigma2)
|
|
404
|
-
expected_coords[SHOCK_DIM] += state_names
|
|
405
|
-
expected_coords[SHOCK_AUX_DIM] += state_names
|
|
406
|
-
|
|
407
|
-
sm_params["sigma2.cycle"] = sigma2
|
|
408
|
-
|
|
409
|
-
if damped_cycle:
|
|
410
|
-
rho = rng.beta(1, 1)
|
|
411
|
-
params["cycle_dampening_factor"] = rho
|
|
412
|
-
sm_params["damping.cycle"] = rho
|
|
413
|
-
|
|
414
|
-
comp = st.CycleComponent(
|
|
415
|
-
name="cycle",
|
|
416
|
-
dampen=damped_cycle,
|
|
417
|
-
innovations=stochastic_cycle,
|
|
418
|
-
estimate_cycle_length=True,
|
|
419
|
-
)
|
|
420
|
-
|
|
421
|
-
components.append(comp)
|
|
422
|
-
|
|
423
|
-
if autoregressive is not None:
|
|
424
|
-
ar_names = [f"L{i+1}.data" for i in range(autoregressive)]
|
|
425
|
-
ar_params = rng.normal(size=(autoregressive,)).astype(floatX)
|
|
426
|
-
if autoregressive == 1:
|
|
427
|
-
ar_params = ar_params.item()
|
|
428
|
-
sigma2 = np.abs(rng.normal()).astype(floatX)
|
|
429
|
-
|
|
430
|
-
params["ar_params"] = ar_params
|
|
431
|
-
params["sigma_ar"] = np.sqrt(sigma2)
|
|
432
|
-
expected_param_dims["ar_params"] += (AR_PARAM_DIM,)
|
|
433
|
-
expected_coords[AR_PARAM_DIM] += tuple(list(range(1, autoregressive + 1)))
|
|
434
|
-
expected_coords[ALL_STATE_DIM] += ar_names
|
|
435
|
-
expected_coords[ALL_STATE_AUX_DIM] += ar_names
|
|
436
|
-
expected_coords[SHOCK_DIM] += ["ar_innovation"]
|
|
437
|
-
expected_coords[SHOCK_AUX_DIM] += ["ar_innovation"]
|
|
438
|
-
|
|
439
|
-
sm_params["sigma2.ar"] = sigma2
|
|
440
|
-
for i, rho in enumerate(ar_params):
|
|
441
|
-
sm_init[f"ar.L{i+1}"] = 0
|
|
442
|
-
sm_params[f"ar.L{i+1}"] = rho
|
|
443
|
-
|
|
444
|
-
comp = st.AutoregressiveComponent(name="ar", order=autoregressive)
|
|
445
|
-
components.append(comp)
|
|
446
|
-
|
|
447
|
-
if exog is not None:
|
|
448
|
-
names = [f"x{i + 1}" for i in range(exog.shape[1])]
|
|
449
|
-
betas = rng.normal(size=(exog.shape[1],)).astype(floatX)
|
|
450
|
-
params["beta_exog"] = betas
|
|
451
|
-
params["data_exog"] = exog
|
|
452
|
-
expected_param_dims["beta_exog"] += ("exog_state",)
|
|
453
|
-
expected_param_dims["data_exog"] += ("time", "exog_data")
|
|
454
|
-
|
|
455
|
-
expected_coords["exog_state"] += tuple(names)
|
|
456
|
-
|
|
457
|
-
for i, beta in enumerate(betas):
|
|
458
|
-
sm_params[f"beta.x{i + 1}"] = beta
|
|
459
|
-
sm_init[f"beta.x{i+1}"] = beta
|
|
460
|
-
comp = st.RegressionComponent(name="exog", state_names=names)
|
|
461
|
-
components.append(comp)
|
|
462
|
-
|
|
463
|
-
st_mod = components.pop(0)
|
|
464
|
-
for comp in components:
|
|
465
|
-
st_mod += comp
|
|
466
|
-
return mod, st_mod, params, sm_params, sm_init, expected_param_dims, expected_coords
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
@pytest.mark.parametrize(
|
|
470
|
-
"level, trend, stochastic_level, stochastic_trend, irregular",
|
|
471
|
-
[
|
|
472
|
-
(False, False, False, False, True),
|
|
473
|
-
(True, True, True, True, True),
|
|
474
|
-
(True, True, False, True, False),
|
|
475
|
-
],
|
|
476
|
-
)
|
|
477
|
-
@pytest.mark.parametrize("autoregressive", [None, 3])
|
|
478
|
-
@pytest.mark.parametrize("seasonal, stochastic_seasonal", [(None, False), (12, False), (12, True)])
|
|
479
|
-
@pytest.mark.parametrize(
|
|
480
|
-
"freq_seasonal, stochastic_freq_seasonal",
|
|
481
|
-
[
|
|
482
|
-
(None, None),
|
|
483
|
-
([{"period": 12, "harmonics": 2}], [False]),
|
|
484
|
-
([{"period": 12, "harmonics": 6}], [True]),
|
|
485
|
-
],
|
|
486
|
-
)
|
|
487
|
-
@pytest.mark.parametrize(
|
|
488
|
-
"cycle, damped_cycle, stochastic_cycle",
|
|
489
|
-
[(False, False, False), (True, False, True), (True, True, True)],
|
|
490
|
-
)
|
|
491
|
-
@pytest.mark.filterwarnings("ignore::statsmodels.tools.sm_exceptions.ConvergenceWarning")
|
|
492
|
-
@pytest.mark.filterwarnings("ignore::statsmodels.tools.sm_exceptions.SpecificationWarning")
|
|
493
|
-
def test_structural_model_against_statsmodels(
|
|
494
|
-
level,
|
|
495
|
-
trend,
|
|
496
|
-
stochastic_level,
|
|
497
|
-
stochastic_trend,
|
|
498
|
-
irregular,
|
|
499
|
-
autoregressive,
|
|
500
|
-
seasonal,
|
|
501
|
-
stochastic_seasonal,
|
|
502
|
-
freq_seasonal,
|
|
503
|
-
stochastic_freq_seasonal,
|
|
504
|
-
cycle,
|
|
505
|
-
damped_cycle,
|
|
506
|
-
stochastic_cycle,
|
|
507
|
-
rng,
|
|
508
|
-
):
|
|
509
|
-
retvals = create_structural_model_and_equivalent_statsmodel(
|
|
510
|
-
rng,
|
|
511
|
-
level=level,
|
|
512
|
-
trend=trend,
|
|
513
|
-
seasonal=seasonal,
|
|
514
|
-
freq_seasonal=freq_seasonal,
|
|
515
|
-
cycle=cycle,
|
|
516
|
-
damped_cycle=damped_cycle,
|
|
517
|
-
autoregressive=autoregressive,
|
|
518
|
-
irregular=irregular,
|
|
519
|
-
stochastic_level=stochastic_level,
|
|
520
|
-
stochastic_trend=stochastic_trend,
|
|
521
|
-
stochastic_seasonal=stochastic_seasonal,
|
|
522
|
-
stochastic_freq_seasonal=stochastic_freq_seasonal,
|
|
523
|
-
stochastic_cycle=stochastic_cycle,
|
|
524
|
-
)
|
|
525
|
-
f_sm_mod, mod, params, sm_params, sm_init, expected_dims, expected_coords = retvals
|
|
526
|
-
|
|
527
|
-
data = rng.normal(size=(100,)).astype(floatX)
|
|
528
|
-
sm_mod = f_sm_mod(data)
|
|
529
|
-
|
|
530
|
-
if len(sm_init) > 0:
|
|
531
|
-
init_array = np.concatenate(
|
|
532
|
-
[np.atleast_1d(sm_init[k]).ravel() for k in sm_mod.state_names if k != "dummy"]
|
|
533
|
-
)
|
|
534
|
-
sm_mod.initialize_known(init_array, np.eye(sm_mod.k_states))
|
|
535
|
-
else:
|
|
536
|
-
sm_mod.initialize_default()
|
|
537
|
-
|
|
538
|
-
if len(sm_params) > 0:
|
|
539
|
-
param_array = np.concatenate(
|
|
540
|
-
[np.atleast_1d(sm_params[k]).ravel() for k in sm_mod.param_names]
|
|
541
|
-
)
|
|
542
|
-
sm_mod.update(param_array, transformed=True)
|
|
543
|
-
|
|
544
|
-
_assert_all_statespace_matrices_match(mod, params, sm_mod)
|
|
545
|
-
|
|
546
|
-
built_model = mod.build(verbose=False)
|
|
547
|
-
|
|
548
|
-
_assert_coord_shapes_match_matrices(built_model, params)
|
|
549
|
-
_assert_param_dims_correct(built_model.param_dims, expected_dims)
|
|
550
|
-
_assert_coords_correct(built_model.coords, expected_coords)
|
|
551
|
-
_assert_params_info_correct(built_model.param_info, built_model.coords, built_model.param_dims)
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
def test_level_trend_model(rng):
|
|
555
|
-
mod = st.LevelTrendComponent(order=2, innovations_order=0)
|
|
556
|
-
params = {"initial_trend": [0.0, 1.0]}
|
|
557
|
-
x, y = simulate_from_numpy_model(mod, rng, params)
|
|
558
|
-
|
|
559
|
-
assert_allclose(np.diff(y), 1, atol=ATOL, rtol=RTOL)
|
|
560
|
-
|
|
561
|
-
# Check coords
|
|
562
|
-
mod = mod.build(verbose=False)
|
|
563
|
-
_assert_basic_coords_correct(mod)
|
|
564
|
-
assert mod.coords["trend_state"] == ["level", "trend"]
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
def test_measurement_error(rng):
|
|
568
|
-
mod = st.MeasurementError("obs") + st.LevelTrendComponent(order=2)
|
|
569
|
-
mod = mod.build(verbose=False)
|
|
570
|
-
|
|
571
|
-
_assert_basic_coords_correct(mod)
|
|
572
|
-
assert "sigma_obs" in mod.param_names
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
@pytest.mark.parametrize("order", [1, 2, [1, 0, 1]], ids=["AR1", "AR2", "AR(1,0,1)"])
|
|
576
|
-
def test_autoregressive_model(order, rng):
|
|
577
|
-
ar = st.AutoregressiveComponent(order=order)
|
|
578
|
-
params = {
|
|
579
|
-
"ar_params": np.full((sum(ar.order),), 0.5, dtype=floatX),
|
|
580
|
-
"sigma_ar": 0.0,
|
|
581
|
-
}
|
|
582
|
-
|
|
583
|
-
x, y = simulate_from_numpy_model(ar, rng, params, steps=100)
|
|
584
|
-
|
|
585
|
-
# Check coords
|
|
586
|
-
ar.build(verbose=False)
|
|
587
|
-
_assert_basic_coords_correct(ar)
|
|
588
|
-
lags = np.arange(len(order) if isinstance(order, list) else order, dtype="int") + 1
|
|
589
|
-
if isinstance(order, list):
|
|
590
|
-
lags = lags[np.flatnonzero(order)]
|
|
591
|
-
assert_allclose(ar.coords["ar_lag"], lags)
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
@pytest.mark.parametrize("s", [10, 25, 50])
|
|
595
|
-
@pytest.mark.parametrize("innovations", [True, False])
|
|
596
|
-
@pytest.mark.parametrize("remove_first_state", [True, False])
|
|
597
|
-
def test_time_seasonality(s, innovations, remove_first_state, rng):
|
|
598
|
-
def random_word(rng):
|
|
599
|
-
return "".join(rng.choice(list("abcdefghijklmnopqrstuvwxyz")) for _ in range(5))
|
|
600
|
-
|
|
601
|
-
state_names = [random_word(rng) for _ in range(s)]
|
|
602
|
-
mod = st.TimeSeasonality(
|
|
603
|
-
season_length=s,
|
|
604
|
-
innovations=innovations,
|
|
605
|
-
name="season",
|
|
606
|
-
state_names=state_names,
|
|
607
|
-
remove_first_state=remove_first_state,
|
|
608
|
-
)
|
|
609
|
-
x0 = np.zeros(mod.k_states, dtype=floatX)
|
|
610
|
-
x0[0] = 1
|
|
611
|
-
|
|
612
|
-
params = {"season_coefs": x0}
|
|
613
|
-
if mod.innovations:
|
|
614
|
-
params["sigma_season"] = 0.0
|
|
615
|
-
|
|
616
|
-
x, y = simulate_from_numpy_model(mod, rng, params)
|
|
617
|
-
y = y.ravel()
|
|
618
|
-
if not innovations:
|
|
619
|
-
assert_pattern_repeats(y, s, atol=ATOL, rtol=RTOL)
|
|
620
|
-
|
|
621
|
-
# Check coords
|
|
622
|
-
mod.build(verbose=False)
|
|
623
|
-
_assert_basic_coords_correct(mod)
|
|
624
|
-
test_slice = slice(1, None) if remove_first_state else slice(None)
|
|
625
|
-
assert mod.coords["season_state"] == state_names[test_slice]
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
def get_shift_factor(s):
|
|
629
|
-
s_str = str(s)
|
|
630
|
-
if "." not in s_str:
|
|
631
|
-
return 1
|
|
632
|
-
_, decimal = s_str.split(".")
|
|
633
|
-
return 10 ** len(decimal)
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
@pytest.mark.parametrize("n", [*np.arange(1, 6, dtype="int").tolist(), None])
|
|
637
|
-
@pytest.mark.parametrize("s", [5, 10, 25, 25.2])
|
|
638
|
-
def test_frequency_seasonality(n, s, rng):
|
|
639
|
-
mod = st.FrequencySeasonality(season_length=s, n=n, name="season")
|
|
640
|
-
x0 = rng.normal(size=mod.n_coefs).astype(floatX)
|
|
641
|
-
params = {"season": x0, "sigma_season": 0.0}
|
|
642
|
-
k = get_shift_factor(s)
|
|
643
|
-
T = int(s * k)
|
|
644
|
-
|
|
645
|
-
x, y = simulate_from_numpy_model(mod, rng, params, steps=2 * T)
|
|
646
|
-
assert_pattern_repeats(y, T, atol=ATOL, rtol=RTOL)
|
|
647
|
-
|
|
648
|
-
# Check coords
|
|
649
|
-
mod.build(verbose=False)
|
|
650
|
-
_assert_basic_coords_correct(mod)
|
|
651
|
-
if n is None:
|
|
652
|
-
n = int(s // 2)
|
|
653
|
-
states = [f"season_{f}_{i}" for i in range(n) for f in ["Cos", "Sin"]]
|
|
654
|
-
|
|
655
|
-
# Remove the last state when the model is completely saturated
|
|
656
|
-
if s / n == 2.0:
|
|
657
|
-
states.pop()
|
|
658
|
-
assert mod.coords["season_state"] == states
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
cycle_test_vals = zip([None, None, 3, 5, 10], [False, True, True, False, False])
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
def test_cycle_component_deterministic(rng):
|
|
665
|
-
cycle = st.CycleComponent(
|
|
666
|
-
name="cycle", cycle_length=12, estimate_cycle_length=False, innovations=False
|
|
667
|
-
)
|
|
668
|
-
params = {"cycle": np.array([1.0, 1.0], dtype=floatX)}
|
|
669
|
-
x, y = simulate_from_numpy_model(cycle, rng, params, steps=12 * 12)
|
|
670
|
-
|
|
671
|
-
assert_pattern_repeats(y, 12, atol=ATOL, rtol=RTOL)
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
def test_cycle_component_with_dampening(rng):
|
|
675
|
-
cycle = st.CycleComponent(
|
|
676
|
-
name="cycle", cycle_length=12, estimate_cycle_length=False, innovations=False, dampen=True
|
|
677
|
-
)
|
|
678
|
-
params = {"cycle": np.array([10.0, 10.0], dtype=floatX), "cycle_dampening_factor": 0.75}
|
|
679
|
-
x, y = simulate_from_numpy_model(cycle, rng, params, steps=100)
|
|
680
|
-
|
|
681
|
-
# Check that the cycle dampens to zero over time
|
|
682
|
-
assert_allclose(y[-1], 0.0, atol=ATOL, rtol=RTOL)
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
def test_cycle_component_with_innovations_and_cycle_length(rng):
|
|
686
|
-
cycle = st.CycleComponent(
|
|
687
|
-
name="cycle", estimate_cycle_length=True, innovations=True, dampen=True
|
|
688
|
-
)
|
|
689
|
-
params = {
|
|
690
|
-
"cycle": np.array([1.0, 1.0], dtype=floatX),
|
|
691
|
-
"cycle_length": 12.0,
|
|
692
|
-
"cycle_dampening_factor": 0.95,
|
|
693
|
-
"sigma_cycle": 1.0,
|
|
694
|
-
}
|
|
695
|
-
|
|
696
|
-
x, y = simulate_from_numpy_model(cycle, rng, params)
|
|
697
|
-
|
|
698
|
-
cycle.build(verbose=False)
|
|
699
|
-
_assert_basic_coords_correct(cycle)
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
def test_exogenous_component(rng):
|
|
703
|
-
data = rng.normal(size=(100, 2)).astype(floatX)
|
|
704
|
-
mod = st.RegressionComponent(state_names=["feature_1", "feature_2"], name="exog")
|
|
705
|
-
|
|
706
|
-
params = {"beta_exog": np.array([1.0, 2.0], dtype=floatX)}
|
|
707
|
-
exog_data = {"data_exog": data}
|
|
708
|
-
x, y = simulate_from_numpy_model(mod, rng, params, exog_data)
|
|
709
|
-
|
|
710
|
-
# Check that the generated data is just a linear regression
|
|
711
|
-
assert_allclose(y, data @ params["beta_exog"], atol=ATOL, rtol=RTOL)
|
|
712
|
-
|
|
713
|
-
mod.build(verbose=False)
|
|
714
|
-
_assert_basic_coords_correct(mod)
|
|
715
|
-
assert mod.coords["exog_state"] == ["feature_1", "feature_2"]
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
def test_adding_exogenous_component(rng):
|
|
719
|
-
data = rng.normal(size=(100, 2)).astype(floatX)
|
|
720
|
-
reg = st.RegressionComponent(state_names=["a", "b"], name="exog")
|
|
721
|
-
ll = st.LevelTrendComponent(name="level")
|
|
722
|
-
|
|
723
|
-
seasonal = st.FrequencySeasonality(name="annual", season_length=12, n=4)
|
|
724
|
-
mod = reg + ll + seasonal
|
|
725
|
-
|
|
726
|
-
assert mod.ssm["design"].eval({"data_exog": data}).shape == (100, 1, 2 + 2 + 8)
|
|
727
|
-
assert_allclose(mod.ssm["design", 5, 0, :2].eval({"data_exog": data}), data[5])
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
def test_add_components():
|
|
731
|
-
ll = st.LevelTrendComponent(order=2)
|
|
732
|
-
se = st.TimeSeasonality(name="seasonal", season_length=12)
|
|
733
|
-
mod = ll + se
|
|
734
|
-
|
|
735
|
-
ll_params = {
|
|
736
|
-
"initial_trend": np.zeros(2, dtype=floatX),
|
|
737
|
-
"sigma_trend": np.ones(2, dtype=floatX),
|
|
738
|
-
}
|
|
739
|
-
se_params = {
|
|
740
|
-
"seasonal_coefs": np.ones(11, dtype=floatX),
|
|
741
|
-
"sigma_seasonal": 1.0,
|
|
742
|
-
}
|
|
743
|
-
all_params = ll_params.copy()
|
|
744
|
-
all_params.update(se_params)
|
|
745
|
-
|
|
746
|
-
(ll_x0, ll_P0, ll_c, ll_d, ll_T, ll_Z, ll_R, ll_H, ll_Q) = unpack_symbolic_matrices_with_params(
|
|
747
|
-
ll, ll_params
|
|
748
|
-
)
|
|
749
|
-
(se_x0, se_P0, se_c, se_d, se_T, se_Z, se_R, se_H, se_Q) = unpack_symbolic_matrices_with_params(
|
|
750
|
-
se, se_params
|
|
751
|
-
)
|
|
752
|
-
x0, P0, c, d, T, Z, R, H, Q = unpack_symbolic_matrices_with_params(mod, all_params)
|
|
753
|
-
|
|
754
|
-
for property in ["param_names", "shock_names", "param_info", "coords", "param_dims"]:
|
|
755
|
-
assert [x in getattr(mod, property) for x in getattr(ll, property)]
|
|
756
|
-
assert [x in getattr(mod, property) for x in getattr(se, property)]
|
|
757
|
-
|
|
758
|
-
ll_mats = [ll_T, ll_R, ll_Q]
|
|
759
|
-
se_mats = [se_T, se_R, se_Q]
|
|
760
|
-
all_mats = [T, R, Q]
|
|
761
|
-
|
|
762
|
-
for ll_mat, se_mat, all_mat in zip(ll_mats, se_mats, all_mats):
|
|
763
|
-
assert_allclose(all_mat, linalg.block_diag(ll_mat, se_mat), atol=ATOL, rtol=RTOL)
|
|
764
|
-
|
|
765
|
-
ll_mats = [ll_x0, ll_c, ll_Z]
|
|
766
|
-
se_mats = [se_x0, se_c, se_Z]
|
|
767
|
-
all_mats = [x0, c, Z]
|
|
768
|
-
axes = [0, 0, 1]
|
|
769
|
-
|
|
770
|
-
for ll_mat, se_mat, all_mat, axis in zip(ll_mats, se_mats, all_mats, axes):
|
|
771
|
-
assert_allclose(all_mat, np.concatenate([ll_mat, se_mat], axis=axis), atol=ATOL, rtol=RTOL)
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
def test_filter_scans_time_varying_design_matrix(rng):
|
|
775
|
-
time_idx = pd.date_range(start="2000-01-01", freq="D", periods=100)
|
|
776
|
-
data = pd.DataFrame(rng.normal(size=(100, 2)), columns=["a", "b"], index=time_idx)
|
|
777
|
-
|
|
778
|
-
y = pd.DataFrame(rng.normal(size=(100, 1)), columns=["data"], index=time_idx)
|
|
779
|
-
|
|
780
|
-
reg = st.RegressionComponent(state_names=["a", "b"], name="exog")
|
|
781
|
-
mod = reg.build(verbose=False)
|
|
782
|
-
|
|
783
|
-
with pm.Model(coords=mod.coords) as m:
|
|
784
|
-
data_exog = pm.Data("data_exog", data.values)
|
|
785
|
-
|
|
786
|
-
x0 = pm.Normal("x0", dims=["state"])
|
|
787
|
-
P0 = pm.Deterministic("P0", pt.eye(mod.k_states), dims=["state", "state_aux"])
|
|
788
|
-
beta_exog = pm.Normal("beta_exog", dims=["exog_state"])
|
|
789
|
-
|
|
790
|
-
mod.build_statespace_graph(y)
|
|
791
|
-
x0, P0, c, d, T, Z, R, H, Q = mod.unpack_statespace()
|
|
792
|
-
pm.Deterministic("Z", Z)
|
|
793
|
-
|
|
794
|
-
prior = pm.sample_prior_predictive(draws=10)
|
|
795
|
-
|
|
796
|
-
prior_Z = prior.prior.Z.values
|
|
797
|
-
assert prior_Z.shape == (1, 10, 100, 1, 2)
|
|
798
|
-
assert_allclose(prior_Z[0, :, :, 0, :], data.values[None].repeat(10, axis=0))
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
@pytest.mark.skipif(floatX.endswith("32"), reason="Prior covariance not PSD at half-precision")
|
|
802
|
-
def test_extract_components_from_idata(rng):
|
|
803
|
-
time_idx = pd.date_range(start="2000-01-01", freq="D", periods=100)
|
|
804
|
-
data = pd.DataFrame(rng.normal(size=(100, 2)), columns=["a", "b"], index=time_idx)
|
|
805
|
-
|
|
806
|
-
y = pd.DataFrame(rng.normal(size=(100, 1)), columns=["data"], index=time_idx)
|
|
807
|
-
|
|
808
|
-
ll = st.LevelTrendComponent()
|
|
809
|
-
season = st.FrequencySeasonality(name="seasonal", season_length=12, n=2, innovations=False)
|
|
810
|
-
reg = st.RegressionComponent(state_names=["a", "b"], name="exog")
|
|
811
|
-
me = st.MeasurementError("obs")
|
|
812
|
-
mod = (ll + season + reg + me).build(verbose=False)
|
|
813
|
-
|
|
814
|
-
with pm.Model(coords=mod.coords) as m:
|
|
815
|
-
data_exog = pm.Data("data_exog", data.values)
|
|
816
|
-
|
|
817
|
-
x0 = pm.Normal("x0", dims=["state"])
|
|
818
|
-
P0 = pm.Deterministic("P0", pt.eye(mod.k_states), dims=["state", "state_aux"])
|
|
819
|
-
beta_exog = pm.Normal("beta_exog", dims=["exog_state"])
|
|
820
|
-
initial_trend = pm.Normal("initial_trend", dims=["trend_state"])
|
|
821
|
-
sigma_trend = pm.Exponential("sigma_trend", 1, dims=["trend_shock"])
|
|
822
|
-
seasonal_coefs = pm.Normal("seasonal", dims=["seasonal_state"])
|
|
823
|
-
sigma_obs = pm.Exponential("sigma_obs", 1)
|
|
824
|
-
|
|
825
|
-
mod.build_statespace_graph(y)
|
|
826
|
-
|
|
827
|
-
x0, P0, c, d, T, Z, R, H, Q = mod.unpack_statespace()
|
|
828
|
-
prior = pm.sample_prior_predictive(draws=10)
|
|
829
|
-
|
|
830
|
-
filter_prior = mod.sample_conditional_prior(prior)
|
|
831
|
-
comp_prior = mod.extract_components_from_idata(filter_prior)
|
|
832
|
-
comp_states = comp_prior.filtered_prior.coords["state"].values
|
|
833
|
-
expected_states = ["LevelTrend[level]", "LevelTrend[trend]", "seasonal", "exog[a]", "exog[b]"]
|
|
834
|
-
missing = set(comp_states) - set(expected_states)
|
|
835
|
-
|
|
836
|
-
assert len(missing) == 0, missing
|