Skip to content

Commit c73b785

Browse files
authored
REF: Rename ar steps (#766)
1 parent b239132 commit c73b785

File tree

2 files changed

+130
-175
lines changed

2 files changed

+130
-175
lines changed

examples/time_series/Time_Series_Generative_Graph.ipynb

+109-154
Large diffs are not rendered by default.

examples/time_series/Time_Series_Generative_Graph.myst.md

+21-21
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,15 @@ jupytext:
55
format_name: myst
66
format_version: 0.13
77
kernelspec:
8-
display_name: pymc_env
8+
display_name: Python 3 (ipykernel)
99
language: python
1010
name: python3
1111
---
1212

1313
(time_series_generative_graph)=
1414
# Time Series Models Derived From a Generative Graph
1515

16-
:::{post} July, 2024
16+
:::{post} January, 2025
1717
:tags: time-series,
1818
:category: intermediate, reference
1919
:author: Jesse Grabowski, Juan Orduz and Ricardo Vieira
@@ -109,15 +109,15 @@ def ar_step(x_tm2, x_tm1, rho, sigma):
109109
110110
# Here we actually "loop" over the time series.
111111
def ar_dist(ar_init, rho, sigma, size):
112-
ar_innov, _ = pytensor.scan(
112+
ar_steps, _ = pytensor.scan(
113113
fn=ar_step,
114114
outputs_info=[{"initial": ar_init, "taps": range(-lags, 0)}],
115115
non_sequences=[rho, sigma],
116116
n_steps=timeseries_length - lags,
117117
strict=True,
118118
)
119119
120-
return ar_innov
120+
return ar_steps
121121
```
122122

123123
## Generate AR(2) Graph
@@ -136,8 +136,8 @@ with pm.Model(coords=coords, check_bounds=False) as model:
136136
137137
ar_init = pm.Normal(name="ar_init", sigma=0.5, dims=("lags",))
138138
139-
ar_innov = pm.CustomDist(
140-
"ar_dist",
139+
ar_steps = pm.CustomDist(
140+
"ar_steps",
141141
ar_init,
142142
rho,
143143
sigma,
@@ -146,7 +146,7 @@ with pm.Model(coords=coords, check_bounds=False) as model:
146146
)
147147
148148
ar = pm.Deterministic(
149-
name="ar", var=pt.concatenate([ar_init, ar_innov], axis=-1), dims=("timeseries_length",)
149+
name="ar", var=pt.concatenate([ar_init, ar_steps], axis=-1), dims=("timeseries_length",)
150150
)
151151
152152
@@ -208,9 +208,9 @@ Next, we want to condition the AR(2) model on some observed data so that we can
208208
```{code-cell} ipython3
209209
# select a random draw from the prior
210210
prior_draw = prior.prior.isel(chain=0, draw=chosen_draw)
211-
test_data = prior_draw["ar_dist"].to_numpy()
211+
test_data = prior_draw["ar_steps"].to_numpy()
212212
213-
with pm.observe(model, {"ar_dist": test_data}) as observed_model:
213+
with pm.observe(model, {"ar_steps": test_data}) as observed_model:
214214
trace = pm.sample(chains=4, random_seed=rng)
215215
```
216216

@@ -321,15 +321,15 @@ Let's see how to do this in PyMC! The key observation is that we need to pass th
321321
```{code-cell} ipython3
322322
def conditional_ar_dist(y_data, rho, sigma, size):
323323
# Here we condition on the observed data by passing it through the `sequences` argument.
324-
ar_innov, _ = pytensor.scan(
324+
ar_steps, _ = pytensor.scan(
325325
fn=ar_step,
326326
sequences=[{"input": y_data, "taps": list(range(-lags, 0))}],
327327
non_sequences=[rho, sigma],
328328
n_steps=timeseries_length - lags,
329329
strict=True,
330330
)
331331
332-
return ar_innov
332+
return ar_steps
333333
```
334334

335335
Then we can simply generate samples from the posterior predictive distribution. Observe we need to "rewrite" the generative graph to include the conditioned transition step. When you call {meth}`~pm.sample_posterior_predictive`,PyMC will attempt to match the names of random variables in the active model context to names in the provided `idata.posterior`. If a match is found, the specified model prior is ignored, and replaced with draws from the posterior. This means we can put any prior we want on these parameters, because it will be ignored. We choose {class}`~pymc.distributions.continuous.Flat` because you cannot sample from it. This way, if PyMC does not find a match for one of our priors, we will get an error to let us know something isn't right. For a detailed explanation on these type of cross model predictions, see the great blog post [Out of model predictions with PyMC](https://www.pymc-labs.com/blog-posts/out-of-model-predictions-with-pymc/).
@@ -351,8 +351,8 @@ with pm.Model(coords=coords, check_bounds=False) as conditional_model:
351351
rho = pm.Flat(name="rho", dims=("lags",))
352352
sigma = pm.Flat(name="sigma")
353353
354-
ar_innov = pm.CustomDist(
355-
"ar_dist",
354+
ar_steps = pm.CustomDist(
355+
"ar_steps",
356356
y_data,
357357
rho,
358358
sigma,
@@ -361,7 +361,7 @@ with pm.Model(coords=coords, check_bounds=False) as conditional_model:
361361
)
362362
363363
ar = pm.Deterministic(
364-
name="ar", var=pt.concatenate([ar_init, ar_innov], axis=-1), dims=("timeseries_length",)
364+
name="ar", var=pt.concatenate([ar_init, ar_steps], axis=-1), dims=("timeseries_length",)
365365
)
366366
367367
post_pred_conditional = pm.sample_posterior_predictive(trace, var_names=["ar"], random_seed=rng)
@@ -447,17 +447,17 @@ with pm.Model(coords=coords, check_bounds=False) as forecasting_model:
447447
sigma = pm.Flat(name="sigma")
448448
449449
def ar_dist_forecasting(forecast_initial_state, rho, sigma, size):
450-
ar_innov, _ = pytensor.scan(
450+
ar_steps, _ = pytensor.scan(
451451
fn=ar_step,
452452
outputs_info=[{"initial": forecast_initial_state, "taps": range(-lags, 0)}],
453453
non_sequences=[rho, sigma],
454454
n_steps=forecast_steps,
455455
strict=True,
456456
)
457-
return ar_innov
457+
return ar_steps
458458
459-
ar_innov = pm.CustomDist(
460-
"ar_dist",
459+
ar_steps = pm.CustomDist(
460+
"ar_steps",
461461
forecast_initial_state,
462462
rho,
463463
sigma,
@@ -466,14 +466,14 @@ with pm.Model(coords=coords, check_bounds=False) as forecasting_model:
466466
)
467467
468468
post_pred_forecast = pm.sample_posterior_predictive(
469-
trace, var_names=["ar_dist"], random_seed=rng
469+
trace, var_names=["ar_steps"], random_seed=rng
470470
)
471471
```
472472

473473
We can visualize the out-of-sample predictions and compare thee results wth the one from `statsmodels`.
474474

475475
```{code-cell} ipython3
476-
forecast_post_pred_ar = post_pred_forecast.posterior_predictive["ar_dist"]
476+
forecast_post_pred_ar = post_pred_forecast.posterior_predictive["ar_steps"]
477477
478478
_, ax = plt.subplots()
479479
for i, hdi_prob in enumerate((0.94, 0.64), 1):
@@ -497,7 +497,7 @@ ax.plot(
497497
)
498498
499499
for i, hdi_prob in enumerate((0.94, 0.64), 1):
500-
hdi = az.hdi(forecast_post_pred_ar, hdi_prob=hdi_prob)["ar_dist"]
500+
hdi = az.hdi(forecast_post_pred_ar, hdi_prob=hdi_prob)["ar_steps"]
501501
lower = hdi.sel(hdi="lower")
502502
upper = hdi.sel(hdi="higher")
503503
ax.fill_between(

0 commit comments

Comments
 (0)