Skip to content

Commit

Permalink
notebooks
Browse files Browse the repository at this point in the history
  • Loading branch information
LegrandNico committed Oct 2, 2023
1 parent eef13f7 commit 914fc8c
Show file tree
Hide file tree
Showing 15 changed files with 1,017 additions and 730 deletions.
161 changes: 83 additions & 78 deletions docs/source/notebooks/1.1-Binary_HGF.ipynb

Large diffs are not rendered by default.

29 changes: 17 additions & 12 deletions docs/source/notebooks/1.1-Binary_HGF.md
Original file line number Diff line number Diff line change
Expand Up @@ -208,11 +208,11 @@ slideshow:
with pm.Model() as two_levels_binary_hgf:
# Set a prior over the evolution rate at the second level.
omega_2 = pm.Uniform("omega_2", -3.5, 0.0)
tonic_volatility_2 = pm.Uniform("tonic_volatility_2", -3.5, 0.0)
# Call the pre-parametrized HGF distribution here.
# All parameters are set to their default value, except omega_2.
pm.Potential("hgf_loglike", hgf_logp_op(omega_2=omega_2))
pm.Potential("hgf_loglike", hgf_logp_op(tonic_volatility_2=tonic_volatility_2))
```

#### Visualizing the model
Expand All @@ -229,15 +229,15 @@ with two_levels_binary_hgf:
```

```{code-cell} ipython3
az.plot_trace(two_level_hgf_idata, var_names=["omega_2"]);
az.plot_trace(two_level_hgf_idata, var_names=["tonic_volatility_2"]);
plt.tight_layout()
```

### Using the learned parameters
To visualize how the model would behave under the most probable values, we average the $\omega_{2}$ samples and use this value in a new model.

```{code-cell} ipython3
omega_2 = az.summary(two_level_hgf_idata)["mean"]["omega_2"]
tonic_volatility_2 = az.summary(two_level_hgf_idata)["mean"]["tonic_volatility_2"]
```

```{code-cell} ipython3
Expand All @@ -246,7 +246,7 @@ hgf_mcmc = HGF(
model_type="binary",
initial_mean={"1": jnp.inf, "2": 0.5},
initial_precision={"1": 0.0, "2": 1.0},
tonic_volatility={"1": jnp.inf, "2": omega_2},
tonic_volatility={"1": jnp.inf, "2": tonic_volatility_2},
tonic_drift={"1": 0.0, "2": 0.0},
volatility_coupling={"1": 1.0}).input_data(
input_data=u
Expand Down Expand Up @@ -287,12 +287,17 @@ slideshow:
with pm.Model() as three_levels_binary_hgf:
# Set a prior over the evolution rate at the second and third levels.
omega_2 = pm.Uniform("omega_2", -4.0, 0.0)
omega_3 = pm.Normal("omega_3", -11.0, 2)
tonic_volatility_2 = pm.Uniform("tonic_volatility_2", -4.0, 0.0)
tonic_volatility_3 = pm.Normal("tonic_volatility_3", -11.0, 2)
# Call the pre-parametrized HGF distribution here.
# All parameters are set to their default value except omega_2 and omega_3.
pm.Potential("hgf_loglike", hgf_logp_op(omega_2=omega_2, omega_3=omega_3))
pm.Potential(
"hgf_loglike",
hgf_logp_op(
tonic_volatility_2=tonic_volatility_2,
tonic_volatility_3=tonic_volatility_3)
)
```

#### Visualizing the model
Expand All @@ -314,16 +319,16 @@ with three_levels_binary_hgf:
```

```{code-cell} ipython3
az.plot_trace(three_level_hgf_idata, var_names=["omega_2", "omega_3"]);
az.plot_trace(three_level_hgf_idata, var_names=["tonic_volatility_2", "tonic_volatility_3"]);
plt.tight_layout()
```

### Using the learned parameters
To visualize how the model would behave under the most probable values, we average the $\omega_{2}$ samples and use this value in a new model.

```{code-cell} ipython3
omega_2 = az.summary(three_level_hgf_idata)["mean"]["omega_2"]
omega_3 = az.summary(three_level_hgf_idata)["mean"]["omega_3"]
tonic_volatility_2 = az.summary(three_level_hgf_idata)["mean"]["tonic_volatility_2"]
tonic_volatility_3 = az.summary(three_level_hgf_idata)["mean"]["tonic_volatility_3"]
```

```{code-cell} ipython3
Expand All @@ -332,7 +337,7 @@ hgf_mcmc = HGF(
model_type="binary",
initial_mean={"1": jnp.inf, "2": 0.5, "3": 0.0},
initial_precision={"1": 0.0, "2": 1e4, "3": 1e1},
tonic_volatility={"1": jnp.inf, "2": omega_2, "3": omega_3},
tonic_volatility={"1": jnp.inf, "2": tonic_volatility_2, "3": tonic_volatility_3},
tonic_drift={"1": 0.0, "2": 0.0, "3": 0.0},
volatility_coupling={"1": 1.0, "2": 1.0}).input_data(
input_data=u
Expand Down
Loading

0 comments on commit 914fc8c

Please sign in to comment.