Skip to article frontmatterSkip to article content
Site not loading correctly?

This may be due to an incorrect BASE_URL configuration. See the MyST Documentation for reference.

import numpy as np
import plotly.graph_objects as go
from plotly.subplots import make_subplots
from scipy.stats import multivariate_normal
from sklearn.datasets import fetch_california_housing
from sklearn.preprocessing import StandardScaler

# Set random seed for reproducibility
rng = np.random.default_rng(42)

1. Data Laden en Voorbereiden

We laden eerst de California Housing dataset en selecteren een subset van features.

# Load California Housing dataset
housing = fetch_california_housing(as_frame=True)
df = housing.frame

# Select features and target
features = ["MedInc", "HouseAge"]
X = df[features].values
y = df["MedHouseVal"].values

print(f"Dataset shape: X={X.shape}, y={y.shape}")
print("\nFirst 5 samples:")
print(df[["MedInc", "HouseAge", "MedHouseVal"]].head())
Dataset shape: X=(20640, 2), y=(20640,)

First 5 samples:
   MedInc  HouseAge  MedHouseVal
0  8.3252      41.0        4.526
1  8.3014      21.0        3.585
2  7.2574      52.0        3.521
3  5.6431      52.0        3.413
4  3.8462      52.0        3.422

Standardisatie

Voor Bayesiaanse regressie is het belangrijk om de data te standardiseren (mean=0, std=1). Dit maakt het makkelijker om informatieve priors te kiezen.

# Standardize features and target
scaler_X = StandardScaler()
scaler_y = StandardScaler()

X_scaled = scaler_X.fit_transform(X)
y_scaled = scaler_y.fit_transform(y.reshape(-1, 1)).flatten()

print(f"Scaled X mean: {X_scaled.mean(axis=0)}")
print(f"Scaled X std: {X_scaled.std(axis=0)}")
print(f"Scaled y mean: {y_scaled.mean():.6f}")
print(f"Scaled y std: {y_scaled.std():.6f}")
Scaled X mean: [6.60969987e-17 5.50808322e-18]
Scaled X std: [1. 1.]
Scaled y mean: 0.000000
Scaled y std: 1.000000

Subset voor Visualisatie

Voor de visualisatie nemen we een kleine subset van 50 observaties.

# # Take a small subset for visualization
# n_samples = 50
# indices = rng.choice(len(X_scaled), size=n_samples, replace=False)
# X_subset = X_scaled[indices]
# y_subset = y_scaled[indices]

# print(f"Working with {n_samples} samples for visualization")

2. Bayesiaanse Parameterschatting Setup

We schatten drie parameters: b0b_0 (intercept), b1b_1 (effect van inkomen), b2b_2 (effect van leeftijd).

Prior Distributie

We kiezen een trivariate normaalverdeling als prior:

p(b)=N(μ0,Σ0)p(\pmb{b}) = \mathcal{N}(\pmb{\mu}_0, \pmb{\Sigma}_0)

Voor gestandaardiseerde data kiezen we:

  • Mean: μ0=[0,0,0]T\pmb{\mu}_0 = [0, 0, 0]^T

  • Covariance: Σ0=0.52I\pmb{\Sigma}_0 = 0.5^2 \pmb{I} (redelijk brede prior)

# Prior parameters
prior_mean = np.array([0.0, 0.0, 0.0])  # [b0, b1, b2]
prior_cov = np.eye(3) * 0.5**2  # Independent priors with std=0.5

print("Prior mean:")
print(prior_mean)
print("\nPrior covariance:")
print(prior_cov)
Prior mean:
[0. 0. 0.]

Prior covariance:
[[0.25 0.   0.  ]
 [0.   0.25 0.  ]
 [0.   0.   0.25]]

Likelihood Parameters

We nemen aan dat de observatieruis ϵ\epsilon normaal verdeeld is met bekende standaarddeviatie σ=0.8\sigma=0.8 (in de gestandaardiseerde ruimte).

# Known noise standard deviation (in standardized space)
sigma = 0.8
beta = 1 / (sigma**2)  # precision parameter

print(f"Noise std (σ): {sigma}")
print(f"Precision (β): {beta}")
Noise std (σ): 0.8
Precision (β): 1.5624999999999998

3. Visualisatie Grid Setup

Voor visualisatie maken we een 2D grid in de (b1,b2)(b_1, b_2) ruimte, waarbij we b0b_0 op zijn prior mean (0) fixeren.

# Create a grid for visualization (focusing on b1 and b2)
b1_range = np.linspace(-2, 2, 100)
b2_range = np.linspace(-2, 2, 100)
B1, B2 = np.meshgrid(b1_range, b2_range)

# We'll fix b0 at its prior mean (0) for 2D visualization
b0_fixed = 0.0

4. Prior Visualisatie

We visualiseren eerst de prior distributie voor (b1,b2)(b_1, b_2).

# Compute prior density on the grid (marginalizing over b0)
# For visualization, we fix b0=0 and look at the 2D slice
prior_2d_mean = prior_mean[1:]  # [b1, b2]
prior_2d_cov = prior_cov[1:, 1:]  # 2x2 covariance for b1, b2

prior_dist_2d = multivariate_normal(mean=prior_2d_mean, cov=prior_2d_cov)
pos = np.dstack((B1, B2))
prior_density = prior_dist_2d.pdf(pos)

# Plot prior
fig = go.Figure(
    data=go.Heatmap(
        z=prior_density,
        x=b1_range,
        y=b2_range,
        colorscale="RdYlBu_r",
        colorbar={"title": "Density"},
    )
)

fig.update_layout(
    title="Prior: p(b)",
    xaxis_title="b1 (MedInc coefficient)",
    yaxis_title="b2 (HouseAge coefficient)",
    width=700,
    height=600,
    yaxis={"scaleanchor": "x", "scaleratio": 1},
)

fig.show()
Loading...

✍️ Oefening 1: Likelihood Functie Implementeren

Implementeer de likelihood functie voor een enkele observatie (xi,yi)(x_i, y_i).

De likelihood voor een enkele observatie is:

p(yixi,b,σ)=12πσ2exp(12σ2(yixiTb)2)p(y_i | \pmb{x}_i, \pmb{b}, \sigma) = \frac{1}{\sqrt{2\pi\sigma^2}} \exp\left(-\frac{1}{2\sigma^2}(y_i - \pmb{x}_i^T\pmb{b})^2\right)

Voor numerieke stabiliteit berekenen we de log-likelihood en nemen daar de exponentiaal van:

logp(yixi,b,σ)=12σ2(yixiTb)212log(2πσ2)\log p(y_i | \pmb{x}_i, \pmb{b}, \sigma) = -\frac{1}{2\sigma^2}(y_i - \pmb{x}_i^T\pmb{b})^2 - \frac{1}{2}\log(2\pi\sigma^2)

Hints:

  • De predictie is: y^i=b0+b1xi1+b2xi2\hat{y}_i = b_0 + b_1 x_{i1} + b_2 x_{i2}

  • De residual is: ri=yiy^ir_i = y_i - \hat{y}_i

  • De likelihood is proportioneel aan: exp(β2ri2)\exp\left(-\frac{\beta}{2} r_i^2\right), waarbij β=1/σ2\beta = 1/\sigma^2

def compute_likelihood(x_obs, y_obs, B0, B1, B2, beta):
    """
    Compute likelihood for a single observation on a grid of parameter values.

    Parameters
    ----------
    x_obs : array-like, shape (2,)
        Feature values [x1, x2] for the observation
    y_obs : float
        Target value for the observation
    B0 : float or array
        Intercept parameter value(s)
    B1 : array
        Grid of b1 values (coefficient for MedInc)
    B2 : array
        Grid of b2 values (coefficient for HouseAge)
    beta : float
        Precision parameter (1/sigma^2)

    Returns
    -------
    likelihood : array
        Likelihood values on the grid
    """
    # Solution implemented
    y_pred = B0 + B1 * x_obs[0] + B2 * x_obs[1]
    residual = y_obs - y_pred
    likelihood = np.exp(-beta / 2 * residual**2)

    return likelihood


# Test the function with a dummy observation
x_test = X_scaled[0]
y_test = y_scaled[0]
likelihood_test = compute_likelihood(x_test, y_test, b0_fixed, B1, B2, beta)
print(f"Likelihood shape: {likelihood_test.shape}")
print(f"Likelihood max: {likelihood_test.max():.6f}")
Likelihood shape: (100, 100)
Likelihood max: 1.000000

✍️ Oefening 2: Posterior na 1 Observatie

Bereken de posterior distributie na het observeren van het eerste datapunt.

Volgens de regel van Bayes:

p(by1)p(y1b)p(b)p(\pmb{b}|y_1) \propto p(y_1|\pmb{b}) \cdot p(\pmb{b})

Stappen:

  1. Bereken de likelihood voor de eerste observatie

  2. Vermenigvuldig met de prior

  3. Normaliseer zodat de posterior integreert tot 1

# Select first observation
x_obs_1 = X_scaled[0]
y_obs_1 = y_scaled[0]

# Calculate likelihood for first observation
likelihood_1 = compute_likelihood(x_obs_1, y_obs_1, b0_fixed, B1, B2, beta)

# Calculate posterior by prior * likelihood
posterior_1 = prior_density * likelihood_1

# Normalize the posterior
grid_spacing = (b1_range[1] - b1_range[0]) * (b2_range[1] - b2_range[0])
posterior_1_normalized = posterior_1 / (np.sum(posterior_1) * grid_spacing)

Visualisatie: Prior → Likelihood → Posterior (1 observatie)

# Visualization after 1 observation
fig = make_subplots(
    rows=1,
    cols=3,
    subplot_titles=("Prior: p(b)", "Likelihood: p(y1|b)", "Posterior: p(b|y1)"),
)

# Prior
fig.add_trace(
    go.Heatmap(z=prior_density, x=b1_range, y=b2_range, colorscale="RdYlBu_r", showscale=False),
    row=1,
    col=1,
)

# Likelihood
fig.add_trace(
    go.Heatmap(z=likelihood_1, x=b1_range, y=b2_range, colorscale="RdYlBu_r", showscale=False),
    row=1,
    col=2,
)

# Posterior
fig.add_trace(
    go.Heatmap(
        z=posterior_1_normalized,
        x=b1_range,
        y=b2_range,
        colorscale="RdYlBu_r",
        colorbar={"title": "Density"},
    ),
    row=1,
    col=3,
)

fig.update_xaxes(title_text="b1 (MedInc)", row=1, col=1)
fig.update_xaxes(title_text="b1 (MedInc)", row=1, col=2)
fig.update_xaxes(title_text="b1 (MedInc)", row=1, col=3)
fig.update_yaxes(title_text="b2 (HouseAge)", row=1, col=1, scaleanchor="x", scaleratio=1)
fig.update_yaxes(title_text="b2 (HouseAge)", row=1, col=2, scaleanchor="x", scaleratio=1)
fig.update_yaxes(title_text="b2 (HouseAge)", row=1, col=3, scaleanchor="x", scaleratio=1)

fig.update_layout(width=1400, height=450)
fig.show()
Loading...

✍️ Oefening 3: Sequentiële Update (5 observaties)

Nu gaan we sequentieel meer data toevoegen. Telkens gebruiken we de vorige posterior als nieuwe prior.

Dit illustreert het incrementele leren aspect van Bayesiaanse methoden.

Algoritme:

posterior = prior
for i in range(n_observations):
    likelihood = compute_likelihood(x[i], y[i], ...)
    posterior = posterior * likelihood
    posterior = normalize(posterior)
# Sequential update with 5 observations
n_obs = 5

# Start with prior
current_posterior = prior_density.copy()

# Loop over first n_obs observations
for i in range(n_obs):
    # Select observation i
    x_i = X_scaled[i]
    y_i = y_scaled[i]

    # Calculate likelihood for observation i
    likelihood_i = compute_likelihood(x_i, y_i, b0_fixed, B1, B2, beta)

    # Update posterior: posterior * likelihood
    current_posterior = current_posterior * likelihood_i

    # Normalize
    current_posterior = current_posterior / (np.sum(current_posterior) * grid_spacing)

posterior_5 = current_posterior

Visualisatie na 5 observaties

# Visualization after 5 observations
map_idx = np.unravel_index(np.argmax(posterior_5), posterior_5.shape)
b1_map = b1_range[map_idx[1]]
b2_map = b2_range[map_idx[0]]

fig = go.Figure(
    data=[
        go.Heatmap(
            z=posterior_5,
            x=b1_range,
            y=b2_range,
            colorscale="RdYlBu_r",
            colorbar={"title": "Density"},
        ),
        go.Scatter(
            x=[b1_map],
            y=[b2_map],
            mode="markers",
            marker={"color": "red", "size": 12, "symbol": "x"},
            name="MAP estimate",
        ),
    ]
)

fig.update_layout(
    title="Posterior after 5 observations: p(b|y_1:5)",
    xaxis_title="b1 (MedInc coefficient)",
    yaxis_title="b2 (HouseAge coefficient)",
    width=700,
    height=600,
    yaxis={"scaleanchor": "x", "scaleratio": 1},
)

fig.show()

print("\nMAP estimate after 5 observations:")
print(f"  b1 (MedInc): {b1_map:.3f}")
print(f"  b2 (HouseAge): {b2_map:.3f}")
Loading...

MAP estimate after 5 observations:
  b1 (MedInc): 0.545
  b2 (HouseAge): 0.343

✍️ Oefening 4: Finale Posterior (alle observaties)

Verwerk nu alle observaties sequentieel en vergelijk de finale posterior met de OLS oplossing.

# Process all 50 observations
n_obs_full = len(X_scaled)

# Start with prior
current_posterior = prior_density.copy()

# Loop over all observations and update posterior
for i in range(n_obs_full):
    x_i = X_scaled[i]
    y_i = y_scaled[i]
    likelihood_i = compute_likelihood(x_i, y_i, b0_fixed, B1, B2, beta)
    current_posterior = current_posterior * likelihood_i
    current_posterior = current_posterior / (np.sum(current_posterior) * grid_spacing)

posterior_final = current_posterior

Vergelijking met OLS

Bereken de OLS oplossing en vergelijk met de MAP schatting.

# Compute OLS solution for comparison
X_design = np.column_stack([np.ones(len(X_scaled)), X_scaled])
b_ols = np.linalg.lstsq(X_design, y_scaled, rcond=None)[0]

print("OLS estimates:")
print(f"  b0 (intercept): {b_ols[0]:.3f}")
print(f"  b1 (MedInc): {b_ols[1]:.3f}")
print(f"  b2 (HouseAge): {b_ols[2]:.3f}")

# Find MAP estimate from final posterior
map_idx_final = np.unravel_index(np.argmax(posterior_final), posterior_final.shape)
b1_map_final = b1_range[map_idx_final[1]]
b2_map_final = b2_range[map_idx_final[0]]

print("\nMAP estimates (Bayesian):")
print(f"  b0 (intercept): {b0_fixed:.3f} (fixed)")
print(f"  b1 (MedInc): {b1_map_final:.3f}")
print(f"  b2 (HouseAge): {b2_map_final:.3f}")

print("\nDifferences (OLS - MAP):")
print(f"  b1: {b_ols[1] - b1_map_final:.3f}")
print(f"  b2: {b_ols[2] - b2_map_final:.3f}")
OLS estimates:
  b0 (intercept): 0.000
  b1 (MedInc): 0.711
  b2 (HouseAge): 0.190

MAP estimates (Bayesian):
  b0 (intercept): 0.000 (fixed)
  b1 (MedInc): 0.707
  b2 (HouseAge): 0.182

Differences (OLS - MAP):
  b1: 0.004
  b2: 0.008

Finale Visualisatie

# Final visualization
fig = make_subplots(
    rows=1,
    cols=2,
    subplot_titles=(
        "Prior: p(b)",
        f"Posterior after {n_obs_full} observations: p(b|y_1:{n_obs_full})",
    ),
)

# Prior
fig.add_trace(
    go.Heatmap(z=prior_density, x=b1_range, y=b2_range, colorscale="RdYlBu_r", showscale=False),
    row=1,
    col=1,
)
fig.add_trace(
    go.Scatter(
        x=[b_ols[1]],
        y=[b_ols[2]],
        mode="markers",
        marker={"color": "black", "size": 12, "symbol": "x"},
        name="OLS",
    ),
    row=1,
    col=1,
)

# Final Posterior
fig.add_trace(
    go.Heatmap(
        z=posterior_final,
        x=b1_range,
        y=b2_range,
        colorscale="RdYlBu_r",
        colorbar={"title": "Density"},
    ),
    row=1,
    col=2,
)
fig.add_trace(
    go.Scatter(
        x=[b1_map_final],
        y=[b2_map_final],
        mode="markers",
        marker={"color": "red", "size": 12, "symbol": "x"},
        name="MAP",
    ),
    row=1,
    col=2,
)
fig.add_trace(
    go.Scatter(
        x=[b_ols[1]],
        y=[b_ols[2]],
        mode="markers",
        marker={"color": "black", "size": 12, "symbol": "x"},
        name="OLS",
    ),
    row=1,
    col=2,
)

fig.update_xaxes(title_text="b1 (MedInc)", row=1, col=1)
fig.update_xaxes(title_text="b1 (MedInc)", row=1, col=2)
fig.update_yaxes(title_text="b2 (HouseAge)", row=1, col=1, scaleanchor="x", scaleratio=1)
fig.update_yaxes(title_text="b2 (HouseAge)", row=1, col=2, scaleanchor="x", scaleratio=1)

fig.update_layout(width=1300, height=550)
fig.show()
Loading...

🎯 Bonus Oefening: Convergentie Visualiseren

Maak een plot die toont hoe de MAP schattingen evolueren als functie van het aantal observaties.

Doel:

  • Toon op de x-as het aantal observaties (1, 2, 3, ..., 50)

  • Toon op de y-as de MAP schattingen voor b1b_1 en b2b_2

  • Voeg horizontale lijnen toe voor de OLS waarden

# Track MAP estimates as we add observations
map_b1_history = []
map_b2_history = []

current_posterior = prior_density.copy()

for i in range(n_obs_full):
    # Update posterior with observation i
    x_i = X_scaled[i]
    y_i = y_scaled[i]
    likelihood_i = compute_likelihood(x_i, y_i, b0_fixed, B1, B2, beta)
    current_posterior = current_posterior * likelihood_i
    current_posterior = current_posterior / (np.sum(current_posterior) * grid_spacing)

    # Find MAP estimate
    map_idx = np.unravel_index(np.argmax(current_posterior), current_posterior.shape)
    b1_map = b1_range[map_idx[1]]
    b2_map = b2_range[map_idx[0]]

    map_b1_history.append(b1_map)
    map_b2_history.append(b2_map)

# Plot convergence
fig = make_subplots(rows=1, cols=2, subplot_titles=("Convergence of b1", "Convergence of b2"))

# b1 convergence
fig.add_trace(
    go.Scatter(
        x=list(range(1, n_obs_full + 1)),
        y=map_b1_history,
        mode="lines+markers",
        name="MAP estimate",
    ),
    row=1,
    col=1,
)
fig.add_hline(y=b_ols[1], line_dash="dash", line_color="red", annotation_text="OLS", row=1, col=1)

# b2 convergence
fig.add_trace(
    go.Scatter(
        x=list(range(1, n_obs_full + 1)),
        y=map_b2_history,
        mode="lines+markers",
        name="MAP estimate",
        showlegend=False,
    ),
    row=1,
    col=2,
)
fig.add_hline(y=b_ols[2], line_dash="dash", line_color="red", annotation_text="OLS", row=1, col=2)

fig.update_xaxes(title_text="Number of observations", row=1, col=1)
fig.update_xaxes(title_text="Number of observations", row=1, col=2)
fig.update_yaxes(title_text="b1 (MedInc coefficient)", row=1, col=1)
fig.update_yaxes(title_text="b2 (HouseAge coefficient)", row=1, col=2)

fig.update_layout(width=1200, height=450)
fig.show()
Loading...