import torch
import torch.autograd.functional as F
import torch.distributions as dist
import numpy as np
import matplotlib.pyplot as plt
'font.family'] = 'sans-serif'
plt.rcParams['font.sans-serif'] = ['Liberation Sans']
plt.rcParams[
import pandas as pd
%matplotlib inline
Tue Plots
Plots
desp
from tueplots import bundles
plt.rcParams.update(bundles.beamer_moml())#plt.rcParams.update(bundles.icml2022())
# Also add despine to the bundle using rcParams
'axes.spines.right'] = False
plt.rcParams['axes.spines.top'] = False
plt.rcParams[
# Increase font size to match Beamer template
'font.size'] = 16
plt.rcParams[# Make background transparent
'figure.facecolor'] = 'none' plt.rcParams[
try:
import hamiltorch
except ImportError:
%pip install git+https://github.com/AdamCobb/hamiltorch
123)
hamiltorch.set_random_seed(= torch.device('cuda' if torch.cuda.is_available() else 'cpu')
device device
device(type='cpu')
= torch.distributions.Normal(0, 1)
gt_distribution
# Samples from the ground truth distribution
def sample_gt(n):
return gt_distribution.sample((n,))
= sample_gt(1000) samples
= torch.linspace(-3, 3, 1000)
x_lin = torch.exp(gt_distribution.log_prob(x_lin))
y_lin
='Ground truth') plt.plot(x_lin, y_lin, label
# Logprob function to be passed to Hamiltorch sampler
def logprob(x):
return gt_distribution.log_prob(x).sum()
# Initial state
= torch.tensor([0.0])
x0 = 5000
num_samples = 0.3
step_size = 5
num_steps_per_sample 123) hamiltorch.set_random_seed(
= hamiltorch.sample(log_prob_func=logprob, params_init=x0,
params_hmc =num_samples, step_size=step_size,
num_samples=num_steps_per_sample) num_steps_per_sample
Sampling (Sampler.HMC; Integrator.IMPLICIT)
Time spent | Time remain.| Progress | Samples | Samples/sec
0d:00:00:16 | 0d:00:00:00 | #################### | 5000/5000 | 308.91
Acceptance Rate 0.99
= torch.tensor(params_hmc)
params_hmc # Trace plot
='Trace')
plt.plot(params_hmc, label'Iteration')
plt.xlabel('Parameter value') plt.ylabel(
Text(0, 0.5, 'Parameter value')
# Logprob function to be passed to Hamiltorch sampler
def logprob(x):
return gt_distribution.log_prob(x).sum()
# Initial state
= torch.tensor([0.0])
x0 = 5000
num_samples = 0.3
step_size = 5
num_steps_per_sample 123) hamiltorch.set_random_seed(
= hamiltorch.sample(log_prob_func=logprob, params_init=x0,
params_hmc =num_samples, step_size=step_size,
num_samples=num_steps_per_sample) num_steps_per_sample
Sampling (Sampler.HMC; Integrator.IMPLICIT)
Time spent | Time remain.| Progress | Samples | Samples/sec
0d:00:00:14 | 0d:00:00:00 | #################### | 5000/5000 | 338.09
Acceptance Rate 0.99
= torch.tensor(params_hmc)
params_hmc # Trace plot
='Trace')
plt.plot(params_hmc, label'Iteration')
plt.xlabel('Parameter value') plt.ylabel(
/tmp/ipykernel_18135/1433682485.py:1: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
params_hmc = torch.tensor(params_hmc)
Text(0, 0.5, 'Parameter value')
# KDE plot
import seaborn as sns
plt.figure()='Samples', shade=True, color='C1')
sns.kdeplot(params_hmc.detach().numpy(), label='Ground truth')
plt.plot(x_lin, y_lin, label'Parameter value')
plt.xlabel('Density')
plt.ylabel( plt.legend()
/tmp/ipykernel_18135/469715340.py:4: FutureWarning:
`shade` is now deprecated in favor of `fill`; setting `fill=True`.
This will become an error in seaborn v0.14.0; please update your code.
sns.kdeplot(params_hmc.detach().numpy(), label='Samples', shade=True, color='C1')
<matplotlib.legend.Legend at 0x7f51dfedd4b0>
# Linear regression for 1 dimensional input using HMC
= torch.linspace(-3, 3, 90)
x_lin = torch.tensor([2.0])
theta_0_true = torch.tensor([3.0])
theta_1_true = lambda x: theta_0_true + theta_1_true * x
f = torch.randn_like(x_lin) *1.0
eps = f(x_lin) + eps
y_lin
='Data', color='C0')
plt.scatter(x_lin, y_lin, label='Ground truth')
plt.plot(x_lin, f(x_lin), label'x')
plt.xlabel('y') plt.ylabel(
Text(0, 0.5, 'y')