def plot_unit_gaussian_samples(D):
p = figure(plot_width=800, plot_height=500,
title='Samples from a unit {}D Gaussian'.format(D))
xs = np.linspace(0, 1, D)
for color in Category10[10]:
ys = np.random.multivariate_normal(np.zeros(D), np.eye(D))
p.line(xs, ys, line_width=1, color=color)
return p
show(plot_unit_gaussian_samples(2))
show(plot_unit_gaussian_samples(20))
def k(xs, ys, sigma=1, l=1):
"""Sqared Exponential kernel as above but designed to return the whole
covariance matrix - i.e. the pairwise covariance of the vectors xs & ys.
Also with two parameters which are discussed at the end."""
# Pairwise difference matrix.
dx = np.expand_dims(xs, 1) - np.expand_dims(ys, 0)
return (sigma ** 2) * np.exp(-((dx / l) ** 2) / 2)
def m(x):
"""The mean function. As discussed, we can let the mean always be zero."""
return np.zeros_like(x)
We can plot this kernel to show how it’s maximised when x=x′x=x′ and then smoothly falls off as the two inputs start to differ.
N = 100
x = np.linspace(-2, 2, N)
y = np.linspace(-2, 2, N)
d = k(x, y)
color_mapper = LinearColorMapper(palette="Plasma256", low=0, high=1)
p = figure(plot_width=400, plot_height=400, x_range=(-2, 2), y_range=(-2, 2),
title='Visualisation of k(x, x\')', x_axis_label='x',
y_axis_label='x\'', toolbar_location=None)
p.image(image=[d], color_mapper=color_mapper, x=-2, y=-2, dw=4, dh=4)
color_bar = ColorBar(color_mapper=color_mapper, ticker=BasicTicker(),
label_standoff=12, border_line_color=None, location=(0,0))
p.add_layout(color_bar, 'right')
show(p)
p = figure(plot_width=800, plot_height=500)
D = 20
xs = np.linspace(0, 1, D)
for color in Category10[10]:
ys = np.random.multivariate_normal(m(xs), k(xs, xs))
p.circle(xs, ys, size=3, color=color)
p.line(xs, ys, line_width=1, color=color)
show(p)
n = 100
xs = np.linspace(-5, 5, n)
K = k(xs, xs)
mu = m(xs)
p = figure(plot_width=800, plot_height=500)
for color in Category10[5]:
ys = np.random.multivariate_normal(mu, K)
p.line(xs, ys, line_width=2, color=color)
show(p)
# coefs[i] is the coefficient of x^i
coefs = [6, -2.5, -2.4, -0.1, 0.2, 0.03]
def f(x):
total = 0
for exp, coef in enumerate(coefs):
total += coef * (x ** exp)
return total
xs = np.linspace(-5.0, 3.5, 100)
ys = f(xs)
p = figure(plot_width=800, plot_height=400, x_axis_label='x',
y_axis_label='f(x)', title='The hidden function f(x)')
p.line(xs, ys, line_width=2)
show(p)
p = figure(plot_width=800, plot_height=600, y_range=(-7, 8))
y_true = f(x_s)
p.line(x_s, y_true, line_width=3, color='black', alpha=0.4,
line_dash='dashed', legend='True f(x)')
p.cross(x_obs, y_obs, size=20, legend='Training data')
stds = np.sqrt(Sigma_s.diagonal())
err_xs = np.concatenate((x_s, np.flip(x_s, 0)))
err_ys = np.concatenate((mu_s + 2 * stds, np.flip(mu_s - 2 * stds, 0)))
p.patch(err_xs, err_ys, alpha=0.2, line_width=0, color='grey',
legend='Uncertainty')
for color in Category10[3]:
y_s = np.random.multivariate_normal(mu_s, Sigma_s)
p.line(x_s, y_s, line_width=1, color=color)
p.line(x_s, mu_s, line_width=3, color='blue', alpha=0.4, legend='Mean')
show(p)
Resources
- Machine Learning - A Probabilistic Perspective, Chapter 15 by Kevin P. Murphy
- Introduction to Gaussian processes on YouTube by Nando de Freitas