Show Sidebar Hide Sidebar

Illustration of Prior and Posterior Gaussian Process for Different Kernels in Scikit-learn

This example illustrates the prior and posterior of a GPR with different kernels. Mean, standard deviation, and 10 samples are shown for both prior and posterior.

New to Plotly?

Plotly's Python library is free and open source! Get started by downloading the client and reading the primer.
You can set up Plotly to work in online or offline mode, or in jupyter notebooks.
We also have a quick-reference cheatsheet (new!) to help you get started!

Version

In [1]:
import sklearn
sklearn.__version__
Out[1]:
'0.18.1'

Imports

In [2]:
import plotly.plotly as py
import plotly.graph_objs as go
from plotly import tools

import numpy as np
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import (RBF, Matern, RationalQuadratic,
                                              ExpSineSquared, DotProduct,
                                              ConstantKernel)

Calculations

In [3]:
kernels = [1.0 * RBF(length_scale=1.0, length_scale_bounds=(1e-1, 10.0)),
           1.0 * RationalQuadratic(length_scale=1.0, alpha=0.1),
           1.0 * ExpSineSquared(length_scale=1.0, periodicity=3.0,
                                length_scale_bounds=(0.1, 10.0),
                                periodicity_bounds=(1.0, 10.0)),
           ConstantKernel(0.1, (0.01, 10.0))
               * (DotProduct(sigma_0=1.0, sigma_0_bounds=(0.0, 10.0)) ** 2),
           1.0 * Matern(length_scale=1.0, length_scale_bounds=(1e-1, 10.0),
                        nu=1.5)]
 
color = 2 * ['red', 'green', 'blue', 'cyan', 'magenta', 'orange']

Plot Results

In [11]:
plots = []
titles = []

for fig_index, kernel in enumerate(kernels):
    # Specify Gaussian Process
    plots.append([[], []])
    gp = GaussianProcessRegressor(kernel=kernel)

    # Plot prior
    X_ = np.linspace(0, 5, 100)
    y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
    
    p1 = go.Scatter(x=X_, y=y_mean, 
                    showlegend=False,
                    mode='lines',
                    line=dict(color='black')
                   )
    
    p2 = go.Scatter(x=X_, y=y_mean + y_std,
                    mode='lines',
                    showlegend=False,
                    line=dict(color='black')
                   )
    
    p3 = go.Scatter(x=X_, y=y_mean - y_std,
                    mode='lines',
                    showlegend=False,
                    line=dict(color='black'),
                    fill = 'tonexty'
                   )
    plots[fig_index][0].append(p2)
    plots[fig_index][0].append(p3)
    plots[fig_index][0].append(p1)
    
    y_samples = gp.sample_y(X_[:, np.newaxis], 10)
    
    k = []
    for col in range(0, len(y_samples[0])):
            k.append([])
            for row in range(0, len(y_samples)):
                k[col].append(y_samples[row][col])
        
    for l in range(0, 10):
        p4 = go.Scatter(x=X_, y=k[l],
                        showlegend=False,
                        mode='lines',
                        line=dict(color=color[l], width=1),
                       )
        
        plots[fig_index][0].append(p4)
        
    titles.append("Prior <br>(kernel:  %s)" % kernel)

    # Generate data and fit GP
    rng = np.random.RandomState(4)
    X = rng.uniform(0, 5, 10)[:, np.newaxis]
    y = np.sin((X[:, 0] - 2.5) ** 2)
    gp.fit(X, y)

    # Plot posterior
    y_mean, y_std = gp.predict(X_[:, np.newaxis], return_std=True)
    p1 = go.Scatter(x=X_, y=y_mean,
                    showlegend=False,
                    mode='lines',
                    line=dict(color='black')
                   )
    
    p2 = go.Scatter(x=X_, y=y_mean + y_std,
                    showlegend=False,
                    mode='lines',
                    line=dict(color='black')
                   )
    
    p3 = go.Scatter(x=X_, y=y_mean - y_std,
                    mode='lines',
                    showlegend=False,
                    line=dict(color='black'),
                    fill = 'tonexty'
                   )
    plots[fig_index][1].append(p2)
    plots[fig_index][1].append(p3)
    plots[fig_index][1].append(p1)
    
    y_samples = gp.sample_y(X_[:, np.newaxis], 10)
    
    k = []
    for col in range(0, len(y_samples[0])):
            k.append([])
            for row in range(0, len(y_samples)):
                k[col].append(y_samples[row][col])
        
    for l in range(0, 10):
        p4 = go.Scatter(x=X_, y=k[l],
                        showlegend=False,
                        mode='lines',
                        line=dict(color=color[l], width=1),
                       )
        
        plots[fig_index][1].append(p4)
        
    p5 = go.Scatter(x=X[:, 0], y=y,
                    showlegend=False,
                    mode='markers',
                    line=dict(color='red'),
                   )
    plots[fig_index][1].append(p5)
    
    titles.append("Posterior <br>(kernel: %s)<br>Log-Likelihood: %.3f"
                  % (gp.kernel_, gp.log_marginal_likelihood(gp.kernel_.theta)))

Create Plotly subplots

In [5]:
def create_subplots(plots, titles):
    fig = tools.make_subplots(rows=1, cols=2,
                          subplot_titles=tuple(titles),
                          print_grid=False)
    
    for j in range(0, len(plots[0])):
        fig.append_trace(plots[0][j], 1, 1)
    for k in range(0, len(plots[1])):
        fig.append_trace(plots[1][k], 1, 2)
        
    for i in map(str, range(1, 3)):
        y = 'yaxis' + i
        x = 'xaxis' + i
        fig['layout'][y].update(showticklabels=False, ticks='', 
                                zeroline=False, showgrid=False)
        fig['layout'][x].update(showticklabels=False, ticks='',
                                zeroline=False, showgrid=False)

    fig['layout'].update(hovermode='closest',
                         margin=dict(l=0, b=10,
                                     r=0, ))
    return fig
    

RBF Kernels

In [6]:
fig = create_subplots(plots[0], titles[0 : 2])
py.iplot(fig)
Out[6]:

RationalQuadratic Kernels

In [7]:
fig = create_subplots(plots[1], titles[2 : 4])
py.iplot(fig)
Out[7]:

ExpSineSquared Kernel

In [8]:
fig = create_subplots(plots[2], titles[4 : 6])
py.iplot(fig)
Out[8]:

DotProduct Kernel

In [9]:
fig = create_subplots(plots[3], titles[6 : 8])
py.iplot(fig)
Out[9]:

Matern

In [10]:
fig = create_subplots(plots[4], titles[8 : 10])
py.iplot(fig)
Out[10]:

License

Authors:

    Jan Hendrik Metzen <jhm@informatik.uni-bremen.de>

License:

    BSD 3 clause
Still need help?
Contact Us

For guaranteed 24 hour response turnarounds, upgrade to a Developer Support Plan.