I've added the GPy module

In [22]:
import GPy
import numpy as np
from matplotlib import pyplot as plt

just reproduce the tutorial

In [23]:
X = np.random.uniform(-3.,3.,(20,1))
Y = np.sin(X) + np.random.randn(20,1)*0.05
kernel = GPy.kern.rbf(input_dim=1, variance=1., lengthscale=1.)
In [24]:
m = GPy.models.GPRegression(X,Y,kernel)
In [25]:
print m

Log-likelihood: -2.273e+01

       Name        |  Value   |  Constraints  |  Ties  |  prior  
-----------------------------------------------------------------
   rbf_variance    |  1.0000  |     (+ve)     |        |         
  rbf_lengthscale  |  1.0000  |     (+ve)     |        |         
  noise_variance   |  1.0000  |     (+ve)     |        |         


In [26]:
m.plot()
In [27]:
m.constrain_positive('.*rbf_variance')
m.constrain_positive('.*lengthscale' )
#m.constrain_fixed('.*noise',0.0025)
m.constrain_positive('.*noise')
m.optimize_restarts(num_restarts=5)
print m
m.plot()
plt.axis([-4,4,-1.5,1.5])
Warning: re-constraining these parameters
rbf_variance
Warning: re-constraining these parameters
rbf_lengthscale
Warning: re-constraining these parameters
noise_variance
Optimization restart 1/5, f = -15.3970821084
Warning: adding jitter of 1.6414455761e-10
Optimization restart 2/5, f = -15.3970769236
Optimization restart 3/5, f = -15.3970816743
Optimization restart 4/5, f = -15.3970821332
Optimization restart 5/5, f = -15.3970821075

Log-likelihood: 1.540e+01

       Name        |  Value   |  Constraints  |  Ties  |  prior  
-----------------------------------------------------------------
   rbf_variance    |  0.7071  |     (+ve)     |        |         
  rbf_lengthscale  |  1.8050  |     (+ve)     |        |         
  noise_variance   |  0.0030  |     (+ve)     |        |         


Out[27]:
[-4, 4, -1.5, 1.5]

try getting the predictions out in numerical form to replicate the internal plot

In [28]:
#Xplot=np.linspace(-4,4,10) #this doesn't  work because the shape of a ndarray created by linspace is [i,None] not [i,1] so dimensional assertion fails in m.predict
#Xplot.shape=[10,1]
size=100
tmp=np.linspace(-4,4,size)
Xplot=np.ones([size,1])
for i,t in enumerate(tmp):
    Xplot[i]=t
[mean,cov,lower,upper]=m.predict(Xplot, full_cov=False)


lower =mean-2*sqrt(cov)
upper =mean+2*sqrt(cov)

plt.fill_between(Xplot[:,0],lower[:,0],upper[:,0],color='AliceBlue',edgecolor='LightSkyBlue')
plt.plot(Xplot,mean,'b')
plt.axis([-4,4,-1.5,1.5])
Out[28]:
[-4, 4, -1.5, 1.5]

try 2d

In [29]:
X = np.random.uniform(-3.,3.,(50,2))
Y = np.sin(X[:,0:1])**2 * np.sin(X[:,1:2])+np.random.randn(50,1)*0.05
ker = GPy.kern.rbf(input_dim=2, variance=1., lengthscale=1.)
m = GPy.models.GPRegression(X,Y,ker)
print Y.shape
print m
m.plot()
(50, 1)

Log-likelihood: -5.828e+01

       Name        |  Value   |  Constraints  |  Ties  |  prior  
-----------------------------------------------------------------
   rbf_variance    |  1.0000  |     (+ve)     |        |         
  rbf_lengthscale  |  1.0000  |     (+ve)     |        |         
  noise_variance   |  1.0000  |     (+ve)     |        |         


Out[29]:
(<matplotlib.contour.QuadContourSet instance at 0x4817e60>,
 <matplotlib.collections.PathCollection at 0x4c98250>)
In [30]:
m.constrain_positive('')
m.optimize_restarts(num_restarts=5)
print m
m.plot()
Warning: re-constraining these parameters
rbf_variance
rbf_lengthscale
noise_variance
Optimization restart 1/5, f = -22.6737257461
Optimization restart 2/5, f = -22.6737258129
Optimization restart 3/5, f = -22.6737259529
Optimization restart 4/5, f = -22.6737261991
Optimization restart 5/5, f = -22.6737262532

Log-likelihood: 2.267e+01

       Name        |  Value   |  Constraints  |  Ties  |  prior  
-----------------------------------------------------------------
   rbf_variance    |  0.1969  |     (+ve)     |        |         
  rbf_lengthscale  |  1.0680  |     (+ve)     |        |         
  noise_variance   |  0.0014  |     (+ve)     |        |         


Out[30]:
(<matplotlib.contour.QuadContourSet instance at 0x3c17050>,
 <matplotlib.collections.PathCollection at 0x48162d0>)

get the data out and make my own plot

In [31]:
size=80
xaxis=np.linspace(-8,8,size)
yaxis=np.linspace(-8,8,size)
x=np.ndarray(0)
y=np.ndarray(0)

for a in xaxis:
    x=np.hstack([x, [a]*size])
    y=np.hstack([y,yaxis])
X=np.vstack([x,y])
print X
[[-8.         -8.         -8.         ...,  8.          8.          8.        ]
 [-8.         -7.79746835 -7.59493671 ...,  7.59493671  7.79746835  8.        ]]

In [32]:
[mean,cov,lower,upper]=m.predict(X.T, full_cov=False)
In [33]:
mean_grid= np.hstack(split(mean,size))
cov_grid= np.hstack(split(cov,size))
In [34]:
plt.contour(xaxis,yaxis,mean_grid,18)
plt.figure()
plt.contour(xaxis,yaxis,cov_grid,18)
Out[34]:
<matplotlib.contour.QuadContourSet instance at 0x4a00128>
In [35]:
m.kern
X = np.random.uniform(-3.,3.,(50,2))
Y = np.sin(X[:,0:1])**2 * np.sin(X[:,1:2])+np.random.randn(50,1)*0.05
In [36]:
m = GPy.models.GPRegression(X,Y,m.kern)
In [37]:
print m

Log-likelihood: -5.265e+01

       Name        |  Value   |  Constraints  |  Ties  |  prior  
-----------------------------------------------------------------
   rbf_variance    |  0.1969  |     (+ve)     |        |         
  rbf_lengthscale  |  1.0680  |     (+ve)     |        |         
  noise_variance   |  1.0000  |     (+ve)     |        |         


In [37]:
In [37]: