diff --git a/doc/modules/gpml.rst b/doc/modules/gaussian_process.rst
similarity index 100%
rename from doc/modules/gpml.rst
rename to doc/modules/gaussian_process.rst
diff --git a/doc/supervised_learning.rst b/doc/supervised_learning.rst
index 55bcc6f12fc17cfd82b4ae7febe772f2afb227ec..68dec57fca109bb27ead90f9ec00f1722a41bbc6 100644
--- a/doc/supervised_learning.rst
+++ b/doc/supervised_learning.rst
@@ -32,7 +32,7 @@ Supervised learning
     modules/logistic
     modules/ann
     modules/feature_selection
-    modules/gpml
+    modules/gaussian_process
 
 
 
diff --git a/examples/gpml/plot_gpml_probabilistic_classification_after_regression.py b/examples/gaussian_process/plot_gp_probabilistic_classification_after_regression.py
similarity index 68%
rename from examples/gpml/plot_gpml_probabilistic_classification_after_regression.py
rename to examples/gaussian_process/plot_gp_probabilistic_classification_after_regression.py
index 553ca21f97851e528730d7285954daf18a6ed9f9..b8943a34136c069af15a7f978c5be45c8be1707e 100644
--- a/examples/gpml/plot_gpml_probabilistic_classification_after_regression.py
+++ b/examples/gaussian_process/plot_gp_probabilistic_classification_after_regression.py
@@ -10,12 +10,13 @@ A two-dimensional regression exercise with a post-processing
 allowing for probabilistic classification thanks to the
 Gaussian property of the prediction.
 """
+print __doc__
 # Author: Vincent Dubourg <vincent.dubourg@gmail.com
 # License: BSD style
 
 import numpy as np
 from scipy import stats
-from scikits.learn.gpml import GaussianProcessModel
+from scikits.learn.gaussian_process import GaussianProcess
 from matplotlib import pyplot as pl
 from matplotlib import cm
 from mpl_toolkits.mplot3d import Axes3D
@@ -50,10 +51,10 @@ X = np.array([[-4.61611719, -6.00099547],
 Y = g(X)
 
 # Instanciate and fit Gaussian Process Model
-aGaussianProcessModel = GaussianProcessModel(theta0=5e-1)
+gp = GaussianProcess(theta0=5e-1)
 
 # Don't perform MLE or you'll get a perfect prediction for this simple example!
-aGaussianProcessModel.fit(X, Y)
+gp.fit(X, Y)
 
 # Evaluate real function, the prediction and its MSE on a grid
 res = 50
@@ -61,7 +62,7 @@ x1, x2 = np.meshgrid(np.linspace(-beta0, beta0, res), np.linspace(-beta0, beta0,
 xx = np.vstack([x1.reshape(x1.size), x2.reshape(x2.size)]).T
 
 YY = g(xx)
-yy, MSE = aGaussianProcessModel.predict(xx, eval_MSE=True)
+yy, MSE = gp.predict(xx, eval_MSE=True)
 sigma = np.sqrt(MSE)
 yy = yy.reshape((res,res))
 YY = YY.reshape((res,res))
@@ -70,6 +71,7 @@ k = PHIinv(.975)
 
 # Plot the probabilistic classification iso-values using the Gaussian property of the prediction
 fig = pl.figure(1)
+
 ax = fig.add_subplot(111)
 ax.axes.set_aspect('equal')
 cax = pl.imshow(np.flipud(PHI(-yy/sigma)), cmap=cm.gray_r, alpha=.8, extent=(-beta0,beta0,-beta0,beta0))
@@ -84,30 +86,13 @@ ax.set_xticklabels([])
 ax.set_yticklabels([])
 pl.xlabel('$x_1$')
 pl.ylabel('$x_2$')
-print u'Click to place label: $g(\mathbf{x})=0$'
 cs = pl.contour(x1, x2, YY, [0.], colors='k', linestyles='dashdot')
-pl.clabel(cs,fmt=FormatFaker(u'$g(\mathbf{x})=0$'),fontsize=11,manual=True)
-print u'Click to place label: ${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 2.5\%$'
+pl.clabel(cs,fmt=FormatFaker(u'$g(\mathbf{x})=0$'),fontsize=11)
 cs = pl.contour(x1, x2, PHI(-yy/sigma), [0.025], colors='b', linestyles='solid')
-pl.clabel(cs,fmt=FormatFaker(u'${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 2.5\%$'),fontsize=11,manual=True)
-print u'Click to place label: $\mu_{\widehat{G}}(\mathbf{x})=0$'
+pl.clabel(cs,fmt=FormatFaker(u'${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 2.5\%$'),fontsize=11)
 cs = pl.contour(x1, x2, PHI(-yy/sigma), [0.5], colors='k', linestyles='dashed')
-pl.clabel(cs,fmt=FormatFaker(u'$\mu_{\widehat{G}}(\mathbf{x})=0$'),fontsize=11,manual=True)
-print u'Click to place label: ${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 97.5\%$'
+pl.clabel(cs,fmt=FormatFaker(u'$\mu_{\widehat{G}}(\mathbf{x})=0$'),fontsize=11)
 cs = pl.contour(x1, x2, PHI(-yy/sigma), [0.975], colors='r', linestyles='solid')
-pl.clabel(cs,fmt=FormatFaker(u'${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 97.5\%$'),fontsize=11,manual=True)
-
-# Plot the prediction and the bounds of the 95% confidence interval
-fig = pl.figure(2)
-ax = Axes3D(fig)
-ax.axes.set_aspect('equal')
-ax.plot_surface(x1, x2, yy, linewidth = 0.5, rstride = 1, cstride = 1, color = 'k', alpha = .8)
-ax.plot_surface(x1, x2, yy - k*sigma, linewidth = 0.5, rstride = 1, cstride = 1, color = 'b', alpha = .8)
-ax.plot_surface(x1, x2, yy + k*sigma, linewidth = 0.5, rstride = 1, cstride = 1, color = 'r', alpha = .8)
-ax.scatter3D(X[Y <= 0, 0], X[Y <= 0, 1], Y[Y <= 0], 'r.', s = 20)
-ax.scatter3D(X[Y > 0, 0], X[Y > 0, 1], Y[Y > 0], 'b.', s = 20)
-ax.set_xlabel(u'$x_1$')
-ax.set_ylabel(u'$x_2$')
-ax.set_zlabel(u'$\widehat{G}(x_1,\,x_2)$')
+pl.clabel(cs,fmt=FormatFaker(u'${\\rm \mathbb{P}}\left[{\widehat{G}}(\mathbf{x}) \leq 0\\right]= 97.5\%$'),fontsize=11)
 
 pl.show()
\ No newline at end of file
diff --git a/examples/gaussian_process/plot_gp_regression.py b/examples/gaussian_process/plot_gp_regression.py
new file mode 100644
index 0000000000000000000000000000000000000000..a10379fe5b5cd8b572d987a33432ef2d239ef5f6
--- /dev/null
+++ b/examples/gaussian_process/plot_gp_regression.py
@@ -0,0 +1,55 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+"""
+===============================================
+Gaussian Processes for Machine Learning example
+===============================================
+
+A simple one-dimensional regression exercise with a
+cubic correlation model whose parameters are estimated
+using the maximum likelihood principle.
+"""
+print __doc__
+# Author: Vincent Dubourg <vincent.dubourg@gmail.com
+# License: BSD style
+
+import numpy as np
+from scipy import stats
+from scikits.learn.gaussian_process import GaussianProcess, corrcubic
+from matplotlib import pyplot as pl
+
+# The function to predict
+f = lambda x: x*np.sin(x)
+
+# The design of experiments
+X = np.array([1., 3., 5., 6., 7., 8.])
+
+# Observations
+Y = f(X)
+
+# Mesh the input space for evaluations of the real function, the prediction and its MSE
+x = np.linspace(0,10,1000)
+
+# Instanciate a Gaussian Process model
+gp = GaussianProcess(corr=corrs[k], theta0=1e-2, thetaL=1e-4, thetaU=1e-1, random_start=100)
+
+# Fit to data using Maximum Likelihood Estimation of the parameters
+gp.fit(X, Y)
+
+# Make the prediction on the meshed x-axis (ask for MSE as well)
+y, MSE = gp.predict(x, eval_MSE=True)
+sigma = np.sqrt(MSE)
+
+# Plot the function, the prediction and the 95% confidence interval based on the MSE
+fig = pl.figure()
+pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
+pl.plot(X, Y, 'r.', markersize=10, label=u'Observations')
+pl.plot(x, y, 'b-', label=u'Prediction')
+pl.fill(np.concatenate([x, x[::-1]]), np.concatenate([y - 1.9600 * sigma, (y + 1.9600 * sigma)[::-1]]), alpha=.5, fc='b', ec='None', label='95% confidence interval')
+pl.xlabel('$x$')
+pl.ylabel('$f(x)$')
+pl.ylim(-10, 20)
+pl.legend(loc='upper left')
+
+pl.show()
diff --git a/examples/gpml/plot_gpml_regression.py b/examples/gpml/plot_gpml_regression.py
deleted file mode 100644
index 3d2fc342cf4d487acbd9541ccce559111a819d85..0000000000000000000000000000000000000000
--- a/examples/gpml/plot_gpml_regression.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-
-"""
-===============================================
-Gaussian Processes for Machine Learning example
-===============================================
-
-A simple one-dimensional regression exercise with
-different correlation models and maximum likelihood
-estimation of the Gaussian Process Model parameters.
-"""
-# Author: Vincent Dubourg <vincent.dubourg@gmail.com
-# License: BSD style
-
-import numpy as np
-from scipy import stats
-from scikits.learn.gpml import GaussianProcessModel, correxp1, correxp2, corrlin, corrcubic
-from matplotlib import pyplot as pl
-
-# The function to predict
-f = lambda x: x*np.sin(x)
-
-# The design of experiments
-X = np.array([1., 3., 5., 6., 7., 8.])
-
-# Observations
-Y = f(X)
-
-# Mesh the input space for evaluations of the real function, the prediction and its MSE
-x = np.linspace(0,10,1000)
-
-# Loop correlation models
-corrs = (correxp1, correxp2, corrlin, corrcubic)
-colors = ('b', 'g', 'y', 'm')
-for k in range(len(corrs)):
-    
-    # Instanciate a Gaussian Process Model with the k-th correlation model
-    if corrs[k] == corrlin or corrs[k] == corrcubic:
-        aGaussianProcessModel = GaussianProcessModel(corr=corrs[k], theta0=1e-2, thetaL=1e-4, thetaU=1e-1, random_start=100)
-    else:
-        aGaussianProcessModel = GaussianProcessModel(corr=corrs[k], theta0=1e-2, thetaL=1e-4, thetaU=1e+1, random_start=100)
-    
-    # Fit to data using Maximum Likelihood Estimation of the parameters
-    aGaussianProcessModel.fit(X, Y)
-    
-    # Make the prediction on the meshed x-axis (ask for MSE as well)
-    y, MSE = aGaussianProcessModel.predict(x, eval_MSE=True)
-    sigma = np.sqrt(MSE)
-    
-    # Compute the reduced likelihood function on a grid of the autocorrelation parameter space
-    theta_values = np.logspace(np.log10(aGaussianProcessModel.thetaL[0,0]), np.log10(aGaussianProcessModel.thetaU[0,0]), 100)
-    reduced_likelihood_function_values = []
-    for t in theta_values:
-        reduced_likelihood_function_values.append(aGaussianProcessModel.reduced_likelihood_function(theta=t)[0])
-    
-    fig = pl.figure()
-    
-    # Plot the function, the prediction and the 95% confidence interval based on the MSE
-    ax = fig.add_subplot(211)
-    pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
-    pl.plot(X, Y, 'r.', markersize=10, label=u'Observations')
-    pl.plot(x, y, colors[k]+'-', label=u'Prediction (%s)' % corrs[k].__name__)
-    pl.fill(np.concatenate([x, x[::-1]]), np.concatenate([y - 1.9600 * sigma, (y + 1.9600 * sigma)[::-1]]), alpha=.5, fc=colors[k], ec='None', label='95% confidence interval')
-    pl.xlabel('$x$')
-    pl.ylabel('$f(x)$')
-    pl.ylim(-10, 20)
-    pl.legend(loc='upper left')
-    
-    # Plot the reduced likelihood function
-    ax = fig.add_subplot(212)
-    pl.plot(theta_values, reduced_likelihood_function_values, colors[k]+'-')
-    pl.xlabel(u'$\\theta$')
-    pl.ylabel(u'Score')
-    pl.xscale('log')
-    pl.xlim(aGaussianProcessModel.thetaL[0,0], aGaussianProcessModel.thetaU[0,0])
-
-pl.show()
diff --git a/scikits/learn/__init__.py b/scikits/learn/__init__.py
index 13cbe8d4bc9b24dc29721aa36a3d49205743ec83..f99cd34e11cd88a9af684041e26816689c9d7eb3 100644
--- a/scikits/learn/__init__.py
+++ b/scikits/learn/__init__.py
@@ -24,7 +24,7 @@ from . import logistic
 from . import lda
 from . import metrics
 from . import svm
-from . import gpml
+from . import gaussian_process
 from . import features
 
 try:
@@ -44,8 +44,8 @@ except:
     pass
 
 __all__ = ['cross_val', 'ball_tree', 'cluster', 'covariance', 'gmm', 'glm',
-           'logistic', 'lda', 'metrics', 'svm', 'gpml', 'features', 'clone', 
-           'test']
+           'logistic', 'lda', 'metrics', 'svm', 'gaussian_process', 'features',
+           'clone', 'test']
 
 __version__ = '0.5-git'
 
diff --git a/scikits/learn/gpml/__init__.py b/scikits/learn/gaussian_process/__init__.py
similarity index 73%
rename from scikits/learn/gpml/__init__.py
rename to scikits/learn/gaussian_process/__init__.py
index bcba115a3744cd3478e15a22bfdd14285266bfe9..51cb685e185b200aaa9fcaf3af599ec2451a9b42 100644
--- a/scikits/learn/gpml/__init__.py
+++ b/scikits/learn/gaussian_process/__init__.py
@@ -9,6 +9,6 @@
     Matlab toolbox <http://www2.imm.dtu.dk/~hbn/dace/>.
 """
 
-from .gaussian_process_model import GaussianProcessModel
-from .correlation_models import *
-from .regression_models import *
\ No newline at end of file
+from .gaussian_process import GaussianProcess
+from .correlation import *
+from .regression import *
\ No newline at end of file
diff --git a/scikits/learn/gpml/correlation_models.py b/scikits/learn/gaussian_process/correlation.py
similarity index 100%
rename from scikits/learn/gpml/correlation_models.py
rename to scikits/learn/gaussian_process/correlation.py
diff --git a/scikits/learn/gpml/gaussian_process_model.py b/scikits/learn/gaussian_process/gaussian_process.py
similarity index 94%
rename from scikits/learn/gpml/gaussian_process_model.py
rename to scikits/learn/gaussian_process/gaussian_process.py
index 66ad5610ebd23882b147e94053a00e7810bf43c2..0d07a05ceaa4bfb9638eeb687cdc6ea806d6d1a4 100644
--- a/scikits/learn/gpml/gaussian_process_model.py
+++ b/scikits/learn/gaussian_process/gaussian_process.py
@@ -8,8 +8,8 @@
 import numpy as np
 from scipy import linalg, optimize, random
 from ..base import BaseEstimator
-from .regression_models import regpoly0
-from .correlation_models import correxp2
+from .regression import regpoly0
+from .correlation import correxp2
 machine_epsilon = np.finfo(np.double).eps
 
 def col_sum(x):
@@ -36,29 +36,29 @@ def col_sum(x):
     
     return s
 
-####################################
-# The Gaussian Process Model class #
-####################################
+##############################
+# The Gaussian Process class #
+##############################
 
-class GaussianProcessModel(BaseEstimator):
+class GaussianProcess(BaseEstimator):
     """
     A class that implements scalar Gaussian Process based prediction (also known as Kriging).
 
     Example
     -------
     import numpy as np
-    from scikits.learn.gpml import GaussianProcessModel
+    from scikits.learn.gaussian_process import GaussianProcess
     import pylab as pl
 
     f = lambda x: x*np.sin(x)
     X = np.array([1., 3., 5., 6., 7., 8.])
     Y = f(X)
-    aGaussianProcessModel = GaussianProcessModel(regr=regpoly0, corr=correxp2, theta0=1e-1, thetaL=1e-3, thetaU=1e0, random_start=100)
-    aGaussianProcessModel.fit(X, Y)
+    gp = GaussianProcess(regr=regpoly0, corr=correxp2, theta0=1e-1, thetaL=1e-3, thetaU=1e0, random_start=100)
+    gp.fit(X, Y)
 
     pl.figure(1)
     x = np.linspace(0,10,1000)
-    y, MSE = aGaussianProcessModel.predict(x, eval_MSE=True)
+    y, MSE = gp.predict(x, eval_MSE=True)
     sigma = np.sqrt(MSE)
     pl.plot(x, f(x), 'r:', label=u'$f(x) = x\,\sin(x)$')
     pl.plot(X, Y, 'r.', markersize=10, label=u'Observations')
@@ -69,10 +69,10 @@ class GaussianProcessModel(BaseEstimator):
     pl.legend(loc='upper left')
 
     pl.figure(2)
-    theta_values = np.logspace(np.log10(aGaussianProcessModel.thetaL),np.log10(aGaussianProcessModel.thetaU),100)
+    theta_values = np.logspace(np.log10(gp.thetaL),np.log10(gp.thetaU),100)
     reduced_likelihood_function_values = []
     for t in theta_values:
-            reduced_likelihood_function_values.append(aGaussianProcessModel.reduced_likelihood_function(theta=t)[0])
+            reduced_likelihood_function_values.append(gp.reduced_likelihood_function(theta=t)[0])
     pl.plot(theta_values, reduced_likelihood_function_values)
     pl.xlabel(u'$\\theta$')
     pl.ylabel(u'Score')
@@ -103,7 +103,7 @@ class GaussianProcessModel(BaseEstimator):
     
     def __init__(self, regr = regpoly0, corr = correxp2, beta0 = None, storage_mode = 'full', verbose = True, theta0 = 1e-1, thetaL = None, thetaU = None, optimizer = 'fmin_cobyla', random_start = 1, normalize = True, nugget = 10.*machine_epsilon):
         """
-        The Gaussian Process Model constructor.
+        The Gaussian Process model constructor.
 
         Parameters
         ----------
@@ -169,8 +169,8 @@ class GaussianProcessModel(BaseEstimator):
 
         Returns
         -------
-        aGaussianProcessModel : self
-            A Gaussian Process Model object awaiting data to be fitted to.
+        gp : self
+            A Gaussian Process model object awaiting data to be fitted to.
         """
         
         self.regr = regr
@@ -220,7 +220,7 @@ class GaussianProcessModel(BaseEstimator):
         
     def fit(self, X, y):
         """
-        The Gaussian Process Model fitting method.
+        The Gaussian Process model fitting method.
 
         Parameters
         ----------
@@ -232,8 +232,8 @@ class GaussianProcessModel(BaseEstimator):
             
         Returns
         -------
-        aGaussianProcessModel : self
-            A fitted Gaussian Process Model object awaiting data to perform predictions.
+        gp : self
+            A fitted Gaussian Process model object awaiting data to perform predictions.
         """
         
         # Force data to numpy.array type (from coding guidelines)
@@ -312,7 +312,7 @@ class GaussianProcessModel(BaseEstimator):
         self.X_sc = np.concatenate([[mean_X],[std_X]])
         self.y_sc = np.concatenate([[mean_y],[std_y]])
         
-        # Determine Gaussian Process Model parameters
+        # Determine Gaussian Process model parameters
         if self.thetaL is not None and self.thetaU is not None:
             # Maximum Likelihood Estimation of the parameters
             if self.verbose:
@@ -323,7 +323,7 @@ class GaussianProcessModel(BaseEstimator):
         else:
             # Given parameters
             if self.verbose:
-                print "Given autocorrelation parameters. Computing Gaussian Process Model parameters..."
+                print "Given autocorrelation parameters. Computing Gaussian Process model parameters..."
             self.theta = self.theta0
             self.reduced_likelihood_function_value, self.par = self.reduced_likelihood_function()
             if np.isinf(self.reduced_likelihood_function_value):
@@ -345,7 +345,7 @@ class GaussianProcessModel(BaseEstimator):
         
     def predict(self, X, eval_MSE=False, batch_size=None):
         """
-        This function evaluates the Gaussian Process Model at x.
+        This function evaluates the Gaussian Process model at x.
         
         Parameters
         ----------
@@ -371,7 +371,7 @@ class GaussianProcessModel(BaseEstimator):
         
         # Check
         if np.any(np.isnan(self.par['beta'])):
-            raise Exception, "Not a valid GaussianProcessModel. Try fitting it again with different parameters theta"
+            raise Exception, "Not a valid GaussianProcess. Try fitting it again with different parameters theta"
         
         # Check design & trial sites
         X = np.asanyarray(X, dtype=np.float)
@@ -423,7 +423,7 @@ class GaussianProcessModel(BaseEstimator):
                 if par['C'] is None:
                     # Light storage mode (need to recompute C, F, Ft and G)
                     if self.verbose:
-                        print "This GaussianProcessModel used light storage mode at instanciation. Need to recompute autocorrelation matrix..."
+                        print "This GaussianProcess used light storage mode at instanciation. Need to recompute autocorrelation matrix..."
                     reduced_likelihood_function_value, par = self.reduced_likelihood_function()
                 
                 rt = linalg.solve(np.matrix(par['C']), np.matrix(r))
@@ -480,13 +480,13 @@ class GaussianProcessModel(BaseEstimator):
         Parameters
         ----------
         theta : array_like, optional
-            An array containing the autocorrelation parameters at which the Gaussian Process Model parameters should be determined.
+            An array containing the autocorrelation parameters at which the Gaussian Process model parameters should be determined.
             Default uses the built-in autocorrelation parameters (ie theta = self.theta).
         
         Returns
         -------
         par : dict
-            A dictionary containing the requested Gaussian Process Model parameters:
+            A dictionary containing the requested Gaussian Process model parameters:
             par['sigma2'] : Gaussian Process variance.
             par['beta'] : Generalized least-squares regression weights for Universal Kriging or given beta0 for Ordinary Kriging.
             par['gamma'] : Gaussian Process weights.
@@ -589,7 +589,7 @@ class GaussianProcessModel(BaseEstimator):
         
         Parameters
         ----------
-        self : All parameters are stored in the Gaussian Process Model object.
+        self : All parameters are stored in the Gaussian Process model object.
         
         Returns
         -------
@@ -654,7 +654,7 @@ class GaussianProcessModel(BaseEstimator):
     
     def score(self, X_test, y_test):
         """
-        This score function returns the deviations of the Gaussian Process Model evaluated onto a test dataset.
+        This score function returns the deviations of the Gaussian Process model evaluated onto a test dataset.
         
         Parameters
         ----------
diff --git a/scikits/learn/gpml/regression_models.py b/scikits/learn/gaussian_process/regression.py
similarity index 100%
rename from scikits/learn/gpml/regression_models.py
rename to scikits/learn/gaussian_process/regression.py
diff --git a/scikits/learn/tests/test_gpml.py b/scikits/learn/tests/test_gaussian_process.py
similarity index 64%
rename from scikits/learn/tests/test_gpml.py
rename to scikits/learn/tests/test_gaussian_process.py
index a7963e65e8601cb7d2cc80912840043b5dd31e7e..bb5ec0c32c5aa8478826b1e51851cb79086e071f 100644
--- a/scikits/learn/tests/test_gpml.py
+++ b/scikits/learn/tests/test_gaussian_process.py
@@ -1,12 +1,12 @@
 """
-Testing for Gaussian Process for Machine Learning module (scikits.learn.gpml)
+Testing for Gaussian Process module (scikits.learn.gaussian_process)
 """
 
 import numpy as np
 from numpy.testing import assert_array_equal, assert_array_almost_equal, \
                           assert_almost_equal, assert_raises, assert_
 
-from .. import gpml, datasets, cross_val, metrics
+from .. import gaussian_process, datasets, cross_val, metrics
 
 diabetes = datasets.load_diabetes()
 
@@ -21,8 +21,8 @@ def test_regression_1d_x_sinx():
     f = lambda x: x * np.sin(x)
     X = np.array([1., 3., 5., 6., 7., 8.])
     y = f(X)
-    gpm = gpml.GaussianProcessModel(theta0=1e-2, thetaL=1e-4, thetaU=1e-1, random_start=10, verbose=False).fit(X, y)
-    y_pred, MSE = gpm.predict(X, eval_MSE=True)
+    gp = gaussian_process.GaussianProcess(theta0=1e-2, thetaL=1e-4, thetaU=1e-1, random_start=10, verbose=False).fit(X, y)
+    y_pred, MSE = gp.predict(X, eval_MSE=True)
     
     assert (np.all(np.abs((y_pred - y) / y)  < 1e-6) and np.all(MSE  < 1e-6))
 
@@ -35,15 +35,14 @@ def test_regression_diabetes(n_jobs = 1, verbose = 0):
     
     Poor performance: Leave-one-out estimate of explained variance is about 0.5
     at best... To be investigated!
-    TODO: find a dataset that would prove GPML performance!
+    TODO: find a dataset that would prove GP performance!
     """
     
     X, y = diabetes['data'], diabetes['target']
     
-    gpm = gpml.GaussianProcessModel(corr=gpml.correxp2, theta0=1e-4, nugget=1e-2, verbose=False).fit(X, y)
+    gp = gaussian_process.GaussianProcess(corr=gaussian_process.correxp2, theta0=1e-4, nugget=1e-2, verbose=False).fit(X, y)
 
-    gpm.thetaL, gpm.thetaU = None, None
-    y_pred = cross_val.cross_val_score(gpm, X, y=y, cv=cross_val.LeaveOneOut(y.size), n_jobs=n_jobs, verbose=verbose) + y
+    y_pred = cross_val.cross_val_score(gp, X, y=y, cv=cross_val.LeaveOneOut(y.size), n_jobs=n_jobs, verbose=verbose) + y
     Q2 = metrics.explained_variance(y_pred, y)
     
     assert Q2 > 0.45