diff --git a/doc/modules/gaussian_process.rst b/doc/modules/gaussian_process.rst
index d8bdf0f2794ce7a12e56dff85ef1e2e1ae46d373..f8e129cd36a6c10a05fb44f530eff3002044ed01 100644
--- a/doc/modules/gaussian_process.rst
+++ b/doc/modules/gaussian_process.rst
@@ -76,7 +76,7 @@ parameters or alternatively it uses the given parameters.
     >>> gp.fit(X, y)  # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
     GaussianProcess(beta0=None, corr=<function squared_exponential at 0x...>,
             normalize=True, nugget=array(2.22...-15),
-            optimizer='fmin_cobyla', random_start=1,
+            optimizer='fmin_cobyla', random_start=1, random_state=...
             regr=<function constant at 0x...>, storage_mode='full',
             theta0=array([[ 0.01]]), thetaL=array([[ 0.0001]]),
             thetaU=array([[ 0.1]]), verbose=False)
diff --git a/sklearn/gaussian_process/gaussian_process.py b/sklearn/gaussian_process/gaussian_process.py
index e0d48888540b0ceb9fa9d64c3109dcb41053cdba..c6e6ebca263e8883edcd380899c48e140c92d26b 100644
--- a/sklearn/gaussian_process/gaussian_process.py
+++ b/sklearn/gaussian_process/gaussian_process.py
@@ -10,7 +10,7 @@ from scipy import linalg, optimize, rand
 
 from ..base import BaseEstimator, RegressorMixin
 from ..metrics.pairwise import manhattan_distances
-from ..utils import array2d
+from ..utils import array2d, check_random_state
 from . import regression_models as regression
 from . import correlation_models as correlation
 
@@ -163,6 +163,11 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
         exponential distribution (log-uniform on [thetaL, thetaU]).
         Default does not use random starting point (random_start = 1).
 
+    random_state: integer or numpy.RandomState, optional
+        The generator used to shuffle the sequence of coordinates of theta in
+        the Welch optimizer. If an integer is given, it fixes the seed.
+        Defaults to the global numpy random number generator.
+
     Examples
     --------
     >>> import numpy as np
@@ -212,7 +217,7 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
                  storage_mode='full', verbose=False, theta0=1e-1,
                  thetaL=None, thetaU=None, optimizer='fmin_cobyla',
                  random_start=1, normalize=True,
-                 nugget=10. * MACHINE_EPSILON):
+                 nugget=10. * MACHINE_EPSILON, random_state=None):
 
         self.regr = regr
         self.corr = corr
@@ -226,6 +231,7 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
         self.nugget = nugget
         self.optimizer = optimizer
         self.random_start = random_start
+        self.random_state = random_state
 
         # Run input checks
         self._check_params()
@@ -250,6 +256,7 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
             A fitted Gaussian Process model object awaiting data to perform
             predictions.
         """
+        self.random_state = check_random_state(self.random_state)
 
         # Force data to 2D numpy.array
         X = array2d(np.asarray(X))
@@ -748,7 +755,7 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
             # Iterate over all dimensions of theta allowing for anisotropy
             if verbose:
                 print("Now improving allowing for anisotropy...")
-            for i in np.random.permutation(range(theta0.size)):
+            for i in self.random_state.shuffle(range(theta0.size)):
                 if verbose:
                     print "Proceeding along dimension %d..." % (i + 1)
                 self.theta0 = array2d(theta_iso)