diff --git a/examples/plot_face_recognition.py b/examples/plot_face_recognition.py
index fb6f23118791cb8fad36a7eb43afdbcdd61bfb47..e869733a3a75817b4cf5e54d52b935a7177d09ff 100644
--- a/examples/plot_face_recognition.py
+++ b/examples/plot_face_recognition.py
@@ -110,7 +110,7 @@ y_train, y_test = y[:split], y[split:]
 
 ################################################################################
 # Compute a PCA (eigenfaces) on the training set
-n_components = 100
+n_components = 200
 
 print "Extracting the top %d eigenfaces" % n_components
 pca = PCA(n_comp=n_components, do_fast_svd=True).fit(X_train)
diff --git a/scikits/learn/utils/extmath.py b/scikits/learn/utils/extmath.py
index c4e91b278c88daca8b9a9fdc5633079d9f38e8b3..53b45f9d06640afe3f00d0169bef3f0b79995760 100644
--- a/scikits/learn/utils/extmath.py
+++ b/scikits/learn/utils/extmath.py
@@ -86,7 +86,7 @@ def density(w, **kwargs):
     return d
 
 
-def fast_svd(M, k, p=10):
+def fast_svd(M, k, p=10, rng=0):
     """Computes the k-truncated SVD using random projections
 
     Parameters
@@ -101,6 +101,9 @@ def fast_svd(M, k, p=10):
         Additional number of samples of the range of M to ensure proper
         conditioning. See the notes below.
 
+    rng: RandomState or an int seed (0 by default)
+        A random number generator instance to make behavior
+
     Notes
     =====
     This algorithm finds the exact truncated singular values decomposition
@@ -117,11 +120,16 @@ def fast_svd(M, k, p=10):
     Halko, et al., 2009 (arXiv:909)
 
     """
+    if rng is None:
+        rng = np.random.RandomState()
+    elif isinstance(rng, int):
+        rng = np.random.RandomState(rng)
+
     # lazy import of scipy sparse, because it is very slow.
     from scipy import sparse
 
     # generating random gaussian vectors r with shape: (M.shape[1], k + p)
-    r = np.random.normal(size=(M.shape[1], k + p))
+    r = rng.normal(size=(M.shape[1], k + p))
 
     # sampling the range of M using by linear projection of r
     if sparse.issparse(M):