diff --git a/examples/decomposition/plot_faces_decomposition.py b/examples/decomposition/plot_faces_decomposition.py index fcab10be6dcdf40385b4096cb95f1cc859602142..46b29e21a643e4677780fe5c68bf9adddbf25f92 100644 --- a/examples/decomposition/plot_faces_decomposition.py +++ b/examples/decomposition/plot_faces_decomposition.py @@ -17,6 +17,7 @@ print __doc__ import logging from time import time +from numpy.random import RandomState import pylab as pl from sklearn.datasets import fetch_olivetti_faces @@ -29,10 +30,11 @@ logging.basicConfig(level=logging.INFO, n_row, n_col = 2, 3 n_components = n_row * n_col image_shape = (64, 64) +rng = RandomState(0) ############################################################################### # Load faces data -dataset = fetch_olivetti_faces(shuffle=True) +dataset = fetch_olivetti_faces(shuffle=True, random_state=rng) faces = dataset.data n_samples, n_features = faces.shape @@ -79,16 +81,19 @@ estimators = [ ('Sparse comp. - MiniBatchSparsePCA', decomposition.MiniBatchSparsePCA(n_components=n_components, alpha=1e-3, - n_iter=100, chunk_size=3), + n_iter=100, chunk_size=3, + random_state=rng), True, False), ('MiniBatchDictionaryLearning', - decomposition.MiniBatchDictionaryLearning(n_atoms=n_components, alpha=5e-3, - n_iter=100, chunk_size=3), + decomposition.MiniBatchDictionaryLearning(n_atoms=15, alpha=5e-3, + n_iter=50, chunk_size=3, + random_state=rng), True, False), ('Cluster centers - MiniBatchKMeans', - MiniBatchKMeans(k=n_components, tol=1e-3, chunk_size=20, max_iter=50), + MiniBatchKMeans(k=n_components, tol=1e-3, chunk_size=20, max_iter=50, + random_state=rng), True, False) ] @@ -117,6 +122,7 @@ for name, estimator, center, transpose in estimators: components_ = estimator.components_ if transpose: components_ = components_.T - plot_gallery('%s - Train time %.1fs' % (name, train_time), components_) + plot_gallery('%s - Train time %.1fs' % (name, train_time), + components_[:n_components]) pl.show()