Skip to content
Snippets Groups Projects
Commit 33a0f4e0 authored by Gael Varoquaux's avatar Gael Varoquaux
Browse files

DOC: Better plotting in RFE example

parent 320ee33a
No related branches found
No related tags found
No related merge requests found
......@@ -7,6 +7,7 @@ Recursive feature elimination with automatic tuning of the
number of features selected with cross-validation
"""
print __doc__
import numpy as np
from scikits.learn.svm import SVC
from scikits.learn.cross_val import StratifiedKFold
......@@ -30,6 +31,14 @@ print 'Optimal number of features : %d' % rfecv.support_.sum()
import pylab as pl
pl.figure()
pl.plot(rfecv.cv_scores_)
pl.semilogx(rfecv.n_features_, rfecv.cv_scores_)
pl.xlabel('Number of features selected')
pl.ylabel('Cross validation score (nb of misclassifications)')
# 15 ticks regularly-space in log
x_ticks = np.unique(np.logspace(np.log10(2),
np.log10(rfecv.n_features_.max()),
15,
).astype(np.int))
pl.xticks(x_ticks, x_ticks)
pl.show()
......@@ -181,6 +181,7 @@ class RFECV(RFE):
clf = self.estimator
n_models = np.max(self.ranking_)
self.cv_scores_ = np.zeros(n_models)
self.n_features_ = np.bincount(self.ranking_)[::-1].cumsum()[-2::-1]
for train, test in cv:
ranking_ = rfe.fit(X[train], y[train]).ranking_
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment