diff --git a/examples/plot_svm.py b/examples/plot_svm.py
deleted file mode 100644
index cd56a5b492b5b9b2c778e389d8a694b2443dbae7..0000000000000000000000000000000000000000
--- a/examples/plot_svm.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""
-==========================
-Linear SVM classifier
-==========================
-
-Simple usage of Support Vector Machines to classify a sample. It will
-plot the decision surface and the support vectors.
-
-"""
-import numpy as np
-import pylab as pl
-from scikits.learn import svm, datasets
-
-# import some data to play with
-iris = datasets.load_iris()
-X = iris.data[:, :2] # we only take the first two features. We could
-                     # avoid this ugly slicing by using a two-dim dataset
-Y = iris.target
-
-h=.02 # step size in the mesh
-
-# we create an instance of SVM and fit out data. We do not scale our
-# data since we want to plot the support vectors
-clf = svm.SVC(kernel='linear')
-clf.fit(X, Y)
-
-# Plot the decision boundary. For that, we will asign a color to each
-# point in the mesh [x_min, m_max]x[y_min, y_max].
-x_min, x_max = X[:,0].min()-1, X[:,0].max()+1
-y_min, y_max = X[:,1].min()-1, X[:,1].max()+1
-xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
-Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
-
-# Put the result into a color plot
-Z = Z.reshape(xx.shape)
-pl.set_cmap(pl.cm.Paired)
-pl.pcolormesh(xx, yy, Z)
-
-# Plot also the training points
-pl.scatter(X[:,0], X[:,1], c=Y)
-# and the support vectors
-pl.scatter(clf.support_[:,0], clf.support_[:, 1], marker='+')
-pl.title('3-Class classification using Support Vector Machine. \n' + \
-         'Support Vectors are hightlighted with a +')
-pl.axis('tight')
-pl.show()
diff --git a/examples/plot_svm_hyperplane.py b/examples/plot_svm_hyperplane.py
deleted file mode 100644
index f2e930552ffb6c30658063d5a1a2b5d7881d8253..0000000000000000000000000000000000000000
--- a/examples/plot_svm_hyperplane.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""
-===========================================
-SVM: Maximum separating margin hyperplane
-===========================================
-
-"""
-
-import numpy as np
-import pylab as pl
-from scikits.learn import svm
-
-# we create 40 separable points
-np.random.seed(0)
-X = np.r_[np.random.randn(20, 2) - [2,2], np.random.randn(20, 2) + [2, 2]]
-Y = [0]*20 + [1]*20
-
-# fit the model
-clf = svm.SVC(kernel='linear')
-clf.fit(X, Y)
-
-# get the separating hyperplane
-w = np.dot(clf.dual_coef_[0], clf.support_)
-a = -w[0]/w[1]
-xx = np.linspace(-5, 5)
-yy = a*xx + (clf.rho_[0])/w[1]
-
-# plot the parallels to the separating hyperplane that pass through the
-# support vectors
-b = clf.support_[0]
-yy_down = a*xx + (b[1] - a*b[0])
-b = clf.support_[-1]
-yy_up = a*xx + (b[1] - a*b[0])
-
-# plot the line, the points, and the nearest vectors to the plane
-pl.set_cmap(pl.cm.Paired)
-pl.plot(xx, yy, 'k-')
-pl.plot(xx, yy_down, 'k--')
-pl.plot(xx, yy_up, 'k--')
-pl.scatter(X[:,0], X[:,1], c=Y)
-pl.scatter(clf.support_[:,0], clf.support_[:,1], marker='+')
-
-pl.axis('tight')
-pl.show()
-
diff --git a/examples/plot_svm_nonlinear.py b/examples/plot_svm_nonlinear.py
deleted file mode 100644
index 8b33c28b0a9a8958718074fe908ec2927959866b..0000000000000000000000000000000000000000
--- a/examples/plot_svm_nonlinear.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-=================
-Non-linear SVM
-=================
-
-"""
-
-import numpy as np
-import pylab as pl
-from scikits.learn import svm
-
-xx, yy = np.meshgrid(np.linspace(-5, 5, 500), np.linspace(-5, 5, 500))
-np.random.seed(0)
-X = np.random.randn(300, 2)
-Y = np.logical_xor(X[:,0]>0, X[:,1]>0)
-
-# fit the model
-clf = svm.SVC(impl='nu_svc', kernel='rbf', C=100)
-clf.fit(X, Y)
-
-# plot the line, the points, and the nearest vectors to the plane
-Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
-Z = Z.reshape(xx.shape)
-
-pl.set_cmap(pl.cm.Paired)
-pl.pcolormesh(xx, yy, Z)
-pl.scatter(X[:,0], X[:,1], c=Y)
-
-pl.axis('tight')
-pl.show()
-
diff --git a/examples/plot_svm_oneclass.py b/examples/plot_svm_oneclass.py
deleted file mode 100644
index 6b437b0dc24563dd1b64705d6ebcdd21d207f25c..0000000000000000000000000000000000000000
--- a/examples/plot_svm_oneclass.py
+++ /dev/null
@@ -1,28 +0,0 @@
-"""
-==================
-One-class SVM
-==================
-"""
-
-import numpy as np
-import pylab as pl
-from scikits.learn import svm
-
-xx, yy = np.meshgrid(np.linspace(-5, 5, 500), np.linspace(-5, 5, 500))
-X = np.random.randn(100, 2)
-Y = [0]*100
-
-# fit the model
-clf = svm.OneClassSVM(nu=0.5)
-clf.fit(X, Y)
-
-# plot the line, the points, and the nearest vectors to the plane
-Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
-Z = Z.reshape(xx.shape)
-
-pl.set_cmap(pl.cm.Paired)
-pl.pcolormesh(xx, yy, Z)
-pl.scatter(X[:,0], X[:,1], c=Y)
-pl.scatter(clf.support_[:,0], clf.support_[:,1], c='black')
-pl.axis('tight')
-pl.show()