From 96ed447d9e1445abbc9a42a4785cf23fbf69c098 Mon Sep 17 00:00:00 2001
From: Fabian Pedregosa <fabian.pedregosa@inria.fr>
Date: Wed, 17 Mar 2010 13:28:18 +0000
Subject: [PATCH] Add neighbors example

From: Fabian Pedregosa <fabian.pedregosa@inria.fr>

git-svn-id: https://scikit-learn.svn.sourceforge.net/svnroot/scikit-learn/trunk@554 22fbfee3-77ab-4535-9bad-27d1bd3bc7d8
---
 examples/plot_neighbors.py | 39 ++++++++++++++++++++++++++++++++++++++
 1 file changed, 39 insertions(+)
 create mode 100644 examples/plot_neighbors.py

diff --git a/examples/plot_neighbors.py b/examples/plot_neighbors.py
new file mode 100644
index 0000000000..55dc92d3d1
--- /dev/null
+++ b/examples/plot_neighbors.py
@@ -0,0 +1,39 @@
+"""
+Sample usage of Support Vector Machines to classify a sample.
+It will plot the decision surface and the support vectors.
+"""
+import numpy as np
+import pylab as pl
+from scikits.learn import neighbors, datasets
+
+# import some data to play with
+iris = datasets.load('iris')
+X = iris.data[:, :2] # we only take the first two features. We could
+                     # avoid this ugly slicing by using a two-dim dataset
+Y = iris.target
+
+h=.02 # step size in the mesh
+
+# we create an instance of SVM and fit out data. We do not scale our
+# data since we want to plot the support vectors
+clf = neighbors.Neighbors()
+clf.fit(X, Y)
+
+# Plot the decision boundary. For that, we will asign a color to each
+# point in the mesh [x_min, m_max]x[y_min, y_max].
+x_min, x_max = X[:,0].min()-1, X[:,0].max()+1
+y_min, y_max = X[:,1].min()-1, X[:,1].max()+1
+xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
+Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
+
+# Put the result into a color plot
+Z = Z.reshape(xx.shape)
+pl.set_cmap(pl.cm.Paired)
+pl.pcolormesh(xx, yy, Z)
+
+# Plot also the training points
+pl.scatter(X[:,0], X[:,1], c=Y)
+# and the support vectors
+pl.title('3-Class classification using Nearest Neighbors')
+pl.axis('tight')
+pl.show()
-- 
GitLab