From 80673ed07d9aba3bf9cea8253cd3bb1c20b14b66 Mon Sep 17 00:00:00 2001
From: Fabian Pedregosa <fabian.pedregosa@inria.fr>
Date: Thu, 24 Feb 2011 19:58:14 +0100
Subject: [PATCH] Rename strategy --> algorithm in Neighbors*.

---
 doc/modules/neighbors.rst             |  4 ++--
 scikits/learn/neighbors.py            | 31 ++++++++++++++-------------
 scikits/learn/tests/test_neighbors.py | 12 +++++------
 3 files changed, 24 insertions(+), 23 deletions(-)

diff --git a/doc/modules/neighbors.rst b/doc/modules/neighbors.rst
index d4eb4e2bc0..dc9df8c36a 100644
--- a/doc/modules/neighbors.rst
+++ b/doc/modules/neighbors.rst
@@ -20,8 +20,8 @@ The :class:`NeighborsClassifier` implements the nearest-neighbors
 classification method using a vote heuristic: the class most present
 in the k nearest neighbors of a point is assigned to this point.
 
-It is possible to use different nearest neighbor search strategies by
-using the keyword ``strategy``. Possible values are ``'auto'``,
+It is possible to use different nearest neighbor search algorithms by
+using the keyword ``algorithm``. Possible values are ``'auto'``,
 ``'ball_tree'``, ``'brute'`` and ``'brute_inplace'``. ``'ball_tree'``
 will create an instance of :class:`BallTree` to conduct the search,
 which is usually very efficient in low-dimensional spaces. In higher
diff --git a/scikits/learn/neighbors.py b/scikits/learn/neighbors.py
index 2fab1ffd32..07fca967db 100644
--- a/scikits/learn/neighbors.py
+++ b/scikits/learn/neighbors.py
@@ -22,8 +22,8 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
     window_size : int, optional
         Window size passed to BallTree
 
-    strategy : {'auto', 'ball_tree', 'brute', 'brute_inplace'}, optional
-        Strategy used to compute the nearest neighbors. 'ball_tree'
+    algorithm : {'auto', 'ball_tree', 'brute', 'brute_inplace'}, optional
+        Algorithm used to compute the nearest neighbors. 'ball_tree'
         will construct a BallTree, 'brute' and 'brute_inplace' will
         perform brute-force search.'auto' will guess the most
         appropriate based on current dataset.
@@ -35,7 +35,7 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
     >>> from scikits.learn.neighbors import NeighborsClassifier
     >>> neigh = NeighborsClassifier(n_neighbors=1)
     >>> neigh.fit(samples, labels)
-    NeighborsClassifier(n_neighbors=1, window_size=1, strategy='auto')
+    NeighborsClassifier(n_neighbors=1, window_size=1, algorithm='auto')
     >>> print neigh.predict([[0,0,0]])
     [1]
 
@@ -48,10 +48,10 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
     http://en.wikipedia.org/wiki/K-nearest_neighbor_algorithm
     """
 
-    def __init__(self, n_neighbors=5, strategy='auto', window_size=1):
+    def __init__(self, n_neighbors=5, algorithm='auto', window_size=1):
         self.n_neighbors = n_neighbors
         self.window_size = window_size
-        self.strategy = strategy
+        self.algorithm = algorithm
 
         
     def fit(self, X, Y, **params):
@@ -73,8 +73,8 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
         self._y = np.asanyarray(Y)
         self._set_params(**params)
 
-        if self.strategy == 'ball_tree' or \
-           (self.strategy == 'auto' and X.shape[1] < 20):
+        if self.algorithm == 'ball_tree' or \
+           (self.algorithm == 'auto' and X.shape[1] < 20):
             self.ball_tree = BallTree(X, self.window_size)
         else:
             self.ball_tree = None
@@ -119,7 +119,7 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
         >>> from scikits.learn.neighbors import NeighborsClassifier
         >>> neigh = NeighborsClassifier(n_neighbors=1)
         >>> neigh.fit(samples, labels)
-        NeighborsClassifier(n_neighbors=1, window_size=1, strategy='auto')
+        NeighborsClassifier(n_neighbors=1, window_size=1, algorithm='auto')
         >>> print neigh.kneighbors([1., 1., 1.])
         (array([ 0.5]), array([2]))
 
@@ -160,7 +160,7 @@ class NeighborsClassifier(BaseEstimator, ClassifierMixin):
 
         # .. get neighbors ..
         if self.ball_tree is None:
-            if self.strategy == 'brute_inplace':
+            if self.algorithm == 'brute_inplace':
                 neigh_ind = knn_brute(self._fit_X, X, self.n_neighbors)
             else:
                 from .metrics import euclidean_distances
@@ -203,8 +203,8 @@ class NeighborsRegressor(NeighborsClassifier, RegressorMixin):
     mode : {'mean', 'barycenter'}, optional
         Weights to apply to labels.
 
-    strategy : {'auto', 'ball_tree', 'brute', 'brute_inplace'}, optional
-        Strategy used to compute the nearest neighbors. 'ball_tree'
+    algorithm : {'auto', 'ball_tree', 'brute', 'brute_inplace'}, optional
+        Algorithm used to compute the nearest neighbors. 'ball_tree'
         will construct a BallTree, 'brute' and 'brute_inplace' will
         perform brute-force search.'auto' will guess the most
         appropriate based on current dataset.
@@ -216,7 +216,8 @@ class NeighborsRegressor(NeighborsClassifier, RegressorMixin):
     >>> from scikits.learn.neighbors import NeighborsRegressor
     >>> neigh = NeighborsRegressor(n_neighbors=2)
     >>> neigh.fit(X, y)
-    NeighborsRegressor(n_neighbors=2, window_size=1, mode='mean', strategy='auto')
+    NeighborsRegressor(n_neighbors=2, window_size=1, mode='mean',
+              algorithm='auto')
     >>> print neigh.predict([[1.5]])
     [ 0.5]
 
@@ -226,12 +227,12 @@ class NeighborsRegressor(NeighborsClassifier, RegressorMixin):
     """
 
 
-    def __init__(self, n_neighbors=5, mode='mean', strategy='auto',
+    def __init__(self, n_neighbors=5, mode='mean', algorithm='auto',
                  window_size=1):
         self.n_neighbors = n_neighbors
         self.window_size = window_size
         self.mode = mode
-        self.strategy = strategy
+        self.algorithm = algorithm
 
 
     def predict(self, X, **params):
@@ -256,7 +257,7 @@ class NeighborsRegressor(NeighborsClassifier, RegressorMixin):
 
         # .. get neighbors ..
         if self.ball_tree is None:
-            if self.strategy == 'brute_inplace':
+            if self.algorithm == 'brute_inplace':
                 neigh_ind = knn_brute(self._fit_X, X, self.n_neighbors)
             else:
                 from .metrics.pairwise import euclidean_distances
diff --git a/scikits/learn/tests/test_neighbors.py b/scikits/learn/tests/test_neighbors.py
index 0f5f5a3206..3921fadf6b 100644
--- a/scikits/learn/tests/test_neighbors.py
+++ b/scikits/learn/tests/test_neighbors.py
@@ -24,19 +24,19 @@ def test_neighbors_1D():
 
     for s in ('auto', 'ball_tree', 'brute', 'inplace'):
         # n_neighbors = 1
-        knn = neighbors.NeighborsClassifier(n_neighbors=1, strategy=s)
+        knn = neighbors.NeighborsClassifier(n_neighbors=1, algorithm=s)
         knn.fit(X, Y)
         test = [[i + 0.01] for i in range(0, n/2)] + \
                [[i - 0.01] for i in range(n/2, n)]
         assert_array_equal(knn.predict(test), [0]*3 + [1]*3)
 
         # n_neighbors = 2
-        knn = neighbors.NeighborsClassifier(n_neighbors=2, strategy=s)
+        knn = neighbors.NeighborsClassifier(n_neighbors=2, algorithm=s)
         knn.fit(X, Y)
         assert_array_equal(knn.predict(test), [0]*4 + [1]*2)
 
         # n_neighbors = 3
-        knn = neighbors.NeighborsClassifier(n_neighbors=3, strategy=s)
+        knn = neighbors.NeighborsClassifier(n_neighbors=3, algorithm=s)
         knn.fit(X, Y)
         assert_array_equal(knn.predict([[i +0.01] for i in range(0, n/2)]),
                             [0 for i in range(n/2)])
@@ -54,15 +54,15 @@ def test_neighbors_iris():
 
     for s in ('auto', 'ball_tree', 'brute', 'inplace'):
         clf = neighbors.NeighborsClassifier()
-        clf.fit(iris.data, iris.target, n_neighbors=1, strategy=s)
+        clf.fit(iris.data, iris.target, n_neighbors=1, algorithm=s)
         assert_array_equal(clf.predict(iris.data), iris.target)
 
-        clf.fit(iris.data, iris.target, n_neighbors=9, strategy=s)
+        clf.fit(iris.data, iris.target, n_neighbors=9, algorithm=s)
         assert_(np.mean(clf.predict(iris.data)== iris.target) > 0.95)
 
         for m in ('barycenter', 'mean'):
             rgs = neighbors.NeighborsRegressor()
-            rgs.fit(iris.data, iris.target, mode=m, strategy=s)
+            rgs.fit(iris.data, iris.target, mode=m, algorithm=s)
             assert_(np.mean(
                 rgs.predict(iris.data).round() == iris.target) > 0.95)
 
-- 
GitLab