From d1e34dda5b5c9df05569a0feed4eb14c87a5de0b Mon Sep 17 00:00:00 2001
From: Gael Varoquaux <gael.varoquaux@normalesup.org>
Date: Sun, 6 May 2012 22:42:38 +0200
Subject: [PATCH] CLEAN sparse_encode: remove unused arguments

And use the ones that should
---
 sklearn/decomposition/dict_learning.py | 33 ++++++++++++--------------
 1 file changed, 15 insertions(+), 18 deletions(-)

diff --git a/sklearn/decomposition/dict_learning.py b/sklearn/decomposition/dict_learning.py
index 9b79571fab..0186fcc44d 100644
--- a/sklearn/decomposition/dict_learning.py
+++ b/sklearn/decomposition/dict_learning.py
@@ -22,8 +22,8 @@ from ..linear_model import Lasso, orthogonal_mp_gram, lars_path
 
 
 def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
-                  n_nonzero_coefs=None, alpha=None, copy_gram=True,
-                  copy_cov=True, init=None, max_iter=1000):
+                  n_nonzero_coefs=None, alpha=None, copy_cov=True,
+                  init=None, max_iter=1000):
     """Generic sparse coding
 
     Each column of the result is the solution to a Lasso problem.
@@ -74,10 +74,6 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
     max_iter: int, 1000 by default
         Maximum number of iterations to perform if `algorithm='lasso_cd'`.
 
-    copy_gram: boolean, optional
-        Whether to copy the precomputed Gram matrix; if False, it may be
-        overwritten.
-
     copy_cov: boolean, optional
         Whether to copy the precomputed covariance matrix; if False, it may be
         overwritten.
@@ -135,7 +131,7 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
         alpha /= n_features  # account for scaling
         new_code = np.empty((n_samples, n_atoms))
         clf = Lasso(alpha=alpha, fit_intercept=False, precompute=gram,
-                    max_iter=1000)
+                    max_iter=max_iter)
         for k in xrange(n_samples):
             # A huge amount of time is spent in this loop. It needs to be
             # tight
@@ -180,7 +176,7 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
 
 
 def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
-                  n_nonzero_coefs=None, alpha=None, copy_gram=True,
+                  n_nonzero_coefs=None, alpha=None, copy_gram=None,
                   copy_cov=True, init=None, max_iter=1000, n_jobs=1):
     """Sparse coding
 
@@ -236,10 +232,6 @@ def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
     max_iter: int, 1000 by default
         Maximum number of iterations to perform if `algorithm='lasso_cd'`.
 
-    copy_gram: boolean, optional
-        Whether to copy the precomputed Gram matrix; if False, it may be
-        overwritten.
-
     copy_cov: boolean, optional
         Whether to copy the precomputed covariance matrix; if False, it may be
         overwritten.
@@ -262,29 +254,34 @@ def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
     warnings.warn("Please note: the interface of sparse_encode has changed: "
                   "It now follows the dictionary learning API and it also "
                   "handles parallelization. Please read the docstring for "
-                  "more information.")
+                  "more information.", stacklevel=2)
+    if copy_gram is not None:
+        warnings.warn("copy_gram in sparse_encode is deprecated: it"
+            "lead to errors.", DeprecationWarning, stacklevel=2)
     dictionary = np.asarray(dictionary)
     X = np.asarray(X)
     n_samples, n_features = X.shape
     n_atoms = dictionary.shape[0]
     if gram is None:
-        copy_gram = False
         gram = np.dot(dictionary, dictionary.T)
     if cov is None and algorithm != 'lasso_cd':
         copy_cov = False
         cov = np.dot(dictionary, X.T)
     if n_jobs == 1 or algorithm == 'threshold':
-        return _sparse_encode(X, dictionary, gram, cov, algorithm,
-                             n_nonzero_coefs, alpha, copy_gram, copy_cov, init)
+        return _sparse_encode(X, dictionary, gram=gram, cov=cov,
+                  algorithm=algorithm, n_nonzero_coefs=n_nonzero_coefs,
+                  alpha=alpha, copy_cov=copy_cov,
+                  init=init, max_iter=max_iter)
     code = np.empty((n_samples, n_atoms))
     slices = list(gen_even_slices(n_samples, n_jobs))
     code_views = Parallel(n_jobs=n_jobs)(
                 delayed(sparse_encode)(X[this_slice], dictionary, gram,
                                        cov[:, this_slice], algorithm,
                                        n_nonzero_coefs, alpha,
-                                       copy_gram, copy_cov,
+                                       copy_cov=copy_cov,
                                        init=init[this_slice] if init is not
-                                       None else None)
+                                       None else None,
+                                       max_iter=max_iter)
                 for this_slice in slices)
     for this_slice, this_view in zip(slices, code_views):
         code[this_slice] = this_view
-- 
GitLab