diff --git a/doc/modules/linear_model.rst b/doc/modules/linear_model.rst
index d93ab4e37efb3633afb248029920a4fffb396026..3e7de9e29d72e690ca630a597a2a8f4b8a059402 100644
--- a/doc/modules/linear_model.rst
+++ b/doc/modules/linear_model.rst
@@ -161,10 +161,10 @@ The objective function to minimize is in this case
   * :ref:`example_linear_model_plot_lasso_coordinate_descent_path.py`
 
 
-.. _lars_algorithm:
+.. _least_angle_regression:
 
-LARS algorithm and its variants
-===============================
+Least Angle Regression
+======================
 
 Least-angle regression (LARS) is a regression algorithm for
 high-dimensional data, developed by Bradley Efron, Trevor Hastie, Iain
diff --git a/scikits/learn/linear_model/least_angle.py b/scikits/learn/linear_model/least_angle.py
index d71d9e0c10239db065c1a33257f5f46e57da7206..28c59759e80e6c85b0c7955b3d5550c170b72082 100644
--- a/scikits/learn/linear_model/least_angle.py
+++ b/scikits/learn/linear_model/least_angle.py
@@ -61,7 +61,6 @@ def lars_path(X, y, Xy=None, Gram=None, max_features=None,
 
         * http://en.wikipedia.org/wiki/Lasso_(statistics)#LASSO_method
     """
-    # : make sure it works with non-normalized columns of X
 
     n_features = X.shape[1]
     n_samples = y.size
@@ -163,10 +162,10 @@ def lars_path(X, y, Xy=None, Gram=None, max_features=None,
                                                             n_active, C)
 
         # least squares solution
-
         least_squares, info = potrs(L[:n_active, :n_active],
                                sign_active[:n_active], lower=True)
 
+        # is this really needed ?
         AA = 1. / np.sqrt(np.sum(least_squares * sign_active[:n_active]))
         least_squares *= AA