Skip to content
Snippets Groups Projects
Commit 54ebafce authored by Gael Varoquaux's avatar Gael Varoquaux
Browse files

COSMIT we no longer support Py 2.5

Also, avoid 'from foo import bar' or 'import as' as much as possible
parent 36983177
No related branches found
No related tags found
No related merge requests found
......@@ -5,7 +5,7 @@
#
# License: BSD Style.
from warnings import warn
import warnings
import numpy as np
from scipy import linalg
......@@ -75,7 +75,7 @@ def _cholesky_omp(X, y, n_nonzero_coefs, tol=None, copy_X=True):
lam = np.argmax(np.abs(np.dot(X.T, residual)))
if lam < n_active or alpha[lam] ** 2 < min_float:
# atom already selected or inner product too small
warn(premature, RuntimeWarning, stacklevel=2)
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
if n_active > 0:
# Updates the Cholesky decomposition of X' X
......@@ -83,7 +83,7 @@ def _cholesky_omp(X, y, n_nonzero_coefs, tol=None, copy_X=True):
solve_triangular(L[:n_active, :n_active], L[n_active, :n_active])
v = nrm2(L[n_active, :n_active]) ** 2
if 1 - v <= min_float: # selected atoms are dependent
warn(premature, RuntimeWarning, stacklevel=2)
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
L[n_active, n_active] = np.sqrt(1 - v)
X.T[n_active], X.T[lam] = swap(X.T[n_active], X.T[lam])
......@@ -169,14 +169,14 @@ def _gram_omp(Gram, Xy, n_nonzero_coefs, tol_0=None, tol=None,
lam = np.argmax(np.abs(alpha))
if lam < n_active or alpha[lam] ** 2 < min_float:
# selected same atom twice, or inner product too small
warn(premature, RuntimeWarning, stacklevel=2)
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
if n_active > 0:
L[n_active, :n_active] = Gram[lam, :n_active]
solve_triangular(L[:n_active, :n_active], L[n_active, :n_active])
v = nrm2(L[n_active, :n_active]) ** 2
if 1 - v <= min_float: # selected atoms are dependent
warn(premature, RuntimeWarning, stacklevel=2)
warnings.warn(premature, RuntimeWarning, stacklevel=2)
break
L[n_active, n_active] = np.sqrt(1 - v)
Gram[n_active], Gram[lam] = swap(Gram[n_active], Gram[lam])
......@@ -520,11 +520,12 @@ class OrthogonalMatchingPursuit(LinearModel):
self.n_nonzero_coefs = int(0.1 * n_features)
if (Gram is not None or Xy is not None) and (self.fit_intercept is True
or self.normalize is True):
warn('Mean subtraction (fit_intercept) and normalization cannot '
'be applied on precomputed Gram and Xy matrices. Your '
'precomputed values are ignored and recomputed. To avoid '
'this, do the scaling yourself and call with fit_intercept '
'and normalize set to False.', RuntimeWarning, stacklevel=2)
warnings.warn('Mean subtraction (fit_intercept) and '
'normalization cannot be applied on precomputed Gram '
'and Xy matrices. Your precomputed values are ignored '
'and recomputed. To avoid this, do the scaling yourself'
'and call with fit_intercept and normalize set to False.',
RuntimeWarning, stacklevel=2)
Gram, Xy = None, None
if Gram is not None:
......
......@@ -2,11 +2,9 @@
# License: BSD style
import warnings
from sys import version_info
import numpy as np
from nose import SkipTest
from nose.tools import assert_raises, assert_true
from numpy.testing import assert_equal, assert_array_almost_equal
......@@ -23,12 +21,6 @@ G, Xy = np.dot(X.T, X), np.dot(X.T, y)
# and y (n_samples, 3)
def check_warnings():
if version_info < (2, 6):
raise SkipTest("Testing for warnings is not supported in versions \
older than Python 2.6")
def test_correct_shapes():
assert_equal(orthogonal_mp(X, y[:, 0], n_nonzero_coefs=5).shape,
(n_features,))
......@@ -71,7 +63,6 @@ def test_with_without_gram_tol():
def test_unreachable_accuracy():
check_warnings() # Skip if unsupported Python version
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
assert_array_almost_equal(
......@@ -133,7 +124,6 @@ def test_estimator():
def test_scaling_with_gram():
check_warnings() # Skip if unsupported Python version
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# Use only 1 nonzero coef to be faster and to avoid warnings
......@@ -157,7 +147,6 @@ def test_scaling_with_gram():
def test_identical_regressors():
check_warnings() # Skip if unsupported Python version
newX = X.copy()
newX[:, 1] = newX[:, 0]
gamma = np.zeros(n_features)
......@@ -185,7 +174,6 @@ def test_swapped_regressors():
def test_no_atoms():
check_warnings() # Skip if unsupported Python version
y_empty = np.zeros_like(y)
Xy_empty = np.dot(X.T, y_empty)
with warnings.catch_warnings():
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment