Thanks to visit codestin.com
Credit goes to github.com

Skip to content

TST: remove ignorewarning and introduce specific filterwarning in SAG #11606

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jul 19, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 9 additions & 12 deletions sklearn/linear_model/tests/test_sag.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# License: BSD 3 clause

import math
import pytest
import numpy as np
import scipy.sparse as sp

Expand All @@ -20,7 +21,6 @@
from sklearn.utils.testing import assert_allclose
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import ignore_warnings
from sklearn.utils import compute_class_weight
from sklearn.utils import check_random_state
from sklearn.preprocessing import LabelEncoder, LabelBinarizer
Expand Down Expand Up @@ -231,7 +231,6 @@ def get_step_size(X, alpha, fit_intercept, classification=True):
return 1.0 / (np.max(np.sum(X * X, axis=1)) + fit_intercept + alpha)


@ignore_warnings
def test_classifier_matching():
n_samples = 20
X, y = make_blobs(n_samples=n_samples, centers=2, random_state=0,
Expand Down Expand Up @@ -301,7 +300,7 @@ def test_regressor_matching():
assert_allclose(intercept2, clf.intercept_)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_pobj_matches_logistic_regression():
"""tests if the sag pobj matches log reg"""
n_samples = 100
Expand Down Expand Up @@ -331,7 +330,7 @@ def test_sag_pobj_matches_logistic_regression():
assert_array_almost_equal(pobj3, pobj1, decimal=4)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_pobj_matches_ridge_regression():
"""tests if the sag pobj matches ridge reg"""
n_samples = 100
Expand Down Expand Up @@ -363,7 +362,7 @@ def test_sag_pobj_matches_ridge_regression():
assert_array_almost_equal(pobj3, pobj2, decimal=4)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_regressor_computed_correctly():
"""tests if the sag regressor is computed correctly"""
alpha = .1
Expand Down Expand Up @@ -407,7 +406,6 @@ def test_sag_regressor_computed_correctly():
# assert_almost_equal(clf2.intercept_, spintercept2, decimal=1)'''


@ignore_warnings
def test_get_auto_step_size():
X = np.array([[1, 2, 3], [2, 3, 4], [2, 3, 2]], dtype=np.float64)
alpha = 1.2
Expand Down Expand Up @@ -452,7 +450,7 @@ def test_get_auto_step_size():
max_squared_sum_, alpha, "wrong", fit_intercept)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_regressor():
"""tests if the sag regressor performs well"""
xmin, xmax = -5, 5
Expand Down Expand Up @@ -491,7 +489,7 @@ def test_sag_regressor():
assert_greater(score2, 0.5)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_classifier_computed_correctly():
"""tests if the binary classifier is computed correctly"""
alpha = .1
Expand Down Expand Up @@ -534,7 +532,7 @@ def test_sag_classifier_computed_correctly():
assert_almost_equal(clf2.intercept_, spintercept2, decimal=1)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_sag_multiclass_computed_correctly():
"""tests if the multiclass classifier is computed correctly"""
alpha = .1
Expand Down Expand Up @@ -593,7 +591,6 @@ def test_sag_multiclass_computed_correctly():
assert_almost_equal(clf2.intercept_[i], intercept2[i], decimal=1)


@ignore_warnings
def test_classifier_results():
"""tests if classifier results match target"""
alpha = .1
Expand All @@ -618,7 +615,7 @@ def test_classifier_results():
assert_almost_equal(pred2, y, decimal=12)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_binary_classifier_class_weight():
"""tests binary classifier with classweights for each class"""
alpha = .1
Expand Down Expand Up @@ -668,7 +665,7 @@ def test_binary_classifier_class_weight():
assert_almost_equal(clf2.intercept_, spintercept2, decimal=1)


@ignore_warnings
@pytest.mark.filterwarnings('ignore:The max_iter was reached')
def test_multiclass_classifier_class_weight():
"""tests multiclass with classweights for each class"""
alpha = .1
Expand Down