Thanks to visit codestin.com
Credit goes to github.com

Skip to content

[MRG+2] Remove nose-specific _named_check #10160

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Nov 17, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 17 additions & 23 deletions sklearn/metrics/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import _named_check

from sklearn.metrics import accuracy_score
from sklearn.metrics import balanced_accuracy_score
Expand Down Expand Up @@ -895,8 +894,8 @@ def test_averaging_multiclass(n_samples=50, n_classes=3):
y_pred_binarize = lb.transform(y_pred)

for name in METRICS_WITH_AVERAGING:
yield (_named_check(check_averaging, name), name, y_true,
y_true_binarize, y_pred, y_pred_binarize, y_score)
yield (check_averaging, name, y_true, y_true_binarize,
y_pred, y_pred_binarize, y_score)


def test_averaging_multilabel(n_classes=5, n_samples=40):
Expand All @@ -910,8 +909,8 @@ def test_averaging_multilabel(n_classes=5, n_samples=40):
y_pred_binarize = y_pred

for name in METRICS_WITH_AVERAGING + THRESHOLDED_METRICS_WITH_AVERAGING:
yield (_named_check(check_averaging, name), name, y_true,
y_true_binarize, y_pred, y_pred_binarize, y_score)
yield (check_averaging, name, y_true, y_true_binarize,
y_pred, y_pred_binarize, y_score)


def test_averaging_multilabel_all_zeroes():
Expand All @@ -922,8 +921,8 @@ def test_averaging_multilabel_all_zeroes():
y_pred_binarize = y_pred

for name in METRICS_WITH_AVERAGING:
yield (_named_check(check_averaging, name), name, y_true,
y_true_binarize, y_pred, y_pred_binarize, y_score)
yield (check_averaging, name, y_true, y_true_binarize,
y_pred, y_pred_binarize, y_score)

# Test _average_binary_score for weight.sum() == 0
binary_metric = (lambda y_true, y_score, average="macro":
Expand All @@ -941,8 +940,8 @@ def test_averaging_multilabel_all_ones():
y_pred_binarize = y_pred

for name in METRICS_WITH_AVERAGING:
yield (_named_check(check_averaging, name), name, y_true,
y_true_binarize, y_pred, y_pred_binarize, y_score)
yield (check_averaging, name, y_true, y_true_binarize,
y_pred, y_pred_binarize, y_score)


@ignore_warnings
Expand Down Expand Up @@ -1031,8 +1030,7 @@ def test_sample_weight_invariance(n_samples=50):
if name in METRICS_WITHOUT_SAMPLE_WEIGHT:
continue
metric = ALL_METRICS[name]
yield _named_check(check_sample_weight_invariance, name), name,\
metric, y_true, y_pred
yield check_sample_weight_invariance, name, metric, y_true, y_pred

# binary
random_state = check_random_state(0)
Expand All @@ -1047,11 +1045,9 @@ def test_sample_weight_invariance(n_samples=50):
continue
metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield _named_check(check_sample_weight_invariance, name), name,\
metric, y_true, y_score
yield check_sample_weight_invariance, name, metric, y_true, y_score
else:
yield _named_check(check_sample_weight_invariance, name), name,\
metric, y_true, y_pred
yield check_sample_weight_invariance, name, metric, y_true, y_pred

# multiclass
random_state = check_random_state(0)
Expand All @@ -1066,11 +1062,9 @@ def test_sample_weight_invariance(n_samples=50):
continue
metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield _named_check(check_sample_weight_invariance, name), name,\
metric, y_true, y_score
yield check_sample_weight_invariance, name, metric, y_true, y_score
else:
yield _named_check(check_sample_weight_invariance, name), name,\
metric, y_true, y_pred
yield check_sample_weight_invariance, name, metric, y_true, y_pred

# multilabel indicator
_, ya = make_multilabel_classification(n_features=1, n_classes=20,
Expand All @@ -1090,11 +1084,11 @@ def test_sample_weight_invariance(n_samples=50):

metric = ALL_METRICS[name]
if name in THRESHOLDED_METRICS:
yield (_named_check(check_sample_weight_invariance, name), name,
metric, y_true, y_score)
yield (check_sample_weight_invariance, name, metric,
y_true, y_score)
else:
yield (_named_check(check_sample_weight_invariance, name), name,
metric, y_true, y_pred)
yield (check_sample_weight_invariance, name, metric,
y_true, y_pred)


@ignore_warnings
Expand Down
12 changes: 4 additions & 8 deletions sklearn/tests/test_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_in
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import _named_check

import sklearn
from sklearn.cluster.bicluster import BiclusterMixin
Expand Down Expand Up @@ -53,8 +52,7 @@ def test_all_estimators():

for name, Estimator in estimators:
# some can just not be sensibly default constructed
yield (_named_check(check_parameters_default_constructible, name),
name, Estimator)
yield check_parameters_default_constructible, name, Estimator


def test_non_meta_estimators():
Expand All @@ -67,12 +65,11 @@ def test_non_meta_estimators():
continue
estimator = Estimator()
# check this on class
yield _named_check(
check_no_fit_attributes_set_in_init, name), name, Estimator
yield check_no_fit_attributes_set_in_init, name, Estimator

for check in _yield_all_checks(name, estimator):
set_checking_parameters(estimator)
yield _named_check(check, name), name, estimator
yield check, name, estimator


def test_configure():
Expand Down Expand Up @@ -114,8 +111,7 @@ def test_class_weight_balanced_linear_classifiers():
issubclass(clazz, LinearClassifierMixin))]

for name, Classifier in linear_classifiers:
yield _named_check(check_class_weight_balanced_linear_classifier,
name), name, Classifier
yield check_class_weight_balanced_linear_classifier, name, Classifier


@ignore_warnings
Expand Down
20 changes: 0 additions & 20 deletions sklearn/utils/testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,26 +753,6 @@ def __exit__(self, exc_type, exc_val, exc_tb):
_delete_folder(self.temp_folder)


class _named_check(object):
"""Wraps a check to show a useful description

Parameters
----------
check : function
Must have ``__name__`` and ``__call__``
arg_text : str
A summary of arguments to the check
"""
# Setting the description on the function itself can give incorrect results
# in failing tests
def __init__(self, check, arg_text):
self.check = check
self.description = ("{0[1]}.{0[3]}:{1.__name__}({2})".format(
inspect.stack()[1], check, arg_text))

def __call__(self, *args, **kwargs):
return self.check(*args, **kwargs)

# Utils to test docstrings


Expand Down