Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 13 additions & 10 deletions sklearn/linear_model/_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from numbers import Integral, Real

import numpy as np
from joblib import effective_n_jobs
from scipy import optimize

from sklearn._loss.loss import HalfBinomialLoss, HalfMultinomialLoss
Expand Down Expand Up @@ -894,7 +893,10 @@ class of problems.
*warm_start* to support *lbfgs*, *newton-cg*, *sag*, *saga* solvers.

n_jobs : int, default=None
Not used at the moment.
Does not have any effect.

.. deprecated:: 1.8
`n_jobs` is deprecated in version 1.8 and will be removed in 1.10.

l1_ratio : float, default=None
The Elastic-Net mixing parameter, with ``0 <= l1_ratio <= 1``. Only
Expand Down Expand Up @@ -1090,6 +1092,13 @@ def fit(self, X, y, sample_weight=None):
"(penalty={})".format(self.penalty)
)

msg = (
"'n_jobs' has no effect since 1.8 and will be removed in 1.10. "
f"You provided 'n_jobs={self.n_jobs}', please leave it unspecified."
)
if self.n_jobs is not None:
warnings.warn(msg, category=FutureWarning)

if self.penalty == "elasticnet" and self.l1_ratio is None:
raise ValueError("l1_ratio must be specified when penalty is elasticnet.")

Expand Down Expand Up @@ -1139,12 +1148,6 @@ def fit(self, X, y, sample_weight=None):
"value > 1e30 results in a frozen fit. Please choose another "
"solver or rescale the input X."
)
if effective_n_jobs(self.n_jobs) != 1:
warnings.warn(
"'n_jobs' > 1 does not have any effect when"
" 'solver' is set to 'liblinear'. Got 'n_jobs'"
" = {}.".format(effective_n_jobs(self.n_jobs))
)
self.coef_, self.intercept_, self.n_iter_ = _fit_liblinear(
X,
y,
Expand Down Expand Up @@ -1183,8 +1186,8 @@ def fit(self, X, y, sample_weight=None):
warm_start_coef, self.intercept_[:, np.newaxis], axis=1
)

# TODO: deprecate n_jobs since it's not used anymore and enable multi-threading
# if benchmarks show a positive effect.
# TODO: enable multi-threading if benchmarks show a positive effect,
# see https://github.com/scikit-learn/scikit-learn/issues/32162
n_threads = 1

coefs, _, n_iter = _logistic_regression_path(
Expand Down
25 changes: 10 additions & 15 deletions sklearn/linear_model/tests/test_logistic.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from sklearn.preprocessing import LabelEncoder, StandardScaler, scale
from sklearn.svm import l1_min_c
from sklearn.utils import compute_class_weight, shuffle
from sklearn.utils._testing import ignore_warnings, skip_if_no_parallel
from sklearn.utils._testing import ignore_warnings
from sklearn.utils.fixes import _IS_32BIT, COO_CONTAINERS, CSR_CONTAINERS

pytestmark = pytest.mark.filterwarnings(
Expand Down Expand Up @@ -119,20 +119,6 @@ def __call__(self, model, X, y, sample_weight=None):
assert mock_scorer.calls == 1


@skip_if_no_parallel
def test_lr_liblinear_warning():
X, y = make_classification(random_state=0)

lr = LogisticRegression(solver="liblinear", n_jobs=2)
warning_message = (
"'n_jobs' > 1 does not have any effect when"
" 'solver' is set to 'liblinear'. Got 'n_jobs'"
" = 2."
)
with pytest.warns(UserWarning, match=warning_message):
lr.fit(X, y)


@pytest.mark.parametrize("csr_container", CSR_CONTAINERS)
def test_predict_3_classes(csr_container):
check_predictions(LogisticRegression(C=10), X, Y2)
Expand Down Expand Up @@ -2619,3 +2605,12 @@ def test_logisticregressioncv_warns_with_use_legacy_attributes():
msg = "The default value of use_legacy_attributes will change from True"
with pytest.warns(FutureWarning, match=msg):
lr.fit(X, y)


# TODO(1.10): remove this test when n_jobs gets removed
def test_logisticregression_warns_with_n_jobs():
X, y = make_classification(n_classes=3, n_samples=50, n_informative=6)
lr = LogisticRegression(n_jobs=1)
msg = "'n_jobs' has no effect"
with pytest.warns(FutureWarning, match=msg):
lr.fit(X, y)
Loading