Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 4953ec3

Browse files
NicolasHugadrinjalali
authored andcommitted
MNT remove more deprecations for 0.23 (#15860)
* removed warn_on_dtype * removed parameters to check_is_fitted * all_estimators parameters * deprecated n_components attribute in AgglomerativeClustering * change default of base.score for multioutput * removed lots of useless decorators? * changed default of copy in quantil_transform * removed six.py * nmf default value of init param * raise error instead of warning in LinearDiscriminantAnalysis * removed label param in hamming_loss * updated method parameter of power_transform * pep8 * changed default value of min_impurity_split * removed assert_false and assert_true * added and fixed versionchanged directives * reset min_impurity_split default to None * fixed LDA issue * fixed some test * more docstrings updates * set min_impurity_decrease for test to pass * upate docstring example * fixed doctest * removed multiouput.score since it's now consistent with the default * deprecate least_angle parameter combination * remove support for l1 or l2 loss in svm * removed linear_assignment.py * add test
1 parent 9355b3c commit 4953ec3

File tree

7 files changed

+12
-460
lines changed

7 files changed

+12
-460
lines changed

sklearn/linear_model/_least_angle.py

Lines changed: 4 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -47,12 +47,6 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0,
4747
Input data. Note that if X is None then the Gram matrix must be
4848
specified, i.e., cannot be None or False.
4949
50-
.. deprecated:: 0.21
51-
52-
The use of ``X`` is ``None`` in combination with ``Gram`` is not
53-
``None`` will be removed in v0.23. Use :func:`lars_path_gram`
54-
instead.
55-
5650
y : None or array-like of shape (n_samples,)
5751
Input targets.
5852
@@ -67,11 +61,6 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0,
6761
matrix is precomputed from the given X, if there are more samples
6862
than features.
6963
70-
.. deprecated:: 0.21
71-
72-
The use of ``X`` is ``None`` in combination with ``Gram`` is not
73-
None will be removed in v0.23. Use :func:`lars_path_gram` instead.
74-
7564
max_iter : int, default=500
7665
Maximum number of iterations to perform, set to infinity for no limit.
7766
@@ -155,9 +144,10 @@ def lars_path(X, y, Xy=None, Gram=None, max_iter=500, alpha_min=0,
155144
156145
"""
157146
if X is None and Gram is not None:
158-
warnings.warn('Use lars_path_gram to avoid passing X and y. '
159-
'The current option will be removed in v0.23.',
160-
FutureWarning)
147+
raise ValueError(
148+
'X cannot be None if Gram is not None'
149+
'Use lars_path_gram to avoid passing X and y.'
150+
)
161151
return _lars_path_solver(
162152
X=X, y=y, Xy=Xy, Gram=Gram, n_samples=None, max_iter=max_iter,
163153
alpha_min=alpha_min, method=method, copy_X=copy_X,

sklearn/linear_model/tests/test_least_angle.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
from sklearn.utils._testing import TempMemmap
1616
from sklearn.exceptions import ConvergenceWarning
1717
from sklearn import linear_model, datasets
18-
from sklearn.linear_model._least_angle import _lars_path_residues, LassoLarsIC
18+
from sklearn.linear_model._least_angle import _lars_path_residues
19+
from sklearn.linear_model import LassoLarsIC, lars_path
1920

2021
# TODO: use another dataset that has multiple drops
2122
diabetes = datasets.load_diabetes()
@@ -730,3 +731,9 @@ def test_lasso_lars_fit_copyX_behaviour(copy_X):
730731
y = X[:, 2]
731732
lasso_lars.fit(X, y, copy_X=copy_X)
732733
assert copy_X == np.array_equal(X, X_copy)
734+
735+
736+
def test_X_none_gram_not_none():
737+
with pytest.raises(ValueError,
738+
match="X cannot be None if Gram is not None"):
739+
lars_path(X=None, y=[1], Gram='not None')

sklearn/multioutput.py

Lines changed: 0 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -265,44 +265,6 @@ def partial_fit(self, X, y, sample_weight=None):
265265
super().partial_fit(
266266
X, y, sample_weight=sample_weight)
267267

268-
# XXX Remove this method in 0.23
269-
def score(self, X, y, sample_weight=None):
270-
"""Returns the coefficient of determination R^2 of the prediction.
271-
272-
The coefficient R^2 is defined as (1 - u/v), where u is the residual
273-
sum of squares ((y_true - y_pred) ** 2).sum() and v is the regression
274-
sum of squares ((y_true - y_true.mean()) ** 2).sum().
275-
Best possible score is 1.0 and it can be negative (because the
276-
model can be arbitrarily worse). A constant model that always
277-
predicts the expected value of y, disregarding the input features,
278-
would get a R^2 score of 0.0.
279-
280-
Notes
281-
-----
282-
R^2 is calculated by weighting all the targets equally using
283-
`multioutput='uniform_average'`.
284-
285-
Parameters
286-
----------
287-
X : array-like, shape (n_samples, n_features)
288-
Test samples.
289-
290-
y : array-like, shape (n_samples) or (n_samples, n_outputs)
291-
True values for X.
292-
293-
sample_weight : array-like, shape [n_samples], optional
294-
Sample weights.
295-
296-
Returns
297-
-------
298-
score : float
299-
R^2 of self.predict(X) wrt. y.
300-
"""
301-
# XXX remove in 0.19 when r2_score default for multioutput changes
302-
from .metrics import r2_score
303-
return r2_score(y, self.predict(X), sample_weight=sample_weight,
304-
multioutput='uniform_average')
305-
306268

307269
class MultiOutputClassifier(ClassifierMixin, _MultiOutputEstimator):
308270
"""Multi target classification

sklearn/svm/_classes.py

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -214,18 +214,6 @@ def fit(self, X, y, sample_weight=None):
214214
self : object
215215
An instance of the estimator.
216216
"""
217-
# FIXME Remove l1/l2 support in 0.23 ----------------------------------
218-
msg = ("loss='%s' has been deprecated in favor of "
219-
"loss='%s' as of 0.16. Backward compatibility"
220-
" for the loss='%s' will be removed in %s")
221-
222-
if self.loss in ('l1', 'l2'):
223-
old_loss = self.loss
224-
self.loss = {'l1': 'hinge', 'l2': 'squared_hinge'}.get(self.loss)
225-
warnings.warn(msg % (old_loss, self.loss, old_loss, '0.23'),
226-
FutureWarning)
227-
# ---------------------------------------------------------------------
228-
229217
if self.C < 0:
230218
raise ValueError("Penalty term must be positive; got (C=%r)"
231219
% self.C)
@@ -406,20 +394,6 @@ def fit(self, X, y, sample_weight=None):
406394
self : object
407395
An instance of the estimator.
408396
"""
409-
# FIXME Remove l1/l2 support in 0.23 ----------------------------------
410-
msg = ("loss='%s' has been deprecated in favor of "
411-
"loss='%s' as of 0.16. Backward compatibility"
412-
" for the loss='%s' will be removed in %s")
413-
414-
if self.loss in ('l1', 'l2'):
415-
old_loss = self.loss
416-
self.loss = {'l1': 'epsilon_insensitive',
417-
'l2': 'squared_epsilon_insensitive'
418-
}.get(self.loss)
419-
warnings.warn(msg % (old_loss, self.loss, old_loss, '0.23'),
420-
FutureWarning)
421-
# ---------------------------------------------------------------------
422-
423397
if self.C < 0:
424398
raise ValueError("Penalty term must be positive; got (C=%r)"
425399
% self.C)

sklearn/svm/tests/test_svm.py

Lines changed: 0 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -736,39 +736,6 @@ def test_linearsvc_parameters():
736736
svm.LinearSVC(loss="l3").fit(X, y)
737737

738738

739-
# FIXME remove in 0.23
740-
def test_linearsvx_loss_penalty_deprecations():
741-
X, y = [[0.0], [1.0]], [0, 1]
742-
743-
msg = ("loss='%s' has been deprecated in favor of "
744-
"loss='%s' as of 0.16. Backward compatibility"
745-
" for the %s will be removed in %s")
746-
747-
# LinearSVC
748-
# loss l1 --> hinge
749-
assert_warns_message(FutureWarning,
750-
msg % ("l1", "hinge", "loss='l1'", "0.23"),
751-
svm.LinearSVC(loss="l1").fit, X, y)
752-
753-
# loss l2 --> squared_hinge
754-
assert_warns_message(FutureWarning,
755-
msg % ("l2", "squared_hinge", "loss='l2'", "0.23"),
756-
svm.LinearSVC(loss="l2").fit, X, y)
757-
758-
# LinearSVR
759-
# loss l1 --> epsilon_insensitive
760-
assert_warns_message(FutureWarning,
761-
msg % ("l1", "epsilon_insensitive", "loss='l1'",
762-
"0.23"),
763-
svm.LinearSVR(loss="l1").fit, X, y)
764-
765-
# loss l2 --> squared_epsilon_insensitive
766-
assert_warns_message(FutureWarning,
767-
msg % ("l2", "squared_epsilon_insensitive",
768-
"loss='l2'", "0.23"),
769-
svm.LinearSVR(loss="l2").fit, X, y)
770-
771-
772739
def test_linear_svx_uppercase_loss_penality_raises_error():
773740
# Check if Upper case notation raises error at _fit_liblinear
774741
# which is called by fit

0 commit comments

Comments
 (0)