Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit d5c4e03

Browse files
committed
add parameter validation to SVDD and update dunder-docs (similar to ocSVM #24001)
finish v1.2 deprecation of params kwargs in `.fit` of SVDD (similar to ocSVM #20843) removed SVDD param-validation exception from test_common.py since #23462 is go (#22722)
1 parent 30e90da commit d5c4e03

File tree

2 files changed

+10
-14
lines changed

2 files changed

+10
-14
lines changed

sklearn/svm/_classes.py

+10-13
Original file line numberDiff line numberDiff line change
@@ -1848,14 +1848,15 @@ class SVDD(OutlierMixin, BaseLibSVM):
18481848
18491849
degree : int, default=3
18501850
Degree of the polynomial kernel function ('poly').
1851-
Ignored by all other kernels.
1851+
Must be non-negative. Ignored by all other kernels.
18521852
18531853
gamma : {'scale', 'auto'} or float, default='scale'
18541854
Kernel coefficient for 'rbf', 'poly' and 'sigmoid'.
18551855
18561856
- if ``gamma='scale'`` (default) is passed then it uses
18571857
1 / (n_features * X.var()) as value of gamma,
18581858
- if 'auto', uses 1 / n_features.
1859+
- if float, must be non-negative.
18591860
18601861
coef0 : float, default=0.0
18611862
Independent term in kernel function.
@@ -1933,9 +1934,9 @@ class SVDD(OutlierMixin, BaseLibSVM):
19331934
19341935
See Also
19351936
--------
1936-
OneClassSVM : Support vector method for outlier detection via a separating
1937-
soft-margin hyperplane implemented with libsvm with a parameter to
1938-
control the number of support vectors.
1937+
sklearn.svm.OneClassSVM : Support vector method for outlier detection via
1938+
a separating soft-margin hyperplane implemented with libsvm with
1939+
a parameter to control the number of support vectors.
19391940
19401941
References
19411942
----------
@@ -1961,6 +1962,10 @@ class SVDD(OutlierMixin, BaseLibSVM):
19611962

19621963
_impl = "svdd_l1"
19631964

1965+
_parameter_constraints = {**BaseLibSVM._parameter_constraints} # type: ignore
1966+
for unused_param in ["C", "class_weight", "epsilon", "probability", "random_state"]:
1967+
_parameter_constraints.pop(unused_param)
1968+
19641969
def __init__(
19651970
self,
19661971
*,
@@ -1994,7 +1999,7 @@ def __init__(
19941999
random_state=None,
19952000
)
19962001

1997-
def fit(self, X, y=None, sample_weight=None, **params):
2002+
def fit(self, X, y=None, sample_weight=None):
19982003
"""Learn a soft minimum-volume hypersphere around the sample X.
19992004
20002005
Parameters
@@ -2010,14 +2015,6 @@ def fit(self, X, y=None, sample_weight=None, **params):
20102015
Per-sample weights. Rescale C per sample. Higher weights
20112016
force the classifier to put more emphasis on these points.
20122017
2013-
**params : dict
2014-
Additional fit parameters.
2015-
2016-
.. deprecated:: 1.0
2017-
The `fit` method will not longer accept extra keyword
2018-
parameters in 1.2. These keyword parameters were
2019-
already discarded.
2020-
20212018
Returns
20222019
-------
20232020
self : object

sklearn/tests/test_common.py

-1
Original file line numberDiff line numberDiff line change
@@ -502,7 +502,6 @@ def test_estimators_do_not_raise_errors_in_init_or_set_params(Estimator):
502502
"RegressorChain",
503503
"RidgeCV",
504504
"RidgeClassifierCV",
505-
"SVDD", # TODO remove when the status of #23462 becomes clearer (#22722)
506505
"SelectFdr",
507506
"SelectFpr",
508507
"SelectFromModel",

0 commit comments

Comments
 (0)