Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 3312bc2

Browse files
kasmith11jjerphanjeremiedbb
authored
MAINT validate parameter in KernelPCA (#24020)
Co-authored-by: Julien Jerphanion <[email protected]> Co-authored-by: jeremiedbb <[email protected]>
1 parent 6e99407 commit 3312bc2

File tree

3 files changed

+40
-36
lines changed

3 files changed

+40
-36
lines changed

sklearn/decomposition/_kernel_pca.py

Lines changed: 40 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
# License: BSD 3 clause
66

77
import numpy as np
8-
import numbers
8+
from numbers import Integral, Real
99
from scipy import linalg
1010
from scipy.sparse.linalg import eigsh
1111

@@ -14,8 +14,8 @@
1414
from ..utils.validation import (
1515
check_is_fitted,
1616
_check_psd_eigenvalues,
17-
check_scalar,
1817
)
18+
from ..utils._param_validation import Interval, StrOptions
1919
from ..utils.deprecation import deprecated
2020
from ..exceptions import NotFittedError
2121
from ..base import BaseEstimator, TransformerMixin, _ClassNamePrefixFeaturesOutMixin
@@ -42,8 +42,8 @@ class KernelPCA(_ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimato
4242
n_components : int, default=None
4343
Number of components. If None, all non-zero components are kept.
4444
45-
kernel : {'linear', 'poly', \
46-
'rbf', 'sigmoid', 'cosine', 'precomputed'}, default='linear'
45+
kernel : {'linear', 'poly', 'rbf', 'sigmoid', 'cosine', 'precomputed'} \
46+
or callable, default='linear'
4747
Kernel used for PCA.
4848
4949
gamma : float, default=None
@@ -239,6 +239,40 @@ class KernelPCA(_ClassNamePrefixFeaturesOutMixin, TransformerMixin, BaseEstimato
239239
(1797, 7)
240240
"""
241241

242+
_parameter_constraints = {
243+
"n_components": [
244+
Interval(Integral, 1, None, closed="left"),
245+
None,
246+
],
247+
"kernel": [
248+
StrOptions({"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"}),
249+
callable,
250+
],
251+
"gamma": [
252+
Interval(Real, 0, None, closed="left"),
253+
None,
254+
],
255+
"degree": [Interval(Integral, 0, None, closed="left")],
256+
"coef0": [Interval(Real, None, None, closed="neither")],
257+
"kernel_params": [dict, None],
258+
"alpha": [Interval(Real, 0, None, closed="left")],
259+
"fit_inverse_transform": ["boolean"],
260+
"eigen_solver": [StrOptions({"auto", "dense", "arpack", "randomized"})],
261+
"tol": [Interval(Real, 0, None, closed="left")],
262+
"max_iter": [
263+
Interval(Integral, 1, None, closed="left"),
264+
None,
265+
],
266+
"iterated_power": [
267+
Interval(Integral, 0, None, closed="left"),
268+
StrOptions({"auto"}),
269+
],
270+
"remove_zero_eig": ["boolean"],
271+
"random_state": ["random_state"],
272+
"copy_X": ["boolean"],
273+
"n_jobs": [None, Integral],
274+
}
275+
242276
def __init__(
243277
self,
244278
n_components=None,
@@ -313,7 +347,6 @@ def _fit_transform(self, K):
313347
if self.n_components is None:
314348
n_components = K.shape[0] # use all dimensions
315349
else:
316-
check_scalar(self.n_components, "n_components", numbers.Integral, min_val=1)
317350
n_components = min(K.shape[0], self.n_components)
318351

319352
# compute eigenvectors
@@ -343,8 +376,6 @@ def _fit_transform(self, K):
343376
random_state=self.random_state,
344377
selection="module",
345378
)
346-
else:
347-
raise ValueError("Unsupported value for `eigen_solver`: %r" % eigen_solver)
348379

349380
# make sure that the eigenvalues are ok and fix numerical issues
350381
self.eigenvalues_ = _check_psd_eigenvalues(
@@ -416,6 +447,8 @@ def fit(self, X, y=None):
416447
self : object
417448
Returns the instance itself.
418449
"""
450+
self._validate_params()
451+
419452
if self.fit_inverse_transform and self.kernel == "precomputed":
420453
raise ValueError("Cannot fit_inverse_transform with a precomputed kernel.")
421454
X = self._validate_data(X, accept_sparse="csr", copy=self.copy_X)

sklearn/decomposition/tests/test_kernel_pca.py

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -67,12 +67,6 @@ def histogram(x, y, **kwargs):
6767
assert X_pred2.shape == X_pred.shape
6868

6969

70-
def test_kernel_pca_invalid_solver():
71-
"""Check that kPCA raises an error if the solver parameter is invalid"""
72-
with pytest.raises(ValueError):
73-
KernelPCA(eigen_solver="unknown").fit(np.random.randn(10, 10))
74-
75-
7670
def test_kernel_pca_invalid_parameters():
7771
"""Check that kPCA raises an error if the parameters are invalid
7872
@@ -204,16 +198,6 @@ def test_kernel_pca_n_components():
204198
assert shape == (2, c)
205199

206200

207-
@pytest.mark.parametrize("n_components", [-1, 0])
208-
def test_kernal_pca_too_few_components(n_components):
209-
rng = np.random.RandomState(0)
210-
X_fit = rng.random_sample((5, 4))
211-
kpca = KernelPCA(n_components=n_components)
212-
msg = "n_components.* must be >= 1"
213-
with pytest.raises(ValueError, match=msg):
214-
kpca.fit(X_fit)
215-
216-
217201
def test_remove_zero_eig():
218202
"""Check that the ``remove_zero_eig`` parameter works correctly.
219203
@@ -326,18 +310,6 @@ def test_kernel_pca_precomputed_non_symmetric(solver):
326310
assert_array_equal(kpca.eigenvalues_, kpca_c.eigenvalues_)
327311

328312

329-
def test_kernel_pca_invalid_kernel():
330-
"""Tests that using an invalid kernel name raises a ValueError
331-
332-
An invalid kernel name should raise a ValueError at fit time.
333-
"""
334-
rng = np.random.RandomState(0)
335-
X_fit = rng.random_sample((2, 4))
336-
kpca = KernelPCA(kernel="tototiti")
337-
with pytest.raises(ValueError):
338-
kpca.fit(X_fit)
339-
340-
341313
def test_gridsearch_pipeline():
342314
"""Check that kPCA works as expected in a grid search pipeline
343315

sklearn/tests/test_common.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -459,7 +459,6 @@ def test_estimators_do_not_raise_errors_in_init_or_set_params(Estimator):
459459
"HashingVectorizer",
460460
"Isomap",
461461
"IterativeImputer",
462-
"KernelPCA",
463462
"LabelPropagation",
464463
"LabelSpreading",
465464
"Lars",

0 commit comments

Comments
 (0)