Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 59b0e62

Browse files
committed
FIX Raise a FutureWarning
1 parent 451ba77 commit 59b0e62

File tree

2 files changed

+39
-12
lines changed

2 files changed

+39
-12
lines changed

sklearn/preprocessing/data.py

Lines changed: 23 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2544,8 +2544,8 @@ class PowerTransformer(BaseEstimator, TransformerMixin):
25442544
25452545
Notes
25462546
-----
2547-
NaNs are treated as missing values: disregarded in fit, and maintained in
2548-
transform.
2547+
NaNs are treated as missing values: disregarded in ``fit``, and maintained
2548+
in ``transform``.
25492549
25502550
For a comparison of the different scalers, transformers, and normalizers,
25512551
see :ref:`examples/preprocessing/plot_all_scaling.py
@@ -2844,7 +2844,7 @@ def _check_input(self, X, check_positive=False, check_shape=False,
28442844
return X
28452845

28462846

2847-
def power_transform(X, method='yeo-johnson', standardize=True, copy=True):
2847+
def power_transform(X, method='warn', standardize=True, copy=True):
28482848
"""
28492849
Power transforms are a family of parametric, monotonic transformations
28502850
that are applied to make data more Gaussian-like. This is useful for
@@ -2866,9 +2866,9 @@ def power_transform(X, method='yeo-johnson', standardize=True, copy=True):
28662866
Parameters
28672867
----------
28682868
X : array-like, shape (n_samples, n_features)
2869-
The data used to estimate the optimal transformation parameters.
2869+
The data to be transformed using a power transformation.
28702870
2871-
method : str, (default='yeo-johnson')
2871+
method : str, (default='warn')
28722872
The power transform method. Available methods are:
28732873
28742874
- 'yeo-johnson' [1]_, works with positive and negative values
@@ -2881,15 +2881,20 @@ def power_transform(X, method='yeo-johnson', standardize=True, copy=True):
28812881
copy : boolean, optional, default=True
28822882
Set to False to perform inplace computation during transformation.
28832883
2884+
Returns
2885+
-------
2886+
X_trans : array-like, shape (n_samples, n_features)
2887+
The transformed data.
2888+
28842889
Examples
28852890
--------
28862891
>>> import numpy as np
28872892
>>> from sklearn.preprocessing import power_transform
28882893
>>> data = [[1, 2], [3, 2], [4, 5]]
2889-
>>> print(power_transform(data))
2890-
[[-1.31616039 -0.70710678]
2891-
[ 0.20998268 -0.70710678]
2892-
[ 1.1061777 1.41421356]]
2894+
>>> print(power_transform(data, method='box-cox')) # doctest: +ELLIPSIS
2895+
[[-1.332... -0.707...]
2896+
[ 0.256... -0.707...]
2897+
[ 1.076... 1.414...]]
28932898
28942899
See also
28952900
--------
@@ -2902,8 +2907,8 @@ def power_transform(X, method='yeo-johnson', standardize=True, copy=True):
29022907
29032908
Notes
29042909
-----
2905-
NaNs are treated as missing values: disregarded in fit, and maintained in
2906-
transform.
2910+
NaNs are treated as missing values: disregarded in ``fit``, and maintained
2911+
in ``transform``.
29072912
29082913
For a comparison of the different scalers, transformers, and normalizers,
29092914
see :ref:`examples/preprocessing/plot_all_scaling.py
@@ -2919,7 +2924,13 @@ def power_transform(X, method='yeo-johnson', standardize=True, copy=True):
29192924
.. [2] G.E.P. Box and D.R. Cox, "An Analysis of Transformations", Journal
29202925
of the Royal Statistical Society B, 26, 211-252 (1964).
29212926
"""
2922-
2927+
if method == 'warn':
2928+
warnings.warn("The default value of 'method' will change from "
2929+
"'box-cox' to 'yeo-johnson' in version 0.21. Set "
2930+
"the 'method' argument explicitly to silence this "
2931+
"warning in the meantime.",
2932+
FutureWarning)
2933+
method = 'box-cox'
29232934
pt = PowerTransformer(method=method, standardize=standardize, copy=copy)
29242935
return pt.fit_transform(X)
29252936

sklearn/preprocessing/tests/test_data.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2295,3 +2295,19 @@ def test_power_transformer_copy_False(method, standardize):
22952295

22962296
X_inv_trans = pt.inverse_transform(X_trans)
22972297
assert X_trans is X_inv_trans
2298+
2299+
2300+
def test_power_transform_default_method():
2301+
future_warning_message = (
2302+
"The default value of 'method' "
2303+
"will change from 'box-cox'"
2304+
)
2305+
X = np.abs(X_2d)
2306+
assert_warns_message(FutureWarning, future_warning_message,
2307+
power_transform, X)
2308+
2309+
with warnings.catch_warnings():
2310+
warnings.simplefilter('ignore')
2311+
X_trans_default = power_transform(X)
2312+
X_trans_boxcox = power_transform(X, method='box-cox')
2313+
assert_array_equal(X_trans_boxcox, X_trans_default)

0 commit comments

Comments
 (0)