Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 6da6718

Browse files
committed
FIX remove additional fit method
1 parent f82a1e2 commit 6da6718

File tree

1 file changed

+29
-4
lines changed

1 file changed

+29
-4
lines changed

sklearn/ensemble/gradient_boosting.py

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -929,6 +929,13 @@ def fit(self, X, y, sample_weight=None, monitor=None):
929929
computing held-out estimates, early stopping, model introspect, and
930930
snapshoting.
931931
932+
presort : bool, optional (default=False)
933+
934+
Whether to presort the data to speed up the finding of best splits in
935+
fitting. By default gradient boosting uses presorting, but this may
936+
slow down the training process on large datasets, or with deep trees.
937+
This option is not available for sparse data.
938+
932939
Returns
933940
-------
934941
self : object
@@ -1285,6 +1292,14 @@ class GradientBoostingClassifier(BaseGradientBoosting, ClassifierMixin):
12851292
If None, the random number generator is the RandomState instance used
12861293
by `np.random`.
12871294
1295+
presort : bool, optional (default=False)
1296+
1297+
Whether to presort the data to speed up the finding of best splits in
1298+
fitting. By default this is turned on for gradient boosting. However,
1299+
if the dataset is very large, or the trees being built are deep, this
1300+
may have performance costs. This option is not available for sparse
1301+
data.
1302+
12881303
Attributes
12891304
----------
12901305
feature_importances_ : array, shape = [n_features]
@@ -1336,7 +1351,8 @@ def __init__(self, loss='deviance', learning_rate=0.1, n_estimators=100,
13361351
min_samples_leaf=1, min_weight_fraction_leaf=0.,
13371352
max_depth=3, init=None, random_state=None,
13381353
max_features=None, verbose=0,
1339-
max_leaf_nodes=None, warm_start=False):
1354+
max_leaf_nodes=None, warm_start=False,
1355+
presort=True):
13401356

13411357
super(GradientBoostingClassifier, self).__init__(
13421358
loss=loss, learning_rate=learning_rate, n_estimators=n_estimators,
@@ -1346,7 +1362,8 @@ def __init__(self, loss='deviance', learning_rate=0.1, n_estimators=100,
13461362
max_depth=max_depth, init=init, subsample=subsample,
13471363
max_features=max_features,
13481364
random_state=random_state, verbose=verbose,
1349-
max_leaf_nodes=max_leaf_nodes, warm_start=warm_start)
1365+
max_leaf_nodes=max_leaf_nodes, warm_start=warm_start,
1366+
presort=presort)
13501367

13511368
def _validate_y(self, y):
13521369
self.classes_, y = np.unique(y, return_inverse=True)
@@ -1611,6 +1628,13 @@ class GradientBoostingRegressor(BaseGradientBoosting, RegressorMixin):
16111628
If None, the random number generator is the RandomState instance used
16121629
by `np.random`.
16131630
1631+
presort : bool, optional (default=False)
1632+
1633+
Whether to presort the data to speed up the finding of best splits in
1634+
fitting. By default this is turned on for gradient boosting. However,
1635+
if the dataset is very large, or the trees being built are deep, this
1636+
may have performance costs. This option is not available for sparse
1637+
data.
16141638
16151639
Attributes
16161640
----------
@@ -1660,7 +1684,7 @@ def __init__(self, loss='ls', learning_rate=0.1, n_estimators=100,
16601684
min_samples_leaf=1, min_weight_fraction_leaf=0.,
16611685
max_depth=3, init=None, random_state=None,
16621686
max_features=None, alpha=0.9, verbose=0, max_leaf_nodes=None,
1663-
warm_start=False):
1687+
warm_start=False, presort=True):
16641688

16651689
super(GradientBoostingRegressor, self).__init__(
16661690
loss=loss, learning_rate=learning_rate, n_estimators=n_estimators,
@@ -1670,7 +1694,8 @@ def __init__(self, loss='ls', learning_rate=0.1, n_estimators=100,
16701694
max_depth=max_depth, init=init, subsample=subsample,
16711695
max_features=max_features,
16721696
random_state=random_state, alpha=alpha, verbose=verbose,
1673-
max_leaf_nodes=max_leaf_nodes, warm_start=warm_start)
1697+
max_leaf_nodes=max_leaf_nodes, warm_start=warm_start,
1698+
presort=True)
16741699

16751700
def predict(self, X):
16761701
"""Predict regression target for X.

0 commit comments

Comments
 (0)