Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit f82a1e2

Browse files
committed
ENH presorter merged completely
1 parent 73c2cf6 commit f82a1e2

File tree

9 files changed

+1917
-3390
lines changed

9 files changed

+1917
-3390
lines changed

sklearn/ensemble/_gradient_boosting.c

Lines changed: 18 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

sklearn/ensemble/gradient_boosting.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545

4646
from ..tree.tree import DecisionTreeRegressor
4747
from ..tree._tree import DTYPE, TREE_LEAF
48-
from ..tree._splitter import PresortBestSplitter
4948
from ..tree._criterion import FriedmanMSE
5049

5150
from ._gradient_boosting import predict_stages
@@ -1018,11 +1017,7 @@ def _fit_stages(self, X, y, y_pred, sample_weight, random_state,
10181017

10191018
# init criterion and splitter
10201019
criterion = FriedmanMSE(1)
1021-
splitter = PresortBestSplitter(criterion,
1022-
self.max_features_,
1023-
self.min_samples_leaf,
1024-
min_weight_leaf,
1025-
random_state)
1020+
splitter = 'best'
10261021

10271022
if self.verbose:
10281023
verbose_reporter = VerboseReporter(self.verbose)

sklearn/ensemble/tests/test_gradient_boosting.py

Lines changed: 0 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -984,22 +984,6 @@ def test_non_uniform_weights_toy_edge_case_reg():
984984
assert_greater(gb.predict([[1, 0]])[0], 0.5)
985985

986986

987-
def test_non_uniform_weights_toy_min_weight_leaf():
988-
# Regression test for issue #4447
989-
X = [[1, 0],
990-
[1, 0],
991-
[1, 0],
992-
[0, 1],
993-
]
994-
y = [0, 0, 1, 0]
995-
# ignore the first 2 training samples by setting their weight to 0
996-
sample_weight = [0, 0, 1, 1]
997-
gb = GradientBoostingRegressor(n_estimators=5, min_weight_fraction_leaf=0.1)
998-
gb.fit(X, y, sample_weight=sample_weight)
999-
assert_true(gb.predict([[1, 0]])[0] > 0.5)
1000-
assert_almost_equal(gb.estimators_[0, 0].splitter.min_weight_leaf, 0.2)
1001-
1002-
1003987
def test_non_uniform_weights_toy_edge_case_clf():
1004988
X = [[1, 0],
1005989
[1, 0],

0 commit comments

Comments
 (0)