Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 6290fcf

Browse files
committed
TEST: use sklearn.fixes.bincount
1 parent 25dbb15 commit 6290fcf

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

sklearn/ensemble/tests/test_forest.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
from sklearn.ensemble import RandomTreesEmbedding
4040
from sklearn.grid_search import GridSearchCV
4141
from sklearn.svm import LinearSVC
42+
from sklearn.utils.fixes import bincount
4243
from sklearn.utils.validation import check_random_state
4344

4445
from sklearn.tree.tree import SPARSE_SPLITTERS
@@ -250,7 +251,7 @@ def entropy(samples):
250251
e = 0.
251252
n_samples = len(samples)
252253

253-
for count in np.bincount(samples):
254+
for count in bincount(samples):
254255
p = 1. * count / n_samples
255256
if p > 0:
256257
e -= p * np.log2(p)
@@ -260,7 +261,7 @@ def entropy(samples):
260261
def mdi_importance(X_m, X, y):
261262
n_samples, p = X.shape
262263

263-
variables = range(p)
264+
variables = list(range(p))
264265
variables.pop(X_m)
265266
imp = 0.
266267

@@ -691,7 +692,7 @@ def check_min_samples_leaf(name, X, y):
691692
random_state=0)
692693
est.fit(X, y)
693694
out = est.estimators_[0].tree_.apply(X)
694-
node_counts = np.bincount(out)
695+
node_counts = bincount(out)
695696
# drop inner nodes
696697
leaf_count = node_counts[node_counts != 0]
697698
assert_greater(np.min(leaf_count), 4,
@@ -725,7 +726,7 @@ def check_min_weight_fraction_leaf(name, X, y):
725726
est.bootstrap = False
726727
est.fit(X, y, sample_weight=weights)
727728
out = est.estimators_[0].tree_.apply(X)
728-
node_weights = np.bincount(out, weights=weights)
729+
node_weights = bincount(out, weights=weights)
729730
# drop inner nodes
730731
leaf_weights = node_weights[node_weights != 0]
731732
assert_greater_equal(

0 commit comments

Comments
 (0)