Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 16a8863

Browse files
committed
Mark missing values as unsupported
1 parent 87bae8b commit 16a8863

File tree

2 files changed

+34
-11
lines changed

2 files changed

+34
-11
lines changed

sklearn/tree/_classes.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,11 @@ def get_n_leaves(self):
183183
return self.tree_.n_leaves
184184

185185
def _support_missing_values(self, X):
186-
return not issparse(X) and self._get_tags()["allow_nan"]
186+
return (
187+
not issparse(X)
188+
and self._get_tags()["allow_nan"]
189+
and self.monotonic_cst is None
190+
)
187191

188192
def _compute_missing_values_in_feature_mask(self, X):
189193
"""Return boolean mask denoting if there are missing values for each feature.

sklearn/tree/tests/test_monotonic_tree.py

Lines changed: 29 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,17 @@
33
import scipy.sparse
44

55
from sklearn.datasets import make_classification, make_regression
6+
from sklearn.ensemble import (
7+
ExtraTreesClassifier,
8+
ExtraTreesRegressor,
9+
RandomForestClassifier,
10+
RandomForestRegressor,
11+
)
612
from sklearn.tree import (
7-
DecisionTreeRegressor,
813
DecisionTreeClassifier,
9-
ExtraTreeRegressor,
14+
DecisionTreeRegressor,
1015
ExtraTreeClassifier,
11-
)
12-
from sklearn.ensemble import (
13-
RandomForestRegressor,
14-
RandomForestClassifier,
15-
ExtraTreesRegressor,
16-
ExtraTreesClassifier,
16+
ExtraTreeRegressor,
1717
)
1818

1919
TREE_CLASSIFIER_CLASSES = [DecisionTreeClassifier, ExtraTreeClassifier]
@@ -31,7 +31,7 @@
3131
@pytest.mark.parametrize("TreeClassifier", TREE_BASED_CLASSIFIER_CLASSES)
3232
@pytest.mark.parametrize("depth_first_builder", (True, False))
3333
@pytest.mark.parametrize("sparse_splitter", (True, False))
34-
def test_montonic_constraints_classifications(
34+
def test_monotonic_constraints_classifications(
3535
TreeClassifier, depth_first_builder, sparse_splitter, global_random_seed
3636
):
3737
n_samples = 1000
@@ -87,7 +87,7 @@ def test_montonic_constraints_classifications(
8787
@pytest.mark.parametrize("depth_first_builder", (True, False))
8888
@pytest.mark.parametrize("sparse_splitter", (True, False))
8989
@pytest.mark.parametrize("criterion", ("absolute_error", "squared_error"))
90-
def test_montonic_constraints_regressions(
90+
def test_monotonic_constraints_regressions(
9191
TreeRegressor, depth_first_builder, sparse_splitter, criterion, global_random_seed
9292
):
9393
n_samples = 1000
@@ -173,6 +173,25 @@ def test_multiple_output_raises(TreeClassifier):
173173
est.fit(X, y)
174174

175175

176+
@pytest.mark.parametrize(
177+
"DecisionTreeEstimator", [DecisionTreeClassifier, DecisionTreeRegressor]
178+
)
179+
def test_missing_values_raises(DecisionTreeEstimator):
180+
X, y = make_classification(
181+
n_samples=100, n_features=5, n_classes=2, n_informative=3, random_state=0
182+
)
183+
X[0, 0] = np.nan
184+
monotonic_cst = np.zeros(X.shape[1])
185+
monotonic_cst[0] = 1
186+
est = DecisionTreeEstimator(
187+
max_depth=None, monotonic_cst=monotonic_cst, random_state=0
188+
)
189+
190+
msg = "Input X contains NaN"
191+
with pytest.raises(ValueError, match=msg):
192+
est.fit(X, y)
193+
194+
176195
@pytest.mark.parametrize("TreeClassifier", TREE_BASED_CLASSIFIER_CLASSES)
177196
def test_bad_monotonic_cst_raises(TreeClassifier):
178197
X = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]

0 commit comments

Comments
 (0)