|
3 | 3 | import scipy.sparse
|
4 | 4 |
|
5 | 5 | from sklearn.datasets import make_classification, make_regression
|
| 6 | +from sklearn.ensemble import ( |
| 7 | + ExtraTreesClassifier, |
| 8 | + ExtraTreesRegressor, |
| 9 | + RandomForestClassifier, |
| 10 | + RandomForestRegressor, |
| 11 | +) |
6 | 12 | from sklearn.tree import (
|
7 |
| - DecisionTreeRegressor, |
8 | 13 | DecisionTreeClassifier,
|
9 |
| - ExtraTreeRegressor, |
| 14 | + DecisionTreeRegressor, |
10 | 15 | ExtraTreeClassifier,
|
11 |
| -) |
12 |
| -from sklearn.ensemble import ( |
13 |
| - RandomForestRegressor, |
14 |
| - RandomForestClassifier, |
15 |
| - ExtraTreesRegressor, |
16 |
| - ExtraTreesClassifier, |
| 16 | + ExtraTreeRegressor, |
17 | 17 | )
|
18 | 18 |
|
19 | 19 | TREE_CLASSIFIER_CLASSES = [DecisionTreeClassifier, ExtraTreeClassifier]
|
|
31 | 31 | @pytest.mark.parametrize("TreeClassifier", TREE_BASED_CLASSIFIER_CLASSES)
|
32 | 32 | @pytest.mark.parametrize("depth_first_builder", (True, False))
|
33 | 33 | @pytest.mark.parametrize("sparse_splitter", (True, False))
|
34 |
| -def test_montonic_constraints_classifications( |
| 34 | +def test_monotonic_constraints_classifications( |
35 | 35 | TreeClassifier, depth_first_builder, sparse_splitter, global_random_seed
|
36 | 36 | ):
|
37 | 37 | n_samples = 1000
|
@@ -87,7 +87,7 @@ def test_montonic_constraints_classifications(
|
87 | 87 | @pytest.mark.parametrize("depth_first_builder", (True, False))
|
88 | 88 | @pytest.mark.parametrize("sparse_splitter", (True, False))
|
89 | 89 | @pytest.mark.parametrize("criterion", ("absolute_error", "squared_error"))
|
90 |
| -def test_montonic_constraints_regressions( |
| 90 | +def test_monotonic_constraints_regressions( |
91 | 91 | TreeRegressor, depth_first_builder, sparse_splitter, criterion, global_random_seed
|
92 | 92 | ):
|
93 | 93 | n_samples = 1000
|
@@ -173,6 +173,25 @@ def test_multiple_output_raises(TreeClassifier):
|
173 | 173 | est.fit(X, y)
|
174 | 174 |
|
175 | 175 |
|
| 176 | +@pytest.mark.parametrize( |
| 177 | + "DecisionTreeEstimator", [DecisionTreeClassifier, DecisionTreeRegressor] |
| 178 | +) |
| 179 | +def test_missing_values_raises(DecisionTreeEstimator): |
| 180 | + X, y = make_classification( |
| 181 | + n_samples=100, n_features=5, n_classes=2, n_informative=3, random_state=0 |
| 182 | + ) |
| 183 | + X[0, 0] = np.nan |
| 184 | + monotonic_cst = np.zeros(X.shape[1]) |
| 185 | + monotonic_cst[0] = 1 |
| 186 | + est = DecisionTreeEstimator( |
| 187 | + max_depth=None, monotonic_cst=monotonic_cst, random_state=0 |
| 188 | + ) |
| 189 | + |
| 190 | + msg = "Input X contains NaN" |
| 191 | + with pytest.raises(ValueError, match=msg): |
| 192 | + est.fit(X, y) |
| 193 | + |
| 194 | + |
176 | 195 | @pytest.mark.parametrize("TreeClassifier", TREE_BASED_CLASSIFIER_CLASSES)
|
177 | 196 | def test_bad_monotonic_cst_raises(TreeClassifier):
|
178 | 197 | X = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]]
|
|
0 commit comments