Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/metadata_routing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,7 @@ Meta-estimators and functions supporting metadata routing:
- :class:`sklearn.calibration.CalibratedClassifierCV`
- :class:`sklearn.compose.ColumnTransformer`
- :class:`sklearn.feature_selection.SelectFromModel`
- :class:`sklearn.impute.IterativeImputer`
- :class:`sklearn.linear_model.ElasticNetCV`
- :class:`sklearn.linear_model.LarsCV`
- :class:`sklearn.linear_model.LassoCV`
Expand Down Expand Up @@ -291,7 +292,6 @@ Meta-estimators and tools not supporting metadata routing yet:
- :class:`sklearn.feature_selection.RFE`
- :class:`sklearn.feature_selection.RFECV`
- :class:`sklearn.feature_selection.SequentialFeatureSelector`
- :class:`sklearn.impute.IterativeImputer`
- :class:`sklearn.linear_model.RANSACRegressor`
- :class:`sklearn.linear_model.RidgeClassifierCV`
- :class:`sklearn.linear_model.RidgeCV`
Expand Down
11 changes: 11 additions & 0 deletions doc/whats_new/v1.5.rst
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,17 @@ Meson is now supported as a build backend, see :ref:`Building with Meson

TODO Fill more details before the 1.5 release, when the Meson story has settled down.

Metadata Routing
----------------

The following models now support metadata routing in one or more or their
methods. Refer to the :ref:`Metadata Routing User Guide <metadata_routing>` for
more details.

- |Feature| :class:`impute.IterativeImputer` now supports metadata routing in
its `fit` method. :pr:`28187` by :user:`Stefanie Senger <StefanieSenger>`.


Changelog
---------

Expand Down
4 changes: 2 additions & 2 deletions sklearn/feature_selection/_from_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -343,12 +343,12 @@ def fit(self, X, y=None, **fit_params):
**fit_params : dict
- If `enable_metadata_routing=False` (default):

Parameters directly passed to the `partial_fit` method of the
Parameters directly passed to the `fit` method of the
sub-estimator. They are ignored if `prefit=True`.

- If `enable_metadata_routing=True`:

Parameters safely routed to the `partial_fit` method of the
Parameters safely routed to the `fit` method of the
sub-estimator. They are ignored if `prefit=True`.

.. versionchanged:: 1.4
Expand Down
72 changes: 65 additions & 7 deletions sklearn/impute/_iterative.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,12 @@
)
from ..utils._mask import _get_mask
from ..utils._param_validation import HasMethods, Interval, StrOptions
from ..utils.metadata_routing import _RoutingNotSupportedMixin
from ..utils.metadata_routing import (
MetadataRouter,
MethodMapping,
_raise_for_params,
process_routing,
)
from ..utils.validation import FLOAT_DTYPES, _check_feature_names_in, check_is_fitted
from ._base import SimpleImputer, _BaseImputer, _check_inputs_dtype

Expand Down Expand Up @@ -47,7 +52,7 @@ def _assign_where(X1, X2, cond):
X1[cond] = X2[cond]


class IterativeImputer(_RoutingNotSupportedMixin, _BaseImputer):
class IterativeImputer(_BaseImputer):
"""Multivariate imputer that estimates each feature from all the others.

A strategy for imputing missing values by modeling each feature with
Expand Down Expand Up @@ -349,6 +354,7 @@ def _impute_one_feature(
neighbor_feat_idx,
estimator=None,
fit_mode=True,
params=None,
):
"""Impute a single feature from the others provided.

Expand Down Expand Up @@ -380,6 +386,9 @@ def _impute_one_feature(
fit_mode : boolean, default=True
Whether to fit and predict with the estimator or just predict.

params : dict
Additional params routed to the individual estimator.

Returns
-------
X_filled : ndarray
Expand Down Expand Up @@ -410,7 +419,7 @@ def _impute_one_feature(
~missing_row_mask,
axis=0,
)
estimator.fit(X_train, y_train)
estimator.fit(X_train, y_train, **params)

# if no missing values, don't predict
if np.sum(missing_row_mask) == 0:
Expand Down Expand Up @@ -685,7 +694,7 @@ def _validate_limit(limit, limit_type, n_features):
# IterativeImputer.estimator is not validated yet
prefer_skip_nested_validation=False
)
def fit_transform(self, X, y=None):
def fit_transform(self, X, y=None, **params):
"""Fit the imputer on `X` and return the transformed `X`.

Parameters
Expand All @@ -697,11 +706,29 @@ def fit_transform(self, X, y=None):
y : Ignored
Not used, present for API consistency by convention.

**params : dict
Parameters routed to the `fit` method of the sub-estimator via the
metadata routing API.

.. versionadded:: 1.5
Only available if
`sklearn.set_config(enable_metadata_routing=True)` is set. See
:ref:`Metadata Routing User Guide <metadata_routing>` for more
details.

Returns
-------
Xt : array-like, shape (n_samples, n_features)
The imputed input data.
"""
_raise_for_params(params, self, "fit")

routed_params = process_routing(
self,
"fit",
**params,
)

self.random_state_ = getattr(
self, "random_state_", check_random_state(self.random_state)
)
Expand All @@ -728,7 +755,7 @@ def fit_transform(self, X, y=None):
self.n_iter_ = 0
return super()._concatenate_indicator(Xt, X_indicator)

# Edge case: a single feature. We return the initial ...
# Edge case: a single feature, we return the initial imputation.
if Xt.shape[1] == 1:
self.n_iter_ = 0
return super()._concatenate_indicator(Xt, X_indicator)
Expand Down Expand Up @@ -770,6 +797,7 @@ def fit_transform(self, X, y=None):
neighbor_feat_idx,
estimator=None,
fit_mode=True,
params=routed_params.estimator.fit,
)
estimator_triplet = _ImputerTriplet(
feat_idx, neighbor_feat_idx, estimator
Expand Down Expand Up @@ -860,7 +888,7 @@ def transform(self, X):

return super()._concatenate_indicator(Xt, X_indicator)

def fit(self, X, y=None):
def fit(self, X, y=None, **fit_params):
"""Fit the imputer on `X` and return self.

Parameters
Expand All @@ -872,12 +900,22 @@ def fit(self, X, y=None):
y : Ignored
Not used, present for API consistency by convention.

**fit_params : dict
Parameters routed to the `fit` method of the sub-estimator via the
metadata routing API.

.. versionadded:: 1.5
Only available if
`sklearn.set_config(enable_metadata_routing=True)` is set. See
:ref:`Metadata Routing User Guide <metadata_routing>` for more
details.

Returns
-------
self : object
Fitted estimator.
"""
self.fit_transform(X)
self.fit_transform(X, **fit_params)
return self

def get_feature_names_out(self, input_features=None):
Expand All @@ -904,3 +942,23 @@ def get_feature_names_out(self, input_features=None):
input_features = _check_feature_names_in(self, input_features)
names = self.initial_imputer_.get_feature_names_out(input_features)
return self._concatenate_indicator_feature_names_out(names, input_features)

def get_metadata_routing(self):
"""Get metadata routing of this object.

Please check :ref:`User Guide <metadata_routing>` on how the routing
mechanism works.

.. versionadded:: 1.5

Returns
-------
routing : MetadataRouter
A :class:`~sklearn.utils.metadata_routing.MetadataRouter` encapsulating
routing information.
"""
router = MetadataRouter(owner=self.__class__.__name__).add(
estimator=self.estimator,
method_mapping=MethodMapping().add(callee="fit", caller="fit"),
)
return router
10 changes: 9 additions & 1 deletion sklearn/tests/test_metaestimators_metadata_routing.py
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,15 @@ def enable_slep006():
"cv_name": "cv",
"cv_routing_methods": ["fit"],
},
{
"metaestimator": IterativeImputer,
"estimator_name": "estimator",
"estimator": ConsumingRegressor,
"init_args": {"skip_complete": False},
"X": X,
"y": y,
"estimator_routing_methods": ["fit"],
},
]
"""List containing all metaestimators to be tested and their settings

Expand Down Expand Up @@ -331,7 +340,6 @@ def enable_slep006():
BaggingRegressor(),
FeatureUnion([]),
GraphicalLassoCV(),
IterativeImputer(),
RANSACRegressor(),
RFE(ConsumingClassifier()),
RFECV(ConsumingClassifier()),
Expand Down