Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/compose/plot_column_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
from sklearn.svm import LinearSVC


class TextStats(BaseEstimator, TransformerMixin):
class TextStats(TransformerMixin, BaseEstimator):
"""Extract features from each document for DictVectorizer"""

def fit(self, x, y=None):
Expand All @@ -54,7 +54,7 @@ def transform(self, posts):
for text in posts]


class SubjectBodyExtractor(BaseEstimator, TransformerMixin):
class SubjectBodyExtractor(TransformerMixin, BaseEstimator):
"""Extract the subject & body from a usenet post in a single pass.

Takes a sequence of strings and produces a dict of sequences. Keys are
Expand Down
32 changes: 10 additions & 22 deletions sklearn/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,17 +129,6 @@ def _pprint(params, offset=0, printer=repr):
return lines


def _update_if_consistent(dict1, dict2):
common_keys = set(dict1.keys()).intersection(dict2.keys())
for key in common_keys:
if dict1[key] != dict2[key]:
raise TypeError("Inconsistent values for tag {}: {} != {}".format(
key, dict1[key], dict2[key]
))
dict1.update(dict2)
return dict1


class BaseEstimator:
"""Base class for all estimators in scikit-learn

Expand Down Expand Up @@ -320,20 +309,19 @@ def __setstate__(self, state):
except AttributeError:
self.__dict__.update(state)

def _more_tags(self):
return _DEFAULT_TAGS

def _get_tags(self):
collected_tags = {}
for base_class in inspect.getmro(self.__class__):
if (hasattr(base_class, '_more_tags')
and base_class != self.__class__):
for base_class in reversed(inspect.getmro(self.__class__)):
if hasattr(base_class, '_more_tags'):
# need the if because mixins might not have _more_tags
# but might do redundant work in estimators
# (i.e. calling more tags on BaseEstimator multiple times)
more_tags = base_class._more_tags(self)
collected_tags = _update_if_consistent(collected_tags,
more_tags)
if hasattr(self, '_more_tags'):
more_tags = self._more_tags()
collected_tags = _update_if_consistent(collected_tags, more_tags)
tags = _DEFAULT_TAGS.copy()
tags.update(collected_tags)
return tags
collected_tags.update(more_tags)
return collected_tags


class ClassifierMixin:
Expand Down
2 changes: 1 addition & 1 deletion sklearn/calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ def grad(AB):
return AB_[0], AB_[1]


class _SigmoidCalibration(BaseEstimator, RegressorMixin):
class _SigmoidCalibration(RegressorMixin, BaseEstimator):
"""Sigmoid regression model.

Attributes
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/affinity_propagation_.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ def affinity_propagation(S, preference=None, convergence_iter=15, max_iter=200,

###############################################################################

class AffinityPropagation(BaseEstimator, ClusterMixin):
class AffinityPropagation(ClusterMixin, BaseEstimator):
"""Perform Affinity Propagation Clustering of data.

Read more in the :ref:`User Guide <affinity_propagation>`.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/bicluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def _log_normalize(X):
return L - row_avg - col_avg + avg


class BaseSpectral(BaseEstimator, BiclusterMixin, metaclass=ABCMeta):
class BaseSpectral(BiclusterMixin, BaseEstimator, metaclass=ABCMeta):
"""Base class for spectral biclustering."""

@abstractmethod
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/birch.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,7 +319,7 @@ def radius(self):
self.sq_norm_)


class Birch(BaseEstimator, TransformerMixin, ClusterMixin):
class Birch(ClusterMixin, TransformerMixin, BaseEstimator):
"""Implements the Birch clustering algorithm.

It is a memory-efficient, online-learning algorithm provided as an
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/dbscan_.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def dbscan(X, eps=0.5, min_samples=5, metric='minkowski', metric_params=None,
return np.where(core_samples)[0], labels


class DBSCAN(BaseEstimator, ClusterMixin):
class DBSCAN(ClusterMixin, BaseEstimator):
"""Perform DBSCAN clustering from vector array or distance matrix.

DBSCAN - Density-Based Spatial Clustering of Applications with Noise.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/hierarchical.py
Original file line number Diff line number Diff line change
Expand Up @@ -652,7 +652,7 @@ def _hc_cut(n_clusters, children, n_leaves):

###############################################################################

class AgglomerativeClustering(BaseEstimator, ClusterMixin):
class AgglomerativeClustering(ClusterMixin, BaseEstimator):
"""
Agglomerative Clustering

Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/k_means_.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@ def _init_centroids(X, k, init, random_state=None, x_squared_norms=None,
return centers


class KMeans(BaseEstimator, ClusterMixin, TransformerMixin):
class KMeans(TransformerMixin, ClusterMixin, BaseEstimator):
"""K-Means clustering

Read more in the :ref:`User Guide <k_means>`.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/mean_shift_.py
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,7 @@ def get_bin_seeds(X, bin_size, min_bin_freq=1):
return bin_seeds


class MeanShift(BaseEstimator, ClusterMixin):
class MeanShift(ClusterMixin, BaseEstimator):
"""Mean shift clustering using a flat kernel.

Mean shift clustering aims to discover "blobs" in a smooth density of
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/optics_.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from ..metrics import pairwise_distances


class OPTICS(BaseEstimator, ClusterMixin):
class OPTICS(ClusterMixin, BaseEstimator):
"""Estimate clustering structure from vector array

OPTICS (Ordering Points To Identify the Clustering Structure), closely
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/spectral.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@ def spectral_clustering(affinity, n_clusters=8, n_components=None,
return labels


class SpectralClustering(BaseEstimator, ClusterMixin):
class SpectralClustering(ClusterMixin, BaseEstimator):
"""Apply clustering to a projection of the normalized Laplacian.

In practice Spectral Clustering is very useful when the structure of
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cluster/tests/test_bicluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from sklearn.datasets import make_biclusters, make_checkerboard


class MockBiclustering(BaseEstimator, BiclusterMixin):
class MockBiclustering(BiclusterMixin, BaseEstimator):
# Mock object for testing get_submatrix.
def __init__(self):
pass
Expand Down
2 changes: 1 addition & 1 deletion sklearn/compose/_column_transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
"item instead of a scalar.")


class ColumnTransformer(_BaseComposition, TransformerMixin):
class ColumnTransformer(TransformerMixin, _BaseComposition):
"""Applies transformers to columns of an array or pandas DataFrame.

This estimator allows different columns or column subsets of the input
Expand Down
2 changes: 1 addition & 1 deletion sklearn/compose/_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
__all__ = ['TransformedTargetRegressor']


class TransformedTargetRegressor(BaseEstimator, RegressorMixin):
class TransformedTargetRegressor(RegressorMixin, BaseEstimator):
"""Meta-estimator to regress on a transformed target.

Useful for applying a non-linear transformation in regression
Expand Down
4 changes: 2 additions & 2 deletions sklearn/compose/tests/test_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ def func(y):
assert_allclose(y_pred_1d_func, y_pred_2d_func)


class DummyCheckerArrayTransformer(BaseEstimator, TransformerMixin):
class DummyCheckerArrayTransformer(TransformerMixin, BaseEstimator):

def fit(self, X, y=None):
assert isinstance(X, np.ndarray)
Expand Down Expand Up @@ -268,7 +268,7 @@ def test_transform_target_regressor_ensure_y_array():
tt.predict(X)


class DummyTransformer(BaseEstimator, TransformerMixin):
class DummyTransformer(TransformerMixin, BaseEstimator):
"""Dummy transformer which count how many time fit was called."""
def __init__(self, fit_counter=0):
self.fit_counter = fit_counter
Expand Down
2 changes: 1 addition & 1 deletion sklearn/covariance/elliptic_envelope.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from ..base import OutlierMixin


class EllipticEnvelope(MinCovDet, OutlierMixin):
class EllipticEnvelope(OutlierMixin, MinCovDet):
"""An object for detecting outliers in a Gaussian distributed dataset.

Read more in the :ref:`User Guide <outlier_detection>`.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/cross_decomposition/cca_.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
__all__ = ['CCA']


class CCA(_PLS, _UnstableArchMixin):
class CCA(_UnstableArchMixin, _PLS):
"""CCA Canonical Correlation Analysis.

CCA inherits from PLS with mode="B" and deflation_mode="canonical".
Expand Down
4 changes: 2 additions & 2 deletions sklearn/cross_decomposition/pls_.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ def _center_scale_xy(X, Y, scale=True):
return X, Y, x_mean, y_mean, x_std, y_std


class _PLS(BaseEstimator, TransformerMixin, RegressorMixin, MultiOutputMixin,
class _PLS(TransformerMixin, RegressorMixin, MultiOutputMixin, BaseEstimator,
metaclass=ABCMeta):
"""Partial Least Squares (PLS)

Expand Down Expand Up @@ -750,7 +750,7 @@ def __init__(self, n_components=2, scale=True, algorithm="nipals",
max_iter=max_iter, tol=tol, copy=copy)


class PLSSVD(BaseEstimator, TransformerMixin):
class PLSSVD(TransformerMixin, BaseEstimator):
"""Partial Least Square SVD

Simply perform a svd on the crosscovariance matrix: X'Y
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from abc import ABCMeta, abstractmethod


class _BasePCA(BaseEstimator, TransformerMixin, metaclass=ABCMeta):
class _BasePCA(TransformerMixin, BaseEstimator, metaclass=ABCMeta):
"""Base class for PCA methods.

Warning: This class should not be used directly.
Expand Down
6 changes: 3 additions & 3 deletions sklearn/decomposition/dict_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -932,7 +932,7 @@ def transform(self, X):
return code


class SparseCoder(BaseEstimator, SparseCodingMixin):
class SparseCoder(SparseCodingMixin, BaseEstimator):
"""Sparse coding

Finds a sparse representation of data against a fixed, precomputed
Expand Down Expand Up @@ -1045,7 +1045,7 @@ def fit(self, X, y=None):
return self


class DictionaryLearning(BaseEstimator, SparseCodingMixin):
class DictionaryLearning(SparseCodingMixin, BaseEstimator):
"""Dictionary learning

Finds a dictionary (a set of atoms) that can best be used to represent data
Expand Down Expand Up @@ -1241,7 +1241,7 @@ def fit(self, X, y=None):
return self


class MiniBatchDictionaryLearning(BaseEstimator, SparseCodingMixin):
class MiniBatchDictionaryLearning(SparseCodingMixin, BaseEstimator):
"""Mini-batch dictionary learning

Finds a dictionary (a set of atoms) that can best be used to represent data
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/factor_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from ..exceptions import ConvergenceWarning


class FactorAnalysis(BaseEstimator, TransformerMixin):
class FactorAnalysis(TransformerMixin, BaseEstimator):
"""Factor Analysis (FA)

A simple linear generative model with Gaussian latent variables.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/fastica_.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,7 +380,7 @@ def g(x, fun_args):
return None, W, S


class FastICA(BaseEstimator, TransformerMixin):
class FastICA(TransformerMixin, BaseEstimator):
"""FastICA: a fast algorithm for Independent Component Analysis.

Read more in the :ref:`User Guide <ICA>`.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/kernel_pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from ..metrics.pairwise import pairwise_kernels


class KernelPCA(BaseEstimator, TransformerMixin):
class KernelPCA(TransformerMixin, BaseEstimator):
"""Kernel Principal component analysis (KPCA)

Non-linear dimensionality reduction through the use of kernels (see
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/nmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -1068,7 +1068,7 @@ def non_negative_factorization(X, W=None, H=None, n_components=None,
return W, H, n_iter


class NMF(BaseEstimator, TransformerMixin):
class NMF(TransformerMixin, BaseEstimator):
r"""Non-Negative Matrix Factorization (NMF)

Find two non-negative matrices (W, H) whose product approximates the non-
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/online_lda.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def _update_doc_distribution(X, exp_topic_word_distr, doc_topic_prior,
return (doc_topic_distr, suff_stats)


class LatentDirichletAllocation(BaseEstimator, TransformerMixin):
class LatentDirichletAllocation(TransformerMixin, BaseEstimator):
"""Latent Dirichlet Allocation with online variational Bayes algorithm

.. versionadded:: 0.17
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/sparse_pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def _check_normalize_components(normalize_components, estimator_name):
)


class SparsePCA(BaseEstimator, TransformerMixin):
class SparsePCA(TransformerMixin, BaseEstimator):
"""Sparse Principal Components Analysis (SparsePCA)

Finds the set of sparse components that can optimally reconstruct
Expand Down
2 changes: 1 addition & 1 deletion sklearn/decomposition/truncated_svd.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
__all__ = ["TruncatedSVD"]


class TruncatedSVD(BaseEstimator, TransformerMixin):
class TruncatedSVD(TransformerMixin, BaseEstimator):
"""Dimensionality reduction using truncated SVD (aka LSA).

This transformer performs linear dimensionality reduction by means of
Expand Down
2 changes: 1 addition & 1 deletion sklearn/discriminant_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def predict_log_proba(self, X):
return np.log(self.predict_proba(X))


class QuadraticDiscriminantAnalysis(BaseEstimator, ClassifierMixin):
class QuadraticDiscriminantAnalysis(ClassifierMixin, BaseEstimator):
"""Quadratic Discriminant Analysis

A classifier with a quadratic decision boundary, generated
Expand Down
4 changes: 2 additions & 2 deletions sklearn/dummy.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from .utils.multiclass import class_distribution


class DummyClassifier(BaseEstimator, ClassifierMixin, MultiOutputMixin):
class DummyClassifier(MultiOutputMixin, ClassifierMixin, BaseEstimator):
"""
DummyClassifier is a classifier that makes predictions using simple rules.

Expand Down Expand Up @@ -353,7 +353,7 @@ def score(self, X, y, sample_weight=None):
return super().score(X, y, sample_weight)


class DummyRegressor(BaseEstimator, RegressorMixin, MultiOutputMixin):
class DummyRegressor(MultiOutputMixin, RegressorMixin, BaseEstimator):
"""
DummyRegressor is a regressor that makes predictions using
simple rules.
Expand Down
2 changes: 1 addition & 1 deletion sklearn/ensemble/_hist_gradient_boosting/binning.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def _find_binning_thresholds(data, max_bins, subsample, random_state):
return binning_thresholds


class _BinMapper(BaseEstimator, TransformerMixin):
class _BinMapper(TransformerMixin, BaseEstimator):
"""Transformer that maps a dataset into integer-valued bins.

The bins are created in a feature-wise fashion, using quantiles so that
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@ def n_iter_(self):
return len(self._predictors)


class HistGradientBoostingRegressor(BaseHistGradientBoosting, RegressorMixin):
class HistGradientBoostingRegressor(RegressorMixin, BaseHistGradientBoosting):
"""Histogram-based Gradient Boosting Regression Tree.

This estimator is much faster than
Expand Down
4 changes: 2 additions & 2 deletions sklearn/ensemble/bagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ def estimators_samples_(self):
for _, sample_indices in self._get_estimators_indices()]


class BaggingClassifier(BaseBagging, ClassifierMixin):
class BaggingClassifier(ClassifierMixin, BaseBagging):
"""A Bagging classifier.

A Bagging classifier is an ensemble meta-estimator that fits base
Expand Down Expand Up @@ -816,7 +816,7 @@ def decision_function(self, X):
return decisions


class BaggingRegressor(BaseBagging, RegressorMixin):
class BaggingRegressor(RegressorMixin, BaseBagging):
"""A Bagging regressor.

A Bagging regressor is an ensemble meta-estimator that fits base
Expand Down
Loading