diff --git a/maint_tools/test_docstrings.py b/maint_tools/test_docstrings.py index 9b23b1789aeb4..d4ea5e0d30b3c 100644 --- a/maint_tools/test_docstrings.py +++ b/maint_tools/test_docstrings.py @@ -80,7 +80,6 @@ "LassoCV", "LassoLars", "LassoLarsCV", - "LassoLarsIC", "LatentDirichletAllocation", "LedoitWolf", "LinearSVC", diff --git a/sklearn/linear_model/_least_angle.py b/sklearn/linear_model/_least_angle.py index deec81a29c190..a239d9ef14609 100644 --- a/sklearn/linear_model/_least_angle.py +++ b/sklearn/linear_model/_least_angle.py @@ -1903,7 +1903,7 @@ def __init__( class LassoLarsIC(LassoLars): - """Lasso model fit with Lars using BIC or AIC for model selection + """Lasso model fit with Lars using BIC or AIC for model selection. The optimization objective for Lasso is:: @@ -1923,7 +1923,7 @@ class LassoLarsIC(LassoLars): The type of criterion to use. fit_intercept : bool, default=True - whether to calculate the intercept for this model. If set + Whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (i.e. data is expected to be centered). @@ -2005,14 +2005,18 @@ class LassoLarsIC(LassoLars): .. versionadded:: 0.24 - Examples + See Also -------- - >>> from sklearn import linear_model - >>> reg = linear_model.LassoLarsIC(criterion='bic', normalize=False) - >>> reg.fit([[-1, 1], [0, 0], [1, 1]], [-1.1111, 0, -1.1111]) - LassoLarsIC(criterion='bic', normalize=False) - >>> print(reg.coef_) - [ 0. -1.11...] + lars_path : Compute Least Angle Regression or Lasso + path using LARS algorithm. + lasso_path : Compute Lasso path with coordinate descent. + Lasso : Linear Model trained with L1 prior as + regularizer (aka the Lasso). + LassoCV : Lasso linear model with iterative fitting + along a regularization path. + LassoLars : Lasso model fit with Least Angle Regression a.k.a. Lars. + LassoLarsCV: Cross-validated Lasso, using the LARS algorithm. + sklearn.decomposition.sparse_encode : Sparse coding. Notes ----- @@ -2025,9 +2029,14 @@ class LassoLarsIC(LassoLars): https://en.wikipedia.org/wiki/Akaike_information_criterion https://en.wikipedia.org/wiki/Bayesian_information_criterion - See Also + Examples -------- - lars_path, LassoLars, LassoLarsCV + >>> from sklearn import linear_model + >>> reg = linear_model.LassoLarsIC(criterion='bic', normalize=False) + >>> reg.fit([[-1, 1], [0, 0], [1, 1]], [-1.1111, 0, -1.1111]) + LassoLarsIC(criterion='bic', normalize=False) + >>> print(reg.coef_) + [ 0. -1.11...] """ def __init__( @@ -2063,10 +2072,10 @@ def fit(self, X, y, copy_X=None): Parameters ---------- X : array-like of shape (n_samples, n_features) - training data. + Training data. y : array-like of shape (n_samples,) - target values. Will be cast to X's dtype if necessary + Target values. Will be cast to X's dtype if necessary. copy_X : bool, default=None If provided, this parameter will override the choice @@ -2076,7 +2085,7 @@ def fit(self, X, y, copy_X=None): Returns ------- self : object - returns an instance of self. + Returns an instance of self. """ _normalize = _deprecate_normalize( self.normalize, default=True, estimator_name=self.__class__.__name__