Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
- Added an attribute `lower_bounds_` in the :class:`mixture.BaseMixture`
class to save the list of lower bounds for each iteration thereby providing
insights into the convergence behavior of mixture models like
:class:`mixture.GaussianMixture`.
By :user:`Manideep Yenugula <myenugula>`
5 changes: 5 additions & 0 deletions sklearn/mixture/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,7 @@ def fit_predict(self, X, y=None):
n_init = self.n_init if do_init else 1

max_lower_bound = -np.inf
best_lower_bounds = []
self.converged_ = False

random_state = check_random_state(self.random_state)
Expand All @@ -236,6 +237,7 @@ def fit_predict(self, X, y=None):
self._initialize_parameters(X, random_state)

lower_bound = -np.inf if do_init else self.lower_bound_
current_lower_bounds = []

if self.max_iter == 0:
best_params = self._get_parameters()
Expand All @@ -248,6 +250,7 @@ def fit_predict(self, X, y=None):
log_prob_norm, log_resp = self._e_step(X)
self._m_step(X, log_resp)
lower_bound = self._compute_lower_bound(log_resp, log_prob_norm)
current_lower_bounds.append(lower_bound)

change = lower_bound - prev_lower_bound
self._print_verbose_msg_iter_end(n_iter, change)
Expand All @@ -262,6 +265,7 @@ def fit_predict(self, X, y=None):
max_lower_bound = lower_bound
best_params = self._get_parameters()
best_n_iter = n_iter
best_lower_bounds = current_lower_bounds
self.converged_ = converged

# Should only warn about convergence if max_iter > 0, otherwise
Expand All @@ -280,6 +284,7 @@ def fit_predict(self, X, y=None):
self._set_parameters(best_params)
self.n_iter_ = best_n_iter
self.lower_bound_ = max_lower_bound
self.lower_bounds_ = best_lower_bounds

# Always do a final e-step to guarantee that the labels returned by
# fit_predict(X) are always consistent with fit(X).predict(X)
Expand Down
4 changes: 4 additions & 0 deletions sklearn/mixture/_bayesian_mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,10 @@ class BayesianGaussianMixture(BaseMixture):
Lower bound value on the model evidence (of the training data) of the
best fit of inference.

lower_bounds_ : array-like of shape (`n_iter_`,)
The list of lower bound values on the model evidence from each iteration
of the best fit of inference.

weight_concentration_prior_ : tuple or float
The dirichlet concentration of each component on the weight
distribution (Dirichlet). The type depends on
Expand Down
4 changes: 4 additions & 0 deletions sklearn/mixture/_gaussian_mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,10 @@ class GaussianMixture(BaseMixture):
Lower bound value on the log-likelihood (of the training data with
respect to the model) of the best fit of EM.

lower_bounds_ : array-like of shape (`n_iter_`,)
The list of lower bound values on the log-likelihood from each
iteration of the best fit of EM.

n_features_in_ : int
Number of features seen during :term:`fit`.

Expand Down
1 change: 1 addition & 0 deletions sklearn/mixture/tests/test_gaussian_mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -1236,6 +1236,7 @@ def test_gaussian_mixture_setting_best_params():
"precisions_cholesky_",
"n_iter_",
"lower_bound_",
"lower_bounds_",
]:
assert hasattr(gmm, attr)

Expand Down