Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit d1e34dd

Browse files
committed
CLEAN sparse_encode: remove unused arguments
And use the ones that should
1 parent 34c2904 commit d1e34dd

File tree

1 file changed

+15
-18
lines changed

1 file changed

+15
-18
lines changed

sklearn/decomposition/dict_learning.py

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@
2222

2323

2424
def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
25-
n_nonzero_coefs=None, alpha=None, copy_gram=True,
26-
copy_cov=True, init=None, max_iter=1000):
25+
n_nonzero_coefs=None, alpha=None, copy_cov=True,
26+
init=None, max_iter=1000):
2727
"""Generic sparse coding
2828
2929
Each column of the result is the solution to a Lasso problem.
@@ -74,10 +74,6 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
7474
max_iter: int, 1000 by default
7575
Maximum number of iterations to perform if `algorithm='lasso_cd'`.
7676
77-
copy_gram: boolean, optional
78-
Whether to copy the precomputed Gram matrix; if False, it may be
79-
overwritten.
80-
8177
copy_cov: boolean, optional
8278
Whether to copy the precomputed covariance matrix; if False, it may be
8379
overwritten.
@@ -135,7 +131,7 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
135131
alpha /= n_features # account for scaling
136132
new_code = np.empty((n_samples, n_atoms))
137133
clf = Lasso(alpha=alpha, fit_intercept=False, precompute=gram,
138-
max_iter=1000)
134+
max_iter=max_iter)
139135
for k in xrange(n_samples):
140136
# A huge amount of time is spent in this loop. It needs to be
141137
# tight
@@ -180,7 +176,7 @@ def _sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
180176

181177

182178
def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
183-
n_nonzero_coefs=None, alpha=None, copy_gram=True,
179+
n_nonzero_coefs=None, alpha=None, copy_gram=None,
184180
copy_cov=True, init=None, max_iter=1000, n_jobs=1):
185181
"""Sparse coding
186182
@@ -236,10 +232,6 @@ def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
236232
max_iter: int, 1000 by default
237233
Maximum number of iterations to perform if `algorithm='lasso_cd'`.
238234
239-
copy_gram: boolean, optional
240-
Whether to copy the precomputed Gram matrix; if False, it may be
241-
overwritten.
242-
243235
copy_cov: boolean, optional
244236
Whether to copy the precomputed covariance matrix; if False, it may be
245237
overwritten.
@@ -262,29 +254,34 @@ def sparse_encode(X, dictionary, gram=None, cov=None, algorithm='lasso_lars',
262254
warnings.warn("Please note: the interface of sparse_encode has changed: "
263255
"It now follows the dictionary learning API and it also "
264256
"handles parallelization. Please read the docstring for "
265-
"more information.")
257+
"more information.", stacklevel=2)
258+
if copy_gram is not None:
259+
warnings.warn("copy_gram in sparse_encode is deprecated: it"
260+
"lead to errors.", DeprecationWarning, stacklevel=2)
266261
dictionary = np.asarray(dictionary)
267262
X = np.asarray(X)
268263
n_samples, n_features = X.shape
269264
n_atoms = dictionary.shape[0]
270265
if gram is None:
271-
copy_gram = False
272266
gram = np.dot(dictionary, dictionary.T)
273267
if cov is None and algorithm != 'lasso_cd':
274268
copy_cov = False
275269
cov = np.dot(dictionary, X.T)
276270
if n_jobs == 1 or algorithm == 'threshold':
277-
return _sparse_encode(X, dictionary, gram, cov, algorithm,
278-
n_nonzero_coefs, alpha, copy_gram, copy_cov, init)
271+
return _sparse_encode(X, dictionary, gram=gram, cov=cov,
272+
algorithm=algorithm, n_nonzero_coefs=n_nonzero_coefs,
273+
alpha=alpha, copy_cov=copy_cov,
274+
init=init, max_iter=max_iter)
279275
code = np.empty((n_samples, n_atoms))
280276
slices = list(gen_even_slices(n_samples, n_jobs))
281277
code_views = Parallel(n_jobs=n_jobs)(
282278
delayed(sparse_encode)(X[this_slice], dictionary, gram,
283279
cov[:, this_slice], algorithm,
284280
n_nonzero_coefs, alpha,
285-
copy_gram, copy_cov,
281+
copy_cov=copy_cov,
286282
init=init[this_slice] if init is not
287-
None else None)
283+
None else None,
284+
max_iter=max_iter)
288285
for this_slice in slices)
289286
for this_slice, this_view in zip(slices, code_views):
290287
code[this_slice] = this_view

0 commit comments

Comments
 (0)