Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 67ff4ef

Browse files
committed
Merge pull request scikit-learn#164 from yarikoptic/0.8.X
0.8.x (to be cp to master as well) PLS -- use string comparisons (instead of identity checking) and few spell fixes
2 parents f7d5317 + bbbe225 commit 67ff4ef

File tree

1 file changed

+16
-13
lines changed

1 file changed

+16
-13
lines changed

scikits/learn/pls.py

Lines changed: 16 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06):
2525
# Inner loop of the Wold algo.
2626
while True:
2727
# 1.1 Update u: the X weights
28-
if mode is "B":
28+
if mode == "B":
2929
if X_pinv is None:
3030
X_pinv = linalg.pinv(X) # compute once pinv(X)
3131
u = np.dot(X_pinv, y_score)
@@ -38,7 +38,7 @@ def _nipals_twoblocks_inner_loop(X, Y, mode="A", max_iter=500, tol=1e-06):
3838
x_score = np.dot(X, u)
3939

4040
# 2.1 Update v: the Y weights
41-
if mode is "B":
41+
if mode == "B":
4242
if Y_pinv is None:
4343
Y_pinv = linalg.pinv(Y) # compute once pinv(Y)
4444
v = np.dot(Y_pinv, x_score)
@@ -95,16 +95,16 @@ def _center_scale_xy(X, Y, scale=True):
9595
class _PLS(BaseEstimator):
9696
"""Partial Least Square (PLS)
9797
98-
We use the therminology defined by [Wegelin et al. 2000].
98+
We use the terminology defined by [Wegelin et al. 2000].
9999
This implementation uses the PLS Wold 2 blocks algorithm or NIPALS which is
100100
based on two nested loops:
101-
(i) The outer loop iterate over compoments.
101+
(i) The outer loop iterate over components.
102102
(ii) The inner loop estimates the loading vectors. This can be done
103103
with two algo. (a) the inner loop of the original NIPALS algo or (b) a
104104
SVD on residuals cross-covariance matrices.
105105
106106
This implementation provides:
107-
- PLS regression, ie., PLS 2 blocks, mode A, with asymetric deflation.
107+
- PLS regression, ie., PLS 2 blocks, mode A, with asymmetric deflation.
108108
A.k.a. PLS2, with multivariate response or PLS1 with univariate response.
109109
- PLS canonical, ie., PLS 2 blocks, mode A, with symetric deflation.
110110
- CCA, ie., PLS 2 blocks, mode B, with symetric deflation.
@@ -167,7 +167,7 @@ class _PLS(BaseEstimator):
167167
Y block to latents rotations.
168168
169169
coefs: array, [p, q]
170-
The coeficients of the linear model: Y = X coefs + Err
170+
The coefficients of the linear model: Y = X coefs + Err
171171
172172
References
173173
----------
@@ -227,7 +227,7 @@ def fit(self, X, Y, **params):
227227
'has %s' % (X.shape[0], Y.shape[0]))
228228
if self.n_components < 1 or self.n_components > p:
229229
raise ValueError('invalid number of components')
230-
if self.algorithm is "svd" and self.mode is "B":
230+
if self.algorithm == "svd" and self.mode == "B":
231231
raise ValueError('Incompatible configuration: mode B is not '
232232
'implemented with svd algorithm')
233233
if not self.deflation_mode in ["canonical", "regression"]:
@@ -250,12 +250,15 @@ def fit(self, X, Y, **params):
250250
for k in xrange(self.n_components):
251251
#1) weights estimation (inner loop)
252252
# -----------------------------------
253-
if self.algorithm is "nipals":
253+
if self.algorithm == "nipals":
254254
u, v = _nipals_twoblocks_inner_loop(
255255
X=Xk, Y=Yk, mode=self.mode,
256256
max_iter=self.max_iter, tol=self.tol)
257-
if self.algorithm is "svd":
257+
elif self.algorithm == "svd":
258258
u, v = _svd_cross_product(X=Xk, Y=Yk)
259+
else:
260+
raise ValueError("Got algorithm %s when only 'svd' "
261+
"and 'nipals' are known" % self.algorithm)
259262
# compute scores
260263
x_score = np.dot(Xk, u)
261264
y_score = np.dot(Yk, v)
@@ -273,11 +276,11 @@ def fit(self, X, Y, **params):
273276
x_loadings = np.dot(Xk.T, x_score) / np.dot(x_score.T, x_score)
274277
# - substract rank-one approximations to obtain remainder matrix
275278
Xk -= np.dot(x_score, x_loadings.T)
276-
if self.deflation_mode is "canonical":
279+
if self.deflation_mode == "canonical":
277280
# - regress Yk's on y_score, then substract rank-one approx.
278281
y_loadings = np.dot(Yk.T, y_score) / np.dot(y_score.T, y_score)
279282
Yk -= np.dot(y_score, y_loadings.T)
280-
if self.deflation_mode is "regression":
283+
if self.deflation_mode == "regression":
281284
# - regress Yk's on x_score, then substract rank-one approx.
282285
y_loadings = np.dot(Yk.T, x_score) / np.dot(x_score.T, x_score)
283286
Yk -= np.dot(x_score, y_loadings.T)
@@ -301,8 +304,8 @@ def fit(self, X, Y, **params):
301304
else:
302305
self.y_rotations_ = np.ones(1)
303306

304-
if True or self.deflation_mode is "regression":
305-
# Estimate regression coeficient
307+
if True or self.deflation_mode == "regression":
308+
# Estimate regression coefficient
306309
# Regress Y on T
307310
# Y = TQ' + Err,
308311
# Then express in function of X

0 commit comments

Comments
 (0)