|
93 | 93 | print('\nPC 1 without scaling:\n', pca.components_[0])
|
94 | 94 | print('\nPC 1 with scaling:\n', pca_std.components_[0])
|
95 | 95 |
|
96 |
| -# Scale and use PCA on X_train data for visualization. |
| 96 | +# Use PCA without and with scale on X_train data for visualization. |
| 97 | +X_train_transformed = pca.transform(X_train) |
97 | 98 | scaler = std_clf.named_steps['standardscaler']
|
98 |
| -X_train_std = pca_std.transform(scaler.transform(X_train)) |
| 99 | +X_train_std_transformed = pca_std.transform(scaler.transform(X_train)) |
99 | 100 |
|
100 | 101 | # visualize standardized vs. untouched dataset with PCA performed
|
101 | 102 | fig, (ax1, ax2) = plt.subplots(ncols=2, figsize=FIG_SIZE)
|
102 | 103 |
|
103 | 104 |
|
104 | 105 | for l, c, m in zip(range(0, 3), ('blue', 'red', 'green'), ('^', 's', 'o')):
|
105 |
| - ax1.scatter(X_train[y_train == l, 0], X_train[y_train == l, 1], |
| 106 | + ax1.scatter(X_train_transformed[y_train == l, 0], |
| 107 | + X_train_transformed[y_train == l, 1], |
106 | 108 | color=c,
|
107 | 109 | label='class %s' % l,
|
108 | 110 | alpha=0.5,
|
109 | 111 | marker=m
|
110 | 112 | )
|
111 | 113 |
|
112 | 114 | for l, c, m in zip(range(0, 3), ('blue', 'red', 'green'), ('^', 's', 'o')):
|
113 |
| - ax2.scatter(X_train_std[y_train == l, 0], X_train_std[y_train == l, 1], |
| 115 | + ax2.scatter(X_train_std_transformed[y_train == l, 0], |
| 116 | + X_train_std_transformed[y_train == l, 1], |
114 | 117 | color=c,
|
115 | 118 | label='class %s' % l,
|
116 | 119 | alpha=0.5,
|
|
0 commit comments