Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit d8d9358

Browse files
committed
DOC Updates to SVM examples
* Fixing flake8 issues * Altered make_blobs to move clusters to corners and be more compact * Reverted changes converting Y to y
1 parent c62b4ea commit d8d9358

File tree

6 files changed

+21
-32
lines changed

6 files changed

+21
-32
lines changed

examples/svm/plot_custom_kernel.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,26 +17,26 @@
1717
iris = datasets.load_iris()
1818
X = iris.data[:, :2] # we only take the first two features. We could
1919
# avoid this ugly slicing by using a two-dim dataset
20-
y = iris.target
20+
Y = iris.target
2121

2222

23-
def my_kernel(X, y):
23+
def my_kernel(X, Y):
2424
"""
2525
We create a custom kernel:
2626
2727
(2 0)
28-
k(X, y) = X ( ) y.T
28+
k(X, Y) = X ( ) Y.T
2929
(0 1)
3030
"""
3131
M = np.array([[2, 0], [0, 1.0]])
32-
return np.dot(np.dot(X, M), y.T)
32+
return np.dot(np.dot(X, M), Y.T)
3333

3434

3535
h = .02 # step size in the mesh
3636

3737
# we create an instance of SVM and fit out data.
3838
clf = svm.SVC(kernel=my_kernel)
39-
clf.fit(X, y)
39+
clf.fit(X, Y)
4040

4141
# Plot the decision boundary. For that, we will assign a color to each
4242
# point in the mesh [x_min, x_max]x[y_min, y_max].
@@ -50,7 +50,7 @@ def my_kernel(X, y):
5050
plt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)
5151

5252
# Plot also the training points
53-
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Paired, edgecolors='k')
53+
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired, edgecolors='k')
5454
plt.title('3-Class classification using Support Vector Machine with custom'
5555
' kernel')
5656
plt.axis('tight')

examples/svm/plot_separating_hyperplane.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717

1818
# we create 40 separable points
19-
X, y = make_blobs(n_samples=40, centers=2, random_state=7)
19+
X, y = make_blobs(n_samples=40, centers=2, random_state=12, cluster_std=0.35)
2020

2121
# fit the model
2222
clf = svm.SVC(kernel='linear')
@@ -37,10 +37,6 @@
3737
Z = clf.decision_function(xy).reshape(XX.shape)
3838

3939
# plot decision boundary and margins
40-
ax.contour(XX, YY, Z, colors='k',
41-
levels=[-1, 0, 1], alpha=0.5,
42-
linestyles=['--', '-', '--'])
40+
ax.contour(XX, YY, Z, colors='k', levels=[-1, 0, 1], alpha=0.5, linestyles=['--', '-', '--'])
4341
# plot support vectors
44-
ax.scatter(clf.support_vectors_[:, 0],
45-
clf.support_vectors_[:, 1],
46-
s=100, linewidth=1, facecolors='none');
42+
ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=100, linewidth=1, facecolors='none')

examples/svm/plot_separating_hyperplane_unbalanced.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
import matplotlib.pyplot as plt
3131
from sklearn import svm
3232

33-
3433
# we create 40 separable points
3534
rng = np.random.RandomState(0)
3635
n_samples_1 = 1000
@@ -66,18 +65,12 @@
6665
Z = clf.decision_function(xy).reshape(XX.shape)
6766

6867
# plot decision boundary and margins
69-
a = ax.contour(XX, YY, Z, colors='k',
70-
levels=[0], alpha=0.5,
71-
linestyles=['-'])
68+
a = ax.contour(XX, YY, Z, colors='k', levels=[0], alpha=0.5, linestyles=['-'])
7269

7370
# get the separating hyperplane for weighted classes
7471
Z = wclf.decision_function(xy).reshape(XX.shape)
7572

7673
# plot decision boundary and margins for weighted classes
77-
b = ax.contour(XX, YY, Z, colors='r',
78-
levels=[0], alpha=0.5,
79-
linestyles=['-'])
74+
b = ax.contour(XX, YY, Z, colors='r', levels=[0], alpha=0.5, linestyles=['-'])
8075

81-
plt.legend([a.collections[0], b.collections[0]],
82-
["non weighted", "weighted"],
83-
loc="upper right");
76+
plt.legend([a.collections[0], b.collections[0]], ["non weighted", "weighted"], loc="upper right")

examples/svm/plot_svm_kernels.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,23 +41,23 @@
4141
(.2, -2.3),
4242
(0, -2.7),
4343
(1.3, 2.1)].T
44-
y = [0] * 8 + [1] * 8
44+
Y = [0] * 8 + [1] * 8
4545

4646
# figure number
4747
fignum = 1
4848

4949
# fit the model
5050
for kernel in ('linear', 'poly', 'rbf'):
5151
clf = svm.SVC(kernel=kernel, gamma=2)
52-
clf.fit(X, y)
52+
clf.fit(X, Y)
5353

5454
# plot the line, the points, and the nearest vectors to the plane
5555
plt.figure(fignum, figsize=(4, 3))
5656
plt.clf()
5757

5858
plt.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80,
5959
facecolors='none', zorder=10, edgecolors='k')
60-
plt.scatter(X[:, 0], X[:, 1], c=y, zorder=10, cmap=plt.cm.Paired,
60+
plt.scatter(X[:, 0], X[:, 1], c=Y, zorder=10, cmap=plt.cm.Paired,
6161
edgecolors='k')
6262

6363
plt.axis('tight')

examples/svm/plot_svm_margin.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
# we create 40 separable points
3030
np.random.seed(0)
3131
X = np.r_[np.random.randn(20, 2) - [2, 2], np.random.randn(20, 2) + [2, 2]]
32-
y = [0] * 20 + [1] * 20
32+
Y = [0] * 20 + [1] * 20
3333

3434
# figure number
3535
fignum = 1
@@ -38,7 +38,7 @@
3838
for name, penalty in (('unreg', 1), ('reg', 0.05)):
3939

4040
clf = svm.SVC(kernel='linear', C=penalty)
41-
clf.fit(X, y)
41+
clf.fit(X, Y)
4242

4343
# get the separating hyperplane
4444
w = clf.coef_[0]
@@ -63,7 +63,7 @@
6363

6464
plt.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80,
6565
facecolors='none', zorder=10, edgecolors='k')
66-
plt.scatter(X[:, 0], X[:, 1], c=y, zorder=10, cmap=plt.cm.Paired,
66+
plt.scatter(X[:, 0], X[:, 1], c=Y, zorder=10, cmap=plt.cm.Paired,
6767
edgecolors='k')
6868

6969
plt.axis('tight')

examples/svm/plot_svm_nonlinear.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,11 @@
1919
np.linspace(-3, 3, 500))
2020
np.random.seed(0)
2121
X = np.random.randn(300, 2)
22-
y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)
22+
Y = np.logical_xor(X[:, 0] > 0, X[:, 1] > 0)
2323

2424
# fit the model
2525
clf = svm.NuSVC()
26-
clf.fit(X, y)
26+
clf.fit(X, Y)
2727

2828
# plot the decision function for each datapoint on the grid
2929
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
@@ -34,7 +34,7 @@
3434
origin='lower', cmap=plt.cm.PuOr_r)
3535
contours = plt.contour(xx, yy, Z, levels=[0], linewidths=2,
3636
linetypes='--')
37-
plt.scatter(X[:, 0], X[:, 1], s=30, c=y, cmap=plt.cm.Paired,
37+
plt.scatter(X[:, 0], X[:, 1], s=30, c=Y, cmap=plt.cm.Paired,
3838
edgecolors='k')
3939
plt.xticks(())
4040
plt.yticks(())

0 commit comments

Comments
 (0)