svm

import numpy as np 
import matplotlib.pyplot as plt
from sklearn import svm
from sklearn.datasets import make_blobs
X, y = make_blobs(n_samples = 50, centers = 2, random_state = 6)
clf = svm.SVC(kernel = 'linear', C = 1000)
clf.fit(X, y)
plt.scatter(X[:,0], X[:,1], c=y, s=30, cmap=plt.cm.Paired)

ax = plt.gca()
xlim = ax.get_xlim()
ylim = ax.get_ylim()
xx = np.linspace(xlim[0], xlim[1], 30)
yy = np.linspace(ylim[0], ylim[1], 30)
YY, XX = np.meshgrid(yy, xx)
xy = np.vstack([XX.ravel(), YY.ravel()]).T
Z = clf.decision_function(xy).reshape(XX.shape)

ax.contour(XX, YY, Z, colors = 'k', levels=[-1,0,1], alpha = 0.5, linestyles=['--', '-', '--'])
ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=100, linewidth=1, facecolors='none')
plt.show()


clf_rbf = svm.SVC(kernel='rbf',C= 1000)
clf_rbf.fit(X, y)
plt.scatter(X[:,0], X[:,1], c=y, s=30, cmap=plt.cm.Paired)

ax = plt.gca()
xlim = ax.get_xlim()
ylim = ax.get_ylim()
xx = np.linspace(xlim[0], xlim[1], 30)
yy = np.linspace(ylim[0], ylim[1], 30)
YY, XX = np.meshgrid(yy, xx)
xy = np.vstack([XX.ravel(), YY.ravel()]).T
Z = clf_rbf.decision_function(xy).reshape(XX.shape)

ax.contour(XX, YY, Z, colors = 'k', levels=[-1,0,1], alpha = 0.5, linestyles=['--', '-', '--'])
ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=100, linewidth=1, facecolors='none')
plt.show()

from sklearn.datasets import load_wine
def make_meshgrid(x, y, h=.02):
    x_min, x_max = x.min()-1, x.max()+1
    y_min, y_max = y.min()-1, y.max()+1
    xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
    return xx, yy

def plot_contours(ax, clf, xx, yy, **params):
    z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
    z = z.reshape(xx.shape)
    out = ax.contourf(xx, yy, z, **params)

wine = load_wine()
X = wine.data[:, :2]
y = wine.target

C = 1.0
models = (svm.SVC(kernel='linear', C=C),
         svm.LinearSVC(C=C),
         svm.SVC(kernel='rbf', gamma=0.7, C=C),
         svm.SVC(kernel='poly', degree=3, C=C))
models = (clf.fit(X, y) for clf in models)

titles = ('SVC with linear kernel', 'LinearSVC (linear kernel)', 'SVC with RBF kernel', 'SVC with polynomial (degree 3) kernel')

fig, sub = plt.subplots(2,2)
plt.subplots_adjust(wspace=0.4, hspace=0.4)

X0,X1 = X[:,0], X[:,1]
xx,yy = make_meshgrid(X0, X1)

for clf,title,ax in zip(models, titles, sub.flatten()):
    plot_contours(ax, clf, xx, yy, cmap=plt.cm.plasma, alpha=0.8)
    ax.scatter(X0, X1, c=y, cmap=plt.cm.plasma, s=20, edgecolors='k')
    ax.set_xlim(xx.min(), xx.max())
    ax.set_ylim(yy.min(), yy.max())
    ax.set_xlabel('Feature 0')
    ax.set_ylabel('Feature 1')
    ax.set_xticks(())
    ax.set_yticks(())
    ax.set_title(title)

plt.show()


C = 1.0
models = (svm.SVC(kernel='rbf', gamma=0.1, C=C),
          svm.SVC(kernel='rbf', gamma=1, C=C),
         svm.SVC(kernel='rbf', gamma=10, C=C))
models = (clf.fit(X, y) for clf in models)

titles = ('gamma=0.1', 'gamma=1', 'gamma=10')

fig, sub = plt.subplots(1,3, figsize=(10,3))

X0,X1 = X[:,0], X[:,1]
xx,yy = make_meshgrid(X0, X1)

for clf,title,ax in zip(models, titles, sub.flatten()):
    plot_contours(ax, clf, xx, yy, cmap=plt.cm.plasma, alpha=0.8)
    ax.scatter(X0, X1, c=y, cmap=plt.cm.plasma, s=20, edgecolors='k')
    ax.set_xlim(xx.min(), xx.max())
    ax.set_ylim(yy.min(), yy.max())
    ax.set_xlabel('Feature 0')
    ax.set_ylabel('Feature 1')
    ax.set_xticks(())
    ax.set_yticks(())
    ax.set_title(title)

plt.show()

from sklearn.datasets import load_boston
boston = load_boston()
print(boston.keys())


plt.plot(X.min(axis=0), 'v', label='min')
plt.plot(X.max(axis=0), '^', label='max')

plt.yscale('log')
plt.legend(loc='best')

plt.xlabel('features')
plt.ylabel('feature magnitude')

plt.show()


from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
scaler.fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)

plt.plot(X_train_scaled.min(axis=0), 'v', label = 'train set min')
plt.plot(X_train_scaled.max(axis=0), '^', label = 'train set max')
plt.plot(X_test_scaled.min(axis=0), 'v', label = 'test set min')
plt.plot(X_test_scaled.max(axis=0), '^', label = 'test set max')
plt.yscale('log')

plt.legend(loc='best')

plt.xlabel('features')
plt.ylabel('feature magnitude')

plt.show()

for kernel in ['linear', 'rbf']:
    svr = SVR(kernel = kernel)
    svr.fit(X_train_scaled, y_train)
    print(kernel, '核函数的模型训练集得分:{:.3f}'.format(svr.score(X_train_scaled, y_train)))
    print(kernel, '核函数的模型测试集得分:{:.3f}'.format(svr.score(X_test_scaled, y_test)))
    


svr = SVR(C=100, gamma=0.1)
svr.fit(X_train_scaled, y_train)

print( '核函数的模型训练集得分:{:.3f}'.format(svr.score(X_train_scaled, y_train)))
print( '核函数的模型测试集得分:{:.3f}'.format(svr.score(X_test_scaled, y_test)))

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值