# 支持向量机

## 一、控制正则项的重要程度的C值

SVM尝试找到中间那条最优的决策边界，这个决策边界距离两个类别最近的样本最远。

### 未经标准化的原始数据点分布

import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
X=iris.data
y=iris.target
#只取y<2的类别，也就是0 1 并且只取前两个特征
X=X[y<2,:2]
# 只取y<2的类别 # 分别画出类别0和1的点
y=y[y<2]
plt.scatter(X[y==0,0],X[y==0,1],color='red')
plt.scatter(X[y==1,0],X[y==1,1],color='blue')
plt.show()
# 标准化
standardScaler=StandardScaler()
#计算训练数据的均值和方差
standardScaler.fit(X)
X_standard=standardScaler.transform(X)#再用scaler中的均值和方差来转换X，使X标准化
#线性SVM分类器
svc=LinearSVC(C=1e9)
# 训练svm
svc.fit(X_standard,y)


LinearSVC(C=1000000000.0, class_weight=None, dual=True, fit_intercept=True,
intercept_scaling=1, loss='squared_hinge', max_iter=1000,
multi_class='ovr', penalty='l2', random_state=None, tol=0.0001,
verbose=0)


### 绘制决策边：

def plot_decision_boundary(model,axis):
x0,x1=np.meshgrid(np.linspace(axis[0],axis[1],int((axis[1]-axis[0])*100)).reshape(-1,1),np.linspace(axis[2],axis[3],int((axis[3]-axis[2])*100)).reshape(-1,1))
X_new=np.c_[x0.ravel(),x1.ravel()]
y_predict=model.predict(X_new)
z=y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap=ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
plt.contourf(x0,x1,z,linewidth=5,cmp=custom_cmap)#绘制决策边
plot_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_standard[y==0,0],X_standard[y==0,1],color='red')
plt.scatter(X_standard[y==1,0],X_standard[y==1,1],color='blue')
plt.show()

C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


svc2=LinearSVC(C=0.01)
svc2.fit(X_standard,y)
plot_decision_boundary(svc2,axis=[-3,3,-3,3])
plt.scatter(X_standard[y==0,0],X_standard[y==0,1],color='red')
plt.scatter(X_standard[y==1,0],X_standard[y==1,1],color='blue')
plt.show()

C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


## 二、使用多项式特征和核函数

#使用多项式特征和核函数
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
#使用月亮数据集生成的数据
X,y=datasets.make_moons()
print(X.shape)
print(y.shape)

(100, 2)
(100,)

#绘制生成的数据
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()



#增加噪声点
X,y=datasets.make_moons(noise=0.15,random_state=777)#随机生成噪声点，random_state是随机种子，noise是方差
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()



#使用svm进行分类
from sklearn.preprocessing import PolynomialFeatures,StandardScaler
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
def PolynomialSVC(degree,C=1.0):
#先生成多项式，再标准化，最后生成svm
return Pipeline([("poly",PolynomialFeatures(degree=degree)),("std_scaler",StandardScaler()),("linearSVC",LinearSVC(C=C))])
poly_svc=PolynomialSVC(degree=3)
poly_svc.fit(X,y)
plot_decision_boundary(poly_svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()

C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


from sklearn.svm import SVC
def PolynomialKernelSVC(degree,C=1.0):
return Pipeline([("std_scaler",StandardScaler()),("kernelSVC",SVC(kernel="poly"))])#poly代表多项式特征
poly_kernel_svc=PolynomialKernelSVC(degree=3)
poly_kernel_svc.fit(X,y)
plot_decision_boundary(poly_kernel_svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()

C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


## 三、核函数

SVM的本质是求解这样一个最优化问题：

K(x,y) = (x \cdot y +1)^2 K ( x , y ) = ( x ⋅ y + 1 ) 2

y 也根据这个规则变成了 y^\prime y ′ ，这两新的向量相乘再相加，结果就是

### RBF核函数：

import numpy as np
import matplotlib.pyplot as plt
x=np.arange(-4,5,1)#生成测试数据
y=np.array((x>=2)&(x<=2),dtype='int')
plt.scatter(x[y==0],[0]*len(x[y==0]))#x取y=0的点，y取0，有多少个x就有多少个y
plt.scatter(x[y==1],[0]*len(x[y==1]))
plt.show()


#接下来使用高斯核函数，看如何将一个一维的数据映射到二维的空间。
#高斯函数
def gaussian(x,l):
gamma=1.0
return np.exp(-gamma*(x-l)**2)
l1,l2=-1,1
X_new=np.empty((len(x),2))
for i,data in enumerate(x):
X_new[i,0]=gaussian(data,l1)
X_new[i,1]=gaussian(data,l2)
plt.scatter(X_new[y==0,0],X_new[y==0,1])
plt.scatter(X_new[y==1,0],X_new[y==1,1])
plt.show()


#生成数据
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
X,y=datasets.make_moons(noise=0.15,random_state=777)
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()


#定义一个RBF核的核函数
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def RBFKernelSVC(gamma=1.0):
return Pipeline([('std_scaler',StandardScaler()),('svc',SVC(kernel='rbf',gamma=gamma))])
svc=RBFKernelSVC()
svc.fit(X,y)
plot_decision_boundary(svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()


C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


#定义一个RBF核的核函数
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def RBFKernelSVC(gamma=1.0):
return Pipeline([('std_scaler',StandardScaler()),('svc',SVC(kernel='rbf',gamma=gamma))])
svc=RBFKernelSVC(100) #gamma=100
svc.fit(X,y)
plot_decision_boundary(svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()


C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


\gamma γ 取值越大，就是高斯分布的钟形图越窄，这里相当于每个样本点都形成了钟形图。很明显这样是过拟合的.

#定义一个RBF核的核函数
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def RBFKernelSVC(gamma=1.0):
return Pipeline([('std_scaler',StandardScaler()),('svc',SVC(kernel='rbf',gamma=gamma))])
svc=RBFKernelSVC(10) #gamma=10
svc.fit(X,y)
plot_decision_boundary(svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()


C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


#定义一个RBF核的核函数
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def RBFKernelSVC(gamma=1.0):
return Pipeline([('std_scaler',StandardScaler()),('svc',SVC(kernel='rbf',gamma=gamma))])
svc=RBFKernelSVC(0.1) #gamma=0.1
svc.fit(X,y)
plot_decision_boundary(svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()


C:\Users\Administrator\Anaconda3\lib\site-packages\ipykernel_launcher.py:8: UserWarning: The following kwargs were not used by contour: 'linewidth', 'cmp'


SVM解决回归问题：

import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
X=boston.data
y=boston.target
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=777)
from sklearn.svm import LinearSVR
from sklearn.svm import SVR
from sklearn.preprocessing import StandardScaler
def StandardLinearSVR(epsilon=0.1):
return Pipeline([('sta_scaler',StandardScaler()),('linearSVR',LinearSVR(epsilon=epsilon))])
svr=StandardLinearSVR()
svr.fit(X_train,y_train)
svr.score(X_test,y_test)

0.6984783816165197


08-17 3万+

02-25 2990

03-28 10万+

08-23 14万+

08-23 1万+

07-05 4万+

07-02 3024

02-25 1507

10-18 1453

05-06 78

04-21 809

09-27 1234

04-03 2万+

04-23 2019

01-11 430

02-22 989

12-20 1903

03-15 397

02-08 3366

04-18 1052

03-03 1340

10-19 1390

11-16 1万+

04-14 59万+

03-13 15万+

03-01 13万+

03-04 14万+

03-08 5万+

03-08 7万+

04-25 6万+

03-10 13万+

03-12 11万+

03-13 11万+

03-14 1万+

03-18 9764

03-19 3万+

03-19 8万+

03-19 3万+

03-22 4万+

03-24 3万+

03-25 3万+

05-08 5万+

03-25 9万+

03-27 1万+

03-29 21万+

03-29 10万+

03-30 16万+

05-28 6100

05-28 1万+

04-02 5116

04-02 4万+

04-06 7万+

04-09 8万+

04-09 2万+

05-17 8445

04-11 3万+

04-15 6万+

04-18 4万+

04-20 4万+

04-24 3万+

04-29 7024

04-30 9961

04-30 9520

05-01 6392

05-03 1万+

05-16 5万+

05-06 1万+

05-08 4万+

05-10 3253

05-14 6432

05-14 1952

05-16 3552

05-16 1万+

05-20 961

05-18 8364

05-27 1367

05-18 3548

05-19 1万+

05-21 7353

#### 石锤！今年Python要过苦日子了？ 程序员：我疯了！

©️2019 CSDN 皮肤主题: 游动-白 设计师: 上身试试