博主是初学者,学的很浅显,大家可以参考大佬的文章: https://blog.csdn.net/b285795298/article/details/81977271
逻辑回归的改进
支持向量机的决策边界:线性可分的例子
向量内积性质的复习
SVM决策边界
SVM的核函数:用来使SVM能够处理非线性分类
核函数和相似度
例子:
代码实现
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.preprocessing import PolynomialFeatures, StandardScaler
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
from sklearn.svm import SVC
# dataset.make_moons生成半环形图
X, y = datasets.make_moons(noise=0.15, random_state=666)
# print(X, y)
# scatter绘制散点图
# 绘制蓝色点
plt.scatter(X[y==0, 0], X[y==0, 1])
# 绘制红色点
plt.scatter(X[y==1, 0], X[y==1, 1])
plt.show()
def plot_decision_boundary(model, axis):
# 生成网格点坐标矩阵
x0, x1 = np.meshgrid(
np.linspace(axis[0], axis[1], int((axis[1] - axis[0]) * 100)).reshape(-1, 1),
np.linspace(axis[2], axis[3], int((axis[3] - axis[2]) * 100)).reshape(-1, 1)
)
# ravel将多为数组转化为一维数字
X_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(X_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A', '#FFF59D', '#90CAF9'])
plt.contourf(x0, x1, zz, linewidth=5, cmap=custom_cmap)
def PolynomialSVC(degree, C=1.0):
# pipline的作用是将数据处理和模型拟合结合在一起,减少代码量
return Pipeline([
('poly', PolynomialFeatures(degree=degree)), # 多项式回归
('std_scaler', StandardScaler()), # 标准化的类
('linearSVC', LinearSVC(C=C)) # 线性SVM
])
poly_svc = PolynomialSVC(degree=3)
poly_svc.fit(X, y)
plot_decision_boundary(poly_svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(X[y==0, 0], X[y==0, 1])
plt.scatter(X[y==1, 0], X[y==1, 1])
plt.show()
# 当算法SVC()的参数 kernel='poly'时,SVC()能直接打到一种多项式特征的效果;
# 使用 SVC() 前,也需要对数据进行标准化处理
def PolynomialKernelSVC(degree, C=1.0):
return Pipeline([
('std_scaler', StandardScaler()),
('kernelSVC', SVC(kernel='rbf', degree=degree, C=C))
])
poly_kernel_svc = PolynomialKernelSVC(degree=3)
poly_kernel_svc.fit(X, y)
plot_decision_boundary(poly_kernel_svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(X[y==0, 0], X[y==0, 1])
plt.scatter(X[y==1, 0], X[y==1, 1])
plt.show()