1 什么是支撑向量机
2 如何最大化hard margin
3 soft margin 和SVM的正则化
4 scikit-learn中的SVM
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
iris = datasets.load_iris()
x = iris.data
y = iris.target
x = x[y < 2, :2]
y = y[y < 2]
# 绘制数据
plt.scatter(x[y == 0, 0], x[y == 0, 1], color = 'r')
plt.scatter(x[y == 1, 0], x[y == 1, 1], color = 'b')
plt.show()
输出:
# 数据归一化
from sklearn.preprocessing import StandardScaler
standardscaler = StandardScaler()
standardscaler.fit(x)
x_standard = standardscaler.transform(x)
# 使用scikit-learn中的SVM
from sklearn.svm import LinearSVC
svc = LinearSVC(C = 1e9)
svc.fit(x_standard, y)
>>>LinearSVC(C=1000000000.0, class_weight=None, dual=True, fit_intercept=True,
intercept_scaling=1, loss='squared_hinge', max_iter=1000,
multi_class='ovr', penalty='l2', random_state=None, tol=0.0001,
verbose=0)
# 决策边界绘制函数
def plot_decision_boundary(model, axis):
x0, x1 = np.meshgrid(
np.linspace(axis[0], axis[1], int((axis[1]-axis[0])*100)).reshape(-1, 1),
np.linspace(axis[2], axis[3], int((axis[3]-axis[2])*100)).reshape(-1, 1)
)
x_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(x_new)
zz = y_predict.reshape