python的pipeline机制
参考博客
可以先从1.3.1开始看
邹博课程P9开始有涉及
奇技淫巧小技巧
LogisticRegressionCV的使用
参考博客
绘图技巧
plt.scatter()
参考博客
plt.rcParams
参考博客
回归分析
岭回归
参考博客
SVR
侧重于实例
侧重于参数
侧重于核函数
SVR实例
#!/usr/bin/python
# -*- coding:utf-8 -*-
import numpy as np
from sklearn import svm
import matplotlib.pyplot as plt
if __name__ == "__main__":
N = 50
np.random.seed(0)
x = np.sort(np.random.uniform(0, 6, N), axis=0)
y = 2*np.sin(x) + 0.1*np.random.randn(N)
x = x.reshape(-1, 1)
print('x =\n', x)
print('y =\n', y)
print('SVR - RBF')
svr_rbf = svm.SVR(kernel='rbf', gamma=0.2, C=100)
svr_rbf.fit(x, y)
print('SVR - Linear')
svr_linear = svm.SVR(kernel='linear', C=100)
svr_linear.fit(x, y)
print('SVR - Polynomial')
svr_poly = svm.SVR(kernel='poly', degree=3, C=100)
svr_poly.fit(x, y)
print('Fit OK.')
# 思考:系数1.1改成1.5
x_test = np.linspace(x.min(), 1.1*x.max(), 100).reshape(-1, 1)
y_rbf = svr_rbf.predict(x_test)
y_linear = svr_linear.predict(x_test)
y_poly = svr_poly.predict(x_test)
name='RBF Kernel','Linear Kernel','Polynomial Kernel'
reg=svr_rbf,svr_linear,svr_poly
plt.figure(figsize=(7, 6), facecolor='w')
plt.plot(x_test, y_rbf, 'r-', linewidth=2, label = '%s, $R^2$=%.3f' % (name[0], reg[0].score(x, y)))
plt.plot(x_test, y_linear, 'g-', linewidth=2, label = '%s, $R^2$=%.3f' % (name[1], reg[1].score(x, y)))
plt.plot(x_test, y_poly, 'b-', linewidth=2, label = '%s, $R^2$=%.3f' % (name[2], reg[2].score(x, y)))
plt.plot(x, y, 'mo', markersize=6, markeredgecolor='k')
plt.scatter(x[svr_rbf.support_], y[svr_rbf.support_], s=200, c='r', marker='*', edgecolors='k', label='RBF Support Vectors', zorder=10)
plt.legend(loc='lower left', fontsize=12)
plt.title('SVR', fontsize=15)
plt.xlabel('X')
plt.ylabel('Y')
plt.grid(b=True, ls=':')
plt.tight_layout(2)
plt.show()