本文介绍了python 支持向量机非线性回归SVR模型,废话不多说,具体如下:
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets, linear_model,svm
from sklearn.model_selection import train_test_split
def load_data_regression():
'''
加载用于回归问题的数据集
'''
diabetes = datasets.load_diabetes() #使用 scikit-learn 自带的一个糖尿病病人的数据集
# 拆分成训练集和测试集,测试集大小为原始数据集大小的 1/4
return train_test_split(diabetes.data,diabetes.target,test_size=0.25,random_state=0)
#支持向量机非线性回归SVR模型
def test_SVR_linear(*data):
X_train,X_test,y_train,y_test=data
regr=svm.SVR(kernel='linear')
regr.fit(X_train,y_train)
print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_))
print('Score: %.2f' % regr.score(X_test, y_test))
# 生成用于回归问题的数据集
X_train,X_test,y_train,y_test=load_data_regression()
# 调用 test_LinearSVR
test_SVR_linear(X_train,X_test,y_train,y_test)
def
test_SVR_poly(*data):
'''
测试 多项式核的 SVR 的预测性能随 degree、gamma、coef0 的影响.
'''
X_train,X_test,y_train,y_test=data
fig=plt.figure()
### 测试 degree ####
degrees=range(1,20)
train_scores=[]
test_scores=[]
for degree in degrees:
regr=svm.SVR(kernel='poly',degree=degree,coef0=1)
regr.fit(X_train,y_train)
train_scores.append(regr.score(X_train,y_train))
test_scores.append(regr.score(X_test, y_test))
ax=fig.add_subplot(1,3,1)
ax.plot(degrees,train_scores,label="Training score ",marker='+' )
ax.plot(degrees,test_scores,label