python进行机器学习中的SVM

# -*- coding: utf-8 -*-
"""
Created on Thu Aug 10 16:42:38 2017


@author: 飘的心
"""


import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets,linear_model,cross_validation,svm


def load_data_regression():
    diabetes=datasets.load_diabetes()
    return cross_validation.train_test_split(diabetes.data,diabetes.target,
                                             test_size=0.25,random_state=0)
    
def load_data_classification():
    iris=datasets.load_iris()
    x_train=iris.data
    y_train=iris.target
    return cross_validation.train_test_split(x_train,y_train,
                                             test_size=0.25,random_state=0)
    
def test_LinearSVC(*data):
    x_train,x_test,y_train,y_test=data
    cls=svm.LinearSVC()
    cls.fit(x_train,y_train)
    print(cls.score(x_test,y_test))
    
x_train,x_test,y_train,y_test=load_data_classification()
test_LinearSVC(x_train,x_test,y_train,y_test)


#不同损失函数
def test_LinearSVC_loss(*data):
    x_train,x_test,y_train,y_test=data
    losses=['hinge','squared_hinge']
    for loss in losses:
        cls=svm.LinearSVC(loss=loss)
        cls.fit(x_train,y_train)
        print('loss :{},accuracy:{}'.format(loss,cls.score(x_test,y_test)))
        
test_LinearSVC_loss(x_train,x_test,y_train,y_test)




#不同惩罚想


def test_LinearSVC_l(*data):
    x_train,x_test,y_train,y_test=data
    l=['l1','l2']
    for p in l:
        cls=svm.LinearSVC(penalty=p,dual=False)
        cls.fit(x_train,y_train)
        print('l:{},accuracy:{}'.format(p,cls.score(x_test,y_test)))
        
test_LinearSVC_l(x_train,x_test,y_train,y_test)






def test_LinearSVC_C(*data):
    x_train,x_test,y_train,y_test=data
    cs=np.logspace(-2,1)
    train_score=[]
    test_score=[]
    for c in cs:
        cls=svm.LinearSVC(C=c)
        cls.fit(x_train,y_train)
        train_score.append(cls.score(x_train,y_train))
        test_score.append(cls.score(x_test,y_test))
        
        
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(cs,train_score,label='Train score')
    ax.plot(cs,test_score,label='test score')
    ax.set_xlabel('c')
    ax.set_ylabel('score')
    ax.legend(loc='best')
    ax.set_xscale('log')
    plt.show()
    
test_LinearSVC_C(x_train,x_test,y_train,y_test)




#线性回归SVR
def test_LinearSVR(*data):
    x_train,x_test,y_train,y_test=data
    regr=svm.LinearSVR()
    regr.fit(x_train,y_train)
    print(regr.score(x_test,y_test))
    
x_train,x_test,y_train,y_test=load_data_regression()
test_LinearSVR(x_train,x_test,y_train,y_test)#得出结果为负值,说明预测性能很差


def test_LinearSVR_loss(*data):
    x_train,x_test,y_train,y_test=data
    losses=['epsilon_insensitive','squared_epsilon_insensitive']
    for loss in losses:
        regr=svm.LinearSVR(loss=loss)
        regr.fit(x_train,y_train)
        print('loss :{},score:{}'.format(loss,regr.score(x_test,y_test)))
        
test_LinearSVR_loss(x_train,x_test,y_train,y_test)




def test_LinearSVR_epsilon(*data):
    x_train,x_test,y_train,y_test=data
    epsilons=np.logspace(-2,2)
    train_scores=[]
    test_scores=[]
    for epsilon in epsilons:
        regr=svm.LinearSVR(epsilon=epsilon,loss='squared_epsilon_insensitive')
        regr.fit(x_train,y_train)
        train_scores.append(regr.score(x_train,y_train))
        test_scores.append(regr.score(x_test,y_test))
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(epsilons,train_scores,label='train')
    ax.plot(epsilons,test_scores,label='test')
    ax.set_title('linear svr')
    ax.set_xscale('log')
    ax.set_ylim(-1,1.05)
    ax.legend(loc='best')
    plt.show()
    
    
test_LinearSVR_epsilon(x_train,x_test,y_train,y_test)




#考察惩罚项系数


def test_LinearSVR_C(*data):
    x_train,x_test,y_train,y_test=data
    cs=np.logspace(-2,1)
    train_scores=[]
    test_scores=[]
    for c in cs:
        regr=svm.LinearSVR(C=c,epsilon=0.1,loss='squared_epsilon_insensitive')
        regr.fit(x_train,y_train)
        train_scores.append(regr.score(x_train,y_train))
        test_scores.append(regr.score(x_test,y_test))
        
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(cs,train_scores,label='train')
    ax.plot(cs,test_scores,label='test')
    ax.set_xscale('log')
    ax.set_title('linear svr')
    ax.set_xlable('c')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.05)
    ax.legend(loc='best',framealpha=0.5)
    plt.show()
    
test_LinearSVR_C(x_train,x_test,y_train,y_test)

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值