机器学习一线性模型——logistics回归

# -*- coding: utf-8 -*-
"""
Created on Mon Jul  9 13:36:25 2018

@author: Wei_Zhan
"""
import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets, linear_model, discriminant_analysis, cross_validation
def load_data():
    iris = datasets.load_iris()
    X_train = iris.data
    y_train = iris.target
    return cross_validation.train_test_split(X_train, y_train, test_size = 0.25, random_state = 0, stratify = y_train)

def test_logisticRegression(*data):
    X_train,X_test,y_train,y_test = data
    regr = linear_model.LogisticRegression()
    regr.fit(X_train,y_train)
    print('Coefficients :%s, intercept %s ' %(regr.coef_, regr.intercept_))
    print('Score: %.2f ' %regr.score(X_test,y_test))
    
X_train,X_test,y_train,y_test = load_data()
test_logisticRegression(X_train,X_test,y_train,y_test)
'''
Coefficients :[[ 0.39310895  1.35470406 -2.12308303 -0.96477916]
 [ 0.22462128 -1.34888898  0.60067997 -1.24122398]
 [-1.50918214 -1.29436177  2.14150484  2.2961458 ]], 
intercept [ 0.24122458  1.13775782 -1.09418724] 
Score: 0.97 
'''
def test_LogisticRegression_multinomial(*data):
    X_train,X_test,y_train,y_test = data
    regr = linear_model.LogisticRegression(multi_class = 'multinomial', solver = 'lbfgs')
    regr.fit(X_train,y_train)
    print('Coefficients :%s, intercept %s ' %(regr.coef_, regr.intercept_))
    print('Score: %.2f ' %regr.score(X_test,y_test))
test_LogisticRegression_multinomial(X_train,X_test,y_train,y_test)
'''
Coefficients :[[-0.38353546  0.8549476  -2.27261596 -0.98449405]
 [ 0.34336695 -0.37378185 -0.03022227 -0.86134865]
 [ 0.04016852 -0.48116574  2.30283824  1.84584271]], intercept [  8.79999897   2.46942062 -11.26941959] 
Score: 1.00  using multi_class the result is better
'''
def test_LogisticRegression_C(*data):
    X_train,X_test,y_train,y_test = data
    Cs = np.logspace(-2, 4, num = 100)
    scores = []
    for i, C in enumerate(Cs):
        regr = linear_model.LogisticRegression(C = C)
        regr.fit(X_train, y_train)
        scores.append(regr.score(X_test, y_test))
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.plot(Cs,scores)
    ax.set_xlabel(r"C")
    ax.set_ylabel(r"Score")
    ax.set_xscale('log')
    ax.set_title("LogisticRegression")
    plt.savefig('LogisticRegression.png') # figure must be saved before show otherwise the figure will be all wihte
    plt.show()
test_LogisticRegression_C(X_train,X_test,y_train,y_test)

def test_LinearDiscriminantAnalysis(*data):
    X_train,X_test,y_train,y_test = data
    lda = discriminant_analysis.LinearDiscriminantAnalysis()
    lda.fit(X_train,y_train)
    print('Coefficients :%s, intercept %s ' %(lda.coef_, lda.intercept_))
    print('Score: %.2f ' %lda.score(X_test,y_test))
test_LinearDiscriminantAnalysis(X_train,X_test,y_train,y_test)
'''
Coefficients :[[  6.575853     9.75807593 -14.34026669 -21.39076537]
 [ -1.98385061  -3.49791089   4.21495042   2.60304299]
 [ -4.47116022  -6.09542385   9.85886057  18.29330864]], intercept [-15.33097142   0.46730077 -30.53297367] 
Score: 1.00 
'''

只需注意logistic回归是分类用途不是回归,其和多元回归就是多了一个Sigmoid函数,用此来进行0-1分类。其中C的影响见图:
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值