关闭

逻辑回归 python 实现

标签: 逻辑回归 python 实现numpy logic regressi
325人阅读 评论(0) 收藏 举报
分类:
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 13 20:56:43 2015


@author: brian gong
"""


import numpy as np
import scipy.optimize as op
import matplotlib.pyplot as plt


def plotfunc(x ,y):   
    ''''''    
    pos = np.where(y==1)
    neg = np.where(y==0)
    plt.plot(x[pos,0], x[pos,1], 'ro')
    plt.plot(x[neg,0], x[neg,1], 'rx')
    #plt.axis([0, 200, 0, 200])
    
    plt.hold(True)


def plotDiscion(theta):
    ''''''
    ex1 = np.linspace(30, 100, 100)
    plot_y = (np.divide(-1, theta[2]) * (theta[1] *ex1 + theta[0]))
    plt.plot(ex1, plot_y, 'b-')


    return
    
    
def Sigmoid(z):
    return 1/(1 + np.exp(-z));






def Gradient(theta,x,y):
    m , n = x.shape
    theta = theta.reshape((n,1));
    y = y.reshape((m,1))
    sigmoid_x_theta = Sigmoid(x.dot(theta));
    grad = ((x.T).dot(sigmoid_x_theta-y))/m;
    return grad.flatten();


def CostFunc(theta,x,y):
    m,n = x.shape; 
    theta = theta.reshape((n,1));
    y = y.reshape((m,1));
    term1 = np.log(Sigmoid(x.dot(theta)));
    term2 = np.log(1-Sigmoid(x.dot(theta)));
    term1 = term1.reshape((m,1))
    term2 = term2.reshape((m,1))
    term = y * term1 + (1 - y) * term2;
    J = -((np.sum(term))/m);
    return J;
if __name__ == "__main__":
    print("__main__")
    data = np.loadtxt('ex2data1.txt',delimiter=',');


    # m training samples and n attributes
    m , n = data.shape          
    X = data[:,0:n-1]
    y = data[:,n-1:]
    plotfunc(X,y)
    X = np.concatenate((np.ones((m,1)), X),axis = 1)
    initial_theta = np.zeros((n,1))
    m , n = X.shape;


    Result = op.minimize(fun = CostFunc, 
                     x0 = initial_theta,
                     args = (X,y), 
                     method = 'TNC',
                     jac = Gradient);
    theta = Result.x;
    print(Result)
    plotDiscion(theta)
0
0

查看评论
* 以上用户言论只代表其个人观点,不代表CSDN网站的观点或立场
    个人资料
    • 访问:5574次
    • 积分:188
    • 等级:
    • 排名:千里之外
    • 原创:13篇
    • 转载:0篇
    • 译文:1篇
    • 评论:1条
    文章分类
    最新评论
  • 决策树原理

    brian_gong: 好吧, 开始是想整体翻译呢, 这么多字实在是懒得打了 , 所以挑重点的翻了两句。