logistic回归把自变量对应的有限分类做了平滑映射,术语是对数比是线性的。
简要测试一下,不考虑数据合理性。
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(intX):
return 1./(1+np.exp(-intX))
dataX = [(2,5,9,5),(3,4,10,1),(34,22,11,0)]
dataY = [1,0,1]
def gradiantDecend(dataX,dataY):
#the data set is dataX=[(x1,x2,x3)...] dataY=[y1,...]
step_size = 0.1
para_size = len(dataX[0])
thetaList = range(0,para_size)
data_size = len(dataX)
for loopindex in range(0,data_size):
for i in range(0,len(thetaList)):
tmp = dataY[loopindex] - sigmoid(dataX[loopindex][i])
thetaList[i] = thetaList[i] + step_size * tmp * dataX[loopindex][i]
return thetaList
print gradiantDecend(dataX,dataY)
run:
[-0.26193165364230042, 0.61054091006066225, 1.0001748245511226, 2.9302405675991419]