http://sbp810050504.blog.51cto.com/2799422/1608064/这个网址解释了多维空间的sigmoid函数与梯度上升算法的原理,大家可以参考一下。
from numpy import *
def loadDataSet():#读数据
dataMat = []
labelMat = []
fr = open('testSet.txt')
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])
labelMat.append(int(lineArr[2]))
return dataMat, labelMat
def sigmoid(intX):#sigmoid函数
return 1.0 / (1 + exp(-intX))
def gradAscent(dataMatIn, classLabels):#Logistic回归梯度上升优化算法
dataMatrix = mat(dataMatIn)
labelMat = mat(classLabels).transpose()
m, n = shape(dataMatrix)
alpha = 0.001
maxCycles = 500
weights = ones((n, 1))
for k in range(maxCycles):
h = sigmoid(dataMatrix * weights)
error = labelMat - h
weights = weights + alpha * dataMatrix.transpose() * error