from numpy import *
def loadDataSet(): #加载数据
dataMat = []; labelMat = []
fr = open('testSet.txt') #打开指定文件
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])]) #将数据放入列表中
labelMat.append(int(lineArr[2])) #将标签加入到数据集合中
return dataMat,labelMat #返回列表内容
def sigmoid(inX): #sigmoid函数,阶跃函数
return 1.0/(1+exp(-inX))
def gradAscent(dataMatIn, classLabels): #梯度上升算法
dataMatrix = mat(dataMatIn) #转化为numpy矩阵
labelMat = mat(classLabels).transpose() #转化为numpy矩阵
m,n = shape(dataMatrix) #shape函数求得矩阵的行和列,m表示行,n表示列
alpha = 0.001 #梯度
maxCycles = 500 #迭代的次数
weights = ones((n,1)) #创建一个n行1列的矩阵
for k in range(maxCycles): #heavy on matrix operations
h = sigmoid(dataMatrix*weights) #放入到sigmoid函数中计算
error = (labelMat - h) #计算预测值与真实值之间的距离
weights = weights + alpha * dataMatrix.transpose()* error #transpose表示转置
return weights #返回拟合的参数系数
def plotBestFit(weights): #一个绘图的函数,具体看效果
import matplotlib.pyplot as plt
dataMat,labelMat=loadDataSet()
dataArr = array(dataMat)
n = shape(dataArr)[0]
xcord1 = []; ycord1 = []
xcord2 = []; ycord2 = []
for i in range(n):
if int(labelMat[i])== 1:
xcord1.append(dataArr[i,1]); ycord1.append(dataArr[i,2])
else:
xcord2.append(dataArr[i,1]); ycord2.append(dataArr[i,2])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
ax.scatter(xcord2, ycord2, s=30, c='green')
x = arange(-3.0, 3.0, 0.1)
y = (-weights[0]-weights[1]*x)/weights[2]
ax.plot(x, y)
plt.xlabel('X1'); plt.ylabel('X2');
plt.show()
def stocGradAscent0(dataMatrix, classLabels): #随机梯度上升1.0版本,每次更新样本参数时,只选择一个样本进行更新
m,n = shape(dataMatrix)
alpha = 0.01
weights = ones(n) #initialize to all ones,创建一个n行1列的矩阵,初始化为1
for i in range(m):
h = sigmoid(sum(dataMatrix[i]*weights))
error = classLabels[i] - h
weights = weights + alpha * error * dataMatrix[i] #每次只选择一个样本进行更新
return weights #返回最优参数系数
def stocGradAscent1(dataMatrix, classLabels, numIter=150): #随机梯度上升2.0版本
m,n = shape(dataMatrix)
weights = ones(n) #initialize to all ones
for j in range(numIter):
dataIndex =list(range(m))
for i in range(m):
alpha = 4/(1.0+j+i)+0.0001 #apha decreases with iteration, does not ,动态变化的步长
randIndex = int(random.uniform(0,len(dataIndex)))#go to 0 because of the constant,随机选择一个样本用来更新weights
h = sigmoid(sum(dataMatrix[randIndex]*weights))
error = classLabels[randIndex] - h
weights = weights + alpha * error * dataMatrix[randIndex]
del(dataIndex[randIndex]) #删除已经用来更新weight之后的样本
return weights
def classifyVector(inX, weights):
prob = sigmoid(sum(inX*weights)) #sigmoid函数
if prob > 0.5: return 1.0 #大于0.5,分类为1
else: return 0.0 #小于0.5,分类为0
def colicTest(): #测试的函数
frTrain = open('horseColicTraining.txt'); frTest = open('horseColicTest.txt')
trainingSet = []; trainingLabels = []
for line in frTrain.readlines():
currLine = line.strip().split('\t')
lineArr =[]
for i in range(21):
lineArr.append(float(currLine[i]))
trainingSet.append(lineArr)
trainingLabels.append(float(currLine[21]))
trainWeights = stocGradAscent1(array(trainingSet), trainingLabels, 1000)
errorCount = 0; numTestVec = 0.0
for line in frTest.readlines():
numTestVec += 1.0
currLine = line.strip().split('\t')
lineArr =[]
for i in range(21):
lineArr.append(float(currLine[i]))
if int(classifyVector(array(lineArr), trainWeights))!= int(currLine[21]):
errorCount += 1
errorRate = (float(errorCount)/numTestVec)
print ("the error rate of this test is: %f" % errorRate)
return errorRate
def multiTest():
numTests = 10; errorSum=0.0
for k in range(numTests):
errorSum += colicTest()
print ("after %d iterations the average error rate is: %f" % (numTests, errorSum/float(numTests)))
#main函数
if __name__ == "__main__":
dataMat, labelMat=loadDataSet()
# weights=stocGradAscent1(array(dataMat),labelMat,500)
# plotBestFit(weights)
multiTest()
这是一个逻辑回归的代码,使用的是梯度上升的算法,最终的是sigmoid函数,记住这个函数,它有着单位阶跃的性质。
def loadDataSet(): #加载数据
dataMat = []; labelMat = []
fr = open('testSet.txt') #打开指定文件
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])]) #将数据放入列表中
labelMat.append(int(lineArr[2])) #将标签加入到数据集合中
return dataMat,labelMat #返回列表内容
def sigmoid(inX): #sigmoid函数,阶跃函数
return 1.0/(1+exp(-inX))
def gradAscent(dataMatIn, classLabels): #梯度上升算法
dataMatrix = mat(dataMatIn) #转化为numpy矩阵
labelMat = mat(classLabels).transpose() #转化为numpy矩阵
m,n = shape(dataMatrix) #shape函数求得矩阵的行和列,m表示行,n表示列
alpha = 0.001 #梯度
maxCycles = 500 #迭代的次数
weights = ones((n,1)) #创建一个n行1列的矩阵
for k in range(maxCycles): #heavy on matrix operations
h = sigmoid(dataMatrix*weights) #放入到sigmoid函数中计算
error = (labelMat - h) #计算预测值与真实值之间的距离
weights = weights + alpha * dataMatrix.transpose()* error #transpose表示转置
return weights #返回拟合的参数系数
def plotBestFit(weights): #一个绘图的函数,具体看效果
import matplotlib.pyplot as plt
dataMat,labelMat=loadDataSet()
dataArr = array(dataMat)
n = shape(dataArr)[0]
xcord1 = []; ycord1 = []
xcord2 = []; ycord2 = []
for i in range(n):
if int(labelMat[i])== 1:
xcord1.append(dataArr[i,1]); ycord1.append(dataArr[i,2])
else:
xcord2.append(dataArr[i,1]); ycord2.append(dataArr[i,2])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
ax.scatter(xcord2, ycord2, s=30, c='green')
x = arange(-3.0, 3.0, 0.1)
y = (-weights[0]-weights[1]*x)/weights[2]
ax.plot(x, y)
plt.xlabel('X1'); plt.ylabel('X2');
plt.show()
def stocGradAscent0(dataMatrix, classLabels): #随机梯度上升1.0版本,每次更新样本参数时,只选择一个样本进行更新
m,n = shape(dataMatrix)
alpha = 0.01
weights = ones(n) #initialize to all ones,创建一个n行1列的矩阵,初始化为1
for i in range(m):
h = sigmoid(sum(dataMatrix[i]*weights))
error = classLabels[i] - h
weights = weights + alpha * error * dataMatrix[i] #每次只选择一个样本进行更新
return weights #返回最优参数系数
def stocGradAscent1(dataMatrix, classLabels, numIter=150): #随机梯度上升2.0版本
m,n = shape(dataMatrix)
weights = ones(n) #initialize to all ones
for j in range(numIter):
dataIndex =list(range(m))
for i in range(m):
alpha = 4/(1.0+j+i)+0.0001 #apha decreases with iteration, does not ,动态变化的步长
randIndex = int(random.uniform(0,len(dataIndex)))#go to 0 because of the constant,随机选择一个样本用来更新weights
h = sigmoid(sum(dataMatrix[randIndex]*weights))
error = classLabels[randIndex] - h
weights = weights + alpha * error * dataMatrix[randIndex]
del(dataIndex[randIndex]) #删除已经用来更新weight之后的样本
return weights
def classifyVector(inX, weights):
prob = sigmoid(sum(inX*weights)) #sigmoid函数
if prob > 0.5: return 1.0 #大于0.5,分类为1
else: return 0.0 #小于0.5,分类为0
def colicTest(): #测试的函数
frTrain = open('horseColicTraining.txt'); frTest = open('horseColicTest.txt')
trainingSet = []; trainingLabels = []
for line in frTrain.readlines():
currLine = line.strip().split('\t')
lineArr =[]
for i in range(21):
lineArr.append(float(currLine[i]))
trainingSet.append(lineArr)
trainingLabels.append(float(currLine[21]))
trainWeights = stocGradAscent1(array(trainingSet), trainingLabels, 1000)
errorCount = 0; numTestVec = 0.0
for line in frTest.readlines():
numTestVec += 1.0
currLine = line.strip().split('\t')
lineArr =[]
for i in range(21):
lineArr.append(float(currLine[i]))
if int(classifyVector(array(lineArr), trainWeights))!= int(currLine[21]):
errorCount += 1
errorRate = (float(errorCount)/numTestVec)
print ("the error rate of this test is: %f" % errorRate)
return errorRate
def multiTest():
numTests = 10; errorSum=0.0
for k in range(numTests):
errorSum += colicTest()
print ("after %d iterations the average error rate is: %f" % (numTests, errorSum/float(numTests)))
#main函数
if __name__ == "__main__":
dataMat, labelMat=loadDataSet()
# weights=stocGradAscent1(array(dataMat),labelMat,500)
# plotBestFit(weights)
multiTest()
这是一个逻辑回归的代码,使用的是梯度上升的算法,最终的是sigmoid函数,记住这个函数,它有着单位阶跃的性质。