由于Peter Harrington所著的这本《机器学习实战》中的官方代码是Python2版本的且有一些勘误,使用Python3的朋友运行起来会有很多问题,所以我将自己在学习过程中修改好的Python3版本代码分享给大家,以供大家交流学习,共同进步!
第五章-Logistic回归
文件名:logRegres.py
from numpy import *
from importlib import reload
#Logistic回归梯度上升优化算法
def loadDataSet():
dataMat = []; labelMat = []
fr = open('testSet.txt')
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0,float(lineArr[0]),float(lineArr[1])])
labelMat.append(int(lineArr[2]))
return dataMat,labelMat
def sigmoid(inx):
return 1.0/(1+exp(-inx))
def gradAscent(dataMatIn, ClassLabels):
dataMatrix = mat(dataMatIn)
labelMat = mat(ClassLabels).transpose()
m,n = shape(dataMatrix)
alpha = 0.001
maxCycles = 500
weights = ones((n,1))
for k in range(maxCycles):
h = sigmoid(dataMatrix*weights)
error = (labelMat - h)
weights = weights + alpha * dataMatrix.transpose() * error
return weights
#画出数据集和Logistic回归最佳拟合直线的函数
def plotBestFit(weights):
import matplotlib.pyplot as plt
dataMat,labelMat=loadDataSet()
dataArr = array(dataMat)
n = shape(dataArr)[0]
xcord1 = []; ycord1 = []
xcord2 = []; ycord2 = []
for i in range(n):
if int(labelMat[i])==1:
xcord1.append(dataArr[i,1]) ; ycord1.append(dataArr[i,2])
else:
xcord2.append(dataArr[i,1]) ; ycord2.append(dataArr[i,2])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')
ax.scatter(xcord2, ycord2, s=30, c='green')
x = arange(-3.0, 3.0, 0.1)
y = (-weights[0]-weights[1]*x)/weights[2]
ax.plot(x,y)
plt.xlabel('X1'); plt.ylabel('X2')
plt.show()
#随机梯度上升算法
def stocGradAscent0(dataMatrix, classLabels):
m,n = shape(dataMatrix)
alpha = 0.01
weights = ones(n)
for i in range(m):
h = sigmoid(sum(dataMatrix[i]*weights))
error = classLabels[i] - h
weights = weights + alpha * error * dataMatrix[i]
return weights
#改进的随机梯度上升算法
def stocGradAscent1(dataMatrix, classLabels, numIter=150):
m,n = shape(dataMatrix)
weights = ones(n)
for j in range(numIter):
dataIndex = list(range(m)) #python3中range不返回数组对象,而是返回range对象
for i in range(m):
alpha = 4/(1.0+j+i)+0.01 #调整以减少数据波动或高频波动
randIndex = int(random.uniform(0,len(dataIndex))) #随机选取样本以减少周期性波动
h = sigmoid(sum(dataMatrix[randIndex]*weights))
error = classLabels[randIndex] - h
weights = weights + alpha * error * dataMatrix[randIndex]
del(dataIndex[randIndex])
return weights
#Logistic回归分类函数
def classifyVector(inX, weights):
prob = sigmoid(sum(inX*weights))
if prob > 0.5 : return 1.0
else: return 0.0
def colicTest():
frTrain = open('horseColicTraining.txt')
frTest = open('horseColicTest.txt')
trainingSet = []; trainingLabels = []
for line in frTrain.readlines():
currLine = line.strip().split('\t')
lineArr = []
for i in range(21):
lineArr.append(float(currLine[i]))
trainingSet.append(lineArr)
trainingLabels.append(float(currLine[21]))
trainWeights = stocGradAscent1(array(trainingSet),trainingLabels,500)
errorCount = 0; numTestVec = 0.0
for line in frTest.readlines():
numTestVec += 1.0
currLine = line.strip().split('\t')
lineArr = []
for i in range(21):
lineArr.append(float(currLine[i]))
if int(classifyVector(array(lineArr),trainWeights))!=int(currLine[21]):
errorCount += 1
errorRate = (float(errorCount)/numTestVec)
print("测试错误率为:%f" % errorRate);
return errorRate
def multiTest():
numTests = 10; errorSum=0.0
for k in range(numTests):
errorSum += colicTest()
print("经过 %d 轮迭代后的错误率为:%f" % (numTests,errorSum/float(numTests)))
相关阅读推荐: