Logistic回归简单分析
优点:计算代价不高,易于理解和实现
缺点:容易欠拟合,分类精度可能不高
适用数据类型:数值型和标称型数据
我们都知道逻辑回归是和Sigmod函数一起的,为了实现逻辑回归分类器,我们可以在每一个特征上都乘以一个回归系数,然后将所有的结果值相加,将总和代入S函数,进而得到一个范围在0~1之间的数值。任何大于0.5的数据被分人1类,小于0.5被归为0类。
而现在有了分类器的函数了,那么上面提到的最佳回归系数怎么求呢?这里就出现了基于最优化方法的最佳回归系数的确定。
梯度上升法:要找到某函数的最大值,最好的方法就是沿着该函数的梯度方向探寻。梯度上升法用来求函数的最大值,梯度下降法用来求函数的最小值。
基于机器学习实战的代码
#! encoding:utf-8
from numpy import *
import matplotlib.pyplot as plt
from _socket import error
from matplotlib.font_manager import weight_as_number
from matplotlib.pyplot import scatter
from bokeh.charts.attributes import color
#加载数据
def loadDataSet():
dataMat=[]
lableMat=[]
fr = open(r"C:\Users\QAQ\Desktop\testSet.txt")
for line in fr.readlines():
lineArr = line.strip().split()
dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])
lableMat.append(int(lineArr[2]))
return dataMat, lableMat
#sigmoid函数
def sigmoid(intX):
return 1.0/(1+exp(-intX))
#梯度上升优化算法
def gradAscent(dataMatIn, classLabels):
dataMatrix = mat(dataMatIn)
labelMat = mat(classLabels).transpose()
m,n = shape(dataMatrix)
alpha = 0.001
maxCycles = 500
weights = ones((n,1))
# print shape(dataMatrix)
# print shape(weights)
for k in range(maxCycles):
h=sigmoid(dataMatrix*weights)
error = (labelMat - h)
weights = weights + alpha * dataMatrix.transpose()*error
return weights
#画出数据集合Logisti回归最佳拟合直线
def plotBestFit(weights):
dataMat, labelMat = loadDataSet()
dataArr = array(dataMat)
n = shape(dataArr)[0]
xrecord1=[];yrecord1=[]
xrecord2=[];yrecord2=[]
for i in range(n):
if int(labelMat[i]) == 1:
xrecord1.append(dataArr[i,1]);yrecord1.append(dataArr[i,2])
else:
xrecord2.append(dataArr[i,1]);yrecord2.append(dataArr[i,2])
# print dataMat
# fig = plt.figure()
scatter(xrecord1,yrecord1,color="red",marker='s',s=30)
scatter(xrecord2, yrecord2,color="blue")
x = arange(-3, 3, 1)
y = (-float(weights[0])-float(weights[1])*x)/float(weights[2])
# print x
# print y
plt.plot(x,y)
plt.show()
# dataArr = array(dataMat)
# print dataArr
# print type(dataArr)
def main():
dataArr, labelMat=loadDataSet()
plotBestFit(gradAscent(dataArr, labelMat))
# print gradAscent(dataArr, labelMat)
main()