机器学习经典算法7-线性回归

1.简单介绍

        分类处理的是离散预测,而对于连续的值类型则可以利用回归进行预测,这里对主要的几个线性回归方法进行初步介绍。这里也有训练集和测试集。

2.单变量线性回归的参数求解



3.多变量线性回归


4.利用矩阵进行参数求解


5.局部加权线性回归


6.岭回归


7.编程实现

     这里standMaReg实现的是利用矩阵分解进行最基本的线性回归参数求解;而standBaGradReg中是利用梯度下降的batch方式进行参数求解,这里设定了iter_num循环求解的次数,每次都基于m个训练集数据进行参数更新,这里对参数weights进行更新时没有考虑参数m、对参数alpha也没有考虑变速而是固定值;lwlr函数实现的是局部加权线性回归,求解采用的是矩阵方式;ridgeRegres实现的是岭回归,这里的lamda默认设定是0.2。
from numpy import *
import matplotlib.pyplot as plt
def loadDataSet(filename):
    numFeat = len(open(filename).readline().split('\t'))-1
    dataMat = []
    labelMat = []
    fr = open(filename)
    for line in fr.readlines():
        lineArr = []
        curLine = line.strip('\n').split('\t')
        for i in range(numFeat):
            lineArr.append(float(curLine[i]))
        dataMat.append(lineArr)
        labelMat.append(float(curLine[-1]))
    return dataMat, labelMat
def standMaReg(xArr, yArr):
    xMat = mat(xArr)
    yMat = mat(yArr).T
    xTx  = xMat.T*xMat
    if linalg.det(xTx)==0.0:
        print 'This matrix is singular, connot do inverse'
        return
    ws = xTx.I*(xMat.T*yMat)
    return ws
def standBaGradReg(xArr, yArr, alpha=0.001, iter_num=15):
    xMat = mat(xArr)
    yMat = mat(yArr).T
    m,n=shape(xMat)
    weights = mat(ones((n,1)))
    for i in range(iter_num):
        yPredict = mat(xMat*weights)
        tmp=mat(zeros((n,1)))
        for j in range(n):
            tmp[j,:] += alpha*sum(multiply((yMat-yPredict),xMat[:,j]))
        weights = weights + tmp
    return weights
def lwlr(testPoint, xArr, yArr, k=1.0):
    xMat = mat(xArr)
    yMat = mat(yArr).T
    m = shape(xMat)[0]
    weights = mat(eye((m)))
    for j in range(m):
        diffMat = testPoint - xMat[j,:]
        weights[j,j] = exp(diffMat*diffMat.T/(-2.0*k**2))
    xTx = xMat.T*(weights*xMat)
    if linalg.det(xTx) == 0.0:
        print "This matrix is singular, cannot do inverse"
        return
    ws = xTx.I*(xMat.T*(weights*yMat))
    return testPoint*ws
def lwlrTest(testArr, xArr, yArr, k=1.0):
    m = shape(testArr)[0]
    yPre = zeros(m)
    for i in range(m):
        yPre[i] = lwlr(testArr[i], xArr, yArr, k)
    return yPre
def ridgeRegres(xMat, yMat, lam=0.2):
    xTx = xMat.T*xMat
    denom = xTx + eye(shape(xMat)[1])*lam
    if linalg.det(denom) == 0.0:
        print "This matrix is singular, cannot do inverse"
    ws = denom.I*(xMat.T*yMat)
    return ws
def ridgeTest(xArr, yArr, numIter=30):
    xMat = mat(xArr)
    yMat = mat(yArr).T
    yMean = mean(yMat,0)
    yMat = yMat - yMean
    xMeans = mean(xMat, 0)
    xVar = var(xMat, 0)
    xMat = (xMat - xMeans)/xVar
    wMat = zeros((numIter,shape(xMat)[1]))
    lamList = []
    for i in range(numIter):
        lamList.append(exp(i-10))
        ws = ridgeRegres(xMat, yMat, exp(i-10))
        wMat[i,:]=ws.T
    return wMat, lamList
def plotReg(weights, xArr, yArr, xIndex=0):
    xMat = mat(xArr)
    yMat = mat(yArr)
    fig = plt.figure()
    ax = fig.add_subplot(111)
    ax.scatter(xMat[:,xIndex].flatten().A[0], yMat.T[:,0].flatten().A[0])
    yPredict = xMat*weights
    ax.plot(xMat[:,xIndex], yPredict)
    plt.show()
xArr, yArr = loadDataSet("ex0.txt")
'''
ws1 = standMaReg(xArr, yArr)
print "ws1", ws1
plotReg(ws1, xArr, yArr, 1)
ws2 = standBaGradReg(xArr, yArr, 0.001, 1000)
print "ws2", ws2

yPre = lwlrTest(xArr, xArr, yArr, 0.01)
xMat = mat(xArr)
srtInde = xMat[:,1].argsort(0)
xSort = xMat[srtInde][:,0,:]
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(xSort[:,1], yPre[srtInde])
ax.scatter(xMat[:,1].flatten().A[0], mat(yArr).T.flatten().A[0], s=2, c='red')
plt.show()
'''
abX, abY = loadDataSet('abalone.txt')
weights, lam = ridgeTest(abX, abY)
plt.plot(weights)
plt.show()


评论 4
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

大胖5566

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值