python实现斯坦福机器学习ex1.1

2017-06-12 = =
梯度下降算法综述
http://blog.csdn.net/google19890102/article/details/69942970

在numpy 中注意一下函数的使用:
1、numpy .linspace(start, end, number):在start和end之间生成number个数字,这些数字服从均匀分布。
2、python中的变量共享内存机制,因此要使用acopy = a.copy()
3、在使用矩阵的时候,养成初始化的习惯,不要混淆numpy中矩阵和python中的元组的概念。
4、在批量梯度下降算法中,theta的值要同步更新。
5、matplotlib库画三维图像,主要使用函数np.meshgrid。

import numpy as np
import matplotlib.pyplot as plt

from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
# ==================== Part 1: Basic Function ====================

def warmUpExercise():

    print("Running warmUpExercise ... \n")
    print("5x5 Identity Matrix: \n")
    a = np.eye(5)
    print(a)

# ======================= Part 2: Plotting =======================
def Plotting(x, y, theta):
    f2 = plt.figure(2)
    p1 = plt.scatter(x, y, marker='x', color='r', label='Training Data', s=30)

    x1 = np.linspace(0, 25, 30)#————————————1
    y1 = theta[0] + theta[1] * x1

    plt.plot(x1, y1, label="Test Data", color='b')

    plt.legend(loc='upper right')
    plt.show()

# ======================= Part 3: Gradient descent =======================

def computeCost(X, y, theta):
    m = X.shape[0]
    XMatrix = np.mat(X)
    yMatrix = np.mat(y)
    thetaMatrix = np.mat(theta)

    J = 1/(2 * m) * sum(np.array((XMatrix * thetaMatrix - yMatrix)) ** 2 )
    return J



def gradientDescent(X, y, theta, alpha, iterations):
    m = len(y)
    J_history = np.zeros((iterations, 1))
    # theta_s = theta :此表达式中共享内存空间
    theta_s = theta.copy()#————————————2

    for i in range(iterations):
        # python
        theta[0] = theta[0] - (alpha/m) * np.sum(np.mat(X)*np.mat(theta_s) - np.mat(y))

        # print(np.mat(X)*np.mat(theta_s))
        # print(np.mat(theta_s))
        p1 = np.mat(X)*np.mat(theta_s) - np.mat(y)
        # print(p1)

        p2 = X[:, 1]*p1
        # print(p2)
        theta[1] = theta[1] - (alpha / m) * p2
        # print(theta[1])
        theta_s = theta.copy()

        J_history[i, :] = computeCost(X, y, theta)
    return theta

def drawJ_vals(theta0_vals, theta1_vals, J_vals):
    X, Y = np.meshgrid(theta0_vals, theta1_vals)
    fig = plt.figure(figsize=(8, 6))
    ax = fig.gca(projection='3d')
    surf = ax.plot_surface(X, Y, J_vals, cmap=cm.coolwarm)
    plt.show()

def drawJ_valsContour(theta0_vals, theta1_vals, J_vals):
    X, Y = np.meshgrid(theta0_vals, theta1_vals)
    fig = plt.figure()
    plt.contour(X, Y, J_vals, np.logspace(-2, 3, 20))
    plt.show()






if __name__ == '__main__':

# ==================== Part 1: Basic Function ====================

    warmUpExercise()
# ======================= Part 2: Plotting =======================

    print('Plotting Data ...\n')
    fr = open('ex1data1.txt')
    arrayLines = fr.readlines()
    numberOfLines = len(arrayLines)
    x = np.zeros((numberOfLines, 1))
    y = np.zeros((numberOfLines, 1))
    index = 0
    for line in arrayLines:
        line = line.strip()
        listFormLine = line.split(",")

        x[index, :] = listFormLine[:1]
        y[index] = listFormLine[-1]
        index += 1

    # Plotting(x, y)

# =================== Part 3: Gradient descent ===================
    print('Running Gradient Descent ...\n')
    # Add a column of ones to x
    columnOne = np.ones((numberOfLines, 1))
    X = np.column_stack((columnOne, x))
    theta = np.zeros((2, 1))
    print(theta.shape)
    #Some gradient descent settings
    iterations = 1500
    alpha = 0.01
    JInitialization = computeCost(X, y, theta)
    theta = gradientDescent(X, y, theta, alpha, iterations)

    print('Theta found by gradient descent: ')
    print(theta)
    print('Program paused. Press enter to continue.\n')
    predict1 = np.mat([1, 3.5]) * np.mat(theta)
    print('For population = 35,000, we predict a profit of ', predict1*10000)
    predict2 = np.mat([1, 7]) * np.mat(theta)
    print('For population = 70,000, we predict a profit of ', predict2*10000)

# ============= Part 4: Visualizing J(theta_0, theta_1) =============
    print('Visualizing J(theta_0, theta_1) ...')
    # Grid over which we will calculate J
    theta0_vals = np.linspace(-10, 10, 100)
    theta1_vals = np.linspace(-1, 4, 100)
    # initialize J_vals to a matrix of 0's
    J_vals = np.zeros((len(theta0_vals), len(theta1_vals)))

    for i in range(len(theta0_vals)):
        for j in range(len(theta1_vals)):
            t = np.array([theta0_vals[i], theta1_vals[j]]).reshape(2, 1)
            J_vals[i, j] = computeCost(X, y, t)

# 在octave中,绘制三维图像需要转成surf命令,所以对J_vals做了转置,
# 在python中则需要使用 import mpl_toolkits.mplot3d
# Plot J_vals as 15 contours(等高线) spaced logarithmically(对数)
        #  between 0.01 and 100
    drawJ_vals(theta0_vals, theta1_vals, J_vals.T)
    drawJ_valsContour(theta0_vals, theta1_vals, J_vals.T)
# 在散点图上显示出 计算得到的theta参数
    Plotting(x, y, theta)


评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值