Coursera吴恩达机器学习ML-week2-ex1多变量线性回归-python实现

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
def compute_loss(X,y,theta):
    loss = (X.dot(theta.T)-y)**2
    return loss.sum()/2/m
def gradient_descent(X_X,y,theta,alpha,epochs):
    for i in range(epochs):
       
        a = theta[0][0]-alpha*np.sum((1/m) * (X_X.dot(theta.T) - y))
        theta =np.array([[a,theta[0][1],theta[0][2]]])
       
        a = theta[0][1]-alpha*np.sum((1/m) * X_X[:,1].reshape(47,1) * (X_X.dot(theta.T) - y))
        theta =np.array([[theta[0][0],a,theta[0][2]]])
        
        a = theta[0][2]-alpha*np.sum((1/m) * X_X[:,2].reshape(47,1) * (X_X.dot(theta.T) - y))
        theta =np.array([[theta[0][0],theta[0][1],a]])
    return theta
data = pd.read_csv("ex1data2.txt",header = None)
#特征归一化,因为数据太大,会超出运算
data = (data - data.mean()) / data.std()

data = data.values
x = data[:,0:2]
y = data[:,2].reshape(47,1)

m = len(x)
x_0 = np.ones([1,47])
X = np.insert(x,0,values = x_0,axis = 1)
theta = np.array([0,0,0]).reshape(1,3)

alpha = 0.01
epochs = 1500
print("Starting theta_0 = {0}, theta_1 = {1},theta_2={2} error = {3}".format(theta[0][0], theta[0][1], theta[0][2], compute_loss(X,y,theta)))
print("Running...")
theta = gradient_descent(X, y, theta, alpha, epochs)
print("After {0} iterations theta_0 = {1}, theta_1 = {2},theta_2={3} error = {4}".format(epochs, theta[0][0], theta[0][1], theta[0][2] ,compute_loss(X,y,theta)))


有不明白的地方可以加群讨论(没错只有我一个人)484266833

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值