tensorflow 线性回归(numpy)
链接: https://pan.baidu.com/s/1uyet8NFjYl8Tk1LabRV_Jg 提取码: s2bx
import numpy as np
np.__version__
# 计算loss
def compute_error_from_line_given_points(b, w, points):
total_error = 0
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# 计算损失
total_error += (y - (w * x + b))**2
return total_error / float(len(points))
# 求梯度
def step_gradient(b_current, w_current, points, learning_rate):
b_gradient = 0
w_gradient = 0
N = float(len(points))
# 循环计算梯度
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# 计算梯度
b_gradient += (2/N) * ((w_current * x + b_current) - y)
w_gradient += (2/N) * x * ((w_current*x + b_current) - y)
# 更新参数
new_b = b_current - (learning_rate * b_gradient)
new_w = w_current - (learning_rate * w_gradient)
return [new_b, new_w]
# 循环梯度运行
def gradient_descent_runner(points, starting_b, starting_w, learning_rate, num_iterations):
w = starting_w
b = starting_b
# 循环更新梯度
for i in range(num_iterations):
b, w = step_gradient(b, w, np.array(points), learning_rate)
return [b, w]
def run():
# 1.导入数据
points = np.genfromtxt("data.csv", delimiter=",")
print(points.shape)
print(points[:5])
# 2.参数初始化
learning_rate = 0.0001
initial_b = 0
initial_w = 0
num_iterations = 1000
# 3.显示初始参数与loss
start_loss = compute_error_from_line_given_points(initial_b, initial_w, points)
print("初始化: b = %f, w = %f loss= %f" % (initial_b, initial_w, start_loss))
# 4.循环更新参数
print("开始执行....")
[b, w] = gradient_descent_runner(points, initial_b, initial_w, learning_rate, num_iterations)
# 5.显示训练后的参数与loss
end_loss = compute_error_from_line_given_points(b, w, points)
print("初始化: b = %f, w = %f loss= %f" % (b, w, end_loss))
if __name__ == "__main__":
run()