通过使用numpy库编写简单的Gradient Descent数据位于附件之中
import torch
from torch import autograd
import numpy as np
import matplotlib.pyplot as plt
'''torch关于求导的简单运用'''
# x = torch.tensor(1.)
# a = torch.tensor(1.,requires_grad=True)
# b = torch.tensor(2.,requires_grad=True)
# c = torch.tensor(3.,requires_grad=True)
#
# y = a**2*x + b*x + c
#
# print('before',a.grad,b.grad,c.grad)
# grads = autograd.grad(y,[a,b,c])
# print('after',grads[0],grads[1],grads[2])
"""函数名:Loss_Function(损失函数) """ #首先明确此次通过偏导想求拟合的直线方程为y = w*x + b
"""参数名:b:拟合曲线的biase(偏移量)"""
""" w:拟合曲线x的系数 """
""" points:待拟合数据 """
"""返回值:返回值为平方损失函数的均值 """
def Loss_Function(b,w,points):
Totle_value = 0