x = 1
learning_rate = 0.1
epochs = 50
y = lambda x : x ** 2 - 1
for epoch in range(epochs):
print(epoch, x)
dx = 2 * x
x = x - learning_rate * dx
print(y(x))
pytorch实现梯度下降
import torch
from torch.autograd import Variable
x = torch.Tensor([1])
#建立一个张量 tensor([1.], requires_grad=True)
x = Variable(x, requires_grad=True)
print('grad', x.grad, 'data', x.data)
learning_rate = 0.1
epochs = 20
for epoch in range(epochs):
y = x ** 2 - 1
y.backward()
print('grad',x.grad.data)
x.data=x.data-learning_rate*x.grad.data
x.grad.data.zero_()
print(x.data)
print(y)