import torch
from torch import nn,optim
import random
import numpy as np
import matplotlib.pyplot as plt
input_size = 3
output_size = 1
num_epochs = 2000
learning_rate = 0.001
x_train=torch.randn(10,input_size)
y_train=2*torch.randn(10,output_size)
model=nn.Linear(input_size,output_size)
criterion=nn.MSELoss()
optimizer=optim.Adam(model.parameters(),lr=learning_rate)
def myenumerate(s_num,e_num):
while s_num<e_num:
yield s_num
s_num+=1
for epoch in myenumerate(0,num_epochs):
predit=model(x_train)
loss=criterion(y_train,predit)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if (epoch+1)%50==0:
plt.cla()
plt.plot(np.arange(y_train.shape[0]),y_train,'ro',label='original labels')
plt.plot(np.arange(y_train.shape[0]),predit.detach().numpy(),'go',label='predit labels')
plt.text(y_train.shape[0]/2,np.min(y_train.detach().numpy()+0.1),'epoch:{}/{},loss:{}'.format(epoch,num_epochs,loss.item().__round__(2)))
plt.legend()
plt.pause(0.2)
plt.show()
