训练模型,学习从“hello”到"ohlol"
![](https://img-blog.csdnimg.cn/20210409090212447.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L1pIVVlBTjEyMDk=,size_16,color_FFFFFF,t_70)
代码
import torch
input_size=4
hidden_size=4
batch_size=1
idx2char=['e','h','l','o'] #字典
x_data=[1,0,2,2,3] #输入序列是hello
y_data=[3,1,2,3,2] #输出序列是ohlol
one_hot_lookup=[[1,0,0,0],
[0,1,0,0],
[0,0,1,0],
[0,0,0,1]]
x_one_hot=[one_hot_lookup[x] for x in x_data] #把输入数据转化为独热向量
#把输入转化为(seqLen,batchSize,inputSize)
inputs=torch.Tensor(x_one_hot).view(-1,batch_size,input_size)
#把标签转化为(seqLen,1)
labels=torch.LongTensor(y_data).view(-1,1)
class Model(torch.nn.Module):
def __init__(self,input_size,hidden_size,batch_size):
super(Model,self).__init__()
self.batch_size=batch_size
self.input_size=input_size
self.hidden_size=hidden_size
self.rnncell=torch.nn.RNNCell(input_size=self.input_size,
hidden_size=self.hidden_size)
def forward(self,input,hidden):
hidden=self.rnncell(input,hidden)
return hidden
def init_hidden(self):
return torch.zeros(self.batch_size,self.hidden_size)
net=Model(input_size,hidden_size,batch_size)
criterion=torch.nn.CrossEntropyLoss()
optimizer=torch.optim.Adam(net.parameters(),lr=0.1)
for epoch in range(15):
loss=0
optimizer.zero_grad()
hidden=net.init_hidden()
print('Predicted string:',end='')
#inputs的shape是(seqLen,batchSize,inputSize)
#input的shape是(batchSize,hiddenSize)
#labels的shape是(seqSize,1)
#label的shape是(1)
for input,label in zip(inputs,labels):
hidden=net(input,hidden)
loss+=criterion(hidden,label)
_,idx=hidden.max(dim=1)
print(idx2char[idx.item()],end='')
loss.backward()
optimizer.step()
print(',Epoch [%d/15] loss=%.4f'%(epoch+1,loss.item()))
运行结果
![](https://img-blog.csdnimg.cn/20210409095234527.png?x-oss-process=image/watermark,type_ZmFuZ3poZW5naGVpdGk,shadow_10,text_aHR0cHM6Ly9ibG9nLmNzZG4ubmV0L1pIVVlBTjEyMDk=,size_16,color_FFFFFF,t_70)