案例简介
模型输入:
seq:[batch_size,len_seq,num_tags]
seq_tag:[batch_size,len_seq]
模型输出:
likehood:tensor()
案例代码
import torch
from TorchCRF import CRF
from torch.utils.data import TensorDataset,DataLoader
from torch.optim import Adam
import matplotlib.pyplot as plt
batch_size = 10
seq_len = 19
num_tags = 9
X = torch.randn(1000,seq_len,num_tags)
tags = torch.ones([1000,seq_len]).long()
# 数据集构建,标签全是1
tensor_data = TensorDataset(X,tags)
dataloader = DataLoader(tensor_data,shuffle = True,batch_size = batch_size)
# 模型定义
model = CRF(num_tags,batch_first=True)
# 模型训练
optimizer = Adam(model.parameters(),lr = 0.05,betas = (0.9,0.99))
losses = []
for seq,seq_tag in dataloader:
loss = (-1)*model(seq,seq_tag)
loss.backward()
optimizer.step()
losses.append(loss.tolist())
plt.xlabel('number of iter')
plt.ylabel('loss')
plt.title('a simple of torchcrf')
plt.plot(losses)
plt.legend()
plt.show()
print('解码结果:',model.decode(X[:4]))
输出:
解码结果: [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]