LSTM的使用

import torch
import torch.nn as nn
import torch.autograd as autograd
from torch import optim
def prepare_sequence(seq,to_ix):
    idxs = [to_ix[w] for w in seq]
    tensor = torch.LongTensor(idxs)
    return autograd.Variable(tensor)
training_data = [('the dog ate the apple'.split(),['DET','NN','V','DET','NN']),
                ('Everybody read that book'.split(),['NN','V','DET','NN'])
                ]
word_to_ix = {}
for sent,tags in training_data:
    for word in sent:
        if word not in word_to_ix:
            word_to_ix[word] = len(word_to_ix)
print(word_to_ix)
{'the': 0, 'dog': 1, 'ate': 2, 'apple': 3, 'Everybody': 4, 'read': 5, 'that': 6, 'book': 7}
EMBEDDING_DIM = 6
HIDDEN_DIM = 6
class LSTMTagger(nn.Module):
    def __init__(self,embedding_dim,hidden_dim,vocab_size,tagset_size):
        super(LSTMTagger,self).__init__()
        self.hidden_dim = hidden_dim
        
        self.word_embeddings = nn.Embedding(vocab_size,embedding_dim)
        
        self.lstm = nn.LSTM(embedding_dim,hidden_dim)
        
        self.hidden2tag = nn.Linear(hidden_dim,tagset_size)
        self.hidden = self.init_hidden()
        
    def init_hidden(self):
        return (autograd.Variable(torch.zeros(1,1,self.hidden_dim)),autograd.Variable(torch.zeros(1,1,self.hidden_dim)))
    def forward(self,sentence):
        embeds = self.word_embeddings(sentence)
        lstm_out,self.hidden = self.lstm(embeds.view(len(sentence),1,-1,self.hidden))
        tag_space = self.hiddend2tag(lstm_out.view(len(sentence),-1))
        tag_scores = F.log_softmax(tag_space,dim = 1)
        return tag_scores
loss_function = nn.NLLLoss()
optimizer = optim.SGD(model.parameters(),lr = 0.1)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

小蜗笔记

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值