TextCNN的简单例子

import gc
import torch
from torch import nn 
import torch.optim as optim
from torch.utils.data import DataLoader,Dataset,TensorDataset
import torch.nn.functional as F
import numpy as np
dtype = torch.FloatTensor
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
device
device(type='cuda')

定义数据集

sentences = ["i love you", "he loves me", "she likes play", 
             "i hate you", "sorry for that", "this is awful"]
labels = [1,1,1,0,0,0
         ]
len(sentences[0])
10
embedding_size =2
# sequence_length = len(sentences[0])
num_class = len(set(labels))
batch_size = 3

word_list = " ".join(sentences).split()
print("word_list:",word_list,end = " ")
print("len(word_list):",len(word_list))
vocab = list(set(word_list))
print("*"*80)
print("vocab:",vocab,end = " ")
print("len(vocab):",len(vocab))
word2idx = {w:i for i,w in enumerate(vocab)}
print("*"*80)
print("word2idx:",word2idx)
vocab_size = len(vocab)
word_list: ['i', 'love', 'you', 'he', 'loves', 'me', 'she', 'likes', 'play', 'i', 'hate', 'you', 'sorry', 'for', 'that', 'this', 'is', 'awful'] len(word_list): 18
********************************************************************************
vocab: ['me', 'likes', 'sorry', 'hate', 'play', 'he', 'you', 'loves', 'that', 'is', 'awful', 'she', 'for', 'love', 'i', 'this'] len(vocab): 16
********************************************************************************
word2idx: {'me': 0, 'likes': 1, 'sorry': 2, 'hate': 3, 'play': 4, 'he': 5, 'you': 6, 'loves': 7, 'that': 8, 'is': 9, 'awful': 10, 'she': 11, 'for': 12, 'love': 13, 'i': 14, 'this': 15}
for sen in sentences:
    print(sen)
    print(sen.split())
    break
i love you
['i', 'love', 'you']
# 数据预处理
def make_data(sentences,labels):
    inputs = []
    targets = []
    for sen in sentences:
        inputs.append([word2idx[i] for i in sen.split()])
    for out in labels:
        targets.append(out)
    return inputs,targets

input_datas,target_datas = make_data(sentences,labels)
#转化成tensor
input_datas,target_datas = torch.LongTensor(input_datas),torch.LongTensor(target_datas)
dataset = TensorDataset(input_datas,target_datas)
train_loader = DataLoader(dataset,batch_size,shuffle = True)
for x,y in train_loader:
    print(x)
    print(x.shape,y.shape)
tensor([[11,  1,  4],
        [14, 13,  6],
        [15,  9, 10]])
torch.Size([3, 3]) torch.Size([3])
tensor([[ 2, 12,  8],
        [ 5,  7,  0],
        [14,  3,  6]])
torch.Size([3, 3]) torch.Size([3])

构建TEXTCNN

vocab_size
16
embedding_size
2
class textcnn_(nn.Module):
    def __init__(self):
        super(textcnn_,self).__init__()
        self.embed = nn.Embedding(vocab_size,embedding_size)
        self.conv = nn.Sequential(
            nn.Conv2d(in_channels=1,out_channels=3,kernel_size=(2,embedding_size)),
            nn.ReLU(),nn.MaxPool2d(kernel_size=(2,1))
        )
        
        self.fc = nn.Linear(3,num_class)
    
    def forward(self,x):
        #x[batch_size,sequence_length]
        batch_size = x.shape[0]
        x = self.embed(x)
        #x [batch_size, sequence_length, embedding_size]
        x = x.unsqueeze(1)
        #x:[batch_size,1 ,sequence_length, embedding_size]
        x = self.conv(x)
        #x:[batch_size,output_channel,1,1]
        x= x.view(batch_size,-1)
        out = self.fc(x)
        return out
        
model = textcnn_().to(device)
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(),lr=1e-3)
num_epochs = 500
loss_all = []
for epoch in range(num_epochs):
    train_loss = 0
    train_num = 0
    for step,(x,y) in enumerate(train_loader):
        x = x.to(device)
        y = y.to(device)
        z_hat = model.forward(x)
        loss = criterion(z_hat,y)
        loss.backward()
        optimizer.zero_grad()
        optimizer.step()
        train_loss+=loss.item()*len(y)
        train_num+=len(y)
    loss_all.append(train_loss/train_num)
    print(f"Epoch:{epoch+1} Loss:{loss_all[-1]:0.8f}")
    del x,y,loss,train_loss,train_num
    gc.collect()
    torch.cuda.empty_cache()
    

hhh,loss基本没变化。
Epoch:1 Loss:0.73505631
Epoch:2 Loss:0.73505628
Epoch:3 Loss:0.73505628
Epoch:4 Loss:0.73505628
Epoch:5 Loss:0.73505631
Epoch:6 Loss:0.73505631
Epoch:7 Loss:0.73505628
Epoch:8 Loss:0.73505628
Epoch:9 Loss:0.73505634
Epoch:10 Loss:0.73505628
Epoch:11 Loss:0.73505628
Epoch:12 Loss:0.73505625
Epoch:13 Loss:0.73505631
Epoch:14 Loss:0.73505628
Epoch:15 Loss:0.73505625
Epoch:16 Loss:0.73505631
Epoch:17 Loss:0.73505628
Epoch:18 Loss:0.73505631
Epoch:19 Loss:0.73505628
Epoch:20 Loss:0.73505634
Epoch:21 Loss:0.73505628
Epoch:22 Loss:0.73505628
Epoch:23 Loss:0.73505631
Epoch:24 Loss:0.73505634
Epoch:25 Loss:0.73505631
Epoch:26 Loss:0.73505634
Epoch:27 Loss:0.73505631
Epoch:28 Loss:0.73505628
Epoch:29 Loss:0.73505628
Epoch:30 Loss:0.73505628
Epoch:31 Loss:0.73505631
Epoch:32 Loss:0.73505631
Epoch:33 Loss:0.73505628
Epoch:34 Loss:0.73505628
Epoch:35 Loss:0.73505628
Epoch:36 Loss:0.73505628
Epoch:37 Loss:0.73505628
Epoch:38 Loss:0.73505628
Epoch:39 Loss:0.73505628
Epoch:40 Loss:0.73505631
Epoch:41 Loss:0.73505628
Epoch:42 Loss:0.73505628
Epoch:43 Loss:0.73505628
Epoch:44 Loss:0.73505631
Epoch:45 Loss:0.73505628
Epoch:46 Loss:0.73505628
Epoch:47 Loss:0.73505634
Epoch:48 Loss:0.73505628
Epoch:49 Loss:0.73505628
Epoch:50 Loss:0.73505628
Epoch:51 Loss:0.73505634
Epoch:52 Loss:0.73505628
Epoch:53 Loss:0.73505625
Epoch:54 Loss:0.73505628
Epoch:55 Loss:0.73505628
Epoch:56 Loss:0.73505628
Epoch:57 Loss:0.73505628
Epoch:58 Loss:0.73505631
Epoch:59 Loss:0.73505625
Epoch:60 Loss:0.73505628
Epoch:61 Loss:0.73505631
Epoch:62 Loss:0.73505631
Epoch:63 Loss:0.73505628
Epoch:64 Loss:0.73505628
Epoch:65 Loss:0.73505631
Epoch:66 Loss:0.73505628
Epoch:67 Loss:0.73505628
Epoch:68 Loss:0.73505628
Epoch:69 Loss:0.73505634
Epoch:70 Loss:0.73505628
Epoch:71 Loss:0.73505628
Epoch:72 Loss:0.73505631
Epoch:73 Loss:0.73505628
Epoch:74 Loss:0.73505625
Epoch:75 Loss:0.73505628
Epoch:76 Loss:0.73505628
Epoch:77 Loss:0.73505631
Epoch:78 Loss:0.73505628
Epoch:79 Loss:0.73505628
Epoch:80 Loss:0.73505628
Epoch:81 Loss:0.73505628
Epoch:82 Loss:0.73505628
Epoch:83 Loss:0.73505631
Epoch:84 Loss:0.73505628
Epoch:85 Loss:0.73505631
Epoch:86 Loss:0.73505628
Epoch:87 Loss:0.73505628
Epoch:88 Loss:0.73505628
Epoch:89 Loss:0.73505628
Epoch:90 Loss:0.73505628
Epoch:91 Loss:0.73505631
Epoch:92 Loss:0.73505628
Epoch:93 Loss:0.73505628
Epoch:94 Loss:0.73505628
Epoch:95 Loss:0.73505628
Epoch:96 Loss:0.73505628
Epoch:97 Loss:0.73505625
Epoch:98 Loss:0.73505628
Epoch:99 Loss:0.73505631
Epoch:100 Loss:0.73505628
Epoch:101 Loss:0.73505628
Epoch:102 Loss:0.73505631
Epoch:103 Loss:0.73505628
Epoch:104 Loss:0.73505628
Epoch:105 Loss:0.73505628
Epoch:106 Loss:0.73505631
Epoch:107 Loss:0.73505628
Epoch:108 Loss:0.73505631
Epoch:109 Loss:0.73505628
Epoch:110 Loss:0.73505628
Epoch:111 Loss:0.73505631
Epoch:112 Loss:0.73505628
Epoch:113 Loss:0.73505628
Epoch:114 Loss:0.73505631
Epoch:115 Loss:0.73505628
Epoch:116 Loss:0.73505631
Epoch:117 Loss:0.73505628
Epoch:118 Loss:0.73505631
Epoch:119 Loss:0.73505628
Epoch:120 Loss:0.73505628
Epoch:121 Loss:0.73505628
Epoch:122 Loss:0.73505628
Epoch:123 Loss:0.73505628
Epoch:124 Loss:0.73505628
Epoch:125 Loss:0.73505628
Epoch:126 Loss:0.73505634
Epoch:127 Loss:0.73505628
Epoch:128 Loss:0.73505631
Epoch:129 Loss:0.73505631
Epoch:130 Loss:0.73505625
Epoch:131 Loss:0.73505628
Epoch:132 Loss:0.73505628
Epoch:133 Loss:0.73505631
Epoch:134 Loss:0.73505628
Epoch:135 Loss:0.73505631
Epoch:136 Loss:0.73505628
Epoch:137 Loss:0.73505628
Epoch:138 Loss:0.73505628
Epoch:139 Loss:0.73505631
Epoch:140 Loss:0.73505628
Epoch:141 Loss:0.73505628
Epoch:142 Loss:0.73505631
Epoch:143 Loss:0.73505628
Epoch:144 Loss:0.73505631
Epoch:145 Loss:0.73505628
Epoch:146 Loss:0.73505628
Epoch:147 Loss:0.73505628
Epoch:148 Loss:0.73505628
Epoch:149 Loss:0.73505631
Epoch:150 Loss:0.73505628
Epoch:151 Loss:0.73505631
Epoch:152 Loss:0.73505628
Epoch:153 Loss:0.73505628
Epoch:154 Loss:0.73505631
Epoch:155 Loss:0.73505628
Epoch:156 Loss:0.73505628
Epoch:157 Loss:0.73505631
Epoch:158 Loss:0.73505625
Epoch:159 Loss:0.73505628
Epoch:160 Loss:0.73505628
Epoch:161 Loss:0.73505628
Epoch:162 Loss:0.73505628
Epoch:163 Loss:0.73505631
Epoch:164 Loss:0.73505628
Epoch:165 Loss:0.73505631
Epoch:166 Loss:0.73505628
Epoch:167 Loss:0.73505634
Epoch:168 Loss:0.73505631
Epoch:169 Loss:0.73505628
Epoch:170 Loss:0.73505634
Epoch:171 Loss:0.73505628
Epoch:172 Loss:0.73505628
Epoch:173 Loss:0.73505628
Epoch:174 Loss:0.73505631
Epoch:175 Loss:0.73505631
Epoch:176 Loss:0.73505628
Epoch:177 Loss:0.73505631
Epoch:178 Loss:0.73505628
Epoch:179 Loss:0.73505628
Epoch:180 Loss:0.73505631
Epoch:181 Loss:0.73505628
Epoch:182 Loss:0.73505628
Epoch:183 Loss:0.73505628
Epoch:184 Loss:0.73505631
Epoch:185 Loss:0.73505628
Epoch:186 Loss:0.73505628
Epoch:187 Loss:0.73505628
Epoch:188 Loss:0.73505628
Epoch:189 Loss:0.73505628
Epoch:190 Loss:0.73505631
Epoch:191 Loss:0.73505628
Epoch:192 Loss:0.73505628
Epoch:193 Loss:0.73505631
Epoch:194 Loss:0.73505628
Epoch:195 Loss:0.73505634
Epoch:196 Loss:0.73505628
Epoch:197 Loss:0.73505628
Epoch:198 Loss:0.73505628
Epoch:199 Loss:0.73505631
Epoch:200 Loss:0.73505628
Epoch:201 Loss:0.73505628
Epoch:202 Loss:0.73505631
Epoch:203 Loss:0.73505634
Epoch:204 Loss:0.73505628
Epoch:205 Loss:0.73505631
Epoch:206 Loss:0.73505628
Epoch:207 Loss:0.73505634
Epoch:208 Loss:0.73505628
Epoch:209 Loss:0.73505628
Epoch:210 Loss:0.73505628
Epoch:211 Loss:0.73505631
Epoch:212 Loss:0.73505628
Epoch:213 Loss:0.73505628
Epoch:214 Loss:0.73505628
Epoch:215 Loss:0.73505628
Epoch:216 Loss:0.73505628
Epoch:217 Loss:0.73505628
Epoch:218 Loss:0.73505628
Epoch:219 Loss:0.73505628
Epoch:220 Loss:0.73505628
Epoch:221 Loss:0.73505628
Epoch:222 Loss:0.73505634
Epoch:223 Loss:0.73505631
Epoch:224 Loss:0.73505628
Epoch:225 Loss:0.73505628
Epoch:226 Loss:0.73505628
Epoch:227 Loss:0.73505628
Epoch:228 Loss:0.73505634
Epoch:229 Loss:0.73505631
Epoch:230 Loss:0.73505628
Epoch:231 Loss:0.73505628
Epoch:232 Loss:0.73505628
Epoch:233 Loss:0.73505628
Epoch:234 Loss:0.73505628
Epoch:235 Loss:0.73505628
Epoch:236 Loss:0.73505631
Epoch:237 Loss:0.73505631
Epoch:238 Loss:0.73505628
Epoch:239 Loss:0.73505631
Epoch:240 Loss:0.73505628
Epoch:241 Loss:0.73505628
Epoch:242 Loss:0.73505628
Epoch:243 Loss:0.73505628
Epoch:244 Loss:0.73505628
Epoch:245 Loss:0.73505628
Epoch:246 Loss:0.73505628
Epoch:247 Loss:0.73505628
Epoch:248 Loss:0.73505628
Epoch:249 Loss:0.73505628
Epoch:250 Loss:0.73505628
Epoch:251 Loss:0.73505628
Epoch:252 Loss:0.73505628
Epoch:253 Loss:0.73505634
Epoch:254 Loss:0.73505628
Epoch:255 Loss:0.73505628
Epoch:256 Loss:0.73505628
Epoch:257 Loss:0.73505631
Epoch:258 Loss:0.73505631
Epoch:259 Loss:0.73505628
Epoch:260 Loss:0.73505628
Epoch:261 Loss:0.73505631
Epoch:262 Loss:0.73505628
Epoch:263 Loss:0.73505628
Epoch:264 Loss:0.73505634
Epoch:265 Loss:0.73505628
Epoch:266 Loss:0.73505628
Epoch:267 Loss:0.73505628
Epoch:268 Loss:0.73505631
Epoch:269 Loss:0.73505631
Epoch:270 Loss:0.73505628
Epoch:271 Loss:0.73505631
Epoch:272 Loss:0.73505628
Epoch:273 Loss:0.73505628
Epoch:274 Loss:0.73505634
Epoch:275 Loss:0.73505631
Epoch:276 Loss:0.73505628
Epoch:277 Loss:0.73505631
Epoch:278 Loss:0.73505634
Epoch:279 Loss:0.73505631
Epoch:280 Loss:0.73505631
Epoch:281 Loss:0.73505628
Epoch:282 Loss:0.73505628
Epoch:283 Loss:0.73505628
Epoch:284 Loss:0.73505628
Epoch:285 Loss:0.73505628
Epoch:286 Loss:0.73505634
Epoch:287 Loss:0.73505628
Epoch:288 Loss:0.73505634
Epoch:289 Loss:0.73505628
Epoch:290 Loss:0.73505634
Epoch:291 Loss:0.73505628
Epoch:292 Loss:0.73505631
Epoch:293 Loss:0.73505631
Epoch:294 Loss:0.73505628
Epoch:295 Loss:0.73505625
Epoch:296 Loss:0.73505631
Epoch:297 Loss:0.73505628
Epoch:298 Loss:0.73505628
Epoch:299 Loss:0.73505628
Epoch:300 Loss:0.73505628
Epoch:301 Loss:0.73505628
Epoch:302 Loss:0.73505628
Epoch:303 Loss:0.73505628
Epoch:304 Loss:0.73505628
Epoch:305 Loss:0.73505628
Epoch:306 Loss:0.73505628
Epoch:307 Loss:0.73505628
Epoch:308 Loss:0.73505634
Epoch:309 Loss:0.73505628
Epoch:310 Loss:0.73505628
Epoch:311 Loss:0.73505631
Epoch:312 Loss:0.73505628
Epoch:313 Loss:0.73505628
Epoch:314 Loss:0.73505631
Epoch:315 Loss:0.73505628
Epoch:316 Loss:0.73505628
Epoch:317 Loss:0.73505628
Epoch:318 Loss:0.73505628
Epoch:319 Loss:0.73505628
Epoch:320 Loss:0.73505628
Epoch:321 Loss:0.73505634
Epoch:322 Loss:0.73505628
Epoch:323 Loss:0.73505628
Epoch:324 Loss:0.73505628
Epoch:325 Loss:0.73505628
Epoch:326 Loss:0.73505628
Epoch:327 Loss:0.73505631
Epoch:328 Loss:0.73505628
Epoch:329 Loss:0.73505631
Epoch:330 Loss:0.73505628
Epoch:331 Loss:0.73505628
Epoch:332 Loss:0.73505628
Epoch:333 Loss:0.73505628
Epoch:334 Loss:0.73505628
Epoch:335 Loss:0.73505628
Epoch:336 Loss:0.73505628
Epoch:337 Loss:0.73505628
Epoch:338 Loss:0.73505631
Epoch:339 Loss:0.73505628
Epoch:340 Loss:0.73505628
Epoch:341 Loss:0.73505628
Epoch:342 Loss:0.73505628
Epoch:343 Loss:0.73505628
Epoch:344 Loss:0.73505631
Epoch:345 Loss:0.73505634
Epoch:346 Loss:0.73505628
Epoch:347 Loss:0.73505628
Epoch:348 Loss:0.73505634
Epoch:349 Loss:0.73505628
Epoch:350 Loss:0.73505628
Epoch:351 Loss:0.73505628
Epoch:352 Loss:0.73505628
Epoch:353 Loss:0.73505628
Epoch:354 Loss:0.73505631
Epoch:355 Loss:0.73505628
Epoch:356 Loss:0.73505634
Epoch:357 Loss:0.73505631
Epoch:358 Loss:0.73505628
Epoch:359 Loss:0.73505628
Epoch:360 Loss:0.73505625
Epoch:361 Loss:0.73505631
Epoch:362 Loss:0.73505631
Epoch:363 Loss:0.73505628
Epoch:364 Loss:0.73505631
Epoch:365 Loss:0.73505628
Epoch:366 Loss:0.73505631
Epoch:367 Loss:0.73505628
Epoch:368 Loss:0.73505631
Epoch:369 Loss:0.73505628
Epoch:370 Loss:0.73505628
Epoch:371 Loss:0.73505628
Epoch:372 Loss:0.73505631
Epoch:373 Loss:0.73505625
Epoch:374 Loss:0.73505628
Epoch:375 Loss:0.73505628
Epoch:376 Loss:0.73505631
Epoch:377 Loss:0.73505628
Epoch:378 Loss:0.73505631
Epoch:379 Loss:0.73505628
Epoch:380 Loss:0.73505631
Epoch:381 Loss:0.73505631
Epoch:382 Loss:0.73505628
Epoch:383 Loss:0.73505628
Epoch:384 Loss:0.73505631
Epoch:385 Loss:0.73505631
Epoch:386 Loss:0.73505631
Epoch:387 Loss:0.73505628
Epoch:388 Loss:0.73505634
Epoch:389 Loss:0.73505634
Epoch:390 Loss:0.73505628
Epoch:391 Loss:0.73505628
Epoch:392 Loss:0.73505628
Epoch:393 Loss:0.73505628
Epoch:394 Loss:0.73505628
Epoch:395 Loss:0.73505631
Epoch:396 Loss:0.73505628
Epoch:397 Loss:0.73505628
Epoch:398 Loss:0.73505628
Epoch:399 Loss:0.73505628
Epoch:400 Loss:0.73505631
Epoch:401 Loss:0.73505628
Epoch:402 Loss:0.73505628
Epoch:403 Loss:0.73505628
Epoch:404 Loss:0.73505631
Epoch:405 Loss:0.73505628
Epoch:406 Loss:0.73505631
Epoch:407 Loss:0.73505631
Epoch:408 Loss:0.73505628
Epoch:409 Loss:0.73505628
Epoch:410 Loss:0.73505631
Epoch:411 Loss:0.73505634
Epoch:412 Loss:0.73505628
Epoch:413 Loss:0.73505631
Epoch:414 Loss:0.73505628
Epoch:415 Loss:0.73505628
Epoch:416 Loss:0.73505628
Epoch:417 Loss:0.73505631
Epoch:418 Loss:0.73505628
Epoch:419 Loss:0.73505631
Epoch:420 Loss:0.73505625
Epoch:421 Loss:0.73505628
Epoch:422 Loss:0.73505628
Epoch:423 Loss:0.73505628
Epoch:424 Loss:0.73505628
Epoch:425 Loss:0.73505634
Epoch:426 Loss:0.73505631
Epoch:427 Loss:0.73505628
Epoch:428 Loss:0.73505631
Epoch:429 Loss:0.73505628
Epoch:430 Loss:0.73505628
Epoch:431 Loss:0.73505631
Epoch:432 Loss:0.73505634
Epoch:433 Loss:0.73505628
Epoch:434 Loss:0.73505631
Epoch:435 Loss:0.73505628
Epoch:436 Loss:0.73505628
Epoch:437 Loss:0.73505631
Epoch:438 Loss:0.73505628
Epoch:439 Loss:0.73505631
Epoch:440 Loss:0.73505628
Epoch:441 Loss:0.73505628
Epoch:442 Loss:0.73505631
Epoch:443 Loss:0.73505628
Epoch:444 Loss:0.73505628
Epoch:445 Loss:0.73505631
Epoch:446 Loss:0.73505628
Epoch:447 Loss:0.73505631
Epoch:448 Loss:0.73505634
Epoch:449 Loss:0.73505628
Epoch:450 Loss:0.73505631
Epoch:451 Loss:0.73505628
Epoch:452 Loss:0.73505634
Epoch:453 Loss:0.73505628
Epoch:454 Loss:0.73505625
Epoch:455 Loss:0.73505628
Epoch:456 Loss:0.73505628
Epoch:457 Loss:0.73505631
Epoch:458 Loss:0.73505631
Epoch:459 Loss:0.73505628
Epoch:460 Loss:0.73505634
Epoch:461 Loss:0.73505628
Epoch:462 Loss:0.73505628
Epoch:463 Loss:0.73505628
Epoch:464 Loss:0.73505628
Epoch:465 Loss:0.73505628
Epoch:466 Loss:0.73505628
Epoch:467 Loss:0.73505628
Epoch:468 Loss:0.73505634
Epoch:469 Loss:0.73505628
Epoch:470 Loss:0.73505628
Epoch:471 Loss:0.73505628
Epoch:472 Loss:0.73505631
Epoch:473 Loss:0.73505628
Epoch:474 Loss:0.73505631
Epoch:475 Loss:0.73505631
Epoch:476 Loss:0.73505628
Epoch:477 Loss:0.73505628
Epoch:478 Loss:0.73505631
Epoch:479 Loss:0.73505628
Epoch:480 Loss:0.73505628
Epoch:481 Loss:0.73505631
Epoch:482 Loss:0.73505628
Epoch:483 Loss:0.73505634
Epoch:484 Loss:0.73505628
Epoch:485 Loss:0.73505628
Epoch:486 Loss:0.73505628
Epoch:487 Loss:0.73505628
Epoch:488 Loss:0.73505628
Epoch:489 Loss:0.73505628
Epoch:490 Loss:0.73505631
Epoch:491 Loss:0.73505628
Epoch:492 Loss:0.73505634
Epoch:493 Loss:0.73505628
Epoch:494 Loss:0.73505628
Epoch:495 Loss:0.73505628
Epoch:496 Loss:0.73505628
Epoch:497 Loss:0.73505628
Epoch:498 Loss:0.73505628
Epoch:499 Loss:0.73505628
Epoch:500 Loss:0.73505631

test_text = 'i hate he'
tests = [[word2idx[n] for n in test_text.split()]]
test_data= torch.LongTensor(tests).to(device)

model = model.eval()
predict = model.forward(test_data).data.max(1, keepdim=True)[1]
if predict[0][0] == 0:
    print(test_text,"is Bad Mean...")
else:
    print(test_text,"is Good Mean!!")
i hate he is Bad Mean...



  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值