import torch
import numpy as np
from torch.nn.functional import one_hot
from torch import nn
predicts = np.array(([
[-1.0606, 1.5613, 1.2007, -0.2481],
[-1.9652, -0.4367, -0.0645, -0.5104],
[ 0.1011, -0.5904, 0.0243, 0.1002]
]))
print("pred_shape:",predicts.shape)
label= torch.tensor([0,2,1])
label = one_hot(label,num_classes=4)
print("label:",label)
print("softmax:",torch.softmax(torch.tensor(predicts),1))
loss =-1/predicts.shape[0] * torch.sum(label*torch.log(torch.softmax(torch.tensor(predicts),1)))
print("loss:",loss)
crit = nn.CrossEntropyLoss()
loss = crit(torch.tensor(predicts),torch.tensor([0,2,1]))
print("CrossEntropyLoss:",loss)
pred_shape: (3, 4)
label: tensor([[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 1, 0, 0]])
softmax: tensor([[0.0376, 0.5172, 0.3606, 0.0847],
[0.0603, 0.2780, 0.4034, 0.2583],
[0.2919, 0.1462, 0.2703, 0.2916]], dtype=torch.float64)
loss: tensor(2.0373, dtype=torch.float64)
CrossEntropyLoss: tensor(2.0373, dtype=torch.float64)