结论:CrossEntropyLoss = NLLLoss + LogSoftmax(dim=1)
验证:
# -*- coding: utf-8 -*-
#
# @Version: python 3.7
# @File: test.py
# @Author: ty
# @E-mail: nwu_ty@163.com
# @Time: 2020/12/2
# @Description:
# @Input:
# @Output:
#
import torch
input = torch.randn(3, 4)
target = torch.tensor([1, 2, 0])
logsoftmax = torch.nn.LogSoftmax(dim=1)
nll = torch.nn.NLLLoss()
cross = torch.nn.CrossEntropyLoss()
print(nll(logsoftmax(input), target))
print(cross(input, target))