定义:
def cross_entropy(input, target, weight=None, size_average=None, ignore_index=-100, reduce=None, reduction='mean'): # type: (Tensor, Tensor, Optional[Tensor], Optional[bool], int, Optional[bool], str) -> Tensor r"""This criterion combines `log_softmax` and `nll_loss` in a single function.
import torch
from torch.nn import functional
x=torch.randn(1,85)
w=torch.randn(5,85)
b=torch.tensor([3])
logits=x@w.t()
print(logits.shape)
print(functional.cross_entropy(logits,torch.tensor([3])))
print(b.shape)
结果:torch.Size([1, 5])
tensor(18.9133)
torch.Size([1])