target = torch.empty(3, dtype = torch.long).random_(5)
在0~5范围内随机生成3个数,作为分类
二分类交叉熵
多分类交叉熵
first[]和second[]用来计算softmax函数
import torch
import torch.nn as nn
import numpy as np
import math
#我们这次计算有三个待预测像素的点的loss值
loss = nn.CrossEntropyLoss()
input = torch.randn(3, 5, requires_grad= True)
target = torch.empty(3, dtype = torch.long).random_(5)
output = loss(input, target)
print('input is ', input)
print('target is ', target)
print('loss=', output)
#自己跟据公式计算loss
first = []
for i in range(3):
val = input[i][target[i]]
first.append(val)
# print(first)
second = []
sum = 0
for i in range(3):
for j in range(5):
sum += math.exp(input[i][j])
second.append(sum)
sum = 0
# print(second)
L = []
for i in range(3):
l = -first[i] + math.log(second[i])
L.append(l)
# print(L)
Loss = 0
for i in range(3):
Loss += L[i]
Loss = Loss / 3 #需要除以3
print('自己计算的loss', Loss)