二分类交叉熵损失函数
PyTorch提供了两个类来计算二分类交叉熵(Binary Cross Entropy),分别是BCELoss() 和BCEWithLogitsLoss()
该损失函数应用在多标签分类任务中
import numpy as np
import math
import torch
import torch.nn as nn
def sigmoid(x):
y=1/(1+np.exp(-x))
return y
def bce_loss1():
pred=np.array([1.1568,-0.6375,0.5318])
targ=np.array([0,0,1])
y=sigmoid(pred)
#print(y) [0.76075077 0.34581189 0.62990284]
y1=-(targ*np.log(y)+(1-targ)*np.log(1-y))
#print(y1) [1.43024948 0.42436033 0.4621897 ]
y2=np.mean(y1)
print('bce_loss1:',y2)
def bce_loss2():
input=torch.tensor([[1.1568,-0.6375,0.5318]])
target=torch.FloatTensor([[0,0,1]])
m=nn.Sigmoid()
input1=m(input)
loss=nn.BCELoss()
rlt=loss(input1,target)
print('bce_loss2:',rlt)
多分类交叉熵损失函数
pytorch中提供的计算多分类的交叉熵损失函数为nn.CrossEntropyLoss()
import numpy as np
import random
import torch
import torch.nn as nn
def softmax(z):
t = np.exp(z)
a = np.exp(z) / np.sum(t, axis=1).reshape(-1,1)
return a
def softmax_loss1():
pred=np.array([[1.1568,-0.6375,0.5318]])
targ=np.array([2])
y=softmax(pred)
#print(y) #[[0.58771492 0.09770395 0.31458113]]
y1=-np.log(y)
#print(y1) #[[0.53151328 2.32581328 1.15651328]]
y2=y1[:,targ[0]]
print('softmax_loss1:',y2)
def softmax_loss2():
input=torch.tensor([[1.1568,-0.6375,0.5318]])
targ=torch.tensor([2])
loss=nn.CrossEntropyLoss()
rlt=loss(input,targ)
print('softmax_loss2:',rlt)