几种不同版本的交叉熵函数

import tensorflow as tf 
from tensorflow import keras
import numpy as np 

a = np.random.randint(0,5,(5,5)).astype(np.float32)
b = np.random.randint(0,5,(5,)).astype(np.float32)
b_onehot = tf.one_hot(b, 5).numpy()

def soft_max(x):
    x_exp = np.exp(x)
    return x_exp / np.sum(x_exp, axis = 1, keepdims = True)

a_softmax = soft_max(a)

def softmax_corss_entropy(label,pred):
    return - np.sum(label * np.log(pred), axis = -1)

print(softmax_corss_entropy(b_onehot, a_softmax))
print(tf.nn.softmax_cross_entropy_with_logits(b_onehot, a))

def keras_categorical_cross_entropy(label, pred):
    pred += 1e-7
    pred /= np.sum(pred, axis = -1, keepdims = True)
    print(np.log(pred))
    print(label)
    res = -np.sum(label * np.log(pred), axis = -1)
    return res 

print(keras_categorical_cross_entropy(b_onehot, a))
print(keras.losses.categorical_crossentropy(b_onehot,a))
print(keras.losses.sparse_categorical_crossentropy(b,a))

输出为

[1.4519143  2.93727    3.1849952  1.3179511  0.79001886]
tf.Tensor([1.4519144 2.93727   3.184995  1.317951  0.7900189], shape=(5,), dtype=float32)
[[ -1.2039728  -2.302585   -1.609438   -0.9162907 -18.420681 ]
 [-18.315321   -1.0986123  -2.1972244  -1.0986123  -1.5040774]
 [ -1.2992829  -1.2992829  -1.2992829  -2.397895   -2.397895 ]
 [ -1.8718021  -1.4663371  -1.4663371  -1.8718021  -1.4663371]
 [-18.515991   -2.397895   -1.0116009  -1.0116009  -1.704748 ]]
[[1. 0. 0. 0. 0.]
 [0. 0. 1. 0. 0.]
 [0. 0. 0. 1. 0.]
 [0. 1. 0. 0. 0.]
 [0. 0. 0. 1. 0.]]
[1.2039728 2.1972244 2.397895  1.4663371 1.0116009]
tf.Tensor([1.2039728 2.1972244 2.3978953 1.4663371 1.0116009], shape=(5,), dtype=float32)
tf.Tensor([1.2039729 2.1972246 2.3978953 1.4663371 1.011601 ], shape=(5,), dtype=float32)

其中误差为是由于 ε \varepsilon ε取值造成的。

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值