import tensorflow as tf
labels = [[0, 0, 1], [0, 1, 0]] # one-hot标签
labels2 = [[0.4, 0.1, 0.5], [0.3, 0.3, 0.4]] # 非one-hot标签
labels3 = [2, 1] # sparse标签
logits = [[2, 0.5, 6], [0.1, 0, 3]]
logits_scaled = tf.nn.softmax(logits)
logits_scaled2 = tf.nn.softmax(logits_scaled)
'''
softmax后总和变成了1
'''
result1 = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)
result2 = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits_scaled)
result3 = -tf.reduce_sum(labels*tf.log(logits_scaled), 1)
result4 = -tf.reduce_sum(labels*tf.log(logits_scaled2), 1)
result5 = tf.nn.softmax_cross_entropy_with_logits(labels=labels2, logits=logits)
result6 = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels3, logits=logits) # 使用sparse进行非one-hot数据的交叉熵运算
with tf.Session() as sess:
print("scaled=", sess.run(logits_scaled))
print
softmax应用
最新推荐文章于 2024-06-11 16:43:37 发布
![](https://img-home.csdnimg.cn/images/20240711042549.png)