import tensorflow as tf
labels = [[0.2,0.3,0.5],
[0.1,0.6,0.3]]
logits = [[2,0.5,1],
[0.1,1,3]]
logits_scaled = tf.nn.softmax(logits)
result1 = tf.nn.softmax_cross_entropy_with_logits_v2(labels=labels, logits=logits) # 结果正确
result2 = -tf.reduce_sum(labels*tf.log(logits_scaled),1) # 根据交叉熵损失函数的定义计算,结果正确
result3 = tf.nn.softmax_cross_entropy_with_logits_v2(labels=labels, logits=logits_scaled) # 结果不合预期,用法不对,输入不应当是尺度化之后的概率
'''
with tf.Session() as sess:
print(sess.run(result1))
print(sess.run(result2))
print(sess.run(result3))
[1.4143689 1.6642545]
[1.4143689 1.6642545]
[1.1718578 1.1757141]
'''
print(tf.get_collection(tf.GraphKeys.LOSSES))
loss1 = tf.losses.hinge_loss(labels,logits)
print(tf.get_collection(tf.GraphKeys.LOSSES))
输出结果:
[]
[<tf.Tensor 'hinge_loss/value:0' shape=() dtype=float32>]