1.softmax_cross_entropy_with_logits
import tensorflow as tf
logits=tf.constant([[1.0,2.0,3.0],[1.0,2.0,3.0],[1.0,2.0,3.0]])
y=tf.nn.softmax(logits)
y_=tf.constant([[0.0,0.0,1.0],[0.0,0.0,1.0],[0.0,0.0,1.0]])
cross_entropy = -tf.reduce_sum(y_*tf.log(y))
cross_entropy2=tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y_))
with tf.Session() as sess:
a=sess.run(cross_entropy)
b = sess.run(cross_entropy2)
print(a) #1.222818
print(b) #1.2228179
#y= [[0.09003057 ,0.24472848 ,0.66524094],
# [0.09003057 ,0.24472848 ,0.66524094],
# [0.09003057 , 0.24472848 ,0.66524094]]
2.sigmoid_cross_entropy_with_logits
import tensorflow as tf
logits = tf.constant([[4.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 5.0, 3.0]])
y = tf.nn.sigmoid(logits)
y_ = tf.constant([[0.0, 1.0, 1.0], [0.0, 0.0, 1.0], [0.0, 1.0, 1.0]])
cross_entropy = -(y_ * tf.log(y) + (1 - y_) * tf.log(1 - y))
cross_entropy2 = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=y_)
with tf.Session() as sess:
a = sess.run(cross_entropy)
b = sess.run(cross_entropy2)
print(a)
print(b)
# y= [[4.0181484 0.12692808 0.04858734]
# [1.3132617 2.1269276 0.04858734]
# [1.3132617 0.00671532 0.04858734]]