tf.nn.softmax_cross_entropy_with_logits

1.softmax_cross_entropy_with_logits

import tensorflow as tf

logits=tf.constant([[1.0,2.0,3.0],[1.0,2.0,3.0],[1.0,2.0,3.0]])
y=tf.nn.softmax(logits)
y_=tf.constant([[0.0,0.0,1.0],[0.0,0.0,1.0],[0.0,0.0,1.0]])
cross_entropy = -tf.reduce_sum(y_*tf.log(y))
cross_entropy2=tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=y_))

with tf.Session() as sess:
    a=sess.run(cross_entropy)
    b = sess.run(cross_entropy2)
    print(a)  #1.222818
    print(b)  #1.2228179

   #y= [[0.09003057 ,0.24472848 ,0.66524094],
    #  [0.09003057 ,0.24472848 ,0.66524094],
    #  [0.09003057 , 0.24472848  ,0.66524094]]

2.sigmoid_cross_entropy_with_logits

import tensorflow as tf

logits = tf.constant([[4.0, 2.0, 3.0], [1.0, 2.0, 3.0], [1.0, 5.0, 3.0]])
y = tf.nn.sigmoid(logits)

y_ = tf.constant([[0.0, 1.0, 1.0], [0.0, 0.0, 1.0], [0.0, 1.0, 1.0]])
cross_entropy = -(y_ * tf.log(y) + (1 - y_) * tf.log(1 - y))
cross_entropy2 = tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=y_)

with tf.Session() as sess:
    a = sess.run(cross_entropy)
    b = sess.run(cross_entropy2)
    print(a)
    print(b)

    # y= [[4.0181484  0.12692808 0.04858734]
    #  [1.3132617  2.1269276  0.04858734]
#  [1.3132617  0.00671532 0.04858734]]
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值