# -*- coding: utf-8 -*-
"""
Created on Mon Dec 24 21:32:37 2018
@author: ZZL
"""
import tensorflow as tf
import time
#our NN's output
logits=tf.constant([[1.0,2.0,3.0],
[1.0, 2.0, 3.0]])
#step1:do softmax
y=tf.nn.softmax(logits)
#true label
y_=tf.constant([[0.0, 0.4, 1.0],
[0.0, 0.4, 1.0]])
#step2:do cross_entropy
a=tf.log(y)
b=y_*a
cross_entropy = -y_*tf.log(y)
#do cross_entropy just one step
cross_entropy2= tf.nn.softmax_cross_entropy_with_logits(labels=y_,logits=logits)#dont forget tf.reduce_sum()!!
with tf.Session() as sess:
# with tf.device('/gpu:0'):
softmax=sess.run(y)
b=sess.run(b)
c=sess.run(a)
c_e = sess.run(cross_entropy)
c_e2 = sess.run(cross_entropy2)
# print("tf.log(y)")
# print(c)
print("step1:softmax result=")
print(softmax)
print('交叉熵b=y_*tf.log(y)')
print(b)
print("step2:cross_entropy result=")
print(c_e)
print("Function(softmax_cross_entropy_with_logits) result=")
print(c_e2)