y=tf.constant([1,2,3,0,2])
y=tf.one_hot(y,depth=4)
y
<tf.Tensor: shape=(5, 4), dtype=float32, numpy=
array([[0., 1., 0., 0.],
[0., 0., 1., 0.],
[0., 0., 0., 1.],
[1., 0., 0., 0.],
[0., 0., 1., 0.]], dtype=float32)>
y=tf.cast(y,dtype=tf.float32)
out=tf.random.normal([5,4])
loss1=tf.reduce_mean(tf.square(y-out))
loss2=tf.square(tf.norm(y-out))/(5*4)
loss3=tf.reduce_mean(tf.losses.MSE(y,out))
loss1
<tf.Tensor: shape=(), dtype=float32, numpy=1.5393263>
loss2
<tf.Tensor: shape=(), dtype=float32, numpy=1.5393263>
loss3
<tf.Tensor: shape=(), dtype=float32, numpy=1.5393263>
熵
p=tf.fill([4],0.25)
entropy=-tf.reduce_sum(p*tf.math.log(p)/tf.math.log(2.))
entropy
<tf.Tensor: shape=(), dtype=float32, numpy=2.0>
交叉熵越小越好:
分类问题求解:
tf.losses.categorical_crossentropy([0,1,0,0],[0.25,0.25,0.25,0.25],from_logits=True)
<tf.Tensor: shape=(), dtype=float32, numpy=1.3862944>
tf.losses.categorical_crossentropy([0,1,0,0],[0.1,0.7,0.1,0.1],from_logits=True)
<tf.Tensor: shape=(), dtype=float32, numpy= numpy=0.97321343>
sigmoid函数