import numpy as np
import paddle.fluid as fluid
import paddle
logit_y = np.array([[1.23, 2.33, 3.33, 2.11], \
[5.23, 2.33, 3.33, 2.11], \
[1.23, 8.33, 3.33, 2.11], \
[1.23, 2.33, 3.33, 2.11]]).astype(np.float32)
output_y1 = np.array([[3], [0], [1], [1]]).astype(np.int64)
output_y2 = np.array([[0,0,0,1], [1,0,0,0], [0,1,0,0], [0,1,0,0]]).astype(np.float32)
logit_y = paddle.to_tensor(logit_y)
output_y1 = paddle.to_tensor(output_y1)
output_y2 = paddle.to_tensor(output_y2)
#print(logit_y.shape, output_y1.shape)
#print(logit_y.numpy(), output_y1.numpy())
loss1 = fluid.layers.softmax_with_cross_entropy(logit_y, output_y1)
print(loss1)
print(loss1.numpy()[0])
loss2 = fluid.layers.softmax_with_cross_entropy(logit_y, output_y2, soft_label=True)
print(loss2)
print(loss2.numpy()[0])
# -------------
# loss_i = -1.0 * y_i * np.log(p_i), where y_i = 1 !!!
fluid.layers.softmax(logit_y)
"""
Tensor(shape=[4, 4], dtype=float32, place=CPUPlace, stop_gradient=True,
[[0.06858130, 0.20602959, 0.56004649, 0.16534261],
[0.80080152, 0.04406267, 0.11977477, 0.03536112],
[0.00081730, 0.99053812, 0.00667419, 0.00197042],
[0.06858130, 0.20602959, 0.56004649, 0.16534261]])
"""
print(-1.0 * np.log(0.16534261)) # 1.79973555
paddle softmax_with_cross_entropy 的用法
于 2022-03-01 16:38:33 首次发布