from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
# import sys
# import math
import mxnet as mx
import mxnet.gluon.loss as gloss
#
import numpy as np
a = np.zeros((2,3))
b = np.zeros((2,1))
pred = mx.nd.array(a)
label = mx.nd.array(b)
loss = gloss.SoftmaxCrossEntropyLoss()
print("loss",loss(pred, label).asnumpy())
def _softmax(x):
step1 = x.exp()
step2 = step1.sum(axis=1,keepdims=True)
return step1 / step2
ce = mx.metric.CrossEntropy()
ce.update(label, _softmax(pred))
loss = ce.get()
print(loss)
常见函数笔记
def softmax(o):
"""softmax函数.