实现了一维和二维矩阵的softmax
import numpy as np
def softmax(x):
orig_shape=x.shape
print("orig_shape",orig_shape)
if len(x.shape)>1:
#矩阵
tmp=np.max(x,axis=1)
x-=tmp.reshape((x.shape[0],1))
x=np.exp(x)
tmp=np.sum(x,axis=1)
x/=tmp.reshape((x.shape[0],1))
print("matrix")
else:
#向量
tmp=np.max(x)
x-=tmp
x=np.exp(x)
tmp=np.sum(x)
x/=tmp
print("vector")
return x
x=np.array([[1,2,3,4],[1,2,3,4]])
x1=np.array([1,2,3,4])
print(x)
print(x1)
#
orig_shape (2, 4)
orig_shape (4,)
#
print(np.max(x,axis=1))
print(np.sum(x,axis=1))
#
[4 4]
[10 10]
#
print(softmax(x))
print(softmax(x1))
#
[[0.0320586 0.08714432 0.23688282 0.64391426]
[0.0320586 0.08714432 0.23688282 0.64391426]]
[0.0320586 0.08714432 0.23688282 0.6439142