Softmax
Softmax代价函数与logistic 代价函数在形式上非常类似,只是在Softmax损失函数中对类标记的 k
个可能值进行了累加。注意在Softmax回归中将 x
分类为类别 j
的概率为:
"""Softmax."""
scores = [3.0, 1.0, 0.2]
import numpy as np
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
return np.exp(x) / np.sum(np.exp(x), axis=0)
#Compute and return softmax(x) axis=0 即对列求和
print(softmax(scores))
# Plot softmax curves
import matplotlib.pyplot as plt
x = np.arange(-2.0, 6.0, 0.1)
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)])
#Take a sequence of arrays and stack them vertically to make a single array.
plt.plot(x, softmax(scores).T, linewidth=2)
plt.show()
[ 0.8360188 0.11314284 0.05083836]