本文是吴恩达机器学习课程的第四次编程作业:Neural Network Back Propagation,用python实现。
ex4.py为主程序入口。
作业文件和训练集数据下载地址:https://github.com/toanoyx/MachineLearning-AndrewNg-coursera-python/tree/master/ex4%20NN%20back%20propagation
下文是文件的源代码:
ex4.py
from tensorflow.contrib import opt
from sklearn.metrics import classification_report
from loadData import *
from displayData import *
from feedForward import *
from nnCostFunction import *
from computeNumericalGradient import *
from checkNNGradients import *
""" 第1部分 可视化数据集 """
X, _ = loadData('ex4data1.mat')
displayData(X)
plt.show()
""" 第2部分 模型表示 """
X_raw, y_raw = loadData('ex4data1.mat', transpose=False)
X = np.insert(X_raw, 0, np.ones(X_raw.shape[0]), axis=1)
def expand_y(y):
res = []
for i in y:
y_array = np.zeros(10)
y_array[i - 1] = 1
res.append(y_array)
return np.array(res)
y = expand_y(y_raw)
def load_weight(path):
data = sio.loadmat(path)
return data['Theta1'], data['Theta2']
t1, t2 = load_weight('ex4weights.mat')
""" 第3部分 前向传播和代价函数 """
theta = np.concatenate((np.ravel(t1), np.ravel(t2)))
_, _, _, _, h = feedForward(theta, X)
print("cost function: " + str(nnCostFunction(theta, X, y)) + "(this should be 0.287629)")
""" 第4部分 正则化代价函数 """
t1, t2 = deserialize(theta)
m = X.shape[0]
l = 1
reg_t1 = (l / (2 * m)) * np.power(t1[:, 1:], 2).sum()
reg_t2 = (l / (2 * m)) * np.power(t2[:, 1:], 2).sum()
regularizedCost = n