参考http://blog.sina.com.cn/s/blog_6ca0f5eb0102wr4j.html#cmt_5A0D972D-72F73880-BE365276-926-938https://mp.csdn.net/postlist
前面已经提到,后期会使用dropout等算法来对识别猫的神经网络进行改进,在实际操作过程中,却遇到了nan的问题
def backward_propagation_with_dropout(AL, Y, caches, keep_prob = 0.5):# caches = A, W, b, Z
grads = {}
L = len(caches)
m = AL.shape[1]
# print(Y.shape)
# print(AL.shape)
Y = Y.reshape(AL.shape)
dAL = -np.divide(Y, np.clip(AL,1e-6,1.0) - np.divide(1 - Y, np.clip((1 - AL),1e-6,1.0)))
current_cache = caches[L-1]#有的内容是从0到L-1
grads["dA"+str(L)], grads["dW"+str(L)], grads["db"+str(L)] = activation_backward_dropout(dAL, current_cache, activation = "sigmoid", keep_prob &