defgradient_check_n(parameters, gradients, X, Y, epsilon =1e-7):
parameters_values, _ = dictionary_to_vector(parameters)
grad = gradients_to_vector(gradients)
num_parameters = parameters_values.shape[0]
J_plus = np.zeros((num_parameters,1))
J_minus = np.zeros((num_parameters,1))
gradapprox = np.zeros((num_parameters,1))for i inrange(num_parameters):
parameters_values_plus = np.copy(parameters_values)
parameters_values_plus[i]= parameters_values_plus[i]+ epsilon
J_plus[i]= forward_propagation_n(X, Y,vector_to_dictionary(parameters_values_plus))[0]
parameters_values_minus = np.copy(parameters_values)
parameters_values_minus[i]= parameters_values_minus[i]- epsilon
J_minus[i]= forward_propagation_n(X, Y,vector_to_dictionary(parameters_values_minus))[0]
gradapprox[i]=(J_plus[i]- J_minus[i])/(2*epsilon)
difference = np.sqrt(np.sum(np.power(grad - gradapprox,2)))/(np.sqrt(np.sum(np.power(grad,2)))+np.sqrt(np.sum(np.power(gradapprox,2))))if difference >1e-7:print("\033[93m"+"There is a mistake in the backward propagation! difference = "+str(difference)+"\033[0m")else:print("\033[92m"+"Your backward propagation works perfectly fine! difference = "+str(difference)+"\033[0m")return difference
X, Y, parameters = gradient_check_n_test_case()
cost, cache = forward_propagation_n(X, Y, parameters)
gradients = backward_propagation_n(X, Y, cache)
difference = gradient_check_n(parameters, gradients, X, Y)
结果:
There is a mistake in the backward propagation! difference =0.2850931567761624