深度学习吴恩达作业
for i in range(num_parameters):
# Compute J_plus[i]. Inputs: "parameters_values, epsilon". Output = "J_plus[i]".
# "_" is used because the function you have to outputs two parameters but we only care about the first one
### START CODE HERE ### (approx. 3 lines)
# Step 1
thetaplus = np.copy(parameters_values)
# Step 2
thetaplus[i][0] = thetaplus[i][0] + epsilon
J_plus[i], cache = forward_propagation_n(X,Y,vector_to_dictionary(thetaplus))
### END CODE HERE ###
# Compute J_minus[i]. Inputs: "parameters_values, epsilon". Output = "J_minus[i]".
### START CODE HERE ### (approx. 3 lines)
# Step 1
thetaminus = np.copy(parameters_values)
# Step 2
thetaminus[i][0] = thetaminus[i][0] - epsilon
J_minus[i], cache = forward_propagation_n(X,Y,vector_to_dictionary(thetaminus))
### END CODE HERE ###
# Compute gradapprox[i]
### START CODE HERE ### (approx. 1 line)
gradapprox[i] = (J_plus[i] - J_minus[i]) / (2 * epsilon)
### END CODE HERE ###
# Compare gradapprox to backward propagation gradients by computing difference.
### START CODE HERE ### (approx. 1 line)
numerator = np.linalg.norm(grad - gradapprox) # Step 1'
denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2'
difference = numerator / denominator
### END CODE HERE ###
if difference > 1e-7:
print ("\033[93m" + "There is a mistake in the backward propagation! difference = " + str(difference) + "\033[0m")
else:
print ("\033[92m" + "Your backward propagation works perfectly fine! difference = " + str(difference) + "\033[0m")
return difference
请问thetaminus[i][0] = thetaminus[i][0] - epsilon这个中的[i][0]什么意思
跪求