import numpy as np
class Logistic(object):
def __init__(self , X , Y , num_iterations , learning_rate):
self.w = np.random.normal(size=(X.shape[0], 1))
self.b = 0
self.X = X
self.Y = Y
self.num_iterations = num_iterations
self.learning_rate = learning_rate
self.cost = 0.0
self.grads = {}
def sigmoid(self,z):
s = 1 / (1 + np.exp(-z))
return s
def propagate(self):
m = self.X.shape[1]
# 正向传播
A = self.sigmoid(np.dot(self.w.T, self.X) +self. b)
self.cost = 1 / m * (np.sum(-self.Y * np.log(A) - (1 - self.Y) * np.log(1 - A)))
# 反向传播
dw = (1 / m) * np.dot(self.X, (A - self.Y).T)
db = (1 / m) * np.sum(A - self.Y)
self.cost = np.squeeze(self.cost)
self.grads = {
"dw": dw,
"db": db
}
def run(self):
for i in range(self.num_iterations):
self.propagate()
dw = self.grads["dw"]
db = self.grads["db"]
self.w =self.w - self.learning_rate * dw
self.b = self.b - self.learning_rate * db
if i % 500 == 0:
print("第", i, "次迭代,cost值为:" + str(self.cost))
def predict(self, X):
w = self.w.reshape(X.shape[0], 1)
A = self.sigmoid(np.dot(w.T, X) + self.b)
Y_predict=np.round(A)
return Y_predict
这篇文章只是预热,内部求导,损失函数下一篇文章详解