神经网络
**反向传播第一层代码
import numpy as np
class FullyConnect:
def _init_(self,l_x,l_y):
self.weights = np.random.randn(l_y,l_x)
self.bias =np.random.randn(l)
def forward(self,x):
self.x=x
self.y=np.dot(self.weights.x)+self.bias
return self.y
def backward(self,x):
self.dw = d*self.x
self.db =d
self.dx = d*self.weights
return self.dw,self.db
反向传播算法第二层:
class Simoid:
def _init_(self):
pass
def sigmoid(self,x):
return 1/(1+np.exp(-x))
def forward (self,x):
self.x=x
self.y=self.sigmoid(x)
return self.y
def backward(self):
sig=self.sigmoid(sig.x)
self.dx = sig*(1-sig)
return self.dx
其实,反向传播算法就是梯度下降算法