按照模块化编程,自己写了个全连接的神经网络,一共四个文件,具体介绍如下:
第一个文件是layers.py,具体实现的是简单的前向计算,relu函数的前向传播计算以及relu的反向传播计算:
import numpy as np
def simple_forward(x,w,b):
output=x.dot(w)+b
return output
def relu_func(x):
return np.maximum(x,0)
def relu_forward(x,w,b):
temp=simple_forward(x,w,b)
return relu_func(temp)
def relu_backward(dout,x,w,b):
dw=x.T.dot(dout)
db=np.sum(dout,axis=0)
dx=dout.dot(w.T)
return dx,dw,db
第二个实现的文件是FullyConNet.py,主要完成的是对神经网络的初始化操作,以及前向计算、反向传播以及测试数据集的准确率函数:
import numpy as np
import Strategy as st
from layers import *
class FullyConNet:
lr_decay=0.95
iter_per_ann=400
parameter={}
layers=[]
weight_init=2e-2
update_rule=None
learning_rate=0
batch_size=0
epoch=0
reg=0
config={}
def __init__(self,input_layer,hidden_layer,output_layer,update_rule,learning_rate,batch_size,epoch,reg):
self.reg=reg
self.batch_size=batch_size
self.epoch=epoch
self.layers=[input_layer]+hidden_layer+[output_layer]
if(hasattr(st,update_rule)):
self.update_rule=getattr(st,update_rule)
length=len(hidden_layer)+1 #6
for i in range(0,length):#0,1,2,3,4,5
self.parameter['w'+str(i)]=self.weight_init*np.random.randn(self.layers[i],self.layers[i+1])
self.parameter['b'+str(i)]=np.zeros(self.layers[i+1])
for i in self.parameter:
self.config[i]={
"learning_rate":learning_rate}
def forward_process(self,train_data,cache_output=None):
if(cache_output