Python3 卷积神经网络卷积层,池化层,全连接层前馈实现

  1 # -*- coding: utf-8 -*-
  2 """
  3 Created on Sun Mar  4 09:21:41 2018
  4 
  5 @author: markli
  6 """
  7 import numpy as np;
  8 
  9 def ReLU(x):  
 10     return max(0,x);
 11 
 12 def logistic(x):  
 13     return 1/(1 + np.exp(-x));
 14 
 15 def logistic_derivative(x):  
 16     return logistic(x)*(1-logistic(x));
 17 
 18 class ConvolutionLayer:
 19     """
 20     卷积神经网络中的卷积层
 21     """
 22     def __init__(self,shape,padding,filters,stride):
 23         """
 24         shape 卷积层形状,元组 (行,列,通道数)
 25         padding 填充零个数的大小
 26         filters 过滤器的形状,元组 (行,列,通道数,个数)
 27         stride 步长
 28         """
 29         self.shape = shape;
 30         self.padding = padding;
 31         self.stride = stride;
 32         self.fileters = filters[:3];
 33         self.fileternum = filters[3];
 34         self.weights = [];
 35         for i in range(filters[3]):
 36             self.weights.append(np.random.randn(shape[2],filters[0],filters[1]));
 37         self.baises = list(np.random.randn(filters[3]));
 38         
 39         self.convlutionsize = (int((shape[0] + 2*padding - filters[0])/stride + 1),int((shape[1] + 2*padding - filters[1])/stride + 1));
 40         self.conv = np.ones((filters[3],self.convlutionsize[0],self.convlutionsize[1]));
 41     
 42     def Convolute(self,Data):
 43         """
 44         Data 三维数组,若只有两维则通道数设为1.
 45         """
 46         if(self.padding != 0):
 47             for c in range(self.shape[2]):
 48                 ones = np.zeros((self.shape[0]+2*self.padding,self.shape[1]+2*self.padding));
 49                 ones[self.padding:self.padding+self.shape[0],self.padding:self.padding+self.shape[1]] = Data[c];
 50                 Data[c] = ones;
 51         c,m,n = Data.shape;
 52         
 53         
 54         #遍历每一个过滤器
 55         for f in range(self.fileternum):
 56             t_conv = self.conv[f]; #取出第f个过滤器卷积后的临时容器
 57             w = self.weights[f]; #取出第f个过滤器的权值集合
 58             b = self.baises[f]; #取出第f个过滤器的偏倚
 59             #卷积运算,所有通道一起遍历
 60             row = 0;
 61             for i in range(self.convlutionsize[0]):
 62                 col = 0;
 63                 for j in range(self.convlutionsize[1]):
 64                     data = Data[:,row:row+self.fileters[0],col:col+self.fileters[1]]; #取出卷积运算的数据立方体
 65                     s = 0; #存放卷积立方体的乘积的和
 66                     #对取出的临时数据的每个通道进行卷积运算
 67                     for t_c in range(c):
 68                         t_w = w[t_c];
 69                         t_data = data[t_c];
 70                         temp = sum(np.multiply(t_w,t_data));
 71                         s = temp + s;
 72                     t_conv[i,j] = ReLU(s+b);
 73                     #向右移动过滤器
 74                     col = col + self.stride;
 75                 #向下移动过滤器    
 76                 row = row + self.stride;
 77             #更新卷积结果容器    
 78             self.conv[f] = t_conv;
 79         
 80 class PoolLayer:
 81     """池化层"""
 82     def __init__(self,shape,poolsize,stride,classic="max"):
 83         """
 84         shape 池化目标的形状, 元组(行,列,通道数)
 85         poolsize 池化矩阵的形状,元组 (行,列)
 86         stride 步长 一般情况下池化的步长等于池化大小
 87         classic 池化方式 max,average
 88         """
 89         self.shape = shape;
 90         self.stride = stride;
 91         self.poolsize = poolsize;
 92         self.classic = classic;
 93         #生成池化结果矩阵形状
 94         self.pool = np.ones((shape[2],(shape[0]-poolsize[0])/stride + 1,(shape[1]-poolsize[1])/stride + 1));
 95         #生成过度池化矩阵形状
 96         self.c_poolsize = ((shape[0]-poolsize[0])/stride + 1,(shape[1]-poolsize[1])/stride + 1);
 97         
 98     def Pool(self,Data):
 99         """
100         Data 三维数组,若只有两维则通道数设为1.
101         """
102         c,m,n = Data.shape;
103         
104         #在每个通道上进行池化操作
105         for k in range(c):
106             p_temp = Data[k];
107             row = 0;
108             for i in range(self.c_poolsize[0]):
109                 col = 0;
110                 for j in range(self.c_poolsize[1]):
111                     temp = p_temp[row:row+self.poolsize[0],col:col+self.poolsize[1]];
112                     if(self.classic == "average"):
113                         self.pool[k][i][j] = np.sum(temp) / (self.poolsize[0] * self.poolsize[1]);
114                     if(self.classic == "max"):
115                         self.pool[k][i][j] = np.max(temp);
116                     else:
117                         print("the classic does not exist");
118                         
119                     col = col + self.stride;
120                     
121                 row = row + self.stride;
122                 
123 class FullConnectLayer:
124     """全连接层"""
125     def __init__(self,n_in,n_out,action_fun=logistic,action_fun_der=logistic_derivative,flag):
126         """
127         n_in 输入层的单元数
128         n_out 输出单元个数 及紧邻下一层的单元数
129         action_fun 激活函数
130         action_fun_der 激活函数的导函数
131         flag 初始化权值和偏倚的标记 normal,larger,smaller
132         """
133         self.action_fun = action_fun;
134         self.action_fun_der = action_fun_der;
135         self.n_in = n_in;
136         self.n_out = n_out;
137         init_weight_biase(flag);
138     
139     def init_weight_biase(self,init_flag):
140         if(init_flag == "noraml"):
141             self.weight = np.random.randn(self.n_out,self.n_in);#weight 取值服从N(0,1) 分布
142             self.biase = np.random.randn(self.n_out,1);
143         elif(init_flag == "larger"):
144             self.weight = 2*np.random.randn(self.n_out,self.n_in)-1; #weight 取值范围(-1,1)
145             self.biases = 2*np.random.randn(self.n_out,1)-1 ; #b 取值范围(-1,1)
146         elif(init_flag == "smaller"):
147             self.weight = np.random.randn(self.n_out,self.n_in)/np.sqrt(self.n_out) ; #weight 取值服从N(0,1/x) 分布
148             self.biase = np.random.randn(self.n_out,1);
149     
150     def Forward(self,inpt):
151         """全连接层的前馈传播"""
152         self.inpt = np.dot(self.weight,inpt) + self.biase;
153         self.outpt = self.action_fun(self.inpt);
154         
155 
156 """Softmax Layer"""

后向传播的实现还是没有头绪,三层之间如何衔接不知道该怎么设计。本人能力水平有限,欢迎交流。本人微信号 markli52024

转载于:https://www.cnblogs.com/FightLi/p/8507682.html

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值