卷积层的实现,加一点个人理解
'''
卷积
激活
池化
全连接
激活
全连接
softmax
'''
import numpy as np
class SimpleConvNet:
def __init__(self,input_dim=(1,28,28),
conv_param={'filter_num':30,
'filter_size':5,
'pad':0,
'stride':1},
hidden_size = 100,
output_size = 10,weight_init_std=0.01):
filter_num = conv_param['filter_num']
filter_size = conv_param['filter_size']
filter_pad = conv_param['pad']
filter_stride = conv_param['stride']
input_size = input_dim[1]
conv_output_size = (input_size - filter_size + 2*filter_pad)/filter_stride + 1
pool_output_size = int(filter_num *(conv_output_size/2)*(conv_output_size/2))# 这里写的不够灵活,默认池化窗口是2*2的
#书上是池化的结果变成二维的,这里呢没有变成二维,反而对下面设置self.params['W2'] 的形状 方便了
#其实卷积核,也算是权重W,只不过他们的运算方式不太一样
self.params={}
# W1 b1 (第一层)负责卷积
self.params['W1'] = weight_init_std * np.random.randn(filter_num,input_dim[0],filter_size,filter_size)
# 卷积核数目,通道数,卷积核长,宽。
self.params['b1'] = np.zeros(filter_num)
#卷积之后的结果是 1 * pool_output_size
#2,3层是全连接层
#然后 第二层的W 的形状是 pool_output_size * hidden_size
self.params['W2'] = weight_init_std * np.random.randn(pool_output_size,hidden_size)
self.params['b2'] = np.zeros(hidden_size)
#经过第二层之后的数据 的 形状是:1 * hidden_size
#第三层W 的形状是hidden_size * output_size
self.params['W3'] = weight_init_std * np.random.randn(hidden_size,output_size)
self.params['b3'] = np.zeros(output_size)
#经过第三层之后的数据是: 1 * output_size