本周搭建了 LeNet。
参考:《TensorFlow 实战 Google 深度学习框架》(郑泽宇)
LeNet paper
LeNet 模型如下
按照以下的LeNet 架构:
神经网络结构的代码 LeNet_forward.py,主要是完成前向通道的数值计算。
import tensorflow as tf
# layer size
CONV1_SIZE = 5
CONV1_DEEP =6
POOL1_SIZE = 2
POOL1_STRIDE = 2
CONV2_SIZE = 5
CONV2_DEEP =16
POOL2_SIZE = 2
POOL2_STRIDE = 2
FC1_SIZE = 120
FC2_SIZE = 84
# image input
NUM_CHANNAL = 1 #图片是32*32*1
IMAGE_SIZE = 28
INPUT_NODE = 784
NUM_LABEL = 10 #也是以后一层全连接层的输出节点数
BATCH_SIZE = 50
# tensor 的均值和方差可视化
def variable_summaries(name, var):
# 计算变量 var 的平均值、标准差。
with tf.name_scope(name):
tf.summary.histogram(name, var) # 计算 var 张量中元素的取值分布
mean = tf.reduce_mean(var)
tf.summary.scalar(name + "/mean", mean)
stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))
tf.summary.scalar(name + "/stddev", stddev)
# 前向通道
def LeNet_forward(INPUT_TENSOR):
with tf.variable_scope("layer1_CONV1"):
CONV1_Weights = tf.get_variable("Weight",[CONV1_SIZE, CONV1_SIZE, NUM_CHANNAL, CONV1_DEEP],
initializer = tf.truncated_normal_initializer(stddev=0.1))
variable_summaries("Weight", CONV1_Weights)
CONV1_bias = tf.get_variable("bias", [CONV1_DEEP], initializer = tf.constant_initializer(0.1))
variable_summaries("bias", CONV1_bias)
CONV1 = tf.nn.conv2d(INPUT_TENSOR, CONV1_Weights,strides=[1,1,1,1], padding='VALID')
with tf.name_scope("Weight_plus_bias"):
pre_activate = tf.nn.bias_add(CONV1, CONV1_bias)