原文地址:AlexNet
关于文章的理解,网上有很多博客可以参考,这里只给出LRN(local response normalization)的一篇回答,其中形象的解释了LRN,如下图所示。地址链接
实现步骤:
1.构建网络
import tensorflow as tf
from tensorflow.contrib import slim
import numpy as np
def AlexNet(x_input):
with slim.arg_scope([slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_initializer=tf.truncated_normal_initializer(mean=0.0, stddev=0.01),
weights_regularizer=slim.l2_regularizer(0.0005)):
# layer 1
net = slim.conv2d(x_input, 96, [11,11], padding="VALID", stride=4, scope="conv_1")
net = tf.nn.local_response_normalization(net, depth_radius=5, bias=2, alpha=0.0001, beta=0.75)
net = slim.max_pool2d(net, [3,3], 2, scope="pool_1")
# layer 2
net = slim.conv2d(net, 256, [5,