VGG
重复的简单基础模块来搭建复杂的神经网络
连续使用数个相同的填充为1、窗口形状3x3的卷积层后接上一个步幅为2、窗口形状2x2的max_pooling。卷积层使输入的高和宽不变,池化层则对其减半,输出通道翻倍。
代码
import d2lzh as d2l
from mxnet import gluon, init, autograd
from mxnet.gluon import nn
import time
# VGG网络单元vgg_block
def vgg_block(num_convs, num_channels):
blk = nn.Sequential()
for _ in range(num_convs):
blk.add(nn.Conv2D(num_channels, kernel_size=3,
padding=1, activation='relu'))
blk.add(nn.MaxPool2D(pool_size=2, strides=2))
return blk
# VGG网络
def vgg(conv_arch):
net = nn.Sequential()
# 卷积层部分
for (num_convs, num_channels) in conv_arch:
net.add(vgg_block(num_convs, num_channels))
# 全连接层部分
net.add(nn.Dense(4096, activation='relu'), nn.Dropout(0.5),
nn.Dense(4096, activation='relu'), nn.Dropout(0.5),
nn.Dense(10))
return net
# 通过conv_arch搭建VGG
conv_arch = ((1, 64), (1, 128), (2, 256), (2, 512), (2, 512))
ratio = 4
small_conv_arch = [(pair[0], pair[1] // ratio) for pair in conv_arch]
net = vgg(small_conv_arch)
# 数据集
batch_size = 128
train_iter, test_iter = d2l.load_data_fashion_mnist(batch_size, resize=224)
ctx = d2l.try_gpu()
net.initialize(ctx=ctx, init=init.Xavier())
print('trying on', ctx)
softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.05})
for epoch in range(3):
train_loss_sum = 0
train_acc_sum = 0
n = 0
start = time.time()
for X, y in train_iter:
X, y = X.as_in_context(ctx), y.as_in_context(ctx)
with autograd.record():
y_hat = net(X)
loss = softmax_cross_entropy(y_hat, y).sum()
loss.backward()
trainer.step(batch_size)
y = y.astype('float32')
train_loss_sum += loss.asscalar()
train_acc_sum += (y_hat.argmax(axis=1) == y).sum().asscalar()
n += y.size
test_acc = d2l.evaluate_accuracy(test_iter, net, ctx)
print('epoch %d, loss %.4f, train acc %.3f, test acc %.3f, time %.1f sec'
% (epoch + 1, train_loss_sum / n, train_acc_sum / n, test_acc, time.time() - start))
结果
trying on gpu(0)
epoch 1, loss 0.9066, train acc 0.671, test acc 0.859, time 160.9 sec
epoch 2, loss 0.4112, train acc 0.850, test acc 0.881, time 154.2 sec
epoch 3, loss 0.3355, train acc 0.878, test acc 0.893, time 154.3 sec