"""
正则化
机器学习中经常会遇到一个问题-----过拟合,过拟合是指由于参数过多或者训练数据过少导致虽然对于训练数据的拟合非常好
但是对于测试数据的拟合非常糟糕,解决这种问题有两种方法:一种是权值衰减,另外一种是dropout方法
权值衰减是指在学习的过程中对权值过大的参数进行惩罚,来抑制过拟合,在代码中即为①处的代码
另外的一种方法为dropout方法,即在学习过程中随机删除一个神经元,具体实现看类dropout
超参数的优化
超参数与参数一样,在神经网络的学习中占有很大的地位,对于他的优化我们只能一次次训练来手动得到最优值
步骤0
先初始设置一个范围
步骤1
从设定的范围内随机取得超参数值
步骤2
利用1中的超参数值进行训练并评估精度
步骤3
重复1和2中的操作,并根据他们的结果一步步缩小超参数范围
"""
import os
import sys
sys.path.append(os.pardir)
import numpy as np
import matplotlib.pyplot as plt
from dataset.mnist import load_mnist
from common.multi_layer_net import MultiLayerNet
from common.optimizer import SGD
(x_train, t_train), (x_test, t_test) = load_mnist(normalize=True)
x_train = x_train[:300]
t_train = t_train[:300]
network = MultiLayerNet(input_size=784, hidden_size_list=[100, 100, 100, 100, 100, 100], output_size=10)
optimizer = SGD(lr=0.01)
max_epochs = 201
train_size = x_train.shape[0]
batch_size = 100
train_loss_list = []
train_acc_list = []
test_acc_list = []
iter_per_epoch = max(train_size / batch_size, 1)
epoch_cnt = 0
for i in range(1000000000):
batch_mask = np.random.choice(train_size, batch_size)
x_batch = x_train[batch_mask]
t_batch = t_train[batch_mask]
grads = network.gradient(x_batch, t_batch)
optimizer.update(network.params, grads)
if i % iter_per_epoch == 0:
train_acc = network.accuracy(x_train, t_train)
test_acc = network.accuracy(x_test, t_test)
train_acc_list.append(train_acc)
test_acc_list.append(test_acc)
epoch_cnt += 1
if epoch_cnt >= max_epochs:
break
markers = {'train': 'o', 'test': 's'}
x = np.arange(max_epochs)
plt.plot(x, train_acc_list, marker='o', label='train', markevery=10)
plt.plot(x, test_acc_list, marker='s', label='test', markevery=10)
plt.xlabel("epochs")
plt.ylabel("accuracy")
plt.ylim(0, 1.0)
plt.legend(loc='lower right')
plt.show()
class Dropout(object):
def __init__(self, dropout_ratio=0.5):
self.dropout_ratid = dropout_ratio
self.mask = None
def forward(self, x, train_flag=True):
if train_flag:
self.mask = np.random.randn(*x.shape) > self.dropout_ratid
return x * self.mask
else:
return x * (1.0 * self.dropout_ratid)
def backward(self, dout):
return dout * self.mask