import scipy.io as sio
import numpy as np
import matplotlib.pylab as plt
# keras import
import keras.utils as kutils
from keras.models import Sequential
from keras.layers import Input, Dense, Dropout, Flatten, Conv2D, MaxPooling2D, BatchNormalization, Activation
from keras.callbacks import ModelCheckpoint
np.random.seed(20)
#加载数据集
train_data = sio.loadmat('./dataset/svhn/train_32x32.mat')
test_data = sio.loadmat('./dataset/svhn/test_32x32.mat')
extra_data = sio.loadmat('./dataset/svhn/extra_32x32.mat')
#数据处理
train_images, train_labels = train_data['X'], train_data['y']
test_images, test_labels = test_data['X'], test_data['y']
extra_images, extra_labels = extra_data['X'], extra_data['y']
#
print(train_images.shape, test_images.shape, extra_images.shape)
print(train_labels.shape, test_labels.shape, extra_labels.shape)
train_images = np.transpose(train_images,(3,0,1,2))
test_images = np.transpose(test_images,(3,0,1,2))
extra_images = np.transpose(extra_images,(3,0,1,2))
print(train_images.shape, test_images.shape, extra_images.shape)
train_images = np.concatenate([train_images, extra_images])
train_labels = np.concatenate([train_labels, extra_labels])
#展示训练图像及其label
print(train_images.shape, train_labels.shape)
train_images = train_images.astype('float32') / 255
test_images = test_images.astype('float32') / 255
plt.imshow(train_images[21918])
plt.show()
print('Label: ', train_labels[21918])
# 0 are marked as 10 in the datasets, so to have a good categorization, we need to mark it as 0
train_labels[train_labels == 10] = 0
test_labels[test_labels == 10] = 0
# Then we can categorize our labels
print(train_labels[:5])
train_labels = kutils.to_categorical(train_labels)
test_labels = kutils.to_categorical(test_labels)
print(train_labels[:5])
model = Sequential()
#CNN深度
model.add(Conv2D(32, kernel_size=(3, 3), padding='same', activation='relu', input_shape=(32, 32, 3)))
model.add(Conv2D(64, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(Conv2D(64, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Dropout(0.5))
model.add(Conv2D(128, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(Conv2D(128, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Dropout(0.25))
model.add(Conv2D(256, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(Conv2D(256, kernel_size=(3, 3), padding='same', activation='relu'))
model.add(Flatten())
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(128, activation='relu'))
model.add(Dense(10, activation='softmax'))
#loss以及优化方法
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
model.summary()
#模型保存路径
checkpoint = ModelCheckpoint("./model/best_model.hdf5", monitor='accuracy', verbose=1,
save_best_only=True, mode='auto', period=1)
history = model.fit(
train_images,
train_labels,
batch_size=128,
epochs=1,
validation_data=(test_images, test_labels),
callbacks=[checkpoint])
数据集在这儿
链接:https://pan.baidu.com/s/1ije6BnmihLx-fGdF9vV4Tg
提取码:xb24
--来自百度网盘超级会员V1的分享