import keras
import numpy as np
import os
import pickle
from keras import Sequential
from keras.callbacks import ModelCheckpoint
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout
from keras.optimizers import Adam
# 读取数据
def load_data(file):
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
# 转换数据格式
def convert_data(data):
images = np.reshape(data[b'data'], (len(data[b'data']), 3, 32, 32)).transpose(0, 2, 3, 1)
labels = np.array(data[b'labels'])
return images, labels
# 加载数据集
def load_dataset():
train_files = ['data_batch_1', 'data_batch_2', 'data_batch_3', 'data_batch_4', 'data_batch_5']
test_file = 'test_batch'
X_train = []
y_train = []
for file in train_files:
data = load_data(os.path.join('cifar-10-batches-py', file))
images, labels = convert_data(data)
X_train.append(images)
y_train.append(labels)
X_train = np.vstack(X_train)
y_train = np.hstack(y_train)
data = load_data(os.path.join('cifar-10-batches-py', test_file))
X_test, y_test = convert_data(data)
return X_train, y_train, X_test, y_test
# 构建CNN模型
def build_model(input_shape, num_classes):
model = Sequential()
model.add(Conv2D(32, (3, 3), activation='relu', input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
return model
# 主函数
if __name__ == '__main__':
# 加载数据
X_train, y_train, X_test, y_test = load_dataset()
# 定义超参数
input_shape = (32, 32, 3)
num_classes = 10
batch_size = 128
epochs = 50
# 构建CNN模型
model = build_model(input_shape, num_classes)
model.summary()
# 编译模型
model.compile(loss='categorical_crossentropy', optimizer=Adam(), metrics=['accuracy'])
# 设置回调函数
checkpoint = ModelCheckpoint(filepath='weights.hdf5', monitor='val_acc', verbose=1, save_best_only=True)
# 训练模型
history = model.fit(X_train, keras.utils.to_categorical(y_train, num_classes), batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(X_test, keras.utils.to_categorical(y_test, num_classes)), callbacks=[checkpoint])
# 评估模型
score = model.evaluate(X_test, keras.utils.to_categorical(y_test, num_classes), verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
问题
找不到保存的模型文件
分析
可能是因为checkpoint函数是临时保存,或者是只有在botebook环境下的才行
解决方法
model.save('my_model.h5') # 将模型保存到 my_model.h5 文件中
使用这个保存