keras内置经典网络实现
covn_base = keras.applications.VGG16(weights='imagenet',
include_top=False,
input_shape=(200, 200, 3))
covn_base.summary()
batch_size=20
def extract_features(data_generator, sample_count):
i = 0
features = np.zeros(shape=(sample_count, 6, 6, 512))
labels = np.zeros(shape=(sample_count))
for inputs_batch, labels_batch in data_generator:
features_batch = covn_base.predict(inputs_batch)
features[i * batch_size : (i + 1) * batch_size] = features_batch
labels[i * batch_size : (i + 1) * batch_size] = labels_batch
i += 1
if i * batch_size >= sample_count:
break
return features, labels
train_features, train_labels = extract_features(train_generator, 2000)
test_features, test_labels = extract_features(test_generator, 1000)
model = keras.Sequential()
model.add(layers.GlobalAveragePooling2D(input_shape=(6, 6, 512)))
model.add(layers.Dense(512, activation='relu'))
model.add(layers.Dropout(0.5))
model.add(layers.Dense(1, activation='sigmoid'))
model.summary()
model.compile(optimizer=keras.optimizers.Adam(lr=0.001),
loss='binary_crossentropy',
metrics=['acc'])
history = model.fit(train_features, train_labels,
epochs=30,
batch_size=50,
validation_data=(test_features, test_labels))
import matplotlib.pyplot as plt
%matplotlib inline
plt.plot(history.epoch, history.history['loss'], 'r', label='loss')
plt.plot(history.epoch, history.history['val_loss'], 'b--', label='val_loss')
plt.legend()
plt.plot(history.epoch, history.history['acc'], 'r')
plt.plot(history.epoch, history.history['val_acc'], 'b--')
二、特征提取使得训练速度加快
def extract_features(data_generator, sample_count):
i = 0
features = np.zeros(shape=(sample_count, 6, 6, 512))
labels = np.zeros(shape=(sample_count))
for inputs_batch, labels_batch in data_generator:
features_batch = covn_base.predict(inputs_batch)
features[i * batch_size : (i + 1) * batch_size] = features_batch
labels[i * batch_size : (i + 1) * batch_size] = labels_batch
i += 1
if i * batch_size >= sample_count:
break
return features, labels