from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
start_time = time.time()
import numpy as np
import matplotlib.pyplot as plt
from keras.callbacks import Callback,ModelCheckpoint
from keras.models import Model
from keras.layers import Dense, Input,Conv2D, MaxPooling2D, Dropout, Flatten, Activation, BatchNormalization,GlobalAveragePooling2D
import keras as ks
from sklearn.model_selection import train_test_split
from generator import s2_generator
from keras.utils.vis_utils import plot_model
from sklearn.metrics import recall_score,accuracy_score
from sklearn.metrics import precision_score,f1_score
from sklearn.metrics import roc_curve
from sklearn.metrics import auc
from itertools import cycle
from scipy import interp
from sklearn.preprocessing import label_binarize
import Adapt_categorical_crossentropy as ACC
from keras.callbacks import Callback,ModelCheckpoint
from keras.utils import multi_gpu_model
from keras.optimizers import Adam,SGD,sgd
from LRFinder import LRFinder
from clr_callbackR import CyclicLR
from step_decay import step_decay_schedule
from keras.applications.vgg19 import VGG19
from keras.applications.vgg16 import VGG16
from keras.applications.xception import Xception
from keras.applications.inception_v3 import InceptionV3
from keras.applications.inception_resnet_v2 import InceptionResNetV2
from keras.applications.mobilenet import MobileNet
from keras.applications.densenet import DenseNet121,DenseNet169,DenseNet201
from keras.applications.resnet50 import ResNet50
from keras.applications.nasnet import NASNetLarge
from keras.applications.nasnet import NASNetMobile
from keras.layers.core import Reshape
from keras.layers import multiply
def build_model(nb_classes, input_shape=(299,299,3)):
inputs_dim = Input(input_shape)
x = InceptionV3(include_top=False,
weights='imagenet',
input_tensor=None,
input_shape=(299, 299, 3),
pooling=max)(inputs_dim)
print(x.shape)
squeeze = GlobalAveragePooling2D()(x)
excitation = Dense(units=2048 // 16)(squeeze)
excitation = Activation('relu')(excitation)
excitation = Dense(units=2048)(excitation)
excitation = Activation('sigmoid')(excitation)
excitation = Reshape((1, 1, 2048))(excitation)
scale = multiply([x, excitation])
x = GlobalAveragePooling2D()(scale)
dp_1 = Dropout(0.3)(x)
fc2 = Dense(nb_classes)(dp_1)
fc2 = Activation('sigmoid')(fc2)
model = Model(inputs=inputs_dim, outputs=fc2)
return model
class ParallelModelCheckpoint(ModelCheckpoint):
def __init__(self, model, filepath, monitor='val_loss', verbose=0,
save_best_only=True, save_weights_only=False,
mode='auto', period=1):
self.single_model = model
super(ParallelModelCheckpoint, self).__init__(filepath, monitor, verbose, save_best_only, save_weights_only,
mode, period)
def set_model(self, model):
super(ParallelModelCheckpoint, self).set_model(self.single_model)
if __name__ == '__main__':
im_size1 = 299
im_size2 = 299
channels = 3
nb_classes = 5
epochs = 150
min_lr = 1e-7
max_lr = 1e-4
X_train = np.load('../Divide_test/x_train_5_right.npy')
Y_train = np.load('../Divide_test/y_train_5_right.npy')
print("全部数据形状")
print(X_train.shape)
print(Y_train.shape)
X_train, X_valid, Y_train, Y_valid = train_test_split(X_train, Y_train, test_size=0.1, random_state=666)
X_test = np.load('../Divide_test/x_test_5_right.npy')
Y_test = np.load('../Divide_test/y_test_5_right.npy')
print("训练集形状")
print(X_train.shape)
print(Y_train.shape)
print("验证集形状")
print(X_valid.shape)
print(Y_valid.shape)
print("测试集形状")
print(X_test.shape)
print(Y_test.shape)
Y_train = np.asarray(Y_train,np.uint8)
Y_valid = np.asarray(Y_valid,np.uint8)
Y_test = np.asarray(Y_test,np.uint8)
X_valid = np.asarray(X_valid, np.float32) / 255.
X_test = np.asarray(X_test,np.float32) / 255.
single_model = build_model(nb_classes, input_shape=(im_size1, im_size2, channels))
plot_model(single_model, to_file="../images/images/SE-InceptionV3_model.png", show_shapes=True, show_layer_names=True)
model = multi_gpu_model(single_model, 2)
early_stopping = ks.callbacks.EarlyStopping(monitor='val_acc', patience=5)
filepath = './model/SE-InceptionV3_model.h5'
saveBestModel = ParallelModelCheckpoint(single_model,'./model/SE-InceptionV3_model.h5',monitor='val_acc',
verbose=1, save_best_only=True, mode='auto')
tensorboard = ks.callbacks.TensorBoard(log_dir=r'./RESNET_ray/SE-InceptionV3_model')
opt = Adam(lr=1e-4)
model.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])
model.summary()
batch_size=100
a = X_train.shape[0]
'''
学习率设置:lr_finder:找寻最优学习速率的系统化方法
reduce_lr:一般设置学习率方法,验证准确率不提高就减少准确率
clr_triangular:周期性学习率 //效果不是太好,可能参数不对
step_decay : 学习速率退火的最流行方式是「步衰减」(Step Decay),类似于reduce_lr
'''
reduce_lr = ks.callbacks.ReduceLROnPlateau(monitor='val_acc', factor=0.3, verbose=1, patience=2,
min_lr=1e-7)
history = model.fit_generator(generator=s2_generator(X_train, Y_train, batch_size,a),
steps_per_epoch=int(X_train.shape[0] / batch_size)+ 1,
epochs=epochs, validation_data=(X_valid, Y_valid), verbose=1,
callbacks=[early_stopping, saveBestModel, tensorboard,reduce_lr])
val_loss = history.history['val_loss']
val_acc = history.history['val_acc']
train_loss = history.history['loss']
train_acc = history.history['acc']
plt.figure(figsize=(10, 4))
plt.ylim(0, 1.5)
plt.plot(train_loss, 'b', label='Training loss')
plt.plot(val_loss, 'r', label='Validation loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
plt.savefig("../images/ROC/SE-InceptionV3_model_5分类_loss.png")
plt.figure(figsize=(10, 4))
plt.ylim(0, 1)
plt.plot(train_acc, 'b', label='Training Accuracy')
plt.plot(val_acc, 'r', label='Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
plt.savefig("../images/ROC/SE-InceptionV3_model_5分类_Accuracy.png")
print("Predicting")
Y_pred = model.predict(X_test)
Y_pred = [np.argmax(y) for y in Y_pred]
Y_test = [np.argmax(y) for y in Y_test]
Y_test = label_binarize(Y_test, classes=[i for i in range(nb_classes)])
Y_pred = label_binarize(Y_pred, classes=[i for i in range(nb_classes)])
precision = precision_score(Y_test, Y_pred, average='micro')
recall = recall_score(Y_test, Y_pred, average='micro')
f1_score = f1_score(Y_test, Y_pred, average='micro')
accuracy_score = accuracy_score(Y_test, Y_pred)
print("Precision_score:", precision)
print("Recall_score:", recall)
print("F1_score:", f1_score)
print("Accuracy_score:", accuracy_score)
fpr = dict()
tpr = dict()
roc_auc = dict()
for i in range(nb_classes):
fpr[i], tpr[i], _ = roc_curve(Y_test[:, i], Y_pred[:, i])
roc_auc[i] = auc(fpr[i], tpr[i])
fpr["micro"], tpr["micro"], _ = roc_curve(Y_test.ravel(), Y_pred.ravel())
roc_auc["micro"] = auc(fpr["micro"], tpr["micro"])
all_fpr = np.unique(np.concatenate([fpr[i] for i in range(nb_classes)]))
mean_tpr = np.zeros_like(all_fpr)
for i in range(nb_classes):
mean_tpr += interp(all_fpr, fpr[i], tpr[i])
mean_tpr /= nb_classes
fpr["macro"] = all_fpr
tpr["macro"] = mean_tpr
roc_auc["macro"] = auc(fpr["macro"], tpr["macro"])
lw = 2
plt.figure()
plt.plot(fpr["micro"], tpr["micro"],
label='micro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["micro"]),
color='deeppink', linestyle=':', linewidth=4)
plt.plot(fpr["macro"], tpr["macro"],
label='macro-average ROC curve (area = {0:0.2f})'
''.format(roc_auc["macro"]),
color='navy', linestyle=':', linewidth=4)
colors = cycle(['aqua', 'darkorange', 'cornflowerblue'])
for i, color in zip(range(nb_classes), colors):
plt.plot(fpr[i], tpr[i], color=color, lw=lw,
label='ROC curve of class {0} (area = {1:0.2f})'
''.format(i, roc_auc[i]))
plt.plot([0, 1], [0, 1], 'k--', lw=lw)
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Some extension of Receiver operating characteristic to multi-class')
plt.legend(loc="lower right")
plt.savefig("../images/ROC/SE-InceptionV3_model_5分类.png")
print("--- %s seconds ---" % (time.time() - start_time))