Keras: AlexNet代码分享
from keras.models import Sequential
from keras.layers import Convolution2D, MaxPool2D, Flatten, Dense, Dropout
from keras.layers.normalization import BatchNormalization
from keras.models import load_model
from keras.preprocessing.image import load_img, img_to_array, array_to_img, ImageDataGenerator
from keras.optimizers import SGD
num_class = 0
model = Sequential()
'''第一个和第二个模块'''
model.add(Convolution2D(input_shape= (227,227,3), filters= 96, kernel_size= 11, strides= 4, padding= 'valid', activation= 'relu'))
model.add(MaxPool2D(pool_size= 3, strides= 2, padding= 'valid'))
model.add(Convolution2D(filters= 256, kernel_size= 5, strides= 1, padding= 'same', activation= 'relu'))
model.add(MaxPool2D(pool_size= 3, strides= 2, padding= 'valid'))
'''第三个和第四个模块'''
model.add(Convolution2D(filters= 384, kernel_size= 3, strides=1, padding= 'same', activation= 'relu'))
model.add(Convolution2D(filters= 384, kernel_size= 3, strides=1, padding= 'same', activation= 'relu'))
'''第五个模块'''
model.add(Convolution2D(filters= 256, kernel_size= 3, strides= 1, padding= 'same', activation= 'relu'))
model.add(MaxPool2D(pool_size= 3, strides= 2, padding= 'valid'))
'''第六、七、八个模块'''
model.add(Flatten())
model.add(Dense(4096, activation= 'relu'))
model.add(BatchNormalization())
model.add(Dense(4096, activation= 'relu'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(num_class, activation= 'softmax'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.summary()