代码
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.datasets import cifar10
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
import h5py
# 全局变量
batch_size = 256
nb_classes = 10
epochs = 12
# input image dimensions
img_rows, img_cols = 28, 28
# number of convolutional filters to use
nb_filters = 32
# size of pooling area for max pooling
pool_size = (2, 2)
# convolution kernel size
kernel_size = (3, 3)
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
print(np.shape(X_train),np.shape(y_train))
print(np.shape(X_test),np.shape(y_test))
print(type(X_train))
print(X_train.shape[0])
print(np.shape( X_train.reshape(X_train.shape[0], img_rows, img_cols, 1)))
print('标签:',y_train[0:20])
#根据不同的backend定下不同的格式
if K.image_dim_ordering() == 'th':
X_train = X_train.reshape(X_train.shape[0], 1, img_rows, img_cols)
X_test = X_test.reshape(X_test.shape[0], 1, img_rows, img_cols)
input_shape = (1, img_rows, img_cols)
else:
X_train = X_train.reshape(X_train.shape[0], img_rows, img_cols, 1)#把第三个通道1补上
X_test = X_test.reshape(X_test.shape[0], img_rows, img_cols, 1)
input_shape = (img_rows, img_cols, 1)#元组类型,元素不能更改
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# 转换为one_hot类型
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
print(Y_train.shape,Y_train[0,0:11])
print(Y_test.shape,Y_test[0,0:11])
# 构建模型
model = Sequential()
"""
model.add(Convolution2D(nb_filters, kernel_size[0], kernel_size[1],
border_mode='same',
input_shape=input_shape))
"""
model.add(Convolution2D(nb_filters, (kernel_size[0], kernel_size[1]),
padding='same',
input_shape=input_shape)) # 卷积层1
model.add(Activation('relu')) # 激活层
model.add(MaxPooling2D(pool_size=pool_size))#池化层
model.add(Convolution2D(nb_filters, (kernel_size[0], kernel_size[1]))) # 卷积层2
model.add(Activation('relu')) # 激活层
model.add(MaxPooling2D(pool_size=pool_size)) # 池化层
model.add(Dropout(0.25)) # 神经元随机失活
model.add(Flatten()) # 拉成一维数据
model.add(Dense(128)) # 全连接层1
model.add(Activation('relu')) # 激活层
model.add(Dropout(0.5)) # 随机失活
model.add(Dense(nb_classes)) # 全连接层2
model.add(Activation('softmax')) # Softmax评分
# 编译模型
model.compile(loss='categorical_crossentropy',
optimizer='adadelta',
metrics=['accuracy'])
# 训练模型
model.fit(X_train, Y_train, batch_size=batch_size, epochs=epochs,
verbose=1, validation_data=(X_test, Y_test))
# 评估模型
score = model.evaluate(X_test, Y_test, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
输出结果:
E:\phthon35\python.exe I:/catsVSdogs1-master/catsVSdogs1-master/file01/SparseAutoEncoded.py
Using TensorFlow backend.
(60000, 28, 28) (60000,)
(10000, 28, 28) (10000,)
<class 'numpy.ndarray'>
60000
(60000, 28, 28, 1)
标签: [5 0 4 1 9 2 1 3 1 4 3 5 3 6 1 7 2 8 6 9]
X_train shape: (60000, 28, 28, 1)
60000 train samples
10000 test samples
(60000, 10) [ 0. 0. 0. 0. 0. 1. 0. 0. 0. 0.]
(10000, 10) [ 0. 0. 0. 0. 0. 0. 0. 1. 0. 0.]
Train on 60000 samples, validate on 10000 samples
Epoch 1/12
2018-01-10 16:53:35.490195: I C:\tf_jenkins\home\workspace\rel-win\M\windows-gpu\PY\35\tensorflow\core\platform\cpu_feature_guard.cc:137] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX AVX2
2018-01-10 16:53:35.815757: I C:\tf_jenkins\home\workspace\rel-win\M\windows-gpu\PY\35\tensorflow\core\common_runtime\gpu\gpu_device.cc:1030] Found device 0 with properties:
name: GeForce GTX 1080 Ti major: 6 minor: 1 memoryClockRate(GHz): 1.683
pciBusID: 0000:01:00.0
totalMemory: 11.00GiB freeMemory: 9.10GiB
2018-01-10 16:53:35.816114: I C:\tf_jenkins\home\workspace\rel-win\M\windows-gpu\PY\35\tensorflow\core\common_runtime\gpu\gpu_device.cc:1120] Creating TensorFlow device (/device:GPU:0) -> (device: 0, name: GeForce GTX 1080 Ti, pci bus id: 0000:01:00.0, compute capability: 6.1)
256/60000 [..............................] - ETA: 8:51 - loss: 2.3166 - acc: 0.0977
1536/60000 [..............................] - ETA: 1:29 - loss: 2.2753 - acc: 0.1549
3072/60000 [>.............................] - ETA: 44s - loss: 2.2067 - acc: 0.2230
4096/60000 [=>............................] - ETA: 33s - loss: 2.1455 - acc: 0.2664
5888/60000 [=>............................] - ETA: 23s - loss: 2.0069 - acc: 0.3263
7168/60000 [==>...........................] - ETA: 19s - loss: 1.9077 - acc: 0.3634
8448/60000 [===>..........................] - ETA: 16s - loss: 1.8163 - acc: 0.3965
9984/60000 [===>..........................] - ETA: 13s - loss: 1.7047 - acc: 0.4343
11776/60000 [====>.........................] - ETA: 11s - loss: 1.5922 - acc: 0.4735
13056/60000 [=====>........................] - ETA: 10s - loss: 1.5168 - acc: 0.4998
14848/60000 [======>.......................] - ETA: 8s - loss: 1.4239 - acc: 0.5304
16384/60000 [=======>......................] - ETA: 7s - loss: 1.3560 - acc: 0.5533
17664/60000 [=======>......................] - ETA: 7s - loss: 1.3029 - acc: 0.5716
18688/60000 [========>.....................] - ETA: 6s - loss: 1.2655 - acc: 0.5842
20480/60000 [=========>....................] - ETA: 6s - loss: 1.2021 - acc: 0.6057
21504/60000 [=========>....................] - ETA: 5s - loss: 1.1729 - acc: 0.6154
22784/60000 [==========>...................] - ETA: 5s - loss: 1.1378 - acc: 0.6278
24320/60000 [===========>..................] - ETA: 4s - loss: 1.0990 - acc: 0.6418
26112/60000 [============>.................] - ETA: 4s - loss: 1.0561 - acc: 0.6563
27136/60000 [============>.................] - ETA: 4s - loss: 1.0343 - acc: 0.6636
28416/60000 [=============>................] - ETA: 3s - loss: 1.0064 - acc: 0.6728
29696/60000 [=============>................] - ETA: 3s - loss: 0.9824 - acc: 0.6807
30976/60000 [==============>...............] - ETA: 3s - loss: 0.9594 - acc: 0.6887
32000/60000 [===============>..............] - ETA: 3s - loss: 0.9425 - acc: 0.6943
33280/60000 [===============>..............] - ETA: 3s - loss: 0.9207 - acc: 0.7018
35072/60000 [================>.............] - ETA: 2s - loss: 0.8936 - acc: 0.7109
36096/60000 [=================>............] - ETA: 2s - loss: 0.8791 - acc: 0.7159
37376/60000 [=================>............] - ETA: 2s - loss: 0.8618 - acc: 0.7215
38400/60000 [==================>...........] - ETA: 2s - loss: 0.8482 - acc: 0.7261
39936/60000 [==================>...........] - ETA: 2s - loss: 0.8290 - acc: 0.7328
41472/60000 [===================>..........] - ETA: 1s - loss: 0.8105 - acc: 0.7392
42752/60000 [====================>.........] - ETA: 1s - loss: 0.7972 - acc: 0.7435
44544/60000 [=====================>........] - ETA: 1s - loss: 0.7781 - acc: 0.7499
46336/60000 [======================>.......] - ETA: 1s - loss: 0.7608 - acc: 0.7557
48128/60000 [=======================>......] - ETA: 1s - loss: 0.7444 - acc: 0.7611
49664/60000 [=======================>......] - ETA: 0s - loss: 0.7304 - acc: 0.7659
50432/60000 [========================>.....] - ETA: 0s - loss: 0.7239 - acc: 0.7681
51456/60000 [========================>.....] - ETA: 0s - loss: 0.7157 - acc: 0.7710
52736/60000 [=========================>....] - ETA: 0s - loss: 0.7061 - acc: 0.7743
54784/60000 [==========================>...] - ETA: 0s - loss: 0.6909 - acc: 0.7790
56064/60000 [===========================>..] - ETA: 0s - loss: 0.6810 - acc: 0.7822
57600/60000 [===========================>..] - ETA: 0s - loss: 0.6697 - acc: 0.7860
59392/60000 [============================>.] - ETA: 0s - loss: 0.6578 - acc: 0.7901
60000/60000 [==============================] - 5s 88us/step - loss: 0.6537 - acc: 0.7915 - val_loss: 0.1492 - val_acc: 0.9553
Epoch 2/12
256/60000 [..............................] - ETA: 0s - loss: 0.2639 - acc: 0.9219
1792/60000 [..............................] - ETA: 2s - loss: 0.2680 - acc: 0.9141
3072/60000 [>.............................] - ETA: 2s - loss: 0.2682 - acc: 0.9167
4352/60000 [=>............................] - ETA: 2s - loss: 0.2722 - acc: 0.9173
5888/60000 [=>............................] - ETA: 2s - loss: 0.2795 - acc: 0.9163
6912/60000 [==>...........................] - ETA: 2s - loss: 0.2802 - acc: 0.9151
8448/60000 [===>..........................] - ETA: 2s - loss: 0.2758 - acc: 0.9168
9728/60000 [===>..........................] - ETA: 2s - loss: 0.2712 - acc: 0.9164
11520/60000 [====>.........................] - ETA: 2s - loss: 0.2669 - acc: 0.9187
12800/60000 [=====>........................] - ETA: 2s - loss: 0.2673 - acc: 0.9187
14336/60000 [======>.......................] - ETA: 2s - loss: 0.2651 - acc: 0.9194
15616/60000 [======>.......................] - ETA: 2s - loss: 0.2638 - acc: 0.9195
16896/60000 [=======>......................] - ETA: 1s - loss: 0.2634 - acc: 0.9194
18176/60000 [========>.....................] - ETA: 1s - loss: 0.2606 - acc: 0.9204
19456/60000 [========>.....................] - ETA: 1s - loss: 0.2589 - acc: 0.9211
20992/60000 [=========>....................] - ETA: 1s - loss: 0.2567 - acc: 0.9223
22784/60000 [==========>...................] - ETA: 1s - loss: 0.2531 - acc: 0.9229
24832/60000 [===========>..................] - ETA: 1s - loss: 0.2519 - acc: 0.9235
25856/60000 [===========>..................] - ETA: 1s - loss: 0.2507 - acc: 0.9238
26880/60000 [============>.................] - ETA: 1s - loss: 0.2514 - acc: 0.9238
28416/60000 [=============>................] - ETA: 1s - loss: 0.2480 - acc: 0.9248
29952/60000 [=============>................] - ETA: 1s - loss: 0.2459 - acc: 0.9257
30976/60000 [==============>...............] - ETA: 1s - loss: 0.2447 - acc: 0.9258
32512/60000 [===============>..............] - ETA: 1s - loss: 0.2430 - acc: 0.9261
34048/60000 [================>.............] - ETA: 1s - loss: 0.2400 - acc: 0.9269
35840/60000 [================>.............] - ETA: 1s - loss: 0.2394 - acc: 0.9273
37376/60000 [=================>............] - ETA: 0s - loss: 0.2374 - acc: 0.9280
38912/60000 [==================>...........] - ETA: 0s - loss: 0.2354 - acc: 0.9287
40448/60000 [===================>..........] - ETA: 0s - loss: 0.2348 - acc: 0.9289
41728/60000 [===================>..........] - ETA: 0s - loss: 0.2337 - acc: 0.9292
43008/60000 [====================>.........] - ETA: 0s - loss: 0.2333 - acc: 0.9293
44544/60000 [=====================>........] - ETA: 0s - loss: 0.2320 - acc: 0.9297
45824/60000 [=====================>........] - ETA: 0s - loss: 0.2317 - acc: 0.9300
47616/60000 [======================