CNN图像识别

#数据加载
%matplotlib inline
import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
import sklearn

def load_batch(name):
    with open(name,'rb') as f:
        data_dict = np.load(f,encoding='bytes')
        images = data_dict[b'data']
        labels = data_dict[b'labels']

        images = images.reshape(10000,3,32,32)
        images = images.transpose(0,2,3,1)

        labels = np.array(labels)
        return images,labels

def load_data():
    images_train = []
    labels_train = []
    
    for i in range(5):
        f = 'data_batch_%d'%(i+1)
        image_batch,label_batch=load_batch(f)
        images_train.append(image_batch)
        labels_train.append(label_batch)
        
        Xtrain = np.concatenate(images_train)
        Ytrain = np.concatenate(labels_train)
        del image_batch,label_batch
        
    Xtest,Ytest = load_batch('test_batch')
    print('finished loadding~~~')
    
    return Xtrain,Ytrain,Xtest,Ytest

Xtrain,Ytrain,Xtest,Ytest = load_data()

plt.imshow(Xtrain[6])

finished loadding~~~





<matplotlib.image.AxesImage at 0xf2e8be0>

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-tk372Jlk-1620811936085)(output_1_2.png)]

!pip install sklearn
Collecting sklearn
  Downloading sklearn-0.0.tar.gz (1.1 kB)
Collecting scikit-learn
  Downloading scikit_learn-0.22.2.post1-cp35-cp35m-win_amd64.whl (6.2 MB)
Requirement already satisfied: numpy>=1.11.0 in c:\programdata\anaconda3\envs\tensorflow\lib\site-packages (from scikit-learn->sklearn) (1.18.5)
Collecting joblib>=0.11
  Downloading joblib-0.14.1-py2.py3-none-any.whl (294 kB)
Collecting scipy>=0.17.0
  Downloading scipy-1.4.1-cp35-cp35m-win_amd64.whl (30.8 MB)
Building wheels for collected packages: sklearn
  Building wheel for sklearn (setup.py): started
  Building wheel for sklearn (setup.py): finished with status 'done'
  Created wheel for sklearn: filename=sklearn-0.0-py2.py3-none-any.whl size=1316 sha256=fcaf738561a52706f6b1d4541d249d0d2c8170280ea59d76a4f236c6cdb4d98d
  Stored in directory: c:\users\st\appdata\local\pip\cache\wheels\9e\ec\a6\33cdb5605b0b150074213e154792654a1006e6e6807dc7ca6f
Successfully built sklearn
Installing collected packages: scipy, joblib, scikit-learn, sklearn
Successfully installed joblib-0.14.1 scikit-learn-0.22.2.post1 scipy-1.4.1 sklearn-0.0


DEPRECATION: Python 3.5 reached the end of its life on September 13th, 2020. Please upgrade your Python as Python 3.5 is no longer maintained. pip 21.0 will drop support for Python 3.5 in January 2021. pip 21.0 will remove support for this functionality.
  WARNING: Retrying (Retry(total=4, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.HTTPSConnection object at 0x0000000004793588>: Failed to establish a new connection: [Errno 11004] getaddrinfo failed',)': /packages/1e/7a/dbb3be0ce9bd5c8b7e3d87328e79063f8b263b2b1bfa4774cb1147bfcd3f/sklearn-0.0.tar.gz
  WARNING: Retrying (Retry(total=3, connect=None, read=None, redirect=None, status=None)) after connection broken by 'NewConnectionError('<pip._vendor.urllib3.connection.HTTPSConnection object at 0x0000000004793780>: Failed to establish a new connection: [Errno 11004] getaddrinfo failed',)': /packages/1e/7a/dbb3be0ce9bd5c8b7e3d87328e79063f8b263b2b1bfa4774cb1147bfcd3f/sklearn-0.0.tar.gz
#定义标签字典,每一个数字所代表的图像类别名称
label_dict = {0:"airplane",1:"automobile",2:"bird",3:"cat",4:"deer",5:"dog",6:"frog",7:"horse",8:"ship",9:"trunk"}

#定义显示图像数据及其对应标签的函数
def plot_images_labels_prediction(images,labels,prediction,idx,num=10):
    fig = plt.gcf()
    fig.set_size_inches(12,6)
    if num > 10:
        num = 10
    for i in range(0,num):
        ax = plt.subplot(2,5,1+i)
        ax.imshow(images[idx],cmap="binary")
        
        title = str(i)+','+label_dict[labels[idx]]
        if len(prediction)>0:
            title+='=>'+label_dict[prediction[idx]]
        ax.set_title(title,fontsize=10)
        
        idx += 1
    plt.show()
    
plot_images_labels_prediction(Xtest,Ytest,[],1,10)

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-G3PVdaX7-1620811936098)(output_3_0.png)]

#数据预处理
#显示第一个图像的第一个像素点
Xtrain[0][0][0]
array([59, 62, 63], dtype=uint8)
#将图像数字标准化Xtrain_normalize = Xtrain.astype('float32')/255.0Xtest_normalize = Xtest.astype('float32')/255.0
#查看预处理后图像数据信息Xtrain_normalize[0][0][0]
array([ 0.23137255,  0.24313726,  0.24705882], dtype=float32)
# 独热编码from sklearn.preprocessing import OneHotEncoderencoder = OneHotEncoder(sparse=False)yy = [[0],[1],[2],[3],[4],[5],[6],[7],[8],[9]]encoder.fit(yy)Ytrain_reshape =Ytrain.reshape(-1,1)Ytrain_onehot = encoder.transform(Ytrain_reshape)Ytest_reshape = Ytest.reshape(-1,1)Ytest_onehot = encoder.transform(Ytest_reshape)Ytrain[:10]
array([6, 9, 9, 4, 1, 1, 2, 7, 8, 3])
Ytrain_onehot.shape
(50000, 10)
Ytrain[:5]
array([6, 9, 9, 4, 1])
Ytrain_onehot[:5]
array([[ 0.,  0.,  0.,  0.,  0.,  0.,  1.,  0.,  0.,  0.],       [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  1.],       [ 0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  1.],       [ 0.,  0.,  0.,  0.,  1.,  0.,  0.,  0.,  0.,  0.],       [ 0.,  1.,  0.,  0.,  0.,  0.,  0.,  0.,  0.,  0.]])
#定义共享函数#定义权值
def weight(shape):    
	return tf.Variable(tf.truncated_normal(shape,stddev=0.1),name='W')
#定义偏值
def bias(shape):    
	return tf.Variable(tf.constant(0.1,shape=shape),name='b')#定义卷积操作#步长为2,即原尺寸长和宽各除2
def conv2d(x,W):    
	return tf.nn.conv2d(x,W,strides=[1,1,1,1],padding='SAME')
def max_pool_2x2(x):    
	return tf.nn.max_pool(x,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME')
#输入层
with tf.name_scope('input_layer'):    
	x = tf.placeholder('float',shape=[None,32,32,3],name='x')
#第一个卷基层
with tf.name_scope('conv_1'):    
	W1 = weight([3,3,3,32])    
	b1 = bias([32])    
	conv_1 = conv2d(x,W1) + b1    conv_1 = tf.nn.relu(conv_1)
#第一个池化层
with tf.name_scope('pool_1'):    
	pool_1 = max_pool_2x2(conv_1)
#第二个卷积层
with tf.name_scope('conv_2'):    
	W2 = weight([3,3,32,64])    
	b2 = bias([64])    
	conv_2 = conv2d(pool_1,W2) + b2    
	conv_2 = tf.nn.relu(conv_2)
#第二个池化层
with tf.name_scope('pool_2'):    
	pool_2 = max_pool_2x2(conv_2)
#全连层
with tf.name_scope('fc'):    
	W3 = weight([4096,128])    
	b3 = bias([128])    
	flat = tf.reshape(pool_2,[-1,4096])    
	h = tf.nn.relu(tf.matmul(flat,W3)+b3)    
	h_dropout = tf.nn.dropout(h,keep_prob=0.8)
#输出层
with tf.name_scope('output_layer'):    
	W4 = weight([128,10])    
	b4 = bias([10])    
	pred = tf.nn.softmax(tf.matmul(h_dropout,W4)+b4)
#构建模型
with tf.name_scope('optimizer'):    
#定义占位符    
y = tf.placeholder('float',shape=[None,10],name='label')        
#定义损失函数    
loss_function = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred,labels=y))        
#选择优化器    
optimizer = tf.train.AdamOptimizer(learning_rate = 0.0001).minimize(loss_function)        
#定义准确率
with tf.name_scope('evaluation'):    
	correct_prediction = tf.equal(tf.argmax(pred,1),tf.argmax(y,1))    				accuracy = tf.reduce_mean(tf.cast(correct_prediction,'float'))
#启动回话import osfrom time import timetrain_epochs = 25batch_size = 50total_batch = int(len(Xtrain)/batch_size)epoch_list = []accuracy_list = []loss_list = []epoch = tf.Variable(0,name='epoch',trainable=False)startTime = time()sess = tf.Session()init = tf.global_variables_initializer()sess.run(init)
#设置检查点存储目录ckpt_dir = "CIFAR10_log/"if not os.path.exists(ckpt_dir):    os.makedirs(ckpt_dir)#生成saversaver = tf.train.Saver(max_to_keep = 1)#如果有检查点文件,读取最新的检查点文件,恢复各种变量值ckpt = tf.train.latest_checkpoint(ckpt_dir)if ckpt != None:    saver.restore(sess,ckpt)else:    print("Traing frome scrath")#获取续训参数start = sess.run(epoch)print("Training starts from {} epoch".format(start+1))
Traing frome scrathTraining starts from 1 epoch
#迭代训练
def get_train_batch(number, batch_size):
    return Xtrain_normalize[number*batch_size:(number+1)*batch_size], Ytrain_onehot[number*batch_size:(number+1)*batch_size]

for ep in range(start, train_epochs):
    for i in range(total_batch):
        batch_x, batch_y = get_train_batch(i,batch_size)
        sess.run(optimizer,feed_dict={x: batch_x, y: batch_y})
        if i %100 == 0:
            print("Step {}".format(i), "finished")
    loss,acc = sess.run([loss_function,accuracy],feed_dict={x: batch_x, y: batch_y})
    epoch_list.append(ep+1)
    loss_list.append(loss)
    accuracy_list.append(acc)
    print("Train epoch:" ,"%02d"%(sess.run(epoch)+1),"Loss=","{:.6f}".format(loss)," Accuracy=",acc)#保存检查点
    
    saver.save(sess,ckpt_dir+"CIFAR10_cnn_model.cpkt",global_step=ep+1)
    sess.run(epoch.assign(ep+1))
duration =time()-startTime
print("Train finished takes:" ,duration)
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 01 Loss= 2.163664  Accuracy= 0.3
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 02 Loss= 2.159031  Accuracy= 0.3
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 03 Loss= 2.152160  Accuracy= 0.3
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 04 Loss= 2.146002  Accuracy= 0.32
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 05 Loss= 2.118356  Accuracy= 0.36
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 06 Loss= 2.115577  Accuracy= 0.36
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 07 Loss= 2.079697  Accuracy= 0.38
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 08 Loss= 2.072792  Accuracy= 0.4
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 09 Loss= 2.049045  Accuracy= 0.42
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 10 Loss= 2.041191  Accuracy= 0.42
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 11 Loss= 2.026438  Accuracy= 0.42
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 12 Loss= 2.034380  Accuracy= 0.46
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 13 Loss= 2.002859  Accuracy= 0.46
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 14 Loss= 1.988719  Accuracy= 0.46
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 15 Loss= 1.987236  Accuracy= 0.5
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 16 Loss= 1.974100  Accuracy= 0.48
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 17 Loss= 2.021529  Accuracy= 0.42
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 18 Loss= 1.996164  Accuracy= 0.44
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 19 Loss= 1.993273  Accuracy= 0.46
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 20 Loss= 2.001982  Accuracy= 0.46
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 21 Loss= 1.966713  Accuracy= 0.5
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 22 Loss= 1.938897  Accuracy= 0.52
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 23 Loss= 1.928690  Accuracy= 0.54
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 24 Loss= 1.972906  Accuracy= 0.48
Step 0 finished
Step 100 finished
Step 200 finished
Step 300 finished
Step 400 finished
Step 500 finished
Step 600 finished
Step 700 finished
Step 800 finished
Step 900 finished
Train epoch: 25 Loss= 1.931121  Accuracy= 0.54
Train finished takes: 4361.418999910355
%matplotlib inline
fig = plt.gcf()
fig.set_size_inches(4,2)
plt.plot(epoch_list,loss_list,label='loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['loss'],loc='upper right')
<matplotlib.legend.Legend at 0xefe2160>

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-oRkAUTzN-1620811936106)(output_25_1.png)]

#可视化准确率
plt.plot(epoch_list,accuracy_list,label="accuracy")
fig = plt.gcf()
fig.set_size_inches(4,2)
plt.ylim(0.1,1)
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend()
plt.show()

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-91jfDBEv-1620811936114)(output_26_0.png)]

#评估模型及预测
test_total_batch = int(len(Xtest_normalize)/batch_size)
test_acc_sum = 0.0
for i in range(test_total_batch):
    test_image_batch = Xtest_normalize[i*batch_size:(i+1)*batch_size]
    test_label_batch = Ytest_onehot[i*batch_size:(i+1)*batch_size]
    test_batch_acc = sess.run(accuracy,feed_dict = {x:test_image_batch,y:test_label_batch})
    test_acc_sum += test_batch_acc
test_acc = float(test_acc_sum/test_total_batch)
print("Test accuracy:{:.6f}".format(test_acc))
Test accuracy:0.613900
test_pred = sess.run(pred,feed_dict={x:Xtest_normalize[:10]})
prediction_result = sess.run(tf.argmax(test_pred,1))
plot_images_labels_prediction(Xtest,Ytest,prediction_result,0,10)
  • 0
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

Alexander plus

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值