tensorflow用CNN实现CIFAR-10图像分类(cpu贼慢)

代码及注释

import cifar10,cifar10_input
import tensorflow as tf
import numpy as np
import time
import os
​
# 各种参数和CIFAR-10的默认下载路径
max_steps = 3000
batch_size = 128
data_dir = './cifar-10-batches-bin'
​
def variable_with_weight_loss(shape, stddev, wl):
    # 用的也是阶段正态分布,初始化权重
    var = tf.Variable(tf.truncated_normal(shape, stddev=stddev))
    if wl is not None:
        # 给权重做一个正则化处理,特征过多会导致过拟合,不知道该惩罚哪些,正则化就来帮忙了
        # L1正则太强了,会导致稀疏,无用的权重都会被置为0,而L2能让其比较平均
        # wl控制L2 loss的大小
        # add_to_collection把weight loss统一存到一个collection中,并命名为losses
        weight_loss = tf.multiply(tf.nn.l2_loss(var),wl,name='weight_loss')
        tf.add_to_collection('losses',weight_loss)
    return var
​
# 用cifar10类下载数据集,解压展开到默认位置
# 然而我却下载失败了,只能去官网下载
# http://www.cs.toronto.edu/~kriz/cifar.html
# cifar10.maybe_download_and_extract()# 对图像做一定处理,进行数据增强,生成训练数据
# 返回的是已经封装好的tensor,每次执行都会生产一个batch_size数量的样本
# 这里的增强是再distorted_inputs里面完成的,如水平翻转,随机剪切24*24,随机亮度
# 这里的图像也会被标准化
images_train, labels_train = cifar10_input.distorted_inputs(data_dir = data_dir,batch_size=batch_size)
# 剪裁正中间24*24大小的区块,对图像进行标准化操作,生成测试数据
# 因为训练集已经随机成24*24,这里也跟风么,反正不用乱搞就行了
images_test,labels_test = cifar10_input.inputs(eval_data=True,data_dir=data_dir,batch_size = batch_size)
​
Filling queue with 20000 CIFAR images before starting to train. This will take a few minutes.
# 特征和label得占位符,这里的batch必须要是事先设定,而不能是None
image_holder = tf.placeholder(tf.float32, [batch_size,24,24,3])
label_holder = tf.placeholder(tf.int32,[batch_size])
​
# 第一个卷积层
# 没有进行L2
weight1 = variable_with_weight_loss(shape=[5,5,3,64],stddev = 5e-2,wl=0.0)
kernel1 = tf.nn.conv2d(image_holder,weight1,[1,1,1,1],padding='SAME')
bias1 = tf.Variable(tf.constant(0.0,shape=[64]))
conv1 = tf.nn.relu(tf.nn.bias_add(kernel1,bias1))
# 池化层3*3步长2*2
pool1 = tf.nn.max_pool(conv1,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')
# 这里的LRN可以理解为一种标准化,增强泛化能力
norm1 = tf.nn.lrn(pool1,4,bias=1.0,alpha=0.001/9.0,beta=0.75)
​
# 第二个卷基层
weight2 = variable_with_weight_loss(shape=[5,5,64,64],stddev=5e-2,wl=0.0)
kernel2 = tf.nn.conv2d(norm1,weight2,[1,1,1,1],padding='SAME')
bias2 = tf.Variable(tf.constant(0.1,shape=[64]))
conv2 = tf.nn.relu(tf.nn.bias_add(kernel2,bias2))
norm2 = tf.nn.lrn(conv2,4,bias=1.0,alpha=0.001/9.0,beta=0.75)
pool2 = tf.nn.max_pool(norm2,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')
​
# 全连接
# 先全部展开,第一维是batch大小,第二维不管,填满为之
reshape = tf.reshape(pool2,[batch_size,-1])
dim = reshape.get_shape()[1].value
# 隐藏节点384,这个权重初始化层万能
# 全连接层不要过拟合,所以弄了一个L2系数,所以这一层的所有参数都会受到L2的制裁
weight3 = variable_with_weight_loss(shape=[dim,384],stddev=0.04,wl=0.004)
bias3 = tf.Variable(tf.constant(0.1,shape=[384]))
local3 = tf.nn.relu(tf.matmul(reshape,weight3)+bias3)
​
# 第二个全连接
weight4 = variable_with_weight_loss(shape=[384,192],stddev=0.04,wl=0.004)
bias4 = tf.Variable(tf.constant(0.1,shape=[192]))
local4 = tf.nn.relu(tf.matmul(local3,weight4)+bias4)
​
# 这个是最后一层,不需要用softmax
weight5 = variable_with_weight_loss(shape = [192,10],stddev = 1/192.0,wl=0.0)
bias5 = tf.Variable(tf.constant(0.0,shape=[10]))
logits = tf.add(tf.matmul(local4,weight5),bias5)
​
# 计算loss,这里softmax和loss结合了
def loss(logits,labels):
    labels = tf.cast(labels,tf.int64)
    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
    logits= logits,labels=labels,name='cross_entropy_per_example')
    cross_entropy_mean = tf.reduce_mean(cross_entropy,name='cross_entropy')
    # 这里把所有得loss甩到collection(losses)里了
    tf.add_to_collection('losses',cross_entropy_mean)
    # 通过求和,得到最后的loss,其中包含cross_entropy_loss和后两个全连接层中weight的L2 loss
    return tf.add_n(tf.get_collection('losses'),name='total_loss')
​
# 将logits节点和label占位符传入,得到最终loss
loss = loss(logits,label_holder)
​
# 优化器adam,学习率为1e-3
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
​
# 这个是输出top k的准确率,默认使用top1,也就是分数最高的那一类准确率
top_k_op = tf.nn.in_top_k(logits,label_holder,1)
​
# 初始化全部参数
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
​
# 这一步是启动前面提到的图片增强线程队列
# 一共使用了16个线程来加速
tf.train.start_queue_runners()

for step in range(max_steps):
    # 记录一下开始的时间
    start_time = time.time()
    # 用session的run来执行训练,得到一个batch的训练数据
    image_batch,label_batch = sess.run([images_train,labels_train])
    # 再将这个batch传入到train_op和loss计算
    _,loss_value = sess.run([train_op,loss],feed_dict={image_holder:image_batch,label_holder:label_batch})
    # 看看练了多久
    # 用GTX,每秒钟大概能训练1800个样本(我就100个,呵呵),每个batch大约需要0.066s
    # loss最后会到1.0左右
    duration = time.time() - start_time
    if step % 10 == 0:
        examples_per_sec = batch_size / duration
        sec_per_batch = float(duration)
        format_str=('step %d ,loss=%.2f (%.1f examples/sec;%.3f sec/batch)')
        print(format_str % (step,loss_value,examples_per_sec,sec_per_batch))

step 0 ,loss=6.87 (93.3 examples/sec;1.371 sec/batch)
step 10 ,loss=5.08 (110.8 examples/sec;1.155 sec/batch)
step 20 ,loss=3.99 (105.6 examples/sec;1.212 sec/batch)
step 30 ,loss=3.23 (106.9 examples/sec;1.198 sec/batch)
step 40 ,loss=2.84 (110.4 examples/sec;1.159 sec/batch)
step 50 ,loss=2.51 (105.7 examples/sec;1.211 sec/batch)
step 60 ,loss=2.23 (107.6 examples/sec;1.190 sec/batch)
step 70 ,loss=2.08 (105.6 examples/sec;1.212 sec/batch)
step 80 ,loss=2.06 (107.8 examples/sec;1.187 sec/batch)
step 90 ,loss=1.93 (104.0 examples/sec;1.230 sec/batch)
step 100 ,loss=2.10 (105.3 examples/sec;1.216 sec/batch)
step 110 ,loss=1.98 (93.8 examples/sec;1.364 sec/batch)
step 120 ,loss=2.11 (99.4 examples/sec;1.288 sec/batch)
step 130 ,loss=1.89 (67.0 examples/sec;1.910 sec/batch)
step 140 ,loss=1.80 (107.4 examples/sec;1.192 sec/batch)
step 150 ,loss=1.83 (95.1 examples/sec;1.346 sec/batch)
step 160 ,loss=2.12 (105.0 examples/sec;1.219 sec/batch)
step 170 ,loss=1.69 (108.3 examples/sec;1.182 sec/batch)
step 180 ,loss=1.89 (101.4 examples/sec;1.263 sec/batch)
step 190 ,loss=1.93 (109.2 examples/sec;1.172 sec/batch)
step 200 ,loss=1.66 (107.0 examples/sec;1.197 sec/batch)
step 210 ,loss=1.77 (107.9 examples/sec;1.186 sec/batch)
step 220 ,loss=1.71 (110.9 examples/sec;1.154 sec/batch)
step 230 ,loss=1.55 (108.0 examples/sec;1.185 sec/batch)
step 240 ,loss=1.54 (104.9 examples/sec;1.220 sec/batch)
step 250 ,loss=1.53 (106.9 examples/sec;1.197 sec/batch)
step 260 ,loss=1.73 (105.2 examples/sec;1.217 sec/batch)
step 270 ,loss=1.66 (111.3 examples/sec;1.150 sec/batch)
step 280 ,loss=1.75 (109.0 examples/sec;1.175 sec/batch)
step 290 ,loss=1.54 (109.6 examples/sec;1.168 sec/batch)
step 300 ,loss=1.62 (97.2 examples/sec;1.317 sec/batch)
step 310 ,loss=1.75 (105.9 examples/sec;1.209 sec/batch)
step 320 ,loss=1.74 (108.7 examples/sec;1.177 sec/batch)
step 330 ,loss=1.67 (111.1 examples/sec;1.152 sec/batch)
step 340 ,loss=1.57 (103.4 examples/sec;1.237 sec/batch)
step 350 ,loss=1.62 (105.2 examples/sec;1.217 sec/batch)
step 360 ,loss=1.55 (108.9 examples/sec;1.176 sec/batch)
step 370 ,loss=1.63 (111.1 examples/sec;1.152 sec/batch)
step 380 ,loss=1.48 (105.0 examples/sec;1.219 sec/batch)
step 390 ,loss=1.88 (109.0 examples/sec;1.174 sec/batch)
step 400 ,loss=1.54 (110.1 examples/sec;1.162 sec/batch)
step 410 ,loss=1.51 (105.7 examples/sec;1.210 sec/batch)
step 420 ,loss=1.50 (108.6 examples/sec;1.178 sec/batch)
step 430 ,loss=1.35 (110.2 examples/sec;1.161 sec/batch)
step 440 ,loss=1.50 (109.1 examples/sec;1.173 sec/batch)
step 450 ,loss=1.66 (103.1 examples/sec;1.242 sec/batch)
step 460 ,loss=1.39 (105.3 examples/sec;1.216 sec/batch)
step 470 ,loss=1.30 (110.4 examples/sec;1.160 sec/batch)
step 480 ,loss=1.43 (109.4 examples/sec;1.170 sec/batch)
step 490 ,loss=1.52 (110.6 examples/sec;1.158 sec/batch)
step 500 ,loss=1.45 (107.4 examples/sec;1.192 sec/batch)
step 510 ,loss=1.52 (106.2 examples/sec;1.205 sec/batch)
step 520 ,loss=1.38 (110.9 examples/sec;1.154 sec/batch)
step 530 ,loss=1.58 (107.2 examples/sec;1.194 sec/batch)
step 540 ,loss=1.44 (109.0 examples/sec;1.174 sec/batch)
step 550 ,loss=1.58 (108.0 examples/sec;1.185 sec/batch)
step 560 ,loss=1.56 (79.4 examples/sec;1.612 sec/batch)
step 570 ,loss=1.47 (105.3 examples/sec;1.216 sec/batch)
step 580 ,loss=1.37 (98.2 examples/sec;1.303 sec/batch)
step 590 ,loss=1.51 (97.8 examples/sec;1.308 sec/batch)
step 600 ,loss=1.41 (103.4 examples/sec;1.237 sec/batch)
step 610 ,loss=1.64 (102.9 examples/sec;1.243 sec/batch)
step 620 ,loss=1.52 (106.1 examples/sec;1.207 sec/batch)
step 630 ,loss=1.59 (101.2 examples/sec;1.264 sec/batch)
step 640 ,loss=1.34 (106.0 examples/sec;1.208 sec/batch)
step 650 ,loss=1.47 (106.9 examples/sec;1.198 sec/batch)
step 660 ,loss=1.46 (106.6 examples/sec;1.201 sec/batch)
step 670 ,loss=1.30 (106.0 examples/sec;1.208 sec/batch)
step 680 ,loss=1.25 (110.5 examples/sec;1.159 sec/batch)
step 690 ,loss=1.31 (109.9 examples/sec;1.164 sec/batch)
step 700 ,loss=1.35 (104.8 examples/sec;1.222 sec/batch)
step 710 ,loss=1.32 (91.6 examples/sec;1.397 sec/batch)
step 720 ,loss=1.28 (103.3 examples/sec;1.239 sec/batch)
step 730 ,loss=1.36 (107.7 examples/sec;1.188 sec/batch)
step 740 ,loss=1.29 (106.6 examples/sec;1.201 sec/batch)
step 750 ,loss=1.38 (99.3 examples/sec;1.289 sec/batch)
step 760 ,loss=1.33 (109.4 examples/sec;1.170 sec/batch)
step 770 ,loss=1.39 (108.9 examples/sec;1.175 sec/batch)
step 780 ,loss=1.59 (87.9 examples/sec;1.456 sec/batch)
step 790 ,loss=1.22 (103.3 examples/sec;1.239 sec/batch)
step 800 ,loss=1.31 (88.5 examples/sec;1.447 sec/batch)
step 810 ,loss=1.14 (105.0 examples/sec;1.219 sec/batch)
step 820 ,loss=1.37 (106.5 examples/sec;1.202 sec/batch)
step 830 ,loss=1.30 (106.0 examples/sec;1.207 sec/batch)
step 840 ,loss=1.29 (101.9 examples/sec;1.256 sec/batch)
step 850 ,loss=1.43 (104.3 examples/sec;1.228 sec/batch)
step 860 ,loss=1.21 (104.6 examples/sec;1.224 sec/batch)
step 870 ,loss=1.31 (106.4 examples/sec;1.203 sec/batch)
step 880 ,loss=1.29 (109.3 examples/sec;1.171 sec/batch)
step 890 ,loss=1.27 (104.9 examples/sec;1.220 sec/batch)
step 900 ,loss=1.43 (106.1 examples/sec;1.206 sec/batch)
step 910 ,loss=1.38 (106.7 examples/sec;1.200 sec/batch)
step 920 ,loss=1.27 (105.5 examples/sec;1.214 sec/batch)
step 930 ,loss=1.58 (97.3 examples/sec;1.316 sec/batch)
step 940 ,loss=1.57 (80.4 examples/sec;1.591 sec/batch)
step 950 ,loss=1.28 (83.7 examples/sec;1.529 sec/batch)
step 960 ,loss=1.04 (86.0 examples/sec;1.489 sec/batch)
step 970 ,loss=1.29 (98.0 examples/sec;1.306 sec/batch)
step 980 ,loss=1.40 (107.1 examples/sec;1.195 sec/batch)
step 990 ,loss=1.34 (99.2 examples/sec;1.290 sec/batch)
step 1000 ,loss=1.20 (104.9 examples/sec;1.220 sec/batch)
step 1010 ,loss=1.21 (106.1 examples/sec;1.206 sec/batch)
step 1020 ,loss=1.31 (107.6 examples/sec;1.189 sec/batch)
step 1030 ,loss=1.12 (108.7 examples/sec;1.177 sec/batch)
step 1040 ,loss=1.25 (107.1 examples/sec;1.195 sec/batch)
step 1050 ,loss=1.26 (105.4 examples/sec;1.215 sec/batch)
step 1060 ,loss=1.25 (106.5 examples/sec;1.202 sec/batch)
step 1070 ,loss=1.36 (109.5 examples/sec;1.169 sec/batch)
step 1080 ,loss=1.26 (108.7 examples/sec;1.178 sec/batch)
step 1090 ,loss=1.35 (102.5 examples/sec;1.249 sec/batch)
step 1100 ,loss=1.32 (108.3 examples/sec;1.182 sec/batch)
step 1110 ,loss=1.58 (109.0 examples/sec;1.174 sec/batch)
step 1120 ,loss=1.23 (102.0 examples/sec;1.254 sec/batch)
step 1130 ,loss=1.18 (107.9 examples/sec;1.186 sec/batch)
step 1140 ,loss=1.20 (108.3 examples/sec;1.182 sec/batch)
step 1150 ,loss=1.22 (105.5 examples/sec;1.213 sec/batch)
step 1160 ,loss=1.22 (109.8 examples/sec;1.165 sec/batch)
step 1170 ,loss=1.28 (110.6 examples/sec;1.158 sec/batch)
step 1180 ,loss=1.07 (104.8 examples/sec;1.221 sec/batch)
step 1190 ,loss=1.07 (88.2 examples/sec;1.452 sec/batch)
step 1200 ,loss=1.21 (103.5 examples/sec;1.237 sec/batch)
step 1210 ,loss=1.29 (107.6 examples/sec;1.189 sec/batch)
step 1220 ,loss=1.17 (106.2 examples/sec;1.205 sec/batch)
step 1230 ,loss=1.24 (108.2 examples/sec;1.183 sec/batch)
step 1240 ,loss=1.32 (92.7 examples/sec;1.380 sec/batch)
step 1250 ,loss=1.06 (106.1 examples/sec;1.206 sec/batch)
step 1260 ,loss=1.28 (98.2 examples/sec;1.304 sec/batch)
step 1270 ,loss=1.11 (97.2 examples/sec;1.317 sec/batch)
step 1280 ,loss=1.26 (106.5 examples/sec;1.202 sec/batch)
step 1290 ,loss=1.30 (104.2 examples/sec;1.229 sec/batch)
step 1300 ,loss=1.21 (107.6 examples/sec;1.189 sec/batch)
step 1310 ,loss=1.26 (102.2 examples/sec;1.252 sec/batch)
step 1320 ,loss=1.31 (107.9 examples/sec;1.186 sec/batch)
step 1330 ,loss=1.26 (107.9 examples/sec;1.186 sec/batch)
step 1340 ,loss=1.18 (107.4 examples/sec;1.192 sec/batch)
step 1350 ,loss=1.24 (99.7 examples/sec;1.284 sec/batch)
step 1360 ,loss=1.21 (106.5 examples/sec;1.202 sec/batch)
step 1370 ,loss=1.22 (102.9 examples/sec;1.244 sec/batch)
step 1380 ,loss=1.38 (103.7 examples/sec;1.234 sec/batch)
step 1390 ,loss=1.15 (103.5 examples/sec;1.237 sec/batch)
step 1400 ,loss=1.19 (106.8 examples/sec;1.198 sec/batch)
step 1410 ,loss=1.37 (103.9 examples/sec;1.232 sec/batch)
step 1420 ,loss=1.30 (99.0 examples/sec;1.292 sec/batch)
step 1430 ,loss=1.27 (99.3 examples/sec;1.289 sec/batch)
step 1440 ,loss=1.17 (103.3 examples/sec;1.239 sec/batch)
step 1450 ,loss=1.29 (100.1 examples/sec;1.279 sec/batch)
step 1460 ,loss=1.19 (100.6 examples/sec;1.273 sec/batch)
step 1470 ,loss=1.19 (100.5 examples/sec;1.273 sec/batch)
step 1480 ,loss=1.30 (104.2 examples/sec;1.229 sec/batch)
step 1490 ,loss=1.23 (96.2 examples/sec;1.330 sec/batch)
step 1500 ,loss=1.19 (98.0 examples/sec;1.306 sec/batch)
step 1510 ,loss=1.10 (93.2 examples/sec;1.374 sec/batch)
step 1520 ,loss=1.34 (108.3 examples/sec;1.182 sec/batch)
step 1530 ,loss=1.27 (108.5 examples/sec;1.179 sec/batch)
step 1540 ,loss=1.16 (99.7 examples/sec;1.284 sec/batch)
step 1550 ,loss=1.22 (99.0 examples/sec;1.293 sec/batch)
step 1560 ,loss=1.10 (107.7 examples/sec;1.188 sec/batch)
step 1570 ,loss=1.17 (105.9 examples/sec;1.208 sec/batch)
step 1580 ,loss=1.34 (107.5 examples/sec;1.190 sec/batch)
step 1590 ,loss=1.34 (105.4 examples/sec;1.214 sec/batch)
step 1600 ,loss=1.09 (95.5 examples/sec;1.340 sec/batch)
step 1610 ,loss=1.26 (94.4 examples/sec;1.355 sec/batch)
step 1620 ,loss=1.18 (91.3 examples/sec;1.401 sec/batch)
step 1630 ,loss=1.16 (93.5 examples/sec;1.369 sec/batch)
step 1640 ,loss=1.09 (103.5 examples/sec;1.237 sec/batch)
step 1650 ,loss=1.01 (103.4 examples/sec;1.238 sec/batch)
step 1660 ,loss=1.20 (99.0 examples/sec;1.293 sec/batch)
step 1670 ,loss=1.19 (103.6 examples/sec;1.235 sec/batch)
step 1680 ,loss=1.16 (100.4 examples/sec;1.274 sec/batch)
step 1690 ,loss=1.28 (94.5 examples/sec;1.354 sec/batch)
step 1700 ,loss=1.16 (104.2 examples/sec;1.228 sec/batch)
step 1710 ,loss=1.10 (103.4 examples/sec;1.238 sec/batch)
step 1720 ,loss=1.03 (100.8 examples/sec;1.269 sec/batch)
step 1730 ,loss=1.16 (104.3 examples/sec;1.227 sec/batch)
step 1740 ,loss=0.93 (108.7 examples/sec;1.178 sec/batch)
step 1750 ,loss=1.21 (107.4 examples/sec;1.191 sec/batch)
step 1760 ,loss=1.14 (108.0 examples/sec;1.185 sec/batch)
step 1770 ,loss=1.19 (103.4 examples/sec;1.238 sec/batch)
step 1780 ,loss=1.09 (103.5 examples/sec;1.237 sec/batch)
step 1790 ,loss=1.17 (105.0 examples/sec;1.219 sec/batch)
step 1800 ,loss=1.13 (104.6 examples/sec;1.223 sec/batch)
step 1810 ,loss=1.22 (99.7 examples/sec;1.284 sec/batch)
step 1820 ,loss=1.45 (88.6 examples/sec;1.444 sec/batch)
step 1830 ,loss=1.01 (104.3 examples/sec;1.227 sec/batch)
step 1840 ,loss=1.34 (107.0 examples/sec;1.196 sec/batch)
step 1850 ,loss=1.16 (104.2 examples/sec;1.228 sec/batch)
step 1860 ,loss=1.19 (94.1 examples/sec;1.361 sec/batch)
step 1870 ,loss=1.05 (107.8 examples/sec;1.187 sec/batch)
step 1880 ,loss=1.14 (81.4 examples/sec;1.573 sec/batch)
step 1890 ,loss=1.20 (101.2 examples/sec;1.265 sec/batch)
step 1900 ,loss=1.07 (103.7 examples/sec;1.234 sec/batch)
step 1910 ,loss=1.15 (98.1 examples/sec;1.304 sec/batch)
step 1920 ,loss=1.36 (105.2 examples/sec;1.216 sec/batch)
step 1930 ,loss=1.16 (88.7 examples/sec;1.443 sec/batch)
step 1940 ,loss=1.07 (93.0 examples/sec;1.376 sec/batch)
step 1950 ,loss=1.00 (96.4 examples/sec;1.327 sec/batch)
step 1960 ,loss=0.98 (94.8 examples/sec;1.350 sec/batch)
step 1970 ,loss=1.19 (93.3 examples/sec;1.372 sec/batch)
step 1980 ,loss=1.16 (95.8 examples/sec;1.336 sec/batch)
step 1990 ,loss=1.24 (103.7 examples/sec;1.235 sec/batch)
step 2000 ,loss=1.34 (99.5 examples/sec;1.286 sec/batch)
step 2010 ,loss=1.06 (96.7 examples/sec;1.323 sec/batch)
step 2020 ,loss=1.31 (102.7 examples/sec;1.247 sec/batch)
step 2030 ,loss=1.14 (95.5 examples/sec;1.340 sec/batch)
step 2040 ,loss=1.07 (102.9 examples/sec;1.244 sec/batch)
step 2050 ,loss=1.20 (95.2 examples/sec;1.345 sec/batch)
step 2060 ,loss=0.94 (95.2 examples/sec;1.344 sec/batch)
step 2070 ,loss=1.08 (105.2 examples/sec;1.216 sec/batch)
step 2080 ,loss=1.20 (103.0 examples/sec;1.242 sec/batch)
step 2090 ,loss=1.04 (107.1 examples/sec;1.195 sec/batch)
step 2100 ,loss=1.22 (100.5 examples/sec;1.274 sec/batch)
step 2110 ,loss=1.19 (107.8 examples/sec;1.187 sec/batch)
step 2120 ,loss=1.12 (108.2 examples/sec;1.183 sec/batch)
step 2130 ,loss=1.07 (107.0 examples/sec;1.197 sec/batch)
step 2140 ,loss=1.15 (106.1 examples/sec;1.206 sec/batch)
step 2150 ,loss=1.25 (92.8 examples/sec;1.379 sec/batch)
step 2160 ,loss=1.25 (105.2 examples/sec;1.217 sec/batch)
step 2170 ,loss=1.26 (104.9 examples/sec;1.220 sec/batch)
step 2180 ,loss=1.09 (93.2 examples/sec;1.374 sec/batch)
step 2190 ,loss=1.16 (106.5 examples/sec;1.202 sec/batch)
step 2200 ,loss=1.16 (100.8 examples/sec;1.269 sec/batch)
step 2210 ,loss=0.97 (107.6 examples/sec;1.190 sec/batch)
step 2220 ,loss=1.07 (101.0 examples/sec;1.267 sec/batch)
step 2230 ,loss=1.12 (104.6 examples/sec;1.224 sec/batch)
step 2240 ,loss=1.22 (105.5 examples/sec;1.213 sec/batch)
step 2250 ,loss=1.07 (105.9 examples/sec;1.208 sec/batch)
step 2260 ,loss=1.18 (95.5 examples/sec;1.341 sec/batch)
step 2270 ,loss=1.09 (97.4 examples/sec;1.314 sec/batch)
step 2280 ,loss=1.01 (99.9 examples/sec;1.281 sec/batch)
step 2290 ,loss=1.17 (105.7 examples/sec;1.211 sec/batch)
step 2300 ,loss=1.07 (101.5 examples/sec;1.261 sec/batch)
step 2310 ,loss=1.04 (105.0 examples/sec;1.219 sec/batch)
step 2320 ,loss=0.93 (83.5 examples/sec;1.532 sec/batch)
step 2330 ,loss=1.14 (100.4 examples/sec;1.274 sec/batch)
step 2340 ,loss=1.04 (101.4 examples/sec;1.262 sec/batch)
step 2350 ,loss=1.08 (108.6 examples/sec;1.178 sec/batch)
step 2360 ,loss=1.05 (103.7 examples/sec;1.235 sec/batch)
step 2370 ,loss=1.05 (98.9 examples/sec;1.295 sec/batch)
step 2380 ,loss=0.89 (109.8 examples/sec;1.165 sec/batch)
step 2390 ,loss=1.11 (84.7 examples/sec;1.512 sec/batch)
step 2400 ,loss=1.15 (108.7 examples/sec;1.177 sec/batch)
step 2410 ,loss=1.13 (93.9 examples/sec;1.363 sec/batch)
step 2420 ,loss=1.28 (96.7 examples/sec;1.323 sec/batch)
step 2430 ,loss=1.06 (101.7 examples/sec;1.259 sec/batch)
step 2440 ,loss=1.31 (92.8 examples/sec;1.380 sec/batch)
step 2450 ,loss=1.09 (106.6 examples/sec;1.200 sec/batch)
step 2460 ,loss=1.38 (100.9 examples/sec;1.268 sec/batch)
step 2470 ,loss=1.06 (108.3 examples/sec;1.182 sec/batch)
step 2480 ,loss=0.99 (97.4 examples/sec;1.314 sec/batch)
step 2490 ,loss=1.07 (96.3 examples/sec;1.329 sec/batch)
step 2500 ,loss=1.13 (106.3 examples/sec;1.204 sec/batch)
step 2510 ,loss=1.17 (102.7 examples/sec;1.246 sec/batch)
step 2520 ,loss=0.98 (100.8 examples/sec;1.269 sec/batch)
step 2530 ,loss=1.05 (105.3 examples/sec;1.215 sec/batch)
step 2540 ,loss=1.03 (95.1 examples/sec;1.345 sec/batch)
step 2550 ,loss=1.16 (90.3 examples/sec;1.417 sec/batch)
step 2560 ,loss=1.03 (107.5 examples/sec;1.191 sec/batch)
step 2570 ,loss=1.15 (104.3 examples/sec;1.227 sec/batch)
step 2580 ,loss=1.06 (105.4 examples/sec;1.215 sec/batch)
step 2590 ,loss=1.06 (98.1 examples/sec;1.305 sec/batch)
step 2600 ,loss=1.04 (101.1 examples/sec;1.266 sec/batch)
step 2610 ,loss=1.07 (88.9 examples/sec;1.440 sec/batch)
step 2620 ,loss=1.06 (92.2 examples/sec;1.388 sec/batch)
step 2630 ,loss=0.97 (88.3 examples/sec;1.450 sec/batch)
step 2640 ,loss=1.08 (97.0 examples/sec;1.320 sec/batch)
step 2650 ,loss=1.02 (100.7 examples/sec;1.271 sec/batch)
step 2660 ,loss=1.11 (101.5 examples/sec;1.261 sec/batch)
step 2670 ,loss=1.24 (107.4 examples/sec;1.192 sec/batch)
step 2680 ,loss=1.13 (99.1 examples/sec;1.291 sec/batch)
step 2690 ,loss=1.05 (100.6 examples/sec;1.272 sec/batch)
step 2700 ,loss=1.01 (102.4 examples/sec;1.250 sec/batch)
step 2710 ,loss=1.08 (101.5 examples/sec;1.261 sec/batch)
step 2720 ,loss=1.25 (104.9 examples/sec;1.220 sec/batch)
step 2730 ,loss=0.94 (96.8 examples/sec;1.322 sec/batch)
step 2740 ,loss=1.18 (104.8 examples/sec;1.221 sec/batch)
step 2750 ,loss=0.91 (101.2 examples/sec;1.264 sec/batch)
step 2760 ,loss=0.98 (102.3 examples/sec;1.251 sec/batch)
step 2770 ,loss=1.06 (86.7 examples/sec;1.476 sec/batch)
step 2780 ,loss=1.02 (102.8 examples/sec;1.245 sec/batch)
step 2790 ,loss=0.99 (103.3 examples/sec;1.239 sec/batch)
step 2800 ,loss=1.21 (100.0 examples/sec;1.280 sec/batch)
step 2810 ,loss=1.06 (108.2 examples/sec;1.183 sec/batch)
step 2820 ,loss=0.91 (106.3 examples/sec;1.204 sec/batch)
step 2830 ,loss=1.01 (105.8 examples/sec;1.210 sec/batch)
step 2840 ,loss=1.21 (99.7 examples/sec;1.284 sec/batch)
step 2850 ,loss=1.23 (101.4 examples/sec;1.263 sec/batch)
step 2860 ,loss=1.10 (103.6 examples/sec;1.236 sec/batch)
step 2870 ,loss=1.16 (106.1 examples/sec;1.206 sec/batch)
step 2880 ,loss=0.97 (99.7 examples/sec;1.284 sec/batch)
step 2890 ,loss=1.15 (106.5 examples/sec;1.202 sec/batch)
step 2900 ,loss=0.88 (106.4 examples/sec;1.203 sec/batch)
step 2910 ,loss=1.05 (104.4 examples/sec;1.226 sec/batch)
step 2920 ,loss=1.04 (109.3 examples/sec;1.171 sec/batch)
step 2930 ,loss=1.14 (107.1 examples/sec;1.195 sec/batch)
step 2940 ,loss=1.04 (109.2 examples/sec;1.172 sec/batch)
step 2950 ,loss=1.00 (109.0 examples/sec;1.174 sec/batch)
step 2960 ,loss=1.02 (105.2 examples/sec;1.217 sec/batch)
step 2970 ,loss=1.14 (98.0 examples/sec;1.306 sec/batch)
step 2980 ,loss=1.31 (98.2 examples/sec;1.303 sec/batch)
step 2990 ,loss=1.02 (99.5 examples/sec;1.286 sec/batch)

# 样本虽然没刚那么多了,但依然要用和上面一样的batch_size
# 要先计算多少个batch才能评测完
# 然后一个batch一个batch地输入
# 每一个step中用session的run方法获取各test中的batch
# 然后用top_k_op计算top1的正确数,最后统计全部的正确率
num_examples = 10000
import math
num_iter = int(math.ceil(num_examples/batch_size))
true_count = 0
total_sample_count = num_iter*batch_size
step = 0 
while step  <num_iter:
    image_batch,label_batch = sess.run([images_test,labels_test])
    predictions = sess.run([top_k_op],feed_dict = {image_holder:image_batch,label_holder:label_batch})
    true_count += np.sum(predictions)
    step += 1

precision = true_count/total_sample_count
print('precision@ 1 = %.3f' % precision)
​
precision@1 = 0.705
  • 1
    点赞
  • 10
    收藏
    觉得还不错? 一键收藏
  • 2
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值