初涉CIFAR10+tensorflow: (win10环境)学习笔记

历经千辛万苦,我终于搞到了CIFAR10的数据集大哭

正常来说:cifar10.maybe_download_and_extract()就可以的,然而,连接服务器超时

不废话上干货

首先是下载cifar-10的控制文件,git一下,没git的下一下。小黑窗输入:

git clone https://github.com/tensorflow/models.git

然后打开对应的文件夹进入一下路径:/models/tutorials/image/cifar10

在中个文件夹下建你的py文件。

然后,试跑下面这个代码:

import cifar10 
cifar10.maybe_download_and_extract()

如果成了,万事大吉。如果不行,手动下载吧。下好后建文件夹:

./tmp/cifar10_data/cifar-10-batches-bin

解压下载文件把里面的内容copy过去就可以了

下载链接:度娘(良心如我,不要积分)

密码:qsp2

讲解和代码在一起,还有我跑的结果:

import cifar10,cifar10_input
import tensorflow as tf
import numpy as np
import time
#cifar10:https://zhuanlan.zhihu.com/p/27017189;
#tf:https://blog.csdn.net/lenbow/article/details/52152766;
max_steps = 3000
batch_size = 128
data_dir= './tmp/cifar10_data/cifar-10-batches-bin'
#权重初始化函数 add_to_collection:https://blog.csdn.net/uestc_c2_403/article/details/72415791;
def variable_with_weight_loss(shape,stddev,w1):
    var = tf.Variable(tf.truncated_normal(shape,stddev=stddev))
    if w1 is not None:
        weight_loss = tf.multiply(tf.nn.l2_loss(var),w1,name='weight_loss')
        #把weight_loss集中到losses中以计算总体loss
        tf.add_to_collection('losses',weight_loss)
    return var
#cifar10.maybe_download_and_extract()
#数据集准备:
#生成训练的数据集
images_train,labels_train = cifar10_input.distorted_inputs(data_dir=data_dir,batch_size=batch_size)
#生成测试的数据集
images_test,labels_test = cifar10_input.inputs(eval_data=True,data_dir=data_dir,batch_size=batch_size)
#placeholder
image_holder = tf.placeholder(tf.float32,[batch_size,24,24,3])
label_holder = tf.placeholder(tf.int32,[batch_size])

#网络搭建:
#第一层:卷积层+池化层+LRN层
#第一层卷积层卷积核大小5x5,颜色通道3,卷积核数64,标准差0.05,w1=0;con2d卷积,步长stride=1,padding为same;bias全为0;非线性化激活函数:ReLU;最大池化尺寸3x3,步长2x2;使用LRN优化ReLU
#lrn:https://blog.csdn.net/banana1006034246/article/details/75204013;
weight1 = variable_with_weight_loss(shape=[5,5,3,64],stddev=5e-2,w1=0)
kernel1 = tf.nn.conv2d(image_holder,weight1,[1,1,1,1],padding='SAME')
bias1 = tf.Variable(tf.constant(0.0,shape=[64]))
conv1 = tf.nn.relu(tf.nn.bias_add(kernel1,bias1))
pool1 = tf.nn.max_pool(conv1, ksize=[1,3,3,1], strides=[1,2,2,1],padding='SAME')
norm1 = tf.nn.lrn(pool1,4,bias=1.0,alpha=0.001/9.0,beta=0.75)
#第二层:卷积层+LRN层+池化层
#第二层卷积层卷积核大小5x5,颜色通道3,卷积核数64,标准差0.05,w1=0;con2d卷积,步长stride=1,padding为same;bias全为0.1;非线性化激活函数:ReLU;最大池化尺寸3x3,步长2x2;使用LRN优化ReLU
weight2 = variable_with_weight_loss(shape=[5,5,3,64],stddev=5e-2,w1=0)
kernel2 = tf.nn.conv2d(image_holder,weight1,[1,1,1,1],padding='SAME')
bias2 = tf.Variable(tf.constant(0.1,shape=[64]))
conv2 = tf.nn.relu(tf.nn.bias_add(kernel1,bias1))
norm2 = tf.nn.lrn(conv2,4,bias=1.0,alpha=0.001/9.0,beta=0.75)
pool2 = tf.nn.max_pool(norm2,ksize=[1,3,3,1],strides=[1,2,2,1],padding='SAME')
#第三层:全连接层
#第三层:全连接层隐含节点数384,标准差0.04,bias=0.1,weight loss=0.004防止过拟合
reshape = tf.reshape(pool2,[batch_size,-1])
dim = reshape.get_shape()[1].value
weight3 = variable_with_weight_loss(shape=[dim,384],stddev=0.04,w1=0.004)
bias3 = tf.Variable(tf.constant(0.1,shape=[384]))
local3 = tf.nn.relu(tf.matmul(reshape,weight3)+bias3)
#第四层:全连接层
#第四层:全连接层隐含节点数192,标准差0.04,bias=0.1,weight loss=0.004防止过拟合
weight4 = variable_with_weight_loss(shape=[384,192],stddev=0.04,w1=0.004)
bias4 = tf.Variable(tf.constant(0.1,shape=[192]))
local4 = tf.nn.relu(tf.matmul(local3,weight4)+bias4)
#第五层:全链接层,输出层
#第五层:全连接层输出层节点数10,标准差1/192.0,bias=0.0
weight5 = variable_with_weight_loss(shape=[192,10],stddev=1/192.0,w1=0.0)
bias5 = tf.Variable(tf.constant(0.0,shape=[10]))
logit5 = tf.add(tf.matmul(local4, weight5),bias5)
#loss搭建:
#cross_entropy:https://zhuanlan.zhihu.com/p/27842203;
#tf.add_n:https://blog.csdn.net/uestc_c2_403/article/details/72808839;
def loss(logits,labels):
    labels=tf.cast(labels,tf.int64)
    cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
        logits=logits,labels=labels,name='cross_entropy_per_example'
    )
    cross_entropy_mean = tf.reduce_mean(cross_entropy,name='cross_entropy')
    tf.add_to_collection('losses',cross_entropy_mean)
    return tf.add_n(tf.get_collection('losses'),name='total_loss')
loss = loss(logit5,label_holder)
#优化器Adam Optimizer
#常用优化器:https://blog.csdn.net/xierhacker/article/details/53174558;https://blog.csdn.net/qiurisiyu2016/article/details/80383812;
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
top_k_op = tf.nn.in_top_k(logit5,label_holder,1)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
tf.train.start_queue_runners()
#开始训练
for step in range(max_steps):
    start_time = time.time()
    image_batch,label_batch = sess.run([images_train,labels_train])
    _, loss_value = sess.run([train_op,loss],feed_dict={image_holder:image_batch,label_holder:label_batch})
    duration = time.time()-start_time
    if step % 10 == 0:
        examples_per_sec = batch_size / duration
        sec_per_batch = float(duration)
        format_str=('step %d,loss=%.2f(%.1f examples/sec; %.3f sec/batch)')
        print(format_str % (step,loss_value,examples_per_sec,sec_per_batch))

#开始测试
num_examples = 10000
import math
num_iter = int(math.ceil(num_examples/batch_size))
true_count = 0
total_sample_count = num_iter * batch_size
step = 0
while step < num_iter:
    image_batch,label_batch = sess.run([images_test,labels_test])
    predictions = sess.run([top_k_op],feed_dict={image_holder:image_batch,label_holder:label_batch})
    true_count += np.sum(predictions)
    step += 1

precision = true_count / total_sample_count
print('precision @ 1 = %.3f' % precision)


'''
step 0,loss=11.26(3.7 examples/sec; 34.469 sec/batch)
step 10,loss=7.76(504.0 examples/sec; 0.254 sec/batch)
step 20,loss=5.64(1131.9 examples/sec; 0.113 sec/batch)
step 30,loss=4.27(1246.7 examples/sec; 0.103 sec/batch)
step 40,loss=3.46(1088.9 examples/sec; 0.118 sec/batch)
step 50,loss=2.81(1337.1 examples/sec; 0.096 sec/batch)
step 60,loss=2.46(985.0 examples/sec; 0.130 sec/batch)
step 70,loss=2.35(1016.0 examples/sec; 0.126 sec/batch)
step 80,loss=2.16(632.5 examples/sec; 0.202 sec/batch)
step 90,loss=2.08(1372.7 examples/sec; 0.093 sec/batch)
step 100,loss=2.05(1200.3 examples/sec; 0.107 sec/batch)
step 110,loss=2.04(1178.4 examples/sec; 0.109 sec/batch)
step 120,loss=2.12(506.0 examples/sec; 0.253 sec/batch)
step 130,loss=1.84(1211.6 examples/sec; 0.106 sec/batch)
step 140,loss=1.95(1252.7 examples/sec; 0.102 sec/batch)
step 150,loss=1.81(1122.0 examples/sec; 0.114 sec/batch)
step 160,loss=1.86(574.8 examples/sec; 0.223 sec/batch)
step 170,loss=1.92(1194.7 examples/sec; 0.107 sec/batch)
step 180,loss=1.91(1173.0 examples/sec; 0.109 sec/batch)
step 190,loss=1.86(598.8 examples/sec; 0.214 sec/batch)
step 200,loss=1.70(1337.1 examples/sec; 0.096 sec/batch)
step 210,loss=1.83(996.4 examples/sec; 0.128 sec/batch)
step 220,loss=1.87(561.0 examples/sec; 0.228 sec/batch)
step 230,loss=1.75(423.8 examples/sec; 0.302 sec/batch)
step 240,loss=1.85(796.5 examples/sec; 0.161 sec/batch)
step 250,loss=1.57(1252.7 examples/sec; 0.102 sec/batch)
step 260,loss=1.93(819.3 examples/sec; 0.156 sec/batch)
step 270,loss=1.75(597.4 examples/sec; 0.214 sec/batch)
step 280,loss=1.50(1167.7 examples/sec; 0.110 sec/batch)
step 290,loss=1.64(1194.7 examples/sec; 0.107 sec/batch)
step 300,loss=1.77(1240.7 examples/sec; 0.103 sec/batch)
step 310,loss=1.72(948.8 examples/sec; 0.135 sec/batch)
step 320,loss=1.82(925.0 examples/sec; 0.138 sec/batch)
step 330,loss=1.68(1189.2 examples/sec; 0.108 sec/batch)
step 340,loss=1.61(1283.9 examples/sec; 0.100 sec/batch)
step 350,loss=1.73(1146.9 examples/sec; 0.112 sec/batch)
step 360,loss=1.69(779.7 examples/sec; 0.164 sec/batch)
step 370,loss=1.71(860.2 examples/sec; 0.149 sec/batch)
step 380,loss=1.77(525.6 examples/sec; 0.244 sec/batch)
step 390,loss=1.76(497.2 examples/sec; 0.257 sec/batch)
step 400,loss=1.81(1162.5 examples/sec; 0.110 sec/batch)
step 410,loss=1.48(1062.0 examples/sec; 0.121 sec/batch)
step 420,loss=1.78(453.5 examples/sec; 0.282 sec/batch)
step 430,loss=1.81(553.8 examples/sec; 0.231 sec/batch)
step 440,loss=1.84(1075.3 examples/sec; 0.119 sec/batch)
step 450,loss=1.60(355.0 examples/sec; 0.361 sec/batch)
step 460,loss=1.77(1178.4 examples/sec; 0.109 sec/batch)
step 470,loss=1.64(1316.7 examples/sec; 0.097 sec/batch)
step 480,loss=1.60(786.8 examples/sec; 0.163 sec/batch)
step 490,loss=1.58(1136.8 examples/sec; 0.113 sec/batch)
step 500,loss=1.52(399.5 examples/sec; 0.320 sec/batch)
step 510,loss=1.66(570.9 examples/sec; 0.224 sec/batch)
step 520,loss=1.65(1189.2 examples/sec; 0.108 sec/batch)
step 530,loss=1.57(403.2 examples/sec; 0.317 sec/batch)
step 540,loss=1.64(1004.1 examples/sec; 0.127 sec/batch)
step 550,loss=1.53(729.0 examples/sec; 0.176 sec/batch)
step 560,loss=1.57(313.9 examples/sec; 0.408 sec/batch)
step 570,loss=1.73(670.3 examples/sec; 0.191 sec/batch)
step 580,loss=1.62(290.0 examples/sec; 0.441 sec/batch)
step 590,loss=1.70(1205.9 examples/sec; 0.106 sec/batch)
step 600,loss=1.69(593.3 examples/sec; 0.216 sec/batch)
step 610,loss=1.61(1122.0 examples/sec; 0.114 sec/batch)
step 620,loss=1.82(925.0 examples/sec; 0.138 sec/batch)
step 630,loss=1.61(733.1 examples/sec; 0.175 sec/batch)
step 640,loss=1.78(1228.9 examples/sec; 0.104 sec/batch)
step 650,loss=1.48(1122.0 examples/sec; 0.114 sec/batch)
step 660,loss=1.43(724.9 examples/sec; 0.177 sec/batch)
step 670,loss=1.53(1265.0 examples/sec; 0.101 sec/batch)
step 680,loss=1.65(395.2 examples/sec; 0.324 sec/batch)
step 690,loss=1.49(632.5 examples/sec; 0.202 sec/batch)
step 700,loss=1.63(672.0 examples/sec; 0.190 sec/batch)
step 710,loss=1.42(1107.6 examples/sec; 0.116 sec/batch)
step 720,loss=1.69(1258.9 examples/sec; 0.102 sec/batch)
step 730,loss=1.57(1402.5 examples/sec; 0.091 sec/batch)
step 740,loss=1.53(1265.0 examples/sec; 0.101 sec/batch)
step 750,loss=1.67(1358.2 examples/sec; 0.094 sec/batch)
step 760,loss=1.54(1316.7 examples/sec; 0.097 sec/batch)
step 770,loss=1.68(1223.1 examples/sec; 0.105 sec/batch)
step 780,loss=1.51(1088.9 examples/sec; 0.118 sec/batch)
step 790,loss=1.70(1310.0 examples/sec; 0.098 sec/batch)
step 800,loss=1.58(1028.1 examples/sec; 0.124 sec/batch)
step 810,loss=1.54(1152.1 examples/sec; 0.111 sec/batch)
step 820,loss=1.51(1040.6 examples/sec; 0.123 sec/batch)
step 830,loss=1.45(313.9 examples/sec; 0.408 sec/batch)
step 840,loss=1.49(1252.7 examples/sec; 0.102 sec/batch)
step 850,loss=1.58(1211.6 examples/sec; 0.106 sec/batch)
step 860,loss=1.75(726.9 examples/sec; 0.176 sec/batch)
step 870,loss=1.48(529.9 examples/sec; 0.242 sec/batch)
step 880,loss=1.73(1189.2 examples/sec; 0.108 sec/batch)
step 890,loss=1.74(1062.0 examples/sec; 0.121 sec/batch)
step 900,loss=1.54(359.9 examples/sec; 0.356 sec/batch)
step 910,loss=1.63(724.9 examples/sec; 0.177 sec/batch)
step 920,loss=1.59(481.5 examples/sec; 0.266 sec/batch)
step 930,loss=1.68(126.5 examples/sec; 1.012 sec/batch)
step 940,loss=1.53(172.0 examples/sec; 0.744 sec/batch)
step 950,loss=1.63(1234.8 examples/sec; 0.104 sec/batch)
step 960,loss=1.58(653.3 examples/sec; 0.196 sec/batch)
step 970,loss=1.48(583.9 examples/sec; 0.219 sec/batch)
step 980,loss=1.54(1234.8 examples/sec; 0.104 sec/batch)
step 990,loss=1.57(384.0 examples/sec; 0.333 sec/batch)
step 1000,loss=1.68(1157.2 examples/sec; 0.111 sec/batch)
step 1010,loss=1.47(378.4 examples/sec; 0.338 sec/batch)
step 1020,loss=1.45(1136.8 examples/sec; 0.113 sec/batch)
step 1030,loss=1.76(661.7 examples/sec; 0.193 sec/batch)
step 1040,loss=1.48(1234.7 examples/sec; 0.104 sec/batch)
step 1050,loss=1.34(1252.7 examples/sec; 0.102 sec/batch)
step 1060,loss=1.63(365.5 examples/sec; 0.350 sec/batch)
step 1070,loss=1.42(1200.3 examples/sec; 0.107 sec/batch)
step 1080,loss=1.62(402.6 examples/sec; 0.318 sec/batch)
step 1090,loss=1.32(809.0 examples/sec; 0.158 sec/batch)
step 1100,loss=1.52(1157.2 examples/sec; 0.111 sec/batch)
step 1110,loss=1.43(902.3 examples/sec; 0.142 sec/batch)
step 1120,loss=1.44(561.0 examples/sec; 0.228 sec/batch)
step 1130,loss=1.45(476.1 examples/sec; 0.269 sec/batch)
step 1140,loss=1.51(465.8 examples/sec; 0.275 sec/batch)
step 1150,loss=1.46(559.8 examples/sec; 0.229 sec/batch)
step 1160,loss=1.59(829.8 examples/sec; 0.154 sec/batch)
step 1170,loss=1.50(1194.7 examples/sec; 0.107 sec/batch)
step 1180,loss=1.52(390.4 examples/sec; 0.328 sec/batch)
step 1190,loss=1.39(428.0 examples/sec; 0.299 sec/batch)
step 1200,loss=1.42(848.9 examples/sec; 0.151 sec/batch)
step 1210,loss=1.75(1141.9 examples/sec; 0.112 sec/batch)
step 1220,loss=1.37(1211.6 examples/sec; 0.106 sec/batch)
step 1230,loss=1.54(409.0 examples/sec; 0.313 sec/batch)
step 1240,loss=1.60(521.3 examples/sec; 0.246 sec/batch)
step 1250,loss=1.45(784.4 examples/sec; 0.163 sec/batch)
step 1260,loss=1.42(1057.6 examples/sec; 0.121 sec/batch)
step 1270,loss=1.50(1141.9 examples/sec; 0.112 sec/batch)
step 1280,loss=1.56(1032.3 examples/sec; 0.124 sec/batch)
step 1290,loss=1.56(1152.1 examples/sec; 0.111 sec/batch)
step 1300,loss=1.61(701.3 examples/sec; 0.183 sec/batch)
step 1310,loss=1.34(1136.9 examples/sec; 0.113 sec/batch)
step 1320,loss=1.57(281.4 examples/sec; 0.455 sec/batch)
step 1330,loss=1.60(1146.9 examples/sec; 0.112 sec/batch)
step 1340,loss=1.50(543.3 examples/sec; 0.236 sec/batch)
step 1350,loss=1.50(627.9 examples/sec; 0.204 sec/batch)
step 1360,loss=1.39(796.5 examples/sec; 0.161 sec/batch)
step 1370,loss=1.56(263.6 examples/sec; 0.486 sec/batch)
step 1380,loss=1.43(498.2 examples/sec; 0.257 sec/batch)
step 1390,loss=1.34(806.5 examples/sec; 0.159 sec/batch)
step 1400,loss=1.30(645.2 examples/sec; 0.198 sec/batch)
step 1410,loss=1.33(1016.0 examples/sec; 0.126 sec/batch)
step 1420,loss=1.49(705.1 examples/sec; 0.182 sec/batch)
step 1430,loss=1.52(693.7 examples/sec; 0.185 sec/batch)
step 1440,loss=1.49(874.8 examples/sec; 0.146 sec/batch)
step 1450,loss=1.44(1117.2 examples/sec; 0.115 sec/batch)
step 1460,loss=1.43(462.5 examples/sec; 0.277 sec/batch)
step 1470,loss=1.40(405.8 examples/sec; 0.315 sec/batch)
step 1480,loss=1.56(312.8 examples/sec; 0.409 sec/batch)
step 1490,loss=1.42(1173.0 examples/sec; 0.109 sec/batch)
step 1500,loss=1.52(423.1 examples/sec; 0.303 sec/batch)
step 1510,loss=1.60(714.9 examples/sec; 0.179 sec/batch)
step 1520,loss=1.49(985.0 examples/sec; 0.130 sec/batch)
step 1530,loss=1.50(777.3 examples/sec; 0.165 sec/batch)
step 1540,loss=1.42(559.8 examples/sec; 0.229 sec/batch)
step 1550,loss=1.62(756.8 examples/sec; 0.169 sec/batch)
step 1560,loss=1.48(513.0 examples/sec; 0.249 sec/batch)
step 1570,loss=1.39(230.0 examples/sec; 0.557 sec/batch)
step 1580,loss=1.37(375.6 examples/sec; 0.341 sec/batch)
step 1590,loss=1.39(329.2 examples/sec; 0.389 sec/batch)
step 1600,loss=1.48(437.4 examples/sec; 0.293 sec/batch)
step 1610,loss=1.43(1084.3 examples/sec; 0.118 sec/batch)
step 1620,loss=1.59(726.9 examples/sec; 0.176 sec/batch)
step 1630,loss=1.56(1070.8 examples/sec; 0.120 sec/batch)
step 1640,loss=1.43(1152.1 examples/sec; 0.111 sec/batch)
step 1650,loss=1.38(323.4 examples/sec; 0.396 sec/batch)
step 1660,loss=1.43(1098.2 examples/sec; 0.117 sec/batch)
step 1670,loss=1.41(397.6 examples/sec; 0.322 sec/batch)
step 1680,loss=1.55(1200.3 examples/sec; 0.107 sec/batch)
step 1690,loss=1.46(638.8 examples/sec; 0.200 sec/batch)
step 1700,loss=1.44(1131.9 examples/sec; 0.113 sec/batch)
step 1710,loss=1.44(1211.6 examples/sec; 0.106 sec/batch)
step 1720,loss=1.55(801.4 examples/sec; 0.160 sec/batch)
step 1730,loss=1.41(319.0 examples/sec; 0.401 sec/batch)
step 1740,loss=1.58(1240.7 examples/sec; 0.103 sec/batch)
step 1750,loss=1.28(722.9 examples/sec; 0.177 sec/batch)
step 1760,loss=1.48(202.6 examples/sec; 0.632 sec/batch)
step 1770,loss=1.34(794.0 examples/sec; 0.161 sec/batch)
step 1780,loss=1.53(1049.0 examples/sec; 0.122 sec/batch)
step 1790,loss=1.36(307.2 examples/sec; 0.417 sec/batch)
step 1800,loss=1.28(469.2 examples/sec; 0.273 sec/batch)
step 1810,loss=1.40(499.2 examples/sec; 0.256 sec/batch)
step 1820,loss=1.36(1157.2 examples/sec; 0.111 sec/batch)
step 1830,loss=1.47(803.9 examples/sec; 0.159 sec/batch)
step 1840,loss=1.42(1024.1 examples/sec; 0.125 sec/batch)
step 1850,loss=1.33(1173.0 examples/sec; 0.109 sec/batch)
step 1860,loss=1.27(688.2 examples/sec; 0.186 sec/batch)
step 1870,loss=1.56(621.8 examples/sec; 0.206 sec/batch)
step 1880,loss=1.58(1093.5 examples/sec; 0.117 sec/batch)
step 1890,loss=1.43(796.5 examples/sec; 0.161 sec/batch)
step 1900,loss=1.34(467.5 examples/sec; 0.274 sec/batch)
step 1910,loss=1.47(952.3 examples/sec; 0.134 sec/batch)
step 1920,loss=1.40(267.7 examples/sec; 0.478 sec/batch)
step 1930,loss=1.48(1117.2 examples/sec; 0.115 sec/batch)
step 1940,loss=1.55(959.4 examples/sec; 0.133 sec/batch)
step 1950,loss=1.54(1093.5 examples/sec; 0.117 sec/batch)
step 1960,loss=1.46(905.5 examples/sec; 0.141 sec/batch)
step 1970,loss=1.38(555.0 examples/sec; 0.231 sec/batch)
step 1980,loss=1.38(1167.7 examples/sec; 0.110 sec/batch)
step 1990,loss=1.47(1147.0 examples/sec; 0.112 sec/batch)
step 2000,loss=1.37(1117.2 examples/sec; 0.115 sec/batch)
step 2010,loss=1.42(362.5 examples/sec; 0.353 sec/batch)
step 2020,loss=1.31(261.7 examples/sec; 0.489 sec/batch)
step 2030,loss=1.49(1183.8 examples/sec; 0.108 sec/batch)
step 2040,loss=1.46(941.8 examples/sec; 0.136 sec/batch)
step 2050,loss=1.33(256.0 examples/sec; 0.500 sec/batch)
step 2060,loss=1.43(374.5 examples/sec; 0.342 sec/batch)
step 2070,loss=1.53(1147.0 examples/sec; 0.112 sec/batch)
step 2080,loss=1.31(816.7 examples/sec; 0.157 sec/batch)
step 2090,loss=1.36(372.9 examples/sec; 0.343 sec/batch)
step 2100,loss=1.55(1183.8 examples/sec; 0.108 sec/batch)
step 2110,loss=1.35(1070.8 examples/sec; 0.120 sec/batch)
step 2120,loss=1.34(877.8 examples/sec; 0.146 sec/batch)
step 2130,loss=1.49(977.5 examples/sec; 0.131 sec/batch)
step 2140,loss=1.46(351.6 examples/sec; 0.364 sec/batch)
step 2150,loss=1.28(928.3 examples/sec; 0.138 sec/batch)
step 2160,loss=1.50(1283.9 examples/sec; 0.100 sec/batch)
step 2170,loss=1.53(1246.7 examples/sec; 0.103 sec/batch)
step 2180,loss=1.42(319.0 examples/sec; 0.401 sec/batch)
step 2190,loss=1.45(668.6 examples/sec; 0.191 sec/batch)
step 2200,loss=1.41(959.3 examples/sec; 0.133 sec/batch)
step 2210,loss=1.32(1136.8 examples/sec; 0.113 sec/batch)
step 2220,loss=1.29(1205.9 examples/sec; 0.106 sec/batch)
step 2230,loss=1.32(605.8 examples/sec; 0.211 sec/batch)
step 2240,loss=1.42(718.8 examples/sec; 0.178 sec/batch)
step 2250,loss=1.38(896.1 examples/sec; 0.143 sec/batch)
step 2260,loss=1.36(1049.0 examples/sec; 0.122 sec/batch)
step 2270,loss=1.32(1183.8 examples/sec; 0.108 sec/batch)
step 2280,loss=1.46(1200.3 examples/sec; 0.107 sec/batch)
step 2290,loss=1.23(477.9 examples/sec; 0.268 sec/batch)
step 2300,loss=1.42(733.1 examples/sec; 0.175 sec/batch)
step 2310,loss=1.44(338.2 examples/sec; 0.378 sec/batch)
step 2320,loss=1.40(372.4 examples/sec; 0.344 sec/batch)
step 2330,loss=1.39(620.3 examples/sec; 0.206 sec/batch)
step 2340,loss=1.53(1098.1 examples/sec; 0.117 sec/batch)
step 2350,loss=1.50(1093.5 examples/sec; 0.117 sec/batch)
step 2360,loss=1.25(1070.8 examples/sec; 0.120 sec/batch)
step 2370,loss=1.41(443.4 examples/sec; 0.289 sec/batch)
step 2380,loss=1.33(421.0 examples/sec; 0.304 sec/batch)
step 2390,loss=1.42(1107.6 examples/sec; 0.116 sec/batch)
step 2400,loss=1.40(611.5 examples/sec; 0.209 sec/batch)
step 2410,loss=1.46(1167.7 examples/sec; 0.110 sec/batch)
step 2420,loss=1.30(1049.0 examples/sec; 0.122 sec/batch)
step 2430,loss=1.45(1146.9 examples/sec; 0.112 sec/batch)
step 2440,loss=1.31(977.5 examples/sec; 0.131 sec/batch)
step 2450,loss=1.63(915.1 examples/sec; 0.140 sec/batch)
step 2460,loss=1.32(222.7 examples/sec; 0.575 sec/batch)
step 2470,loss=1.59(1141.9 examples/sec; 0.112 sec/batch)
step 2480,loss=1.32(1000.2 examples/sec; 0.128 sec/batch)
step 2490,loss=1.38(275.4 examples/sec; 0.465 sec/batch)
step 2500,loss=1.39(441.1 examples/sec; 0.290 sec/batch)
step 2510,loss=1.52(263.3 examples/sec; 0.486 sec/batch)
step 2520,loss=1.33(536.5 examples/sec; 0.239 sec/batch)
step 2530,loss=1.32(386.9 examples/sec; 0.331 sec/batch)
step 2540,loss=1.47(305.8 examples/sec; 0.419 sec/batch)
step 2550,loss=1.45(292.3 examples/sec; 0.438 sec/batch)
step 2560,loss=1.39(532.1 examples/sec; 0.241 sec/batch)
step 2570,loss=1.14(230.0 examples/sec; 0.557 sec/batch)
step 2580,loss=1.26(1107.6 examples/sec; 0.116 sec/batch)
step 2590,loss=1.29(1200.3 examples/sec; 0.107 sec/batch)
step 2600,loss=1.26(327.9 examples/sec; 0.390 sec/batch)
step 2610,loss=1.37(444.2 examples/sec; 0.288 sec/batch)
step 2620,loss=1.20(1200.3 examples/sec; 0.107 sec/batch)
step 2630,loss=1.33(524.5 examples/sec; 0.244 sec/batch)
step 2640,loss=1.24(227.2 examples/sec; 0.563 sec/batch)
step 2650,loss=1.42(1131.9 examples/sec; 0.113 sec/batch)
step 2660,loss=1.42(423.8 examples/sec; 0.302 sec/batch)
step 2670,loss=1.27(1020.0 examples/sec; 0.125 sec/batch)
step 2680,loss=1.23(538.8 examples/sec; 0.238 sec/batch)
step 2690,loss=1.41(741.6 examples/sec; 0.173 sec/batch)
step 2700,loss=1.51(1012.0 examples/sec; 0.126 sec/batch)
step 2710,loss=1.43(583.9 examples/sec; 0.219 sec/batch)
step 2720,loss=1.34(655.0 examples/sec; 0.195 sec/batch)
step 2730,loss=1.34(1141.9 examples/sec; 0.112 sec/batch)
step 2740,loss=1.29(545.6 examples/sec; 0.235 sec/batch)
step 2750,loss=1.17(1211.6 examples/sec; 0.106 sec/batch)
step 2760,loss=1.44(1200.3 examples/sec; 0.107 sec/batch)
step 2770,loss=1.45(574.8 examples/sec; 0.223 sec/batch)
step 2780,loss=1.23(527.7 examples/sec; 0.243 sec/batch)
step 2790,loss=1.35(418.3 examples/sec; 0.306 sec/batch)
step 2800,loss=1.33(365.0 examples/sec; 0.351 sec/batch)
step 2810,loss=1.28(148.4 examples/sec; 0.863 sec/batch)
step 2820,loss=1.32(1252.8 examples/sec; 0.102 sec/batch)
step 2830,loss=1.44(938.4 examples/sec; 0.136 sec/batch)
step 2840,loss=1.28(1211.6 examples/sec; 0.106 sec/batch)
step 2850,loss=1.34(866.0 examples/sec; 0.148 sec/batch)
step 2860,loss=1.50(1223.0 examples/sec; 0.105 sec/batch)
step 2870,loss=1.34(474.4 examples/sec; 0.270 sec/batch)
step 2880,loss=1.19(811.5 examples/sec; 0.158 sec/batch)
step 2890,loss=1.19(1162.5 examples/sec; 0.110 sec/batch)
step 2900,loss=1.43(768.0 examples/sec; 0.167 sec/batch)
step 2910,loss=1.53(501.1 examples/sec; 0.255 sec/batch)
step 2920,loss=1.44(417.6 examples/sec; 0.307 sec/batch)
step 2930,loss=1.37(1217.3 examples/sec; 0.105 sec/batch)
step 2940,loss=1.40(233.3 examples/sec; 0.549 sec/batch)
step 2950,loss=1.37(172.4 examples/sec; 0.743 sec/batch)
step 2960,loss=1.27(832.5 examples/sec; 0.154 sec/batch)
step 2970,loss=1.28(1057.6 examples/sec; 0.121 sec/batch)
step 2980,loss=1.64(460.8 examples/sec; 0.278 sec/batch)
step 2990,loss=1.39(814.1 examples/sec; 0.157 sec/batch)
precision @ 1 = 0.612
'''




评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值