pred=conv_net(x,Weights,bias,keep_prob)
cost=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred,labels=y))
tf.summary.histogram('loss', cost)
optimizer=tf.train.AdamOptimizer(0.01).minimize(cost)
correct_pred=tf.equal(tf.argmax(pred,1),tf.argmax(y,1))
accuracy=tf.reduce_mean(tf.cast(correct_pred,tf.float32))
merged=tf.summary.merge_all()
def train_and_test(train_x, train_y, test_x, test_y, epochs, batch_size, times= 1) :
# 初始化全局变量
init=tf.global_variables_initializer()
start_time=time.time()
with tf.Session()assess:
sess.run(init)
# 把需要可视化的参数写入可视化文件
writer=tf.summary.FileWriter('F:\\data\\fruits-360\\tensorboard\\Fruit_graph' +str(times), sess.graph)for i inrange(epochs):
batch_num= int(np.array(train_x).shape[0]/batch_size)
sum_cost= 0sum_acc= 0
for j inrange(batch_num):
batch_x=get_data(train_x, batch_size, j)
batch_y=get_data(train_y, batch_size, j)
sess.run(optimizer, feed_dict={x:batch_x,y:batch_y,keep_prob:0.75})
loss,acc= sess.run([cost,accuracy],feed_dict={x:batch_x,y:batch_y,keep_prob: 1.})
sum_cost+=loss
sum_acc+=acc
result=sess.run(merged,feed_dict={x:batch_x, y:batch_y, keep_prob:0.75})
writer.add_summary(result, i)
arg_cost= sum_cost/batch_num
arg_acc= sum_acc/batch_num
print("Epoch:", '%04d' % (i+1),"cost=", "{:.9f}".format(arg_cost),"Training accuracy","{:.5f}".format(arg_acc))
end_time=time.time()
print('Optimization Completed')
print('Testing Accuracy:',sess.run(accuracy,feed_dict={x:test_x, y:test_y,keep_prob: 1}))
print('Total processing time:',end_time -start_time)for i in range(10):
random_fruits=get_random_fruits(Training, n_classes)
img_data, img_label, num_label=load(Training, random_fruits)
crop_img=crop(img_data)
test_data, test_label, test_num_label=load(Test, random_fruits)
crop_test=crop(test_data)
print("TIMES"+str(i+1))
fruits_type(random_fruits)
print("\n")
train_and_test(crop_img, num_label, crop_test, test_num_label,20, 26, (i+1))
print("\n\n\n")