train.py
import dataset
import tensorflow as tf
import time
from datetime import timedelta
import math
import random
import numpy as np
# conda install --channel https://conda.anaconda.org/menpo opencv3
#Adding Seed so that random initialization is consistent
from numpy.random import seed
seed(10)
from tensorflow import set_random_seed
set_random_seed(20)
#一次迭代32张图片
batch_size = 32
#Prepare input data
classes = ['dogs','cats']
num_classes = len(classes)
#数值可修改
# 20% of the data will automatically be used for validation
#20%做验证 80%做训练
validation_size = 0.2
#图片输入大小64*64
img_size = 64
num_channels = 3
train_path='D:/hh/twst/test/training_data'
# We shall load all the training and validation images and labels into memory using openCV and use that during training
data = dataset.read_train_sets(train_path, img_size, classes, validation_size=validation_size)
print("Complete reading input data. Will Now print a snippet of it")
print("Number of files in Training-set:\t\t{}".format(len(data.train.labels)))
print("Number of files in Validation-set:\t{}".format(len(data.valid.labels)))
session = tf.Session()
x = tf.placeholder(tf.float32, shape=[None, img_size,img_size,num_channels], name='x')
## labels
y_true = tf.placeholder(tf.float32, shape=[None, num_classes], name='y_true')
y_true_cls = tf.argmax(y_true, dimension=1)
##Network graph params 网络结构 值可以改
filter_size_conv1 = 3
num_filters_conv1 = 32
filter_size_conv2 = 3
num_filters_conv2 = 32
filter_size_conv3 = 3
num_filters_conv3 = 64
#全连接 将卷积的结果映射成1024个特征
fc_layer_size = 1024
#权重参数 随机取一个值
def create_weights(shape):
return tf.Variable(tf.truncated_normal(shape, stddev=0.05))
#偏置参数
def create_biases(size):
return tf.Variable(tf.constant(0.05, shape=[size]))
#创建卷积层
def create_convolutional_layer(input,
num_input_channels,
conv_filter_size,
num_filters):
## We shall define the weights that will be trained using create_weights function. 3 3 3 32
weights = create_weights(shape=[conv_filter_size, conv_filter_size, num_input_channels, num_filters])
## We create biases using the create_biases function. These are also trained.
biases = create_biases(num_filters)
## Creating the convolutional layer 执行一次卷积
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
#激活
layer = tf.nn.relu(layer)