#一个简单的程序
import tensorflow as tf
#定义网络结构和前向传播算法
def get_weight(shape):
w=tf.Variable()
return w
def get_bias(shape):
b=tf.Variable()
return b
def forward(x,shape):
w1=get_weight(shape)
b1=get_bias(shape)
y1=tf.nn.relu(tf.matmul(x,w1)+b1)
w2=get_weight(shape)
b2=get_bias(shape)
y=tf.matmul(y1,w2)+b2
return y
#定义损失函数和反向传播算法
#载入数据集X和Y_#
x=tf.placeholder(tf.float32,shape=[None,],name)
y_=tf.placeholder(tf.float32,shape=[None,],name)
loss=tf.reduce_mean(tf.square(y-y_))
train_step=tf.train.GradientDescentOptimizer(learning_rate=0.001).minimize(loss)
#生成会话,运行反向传播算法
with tf.Session() as sess:
init_op=tf.global_variables_initializer()
sess.run(init_op)
steps=20000
batch_size=8
data_size=128
for i in range(steps):
start=(i*batch_size)%data_size
end=min(start+batch_size,data_szie)
sess.run(train_step,feed_dict={x:X[start:end],y_:Y_[start:end]})
if i%200==0:
loss_val=sess.run(loss,feed_dict={x:X,y_:Y_})
print "After %d steps,loss is: %g " % (i,loss_val)