Tensorflow练习第四弹:搭建神经网络
Momentum degrent
AdaGrad
RMSProp
Adam又快又好
# -*- coding: utf-8 -*-
"""
Created on Mon May 17 09:49:58 2021
@author: monastic
"""
import tensorflow as tf
import numpy as np
配置网络层
# in_size=输入向量的维度,即一个神经元的权值个数
#out_size=这层神经元的个数
def add_layer(inputs,in_size,out_size,activation_function=None):
Weights = tf.Variable(tf.random_normal([in_size,out_size]))
biases = tf.Variable(tf.zeros([1,out_size])+0.1)
Wx_plus_b = tf.matmul(inputs,Weights) + biases
if activation_function is None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
###
生成训练数据
x_data = np.linspace(-1,1,300)[:, np.newaxis]
noise = np.random.normal(0,0.05, x_data.shape)
y_data = np.square(x_data) - 0.5 + noise
构建网络
###
xs = tf.placeholder(tf.float32, [None, 1])
ys = tf.placeholder(tf.float32, [None, 1])
l1 = add_layer(xs, 1, 10, activation_function=tf.nn.relu)
prediction = add_layer(l1, 10, 1, activation_function=None)
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys - prediction), reduction_indices=[1]))
#reduction_indices表求值的维度
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
开始训练
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
for i in range(1000):
sess.run(train_step, feed_dict={xs: x_data, ys: y_data})
if i % 50 == 0:
print(sess.run(loss,feed_dict={xs:x_data,ys:y_data}))