这是一个非常简单的例子。
拟合 y = x * 3 + 2(其中x是一维,y是是实数)
import tensorflow as tf
tf.enable_eager_execution()
tf.executing_eagerly()
tfe = tf.contrib.eager
NUM_EAMPLES = 1000
training_inputs = tf.random_normal([NUM_EAMPLES])
noise = tf.random_normal([NUM_EAMPLES])
training_outputs = training_inputs*3 + 2 + noise
def prediction(input,weight,bias):
return input * weight + bias
def loss(weights,biases):
error = prediction(training_inputs,weights,biases) - training_outputs
return tf.reduce_mean(tf.square(error))
def grad(weights,biases):
with tf.GradientTape() as tape:
loss_value = loss(weights,biases)
return tape.gradient(loss_value,[weights,biases])
traing_steps = 200
learning_rate = 0.01
W = tf.Variable(0.)
B = tf.Variable(0.)
print("Initial loss: {:.3f}".format(loss(W,B)))
for i in range(traing_steps):
dw,db = grad(W,B)
W.