# 基于Tensorflow2.1的鸢尾花分类练习

import tensorflow as tf
import numpy as np

x = load_iris().data  # shape (150,4)
y = load_iris().target  # shape (150,)

# set random seed
np.random.seed(116)
np.random.shuffle(x)
np.random.seed(116)
np.random.shuffle(y)
tf.random.set_seed(116)

# split data , train : test = 120 : 30 , transform data type
x_train = x[:-30]
y_train = y[:-30]
x_test = x[-30:]
y_test = y[-30:]
x_train = tf.cast(x_train,dtype=tf.float32)
y_train = tf.cast(y_train,dtype=tf.int64)
x_test = tf.cast(x_test,dtype=tf.float32)
y_test = tf.cast(y_test,dtype=tf.int64)

# data pipeline
batch_size = 30
train_db = tf.data.Dataset.from_tensor_slices((x_train,y_train)).batch(batch_size)  # 【批 批 批 ...】
test_db = tf.data.Dataset.from_tensor_slices((x_test,y_test)).batch(batch_size)

# full connect
w1 = tf.Variable(tf.random.truncated_normal([4,3],stddev=0.1,seed=1))
b1 = tf.Variable(tf.random.truncated_normal([3],stddev=0.1,seed=1))

# parameters
LR_BASE = 0.01  # 初始学习率
LR_DECAY = 0.99  # 衰减率
LR_STEP = 20  #
epoch = 2000
loss_all_train = 0
loss_all_test = 0
num_classes = 3

for epoch in range(epoch):
lr = LR_BASE * LR_DECAY ** (epoch // LR_STEP)  # 学习率衰减
for step,(x_train,y_train) in enumerate(train_db):
# forward
y = tf.nn.softmax(y)  # one-hot code 0.4 0.05 0.5
y_ = tf.one_hot(y_train,depth=num_classes)
# loss
loss = tf.reduce_mean(tf.square(y_-y))
loss_all_train += loss.numpy()
# feedback
# record val_loss
pri_loss_epoch_train = round(float(loss_all_train) / 4.0,4)
loss_all_train = 0

tatol_correct = 0
tatol_number = 0
# test accuracy
for x_test,y_test in test_db:
y_ = tf.one_hot(y_test,depth=num_classes)  # label one hot
# test_loss
loss_test = tf.reduce_mean(tf.square(tf.subtract(y_,y)))
loss_all_test += loss_test
# converting true to 1 and false to 0
pred = tf.cast(tf.argmax(y,axis=1),dtype=tf.int64)
correct = tf.reduce_sum(tf.cast(tf.equal(pred,y_test),dtype=tf.int32))
# calculate acc
tatol_correct += int(correct)
tatol_number += x_test.shape[0]
pri_loss_epoch_test = round(float(loss_all_test) / 1.0,4)
loss_all_test = 0
acc = round(float(tatol_correct/tatol_number)*100,2)
print("Epoch:{} val_loss:{} test_loss:{} acc:{}% lr:{}".format(epoch,
pri_loss_epoch_train,
pri_loss_epoch_test,
acc,lr))

04-07 153

03-19 416

12-10 74

06-11 721

04-22 168

03-20 143

03-11 74

03-19 105

07-11 5729

03-24 47

04-07 39

03-25 707

02-09 2054

03-27 1133

04-30 278

05-05 62

05-26 400

11-27 8370

03-11 3033

04-14 61万+

#### 在中国程序员是青春饭吗？

©️2019 CSDN 皮肤主题: 游动-白 设计师: 上身试试