import tensorflow as tf
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import os
def read_data():
iris = pd.read_csv(filepath_or_buffer='../datas/iris.data', header=None)
print(iris.head(), iris[4].value_counts())
print(iris.columns)
# 对类别做转换, 从字符串转为整数。
spec_dict = {v: num for num, v in enumerate(iris[4].unique())}
print(spec_dict)
features = iris[[0, 1, 2, 3]]
classes = iris[4].map(spec_dict)
print(features, classes)
return features.values, classes.values
def preprocess_data(features, classes):
"""
标准化 + 数据集的拆分
:param features:
:param classes:
:return:
"""
X_train, X_test, y_train, y_test = train_test_split(
features, classes, test_size=0.2, random_state=42)
normal = StandardScaler()
X_train = normal.fit_transform(X_train)
X_test = normal.transform(X_test)
print(X_train, X_test)
return X_train, X_test, y_train, y_test
def model():
with tf.variable_scope('Network'):
x = tf.placeholder(tf.float32, [None, 4], name='x')
y = tf.placeholder(tf.int32, [None], name='y')
w = tf.get_variable(
'w', shape=[4, 3], initializer=tf.truncated_normal_initializer(stddev=0.1),
dtype=tf.float32
)
b = tf.get_variable(
'b', shape=[3], initializer=tf.zeros_initializer(),
dtype=tf.float32
)
logits = tf.matmul(x, w) + b
predictions = tf.nn.softmax(logits)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=y, logits=logits
))
# 可视化代码
tf.summary.scalar('loss', loss)
# 定义优化器
train_opt = tf.train.AdamOptimizer(0.01).minimize(loss)
# 计算准确率
correct_pred = tf.equal(tf.cast(tf.argmax(logits, axis=1), tf.int32), y)
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# 可视化代码
tf.summary.scalar('accuracy', accuracy)
return x, y, logits, predictions, loss, train_opt, accuracy
def train():
graph = tf.Graph()
with graph.as_default():
x, y, logits, predictions, loss, train_opt, accuracy = model()
# 执行模型会话
with tf.Session(graph=graph) as sess:
# 变量初始化
sess.run(tf.global_variables_initializer())
# 加载数据。
features, classes = read_data()
X_train, X_test, y_train, y_test = preprocess_data(features, classes)
# 定义可视化代码
summary = tf.summary.merge_all()
writer = tf.summary.FileWriter(logdir='./model/iris/graph', graph=sess.graph)
# 模型持久化
saver = tf.train.Saver()
checkpoint_dir = './model/iris/ai20'
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
epochs = 300
for e in range(1, epochs):
feed = {x: X_train, y: y_train}
_, train_loss, train_acc, sum_ = sess.run(
[train_opt, loss, accuracy, summary], feed)
writer.add_summary(sum_, global_step=e)
print('Epochs:{} - Train loss:{:.5f} - Train Acc:{:.4f}'.format(
e, train_loss, train_acc
))
valid_feed = {x: X_test, y: y_test}
acc_ = sess.run(accuracy, valid_feed)
print(acc_)
# 执行模型持久化
files = 'model.ckpt'
save_files = os.path.join(checkpoint_dir, files)
saver.save(sess, save_path=save_files)
print('Saved model to file:{}'.format(save_files))
writer.close()
if __name__ == '__main__':
train()
D:\Anaconda\python.exe D:/AI20/HJZ/04-深度学习/2-TensorFlow基础/tf-代码/AI20_iris作业实现.py
2019-12-29 16:05:51.324304: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX AVX2
0 1 2 3 4
0 5.1 3.5 1.4 0.2 Iris-setosa
1 4.9 3.0 1.4 0.2 Iris-setosa
2 4.7 3.2 1.3 0.2 Iris-setosa
3 4.6 3.1 1.5 0.2 Iris-setosa
4 5.0 3.6 1.4 0.2 Iris-setosa Iris-setosa 50
Iris-versicolor 50
Iris-virginica 50
Name: 4, dtype: int64
Int64Index([0, 1, 2, 3, 4], dtype='int64')
{'Iris-setosa': 0, 'Iris-versicolor': 1, 'Iris-virginica': 2}
0 1 2 3
0 5.1 3.5 1.4 0.2
1 4.9 3.0 1.4 0.2
2 4.7 3.2 1.3 0.2
3 4.6 3.1 1.5 0.2
4 5.0 3.6 1.4 0.2
5 5.4 3.9 1.7 0.4
6 4.6 3.4 1.4 0.3
7 5.0 3.4 1.5 0.2
8 4.4 2.9 1.4 0.2
9 4.9 3.1 1.5 0.1
10 5.4 3.7 1.5 0.2
11 4.8 3.4 1.6 0.2
12 4.8 3.0 1.4 0.1
13 4.3 3.0 1.1 0.1
14 5.8 4.0 1.2 0.2
15 5.7 4.4 1.5 0.4
16 5.4 3.9 1.3 0.4
17 5.1 3.5 1.4 0.3
18 5.7 3.8 1.7 0.3
19 5.1 3.8 1.5 0.3
20 5.4 3.4 1.7 0.2
21 5.1 3.7 1.5 0.4
22 4.6 3.6 1.0 0.2
23 5.1 3.3 1.7 0.5
24 4.8 3.4 1.9 0.2
25 5.0 3.0 1.6 0.2
26 5.0 3.4 1.6 0.4
27 5.2 3.5 1.5 0.2
28 5.2 3.4 1.4 0.2
29 4.7 3.2 1.6 0.2
.. ... ... ... ...
120 6.9 3.2 5.7 2.3
121 5.6 2.8 4.9 2.0
122 7.7 2.8 6.7 2.0
123 6.3 2.7 4.9 1.8
124 6.7 3.3 5.7 2.1
125 7.2 3.2 6.0 1.8
126 6.2 2.8 4.8 1.8
127 6.1 3.0 4.9 1.8
128 6.4 2.8 5.6 2.1
129 7.2 3.0 5.8 1.6
130 7.4 2.8 6.1 1.9
131 7.9 3.8 6.4 2.0
132 6.4 2.8 5.6 2.2
133 6.3 2.8 5.1 1.5
134 6.1 2.6 5.6 1.4
135 7.7 3.0 6.1 2.3
136 6.3 3.4 5.6 2.4
137 6.4 3.1 5.5 1.8
138 6.0 3.0 4.8 1.8
139 6.9 3.1 5.4 2.1
140 6.7 3.1 5.6 2.4
141 6.9 3.1 5.1 2.3
142 5.8 2.7 5.1 1.9
143 6.8 3.2 5.9 2.3
144 6.7 3.3 5.7 2.5
145 6.7 3.0 5.2 2.3
146 6.3 2.5 5.0 1.9
147 6.5 3.0 5.2 2.0
148 6.2 3.4 5.4 2.3
149 5.9 3.0 5.1 1.8
[150 rows x 4 columns] 0 0
1 0
2 0
3 0
4 0
5 0
6 0
7 0
8 0
9 0
10 0
11 0
12 0
13 0
14 0
15 0
16 0
17 0
18 0
19 0
20 0
21 0
22 0
23 0
24 0
25 0
26 0
27 0
28 0
29 0
..
120 2
121 2
122 2
123 2
124 2
125 2
126 2
127 2
128 2
129 2
130 2
131 2
132 2
133 2
134 2
135 2
136 2
137 2
138 2
139 2
140 2
141 2
142 2
143 2
144 2
145 2
146 2
147 2
148 2
149 2
Name: 4, Length: 150, dtype: int64
[[-1.47393679 1.22037928 -1.5639872 -1.30948358]
[-0.13307079 3.02001693 -1.27728011 -1.04292204]
[ 1.08589829 0.09560575 0.38562104 0.28988568]
[-1.23014297 0.77046987 -1.21993869 -1.30948358]
[-1.7177306 0.32056046 -1.39196294 -1.30948358]
[ 0.59831066 -1.25412249 0.72966956 0.95628954]
[ 0.72020757 0.32056046 0.44296246 0.42316645]
[-0.74255534 0.99542457 -1.27728011 -1.30948358]
[-0.98634915 1.22037928 -1.33462153 -1.30948358]
[-0.74255534 2.34515281 -1.27728011 -1.44276436]
[-0.01117388 -0.80421307 0.78701097 0.95628954]
[ 0.23261993 0.77046987 0.44296246 0.55644722]
[ 1.08589829 0.09560575 0.5576453 0.42316645]
[-0.49876152 1.8952434 -1.39196294 -1.04292204]
[-0.49876152 1.44533399 -1.27728011 -1.30948358]
[-0.37686461 -1.47907719 -0.01576889 -0.24323741]
[ 0.59831066 -0.57925837 0.78701097 0.42316645]
[ 0.72020757 0.09560575 1.01637665 0.82300877]
[ 0.96400139 -0.12934896 0.38562104 0.28988568]
[ 1.69538284 1.22037928 1.36042516 1.75597417]
[-0.13307079 -0.35430366 0.2709382 0.15660491]
[ 2.18297047 -0.12934896 1.64713226 1.22285108]
[-0.2549677 -0.12934896 0.44296246 0.42316645]
[-0.86445224 0.99542457 -1.33462153 -1.30948358]
[ 2.30486738 -0.57925837 1.70447368 1.08957031]
[-0.01117388 -0.80421307 0.21359679 -0.24323741]
[-0.74255534 0.77046987 -1.33462153 -1.30948358]
[-0.98634915 0.99542457 -1.39196294 -1.17620281]
[-0.86445224 1.67028869 -1.04791443 -1.04292204]
[-0.98634915 -2.37889602 -0.13045173 -0.24323741]
[ 0.59831066 -0.80421307 0.67232814 0.82300877]
[-1.23014297 0.77046987 -1.04791443 -1.30948358]
[-0.98634915 -0.12934896 -1.21993869 -1.30948358]
[-0.86445224 0.54551516 -1.16259727 -0.90964127]
[-0.2549677 -0.80421307 0.2709382 0.15660491]
[-0.86445224 0.77046987 -1.27728011 -1.30948358]
[-0.13307079 -0.12934896 0.2709382 0.02332414]
[ 2.30486738 1.67028869 1.70447368 1.35613185]
[-1.47393679 0.32056046 -1.33462153 -1.30948358]
[ 0.47641375 -0.35430366 0.32827962 0.15660491]
[-0.13307079 -1.25412249 0.72966956 1.08957031]
[-0.37686461 2.57010752 -1.33462153 -1.30948358]
[ 0.23261993 -0.12934896 0.61498672 0.82300877]
[-0.01117388 -0.80421307 0.78701097 0.95628954]
[ 0.23261993 -1.9289866 0.15625537 -0.24323741]
[-0.49876152 -0.12934896 0.44296246 0.42316645]
[ 0.47641375 0.77046987 0.95903523 1.48941263]
[-0.37686461 -1.7040319 0.15625537 0.15660491]
[-0.49876152 1.8952434 -1.16259727 -1.04292204]
[-0.98634915 -1.7040319 -0.24513457 -0.24323741]
[ 0.72020757 -0.80421307 0.90169381 0.95628954]
[-0.98634915 0.54551516 -1.33462153 -1.30948358]
[-0.98634915 0.32056046 -1.44930436 -1.30948358]
[-0.37686461 -1.47907719 0.04157253 -0.10995664]
[ 1.08589829 -0.12934896 0.72966956 0.68972799]
[-1.10824606 0.09560575 -1.27728011 -1.44276436]
[-0.01117388 -0.57925837 0.78701097 1.6226934 ]
[-0.98634915 0.77046987 -1.27728011 -1.30948358]
[-0.98634915 0.99542457 -1.21993869 -0.7763605 ]
[ 0.11072303 0.32056046 0.61498672 0.82300877]
[-0.86445224 -1.25412249 -0.41715882 -0.10995664]
[ 1.32969211 0.32056046 1.13105949 1.48941263]
[ 0.23261993 -0.80421307 0.78701097 0.55644722]
[ 0.35451684 -1.02916778 1.07371807 0.28988568]
[ 2.30486738 -0.12934896 1.36042516 1.48941263]
[-0.37686461 -1.25412249 0.15625537 0.15660491]
[-1.7177306 -0.35430366 -1.33462153 -1.30948358]
[-1.83962751 -0.12934896 -1.50664578 -1.44276436]
[ 0.23261993 -1.9289866 0.72966956 0.42316645]
[ 1.69538284 0.32056046 1.30308374 0.82300877]
[-1.47393679 0.09560575 -1.27728011 -1.30948358]
[-0.86445224 0.99542457 -1.33462153 -1.17620281]
[-1.7177306 -0.12934896 -1.39196294 -1.30948358]
[ 0.59831066 -1.25412249 0.67232814 0.42316645]
[ 0.59831066 0.77046987 1.07371807 1.6226934 ]
[-1.47393679 0.77046987 -1.33462153 -1.17620281]
[ 1.2077952 -0.12934896 1.01637665 1.22285108]
[ 0.59831066 0.54551516 1.30308374 1.75597417]
[-1.35203988 0.32056046 -1.39196294 -1.30948358]
[ 0.35451684 -0.35430366 0.5576453 0.28988568]
[ 0.84210448 -0.57925837 0.50030388 0.42316645]
[ 0.47641375 -0.57925837 0.61498672 0.82300877]
[ 1.45158902 0.32056046 0.5576453 0.28988568]
[ 0.72020757 0.32056046 0.90169381 1.48941263]
[-0.86445224 1.67028869 -1.21993869 -1.30948358]
[ 1.32969211 0.09560575 0.95903523 1.22285108]
[ 0.11072303 -0.12934896 0.2709382 0.42316645]
[ 0.84210448 -0.12934896 0.84435239 1.08957031]
[-0.13307079 -1.02916778 -0.13045173 -0.24323741]
[-0.74255534 -0.80421307 0.09891395 0.28988568]
[ 0.35451684 -0.12934896 0.50030388 0.28988568]
[-1.5958337 -1.7040319 -1.39196294 -1.17620281]
[ 0.96400139 -0.35430366 0.50030388 0.15660491]
[-0.37686461 -1.02916778 0.38562104 0.02332414]
[-0.62065843 1.44533399 -1.27728011 -1.30948358]
[-0.2549677 -0.12934896 0.21359679 0.15660491]
[ 1.81727975 -0.35430366 1.475108 0.82300877]
[ 1.08589829 0.54551516 1.13105949 1.22285108]
[-0.86445224 1.44533399 -1.27728011 -1.04292204]
[-1.10824606 -1.47907719 -0.24513457 -0.24323741]
[ 1.08589829 0.54551516 1.13105949 1.75597417]
[ 1.69538284 -0.12934896 1.18840091 0.55644722]
[-1.10824606 0.09560575 -1.27728011 -1.44276436]
[ 1.08589829 0.09560575 1.07371807 1.6226934 ]
[-1.10824606 -0.12934896 -1.33462153 -1.30948358]
[ 1.32969211 0.09560575 0.67232814 0.42316645]
[ 1.93917666 -0.57925837 1.36042516 0.95628954]
[ 0.59831066 -0.35430366 1.07371807 0.82300877]
[-0.13307079 -0.57925837 0.21359679 0.15660491]
[ 0.84210448 -0.12934896 1.01637665 0.82300877]
[ 0.59831066 -1.7040319 0.38562104 0.15660491]
[ 0.72020757 -0.35430366 0.32827962 0.15660491]
[-0.2549677 -0.57925837 0.67232814 1.08957031]
[ 0.11072303 -0.12934896 0.78701097 0.82300877]
[-0.49876152 0.77046987 -1.16259727 -1.30948358]
[ 0.35451684 -0.57925837 0.15625537 0.15660491]
[-1.10824606 -1.25412249 0.44296246 0.68972799]
[-0.01117388 2.1201981 -1.44930436 -1.30948358]
[-0.01117388 -1.02916778 0.15625537 0.02332414]
[ 1.57348593 -0.12934896 1.24574233 1.22285108]] [[ 0.35451684 -0.57925837 0.5576453 0.02332414]
[-0.13307079 1.67028869 -1.16259727 -1.17620281]
[ 2.30486738 -1.02916778 1.81915651 1.48941263]
[ 0.23261993 -0.35430366 0.44296246 0.42316645]
[ 1.2077952 -0.57925837 0.61498672 0.28988568]
[-0.49876152 0.77046987 -1.27728011 -1.04292204]
[-0.2549677 -0.35430366 -0.07311031 0.15660491]
[ 1.32969211 0.09560575 0.78701097 1.48941263]
[ 0.47641375 -1.9289866 0.44296246 0.42316645]
[-0.01117388 -0.80421307 0.09891395 0.02332414]
[ 0.84210448 0.32056046 0.78701097 1.08957031]
[-1.23014297 -0.12934896 -1.33462153 -1.44276436]
[-0.37686461 0.99542457 -1.39196294 -1.30948358]
[-1.10824606 0.09560575 -1.27728011 -1.44276436]
[-0.86445224 1.67028869 -1.27728011 -1.17620281]
[ 0.59831066 0.54551516 0.5576453 0.55644722]
[ 0.84210448 -0.12934896 1.18840091 1.35613185]
[-0.2549677 -1.25412249 0.09891395 -0.10995664]
[-0.13307079 -0.57925837 0.44296246 0.15660491]
[ 0.72020757 -0.57925837 1.07371807 1.35613185]
[-1.35203988 0.32056046 -1.21993869 -1.30948358]
[ 0.35451684 -0.12934896 0.67232814 0.82300877]
[-0.98634915 0.77046987 -1.21993869 -1.04292204]
[ 0.72020757 -0.57925837 1.07371807 1.22285108]
[ 2.5486612 1.67028869 1.53244942 1.08957031]
[ 1.08589829 -0.12934896 0.84435239 1.48941263]
[ 1.08589829 -1.25412249 1.18840091 0.82300877]
[ 1.2077952 0.32056046 1.24574233 1.48941263]
[-1.23014297 -0.12934896 -1.33462153 -1.17620281]
[-1.23014297 0.09560575 -1.21993869 -1.30948358]]
Epochs:1 - Train loss:1.30795 - Train Acc:0.0250
Epochs:2 - Train loss:1.27227 - Train Acc:0.0250
Epochs:3 - Train loss:1.23756 - Train Acc:0.0250
Epochs:4 - Train loss:1.20381 - Train Acc:0.0250
Epochs:5 - Train loss:1.17099 - Train Acc:0.0917
Epochs:6 - Train loss:1.13915 - Train Acc:0.1333
Epochs:7 - Train loss:1.10832 - Train Acc:0.2167
Epochs:8 - Train loss:1.07856 - Train Acc:0.3917
Epochs:9 - Train loss:1.04988 - Train Acc:0.4417
Epochs:10 - Train loss:1.02231 - Train Acc:0.5917
Epochs:11 - Train loss:0.99584 - Train Acc:0.7750
Epochs:12 - Train loss:0.97046 - Train Acc:0.7917
Epochs:13 - Train loss:0.94614 - Train Acc:0.7917
Epochs:14 - Train loss:0.92288 - Train Acc:0.8167
Epochs:15 - Train loss:0.90065 - Train Acc:0.8250
Epochs:16 - Train loss:0.87944 - Train Acc:0.8333
Epochs:17 - Train loss:0.85921 - Train Acc:0.8417
Epochs:18 - Train loss:0.83994 - Train Acc:0.8417
Epochs:19 - Train loss:0.82159 - Train Acc:0.8583
Epochs:20 - Train loss:0.80412 - Train Acc:0.8833
Epochs:21 - Train loss:0.78748 - Train Acc:0.9000
Epochs:22 - Train loss:0.77164 - Train Acc:0.9083
Epochs:23 - Train loss:0.75655 - Train Acc:0.9167
Epochs:24 - Train loss:0.74217 - Train Acc:0.9167
Epochs:25 - Train loss:0.72846 - Train Acc:0.9250
Epochs:26 - Train loss:0.71537 - Train Acc:0.9250
Epochs:27 - Train loss:0.70287 - Train Acc:0.9333
Epochs:28 - Train loss:0.69093 - Train Acc:0.9333
Epochs:29 - Train loss:0.67951 - Train Acc:0.9167
Epochs:30 - Train loss:0.66857 - Train Acc:0.9250
Epochs:31 - Train loss:0.65809 - Train Acc:0.9167
Epochs:32 - Train loss:0.64805 - Train Acc:0.9167
Epochs:33 - Train loss:0.63841 - Train Acc:0.9167
Epochs:34 - Train loss:0.62916 - Train Acc:0.9000
Epochs:35 - Train loss:0.62027 - Train Acc:0.9000
Epochs:36 - Train loss:0.61173 - Train Acc:0.9000
Epochs:37 - Train loss:0.60353 - Train Acc:0.9083
Epochs:38 - Train loss:0.59563 - Train Acc:0.9167
Epochs:39 - Train loss:0.58804 - Train Acc:0.9167
Epochs:40 - Train loss:0.58073 - Train Acc:0.9167
Epochs:41 - Train loss:0.57370 - Train Acc:0.9167
Epochs:42 - Train loss:0.56693 - Train Acc:0.9167
Epochs:43 - Train loss:0.56041 - Train Acc:0.9083
Epochs:44 - Train loss:0.55413 - Train Acc:0.9000
Epochs:45 - Train loss:0.54809 - Train Acc:0.9000
Epochs:46 - Train loss:0.54226 - Train Acc:0.8917
Epochs:47 - Train loss:0.53665 - Train Acc:0.8917
Epochs:48 - Train loss:0.53124 - Train Acc:0.8917
Epochs:49 - Train loss:0.52602 - Train Acc:0.8917
Epochs:50 - Train loss:0.52099 - Train Acc:0.8917
Epochs:51 - Train loss:0.51614 - Train Acc:0.8917
Epochs:52 - Train loss:0.51145 - Train Acc:0.8917
Epochs:53 - Train loss:0.50693 - Train Acc:0.8917
Epochs:54 - Train loss:0.50256 - Train Acc:0.8917
Epochs:55 - Train loss:0.49833 - Train Acc:0.8917
Epochs:56 - Train loss:0.49424 - Train Acc:0.8917
Epochs:57 - Train loss:0.49029 - Train Acc:0.8917
Epochs:58 - Train loss:0.48646 - Train Acc:0.8917
Epochs:59 - Train loss:0.48275 - Train Acc:0.8917
Epochs:60 - Train loss:0.47915 - Train Acc:0.8917
Epochs:61 - Train loss:0.47566 - Train Acc:0.8917
Epochs:62 - Train loss:0.47227 - Train Acc:0.8750
Epochs:63 - Train loss:0.46897 - Train Acc:0.8750
Epochs:64 - Train loss:0.46577 - Train Acc:0.8750
Epochs:65 - Train loss:0.46266 - Train Acc:0.8750
Epochs:66 - Train loss:0.45962 - Train Acc:0.8750
Epochs:67 - Train loss:0.45667 - Train Acc:0.8750
Epochs:68 - Train loss:0.45379 - Train Acc:0.8750
Epochs:69 - Train loss:0.45098 - Train Acc:0.8750
Epochs:70 - Train loss:0.44824 - Train Acc:0.8750
Epochs:71 - Train loss:0.44556 - Train Acc:0.8750
Epochs:72 - Train loss:0.44294 - Train Acc:0.8750
Epochs:73 - Train loss:0.44038 - Train Acc:0.8750
Epochs:74 - Train loss:0.43787 - Train Acc:0.8750
Epochs:75 - Train loss:0.43541 - Train Acc:0.8750
Epochs:76 - Train loss:0.43301 - Train Acc:0.8750
Epochs:77 - Train loss:0.43065 - Train Acc:0.8750
Epochs:78 - Train loss:0.42834 - Train Acc:0.8750
Epochs:79 - Train loss:0.42608 - Train Acc:0.8750
Epochs:80 - Train loss:0.42385 - Train Acc:0.8750
Epochs:81 - Train loss:0.42167 - Train Acc:0.8750
Epochs:82 - Train loss:0.41952 - Train Acc:0.8750
Epochs:83 - Train loss:0.41741 - Train Acc:0.8750
Epochs:84 - Train loss:0.41533 - Train Acc:0.8750
Epochs:85 - Train loss:0.41329 - Train Acc:0.8750
Epochs:86 - Train loss:0.41129 - Train Acc:0.8750
Epochs:87 - Train loss:0.40931 - Train Acc:0.8750
Epochs:88 - Train loss:0.40736 - Train Acc:0.8750
Epochs:89 - Train loss:0.40544 - Train Acc:0.8750
Epochs:90 - Train loss:0.40356 - Train Acc:0.8750
Epochs:91 - Train loss:0.40169 - Train Acc:0.8833
Epochs:92 - Train loss:0.39986 - Train Acc:0.8833
Epochs:93 - Train loss:0.39805 - Train Acc:0.8917
Epochs:94 - Train loss:0.39626 - Train Acc:0.8917
Epochs:95 - Train loss:0.39450 - Train Acc:0.8917
Epochs:96 - Train loss:0.39275 - Train Acc:0.8917
Epochs:97 - Train loss:0.39104 - Train Acc:0.8917
Epochs:98 - Train loss:0.38934 - Train Acc:0.8917
Epochs:99 - Train loss:0.38766 - Train Acc:0.8917
Epochs:100 - Train loss:0.38600 - Train Acc:0.8917
Epochs:101 - Train loss:0.38437 - Train Acc:0.8917
Epochs:102 - Train loss:0.38275 - Train Acc:0.8917
Epochs:103 - Train loss:0.38115 - Train Acc:0.8917
Epochs:104 - Train loss:0.37956 - Train Acc:0.9000
Epochs:105 - Train loss:0.37800 - Train Acc:0.9000
Epochs:106 - Train loss:0.37645 - Train Acc:0.9000
Epochs:107 - Train loss:0.37492 - Train Acc:0.9000
Epochs:108 - Train loss:0.37340 - Train Acc:0.9000
Epochs:109 - Train loss:0.37190 - Train Acc:0.9000
Epochs:110 - Train loss:0.37041 - Train Acc:0.9000
Epochs:111 - Train loss:0.36894 - Train Acc:0.9000
Epochs:112 - Train loss:0.36748 - Train Acc:0.9000
Epochs:113 - Train loss:0.36603 - Train Acc:0.9000
Epochs:114 - Train loss:0.36460 - Train Acc:0.9000
Epochs:115 - Train loss:0.36318 - Train Acc:0.9000
Epochs:116 - Train loss:0.36178 - Train Acc:0.9083
Epochs:117 - Train loss:0.36038 - Train Acc:0.9083
Epochs:118 - Train loss:0.35900 - Train Acc:0.9083
Epochs:119 - Train loss:0.35763 - Train Acc:0.9083
Epochs:120 - Train loss:0.35628 - Train Acc:0.9083
Epochs:121 - Train loss:0.35493 - Train Acc:0.9083
Epochs:122 - Train loss:0.35360 - Train Acc:0.9083
Epochs:123 - Train loss:0.35227 - Train Acc:0.9083
Epochs:124 - Train loss:0.35096 - Train Acc:0.9083
Epochs:125 - Train loss:0.34966 - Train Acc:0.9083
Epochs:126 - Train loss:0.34836 - Train Acc:0.9083
Epochs:127 - Train loss:0.34708 - Train Acc:0.9083
Epochs:128 - Train loss:0.34581 - Train Acc:0.9083
Epochs:129 - Train loss:0.34455 - Train Acc:0.9083
Epochs:130 - Train loss:0.34330 - Train Acc:0.9083
Epochs:131 - Train loss:0.34205 - Train Acc:0.9167
Epochs:132 - Train loss:0.34082 - Train Acc:0.9250
Epochs:133 - Train loss:0.33959 - Train Acc:0.9250
Epochs:134 - Train loss:0.33838 - Train Acc:0.9250
Epochs:135 - Train loss:0.33717 - Train Acc:0.9250
Epochs:136 - Train loss:0.33597 - Train Acc:0.9250
Epochs:137 - Train loss:0.33478 - Train Acc:0.9250
Epochs:138 - Train loss:0.33360 - Train Acc:0.9250
Epochs:139 - Train loss:0.33242 - Train Acc:0.9250
Epochs:140 - Train loss:0.33126 - Train Acc:0.9250
Epochs:141 - Train loss:0.33010 - Train Acc:0.9250
Epochs:142 - Train loss:0.32895 - Train Acc:0.9250
Epochs:143 - Train loss:0.32781 - Train Acc:0.9250
Epochs:144 - Train loss:0.32668 - Train Acc:0.9250
Epochs:145 - Train loss:0.32555 - Train Acc:0.9333
Epochs:146 - Train loss:0.32443 - Train Acc:0.9333
Epochs:147 - Train loss:0.32332 - Train Acc:0.9333
Epochs:148 - Train loss:0.32221 - Train Acc:0.9333
Epochs:149 - Train loss:0.32112 - Train Acc:0.9333
Epochs:150 - Train loss:0.32003 - Train Acc:0.9333
Epochs:151 - Train loss:0.31894 - Train Acc:0.9333
Epochs:152 - Train loss:0.31787 - Train Acc:0.9333
Epochs:153 - Train loss:0.31680 - Train Acc:0.9333
Epochs:154 - Train loss:0.31574 - Train Acc:0.9333
Epochs:155 - Train loss:0.31468 - Train Acc:0.9333
Epochs:156 - Train loss:0.31363 - Train Acc:0.9333
Epochs:157 - Train loss:0.31259 - Train Acc:0.9333
Epochs:158 - Train loss:0.31155 - Train Acc:0.9333
Epochs:159 - Train loss:0.31052 - Train Acc:0.9417
Epochs:160 - Train loss:0.30950 - Train Acc:0.9417
Epochs:161 - Train loss:0.30848 - Train Acc:0.9417
Epochs:162 - Train loss:0.30747 - Train Acc:0.9417
Epochs:163 - Train loss:0.30647 - Train Acc:0.9417
Epochs:164 - Train loss:0.30547 - Train Acc:0.9417
Epochs:165 - Train loss:0.30448 - Train Acc:0.9417
Epochs:166 - Train loss:0.30349 - Train Acc:0.9417
Epochs:167 - Train loss:0.30251 - Train Acc:0.9417
Epochs:168 - Train loss:0.30154 - Train Acc:0.9417
Epochs:169 - Train loss:0.30057 - Train Acc:0.9417
Epochs:170 - Train loss:0.29961 - Train Acc:0.9417
Epochs:171 - Train loss:0.29865 - Train Acc:0.9417
Epochs:172 - Train loss:0.29770 - Train Acc:0.9417
Epochs:173 - Train loss:0.29675 - Train Acc:0.9417
Epochs:174 - Train loss:0.29581 - Train Acc:0.9417
Epochs:175 - Train loss:0.29488 - Train Acc:0.9417
Epochs:176 - Train loss:0.29395 - Train Acc:0.9417
Epochs:177 - Train loss:0.29302 - Train Acc:0.9417
Epochs:178 - Train loss:0.29211 - Train Acc:0.9417
Epochs:179 - Train loss:0.29119 - Train Acc:0.9417
Epochs:180 - Train loss:0.29028 - Train Acc:0.9417
Epochs:181 - Train loss:0.28938 - Train Acc:0.9417
Epochs:182 - Train loss:0.28848 - Train Acc:0.9417
Epochs:183 - Train loss:0.28759 - Train Acc:0.9417
Epochs:184 - Train loss:0.28671 - Train Acc:0.9417
Epochs:185 - Train loss:0.28582 - Train Acc:0.9417
Epochs:186 - Train loss:0.28495 - Train Acc:0.9417
Epochs:187 - Train loss:0.28407 - Train Acc:0.9417
Epochs:188 - Train loss:0.28321 - Train Acc:0.9417
Epochs:189 - Train loss:0.28235 - Train Acc:0.9417
Epochs:190 - Train loss:0.28149 - Train Acc:0.9417
Epochs:191 - Train loss:0.28064 - Train Acc:0.9417
Epochs:192 - Train loss:0.27979 - Train Acc:0.9417
Epochs:193 - Train loss:0.27895 - Train Acc:0.9417
Epochs:194 - Train loss:0.27811 - Train Acc:0.9417
Epochs:195 - Train loss:0.27728 - Train Acc:0.9417
Epochs:196 - Train loss:0.27645 - Train Acc:0.9333
Epochs:197 - Train loss:0.27562 - Train Acc:0.9333
Epochs:198 - Train loss:0.27480 - Train Acc:0.9333
Epochs:199 - Train loss:0.27399 - Train Acc:0.9333
Epochs:200 - Train loss:0.27318 - Train Acc:0.9333
Epochs:201 - Train loss:0.27237 - Train Acc:0.9333
Epochs:202 - Train loss:0.27157 - Train Acc:0.9333
Epochs:203 - Train loss:0.27078 - Train Acc:0.9333
Epochs:204 - Train loss:0.26998 - Train Acc:0.9333
Epochs:205 - Train loss:0.26920 - Train Acc:0.9333
Epochs:206 - Train loss:0.26841 - Train Acc:0.9333
Epochs:207 - Train loss:0.26763 - Train Acc:0.9333
Epochs:208 - Train loss:0.26686 - Train Acc:0.9333
Epochs:209 - Train loss:0.26609 - Train Acc:0.9333
Epochs:210 - Train loss:0.26532 - Train Acc:0.9333
Epochs:211 - Train loss:0.26456 - Train Acc:0.9333
Epochs:212 - Train loss:0.26380 - Train Acc:0.9333
Epochs:213 - Train loss:0.26305 - Train Acc:0.9333
Epochs:214 - Train loss:0.26230 - Train Acc:0.9333
Epochs:215 - Train loss:0.26155 - Train Acc:0.9333
Epochs:216 - Train loss:0.26081 - Train Acc:0.9333
Epochs:217 - Train loss:0.26007 - Train Acc:0.9333
Epochs:218 - Train loss:0.25934 - Train Acc:0.9333
Epochs:219 - Train loss:0.25861 - Train Acc:0.9333
Epochs:220 - Train loss:0.25788 - Train Acc:0.9333
Epochs:221 - Train loss:0.25716 - Train Acc:0.9333
Epochs:222 - Train loss:0.25644 - Train Acc:0.9333
Epochs:223 - Train loss:0.25573 - Train Acc:0.9333
Epochs:224 - Train loss:0.25502 - Train Acc:0.9333
Epochs:225 - Train loss:0.25431 - Train Acc:0.9333
Epochs:226 - Train loss:0.25361 - Train Acc:0.9333
Epochs:227 - Train loss:0.25291 - Train Acc:0.9333
Epochs:228 - Train loss:0.25222 - Train Acc:0.9333
Epochs:229 - Train loss:0.25153 - Train Acc:0.9333
Epochs:230 - Train loss:0.25084 - Train Acc:0.9333
Epochs:231 - Train loss:0.25016 - Train Acc:0.9333
Epochs:232 - Train loss:0.24948 - Train Acc:0.9333
Epochs:233 - Train loss:0.24880 - Train Acc:0.9333
Epochs:234 - Train loss:0.24813 - Train Acc:0.9333
Epochs:235 - Train loss:0.24746 - Train Acc:0.9333
Epochs:236 - Train loss:0.24679 - Train Acc:0.9333
Epochs:237 - Train loss:0.24613 - Train Acc:0.9333
Epochs:238 - Train loss:0.24547 - Train Acc:0.9333
Epochs:239 - Train loss:0.24481 - Train Acc:0.9333
Epochs:240 - Train loss:0.24416 - Train Acc:0.9333
Epochs:241 - Train loss:0.24351 - Train Acc:0.9333
Epochs:242 - Train loss:0.24287 - Train Acc:0.9333
Epochs:243 - Train loss:0.24223 - Train Acc:0.9333
Epochs:244 - Train loss:0.24159 - Train Acc:0.9333
Epochs:245 - Train loss:0.24095 - Train Acc:0.9333
Epochs:246 - Train loss:0.24032 - Train Acc:0.9333
Epochs:247 - Train loss:0.23969 - Train Acc:0.9333
Epochs:248 - Train loss:0.23907 - Train Acc:0.9333
Epochs:249 - Train loss:0.23844 - Train Acc:0.9333
Epochs:250 - Train loss:0.23783 - Train Acc:0.9333
Epochs:251 - Train loss:0.23721 - Train Acc:0.9333
Epochs:252 - Train loss:0.23660 - Train Acc:0.9333
Epochs:253 - Train loss:0.23599 - Train Acc:0.9333
Epochs:254 - Train loss:0.23538 - Train Acc:0.9333
Epochs:255 - Train loss:0.23478 - Train Acc:0.9333
Epochs:256 - Train loss:0.23418 - Train Acc:0.9333
Epochs:257 - Train loss:0.23358 - Train Acc:0.9333
Epochs:258 - Train loss:0.23299 - Train Acc:0.9333
Epochs:259 - Train loss:0.23240 - Train Acc:0.9333
Epochs:260 - Train loss:0.23181 - Train Acc:0.9333
Epochs:261 - Train loss:0.23123 - Train Acc:0.9333
Epochs:262 - Train loss:0.23065 - Train Acc:0.9333
Epochs:263 - Train loss:0.23007 - Train Acc:0.9333
Epochs:264 - Train loss:0.22949 - Train Acc:0.9333
Epochs:265 - Train loss:0.22892 - Train Acc:0.9333
Epochs:266 - Train loss:0.22835 - Train Acc:0.9333
Epochs:267 - Train loss:0.22778 - Train Acc:0.9333
Epochs:268 - Train loss:0.22722 - Train Acc:0.9333
Epochs:269 - Train loss:0.22666 - Train Acc:0.9417
Epochs:270 - Train loss:0.22610 - Train Acc:0.9417
Epochs:271 - Train loss:0.22554 - Train Acc:0.9500
Epochs:272 - Train loss:0.22499 - Train Acc:0.9500
Epochs:273 - Train loss:0.22444 - Train Acc:0.9500
Epochs:274 - Train loss:0.22389 - Train Acc:0.9500
Epochs:275 - Train loss:0.22335 - Train Acc:0.9500
Epochs:276 - Train loss:0.22281 - Train Acc:0.9500
Epochs:277 - Train loss:0.22227 - Train Acc:0.9500
Epochs:278 - Train loss:0.22173 - Train Acc:0.9500
Epochs:279 - Train loss:0.22120 - Train Acc:0.9500
Epochs:280 - Train loss:0.22067 - Train Acc:0.9500
Epochs:281 - Train loss:0.22014 - Train Acc:0.9500
Epochs:282 - Train loss:0.21961 - Train Acc:0.9500
Epochs:283 - Train loss:0.21909 - Train Acc:0.9500
Epochs:284 - Train loss:0.21857 - Train Acc:0.9500
Epochs:285 - Train loss:0.21805 - Train Acc:0.9583
Epochs:286 - Train loss:0.21753 - Train Acc:0.9583
Epochs:287 - Train loss:0.21702 - Train Acc:0.9583
Epochs:288 - Train loss:0.21651 - Train Acc:0.9583
Epochs:289 - Train loss:0.21600 - Train Acc:0.9583
Epochs:290 - Train loss:0.21550 - Train Acc:0.9583
Epochs:291 - Train loss:0.21499 - Train Acc:0.9667
Epochs:292 - Train loss:0.21449 - Train Acc:0.9667
Epochs:293 - Train loss:0.21399 - Train Acc:0.9667
Epochs:294 - Train loss:0.21350 - Train Acc:0.9667
Epochs:295 - Train loss:0.21301 - Train Acc:0.9667
Epochs:296 - Train loss:0.21251 - Train Acc:0.9667
Epochs:297 - Train loss:0.21203 - Train Acc:0.9667
Epochs:298 - Train loss:0.21154 - Train Acc:0.9667
Epochs:299 - Train loss:0.21106 - Train Acc:0.9667
1.0
Saved model to file:./model/iris/ai20\model.ckpt
Process finished with exit code 0
tensorboard --logdir +graph路径(右键copy绝对路径)