鸢尾花种类预测

【研究目的】根据已有数据集训练相应的神经网络,能够对对鸢尾花种类进行预测【数据集】(感谢https://blog.csdn.net/skyli114/article/details/78515434的分享,分享仅供学习使用,谢谢合作)iris.csv"","Sepal.Length","Sepal.Width","Petal.Length","Petal.Width","Species"...
摘要由CSDN通过智能技术生成

【研究目的】

根据已有数据集训练相应的神经网络,能够对对鸢尾花种类进行预测

【数据集】

(感谢https://blog.csdn.net/skyli114/article/details/78515434的分享,分享仅供学习使用,谢谢合作)

iris.csv
"","Sepal.Length","Sepal.Width","Petal.Length","Petal.Width","Species"
"1",5.1,3.5,1.4,0.2,"setosa"
"2",4.9,3,1.4,0.2,"setosa"
"3",4.7,3.2,1.3,0.2,"setosa"
"4",4.6,3.1,1.5,0.2,"setosa"
"5",5,3.6,1.4,0.2,"setosa"
"6",5.4,3.9,1.7,0.4,"setosa"
"7",4.6,3.4,1.4,0.3,"setosa"
"8",5,3.4,1.5,0.2,"setosa"
"9",4.4,2.9,1.4,0.2,"setosa"
"10",4.9,3.1,1.5,0.1,"setosa"
"11",5.4,3.7,1.5,0.2,"setosa"
"12",4.8,3.4,1.6,0.2,"setosa"
"13",4.8,3,1.4,0.1,"setosa"
"14",4.3,3,1.1,0.1,"setosa"
"15",5.8,4,1.2,0.2,"setosa"
"16",5.7,4.4,1.5,0.4,"setosa"
"17",5.4,3.9,1.3,0.4,"setosa"
"18",5.1,3.5,1.4,0.3,"setosa"
"19",5.7,3.8,1.7,0.3,"setosa"
"20",5.1,3.8,1.5,0.3,"setosa"
"21",5.4,3.4,1.7,0.2,"setosa"
"22",5.1,3.7,1.5,0.4,"setosa"
"23",4.6,3.6,1,0.2,"setosa"
"24",5.1,3.3,1.7,0.5,"setosa"
"25",4.8,3.4,1.9,0.2,"setosa"
"26",5,3,1.6,0.2,"setosa"
"27",5,3.4,1.6,0.4,"setosa"
"28",5.2,3.5,1.5,0.2,"setosa"
"29",5.2,3.4,1.4,0.2,"setosa"
"30",4.7,3.2,1.6,0.2,"setosa"
"31",4.8,3.1,1.6,0.2,"setosa"
"32",5.4,3.4,1.5,0.4,"setosa"
"33",5.2,4.1,1.5,0.1,"setosa"
"34",5.5,4.2,1.4,0.2,"setosa"
"35",4.9,3.1,1.5,0.2,"setosa"
"36",5,3.2,1.2,0.2,"setosa"
"37",5.5,3.5,1.3,0.2,"setosa"
"38",4.9,3.6,1.4,0.1,"setosa"
"39",4.4,3,1.3,0.2,"setosa"
"40",5.1,3.4,1.5,0.2,"setosa"
"41",5,3.5,1.3,0.3,"setosa"
"42",4.5,2.3,1.3,0.3,"setosa"
"43",4.4,3.2,1.3,0.2,"setosa"
"44",5,3.5,1.6,0.6,"setosa"
"45",5.1,3.8,1.9,0.4,"setosa"
"46",4.8,3,1.4,0.3,"setosa"
"47",5.1,3.8,1.6,0.2,"setosa"
"48",4.6,3.2,1.4,0.2,"setosa"
"49",5.3,3.7,1.5,0.2,"setosa"
"50",5,3.3,1.4,0.2,"setosa"
"51",7,3.2,4.7,1.4,"versicolor"
"52",6.4,3.2,4.5,1.5,"versicolor"
"53",6.9,3.1,4.9,1.5,"versicolor"
"54",5.5,2.3,4,1.3,"versicolor"
"55",6.5,2.8,4.6,1.5,"versicolor"
"56",5.7,2.8,4.5,1.3,"versicolor"
"57",6.3,3.3,4.7,1.6,"versicolor"
"58",4.9,2.4,3.3,1,"versicolor"
"59",6.6,2.9,4.6,1.3,"versicolor"
"60",5.2,2.7,3.9,1.4,"versicolor"
"61",5,2,3.5,1,"versicolor"
"62",5.9,3,4.2,1.5,"versicolor"
"63",6,2.2,4,1,"versicolor"
"64",6.1,2.9,4.7,1.4,"versicolor"
"65",5.6,2.9,3.6,1.3,"versicolor"
"66",6.7,3.1,4.4,1.4,"versicolor"
"67",5.6,3,4.5,1.5,"versicolor"
"68",5.8,2.7,4.1,1,"versicolor"
"69",6.2,2.2,4.5,1.5,"versicolor"
"70",5.6,2.5,3.9,1.1,"versicolor"
"71",5.9,3.2,4.8,1.8,"versicolor"
"72",6.1,2.8,4,1.3,"versicolor"
"73",6.3,2.5,4.9,1.5,"versicolor"
"74",6.1,2.8,4.7,1.2,"versicolor"
"75",6.4,2.9,4.3,1.3,"versicolor"
"76",6.6,3,4.4,1.4,"versicolor"
"77",6.8,2.8,4.8,1.4,"versicolor"
"78",6.7,3,5,1.7,"versicolor"
"79",6,2.9,4.5,1.5,"versicolor"
"80",5.7,2.6,3.5,1,"versicolor"
"81",5.5,2.4,3.8,1.1,"versicolor"
"82",5.5,2.4,3.7,1,"versicolor"
"83",5.8,2.7,3.9,1.2,"versicolor"
"84",6,2.7,5.1,1.6,"versicolor"
"85",5.4,3,4.5,1.5,"versicolor"
"86",6,3.4,4.5,1.6,"versicolor"
"87",6.7,3.1,4.7,1.5,"versicolor"
"88",6.3,2.3,4.4,1.3,"versicolor"
"89",5.6,3,4.1,1.3,"versicolor"
"90",5.5,2.5,4,1.3,"versicolor"
"91",5.5,2.6,4.4,1.2,"versicolor"
"92",6.1,3,4.6,1.4,"versicolor"
"93",5.8,2.6,4,1.2,"versicolor"
"94",5,2.3,3.3,1,"versicolor"
"95",5.6,2.7,4.2,1.3,"versicolor"
"96",5.7,3,4.2,1.2,"versicolor"
"97",5.7,2.9,4.2,1.3,"versicolor"
"98",6.2,2.9,4.3,1.3,"versicolor"
"99",5.1,2.5,3,1.1,"versicolor"
"100",5.7,2.8,4.1,1.3,"versicolor"
"101",6.3,3.3,6,2.5,"virginica"
"102",5.8,2.7,5.1,1.9,"virginica"
"103",7.1,3,5.9,2.1,"virginica"
"104",6.3,2.9,5.6,1.8,"virginica"
"105",6.5,3,5.8,2.2,"virginica"
"106",7.6,3,6.6,2.1,"virginica"
"107",4.9,2.5,4.5,1.7,"virginica"
"108",7.3,2.9,6.3,1.8,"virginica"
"109",6.7,2.5,5.8,1.8,"virginica"
"110",7.2,3.6,6.1,2.5,"virginica"
"111",6.5,3.2,5.1,2,"virginica"
"112",6.4,2.7,5.3,1.9,"virginica"
"113",6.8,3,5.5,2.1,"virginica"
"114",5.7,2.5,5,2,"virginica"
"115",5.8,2.8,5.1,2.4,"virginica"
"116",6.4,3.2,5.3,2.3,"virginica"
"117",6.5,3,5.5,1.8,"virginica"
"118",7.7,3.8,6.7,2.2,"virginica"
"119",7.7,2.6,6.9,2.3,"virginica"
"120",6,2.2,5,1.5,"virginica"
"121",6.9,3.2,5.7,2.3,"virginica"
"122",5.6,2.8,4.9,2,"virginica"
"123",7.7,2.8,6.7,2,"virginica"
"124",6.3,2.7,4.9,1.8,"virginica"
"125",6.7,3.3,5.7,2.1,"virginica"
"126",7.2,3.2,6,1.8,"virginica"
"127",6.2,2.8,4.8,1.8,"virginica"
"128",6.1,3,4.9,1.8,"virginica"
"129",6.4,2.8,5.6,2.1,"virginica"
"130",7.2,3,5.8,1.6,"virginica"
"131",7.4,2.8,6.1,1.9,"virginica"
"132",7.9,3.8,6.4,2,"virginica"
"133",6.4,2.8,5.6,2.2,"virginica"
"134",6.3,2.8,5.1,1.5,"virginica"
"135",6.1,2.6,5.6,1.4,"virginica"
"136",7.7,3,6.1,2.3,"virginica"
"137",6.3,3.4,5.6,2.4,"virginica"
"138",6.4,3.1,5.5,1.8,"virginica"
"139",6,3,4.8,1.8,"virginica"
"140",6.9,3.1,5.4,2.1,"virginica"
"141",6.7,3.1,5.6,2.4,"virginica"
"142",6.9,3.1,5.1,2.3,"virginica"
"143",5.8,2.7,5.1,1.9,"virginica"
"144",6.8,3.2,5.9,2.3,"virginica"
"145",6.7,3.3,5.7,2.5,"virginica"
"146",6.7,3,5.2,2.3,"virginica"
"147",6.3,2.5,5,1.9,"virginica"
"148",6.5,3,5.2,2,"virginica"
"149",6.2,3.4,5.4,2.3,"virginica"
"150",5.9,3,5.1,1.8,"virginica"

【实现代码】

import numpy as np
import pandas as pd
from keras.models import Sequential
from keras.models import model_from_json
from keras.layers import Dense
from keras.utils import np_utils
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder

seed = 13  # 设置随机数种子,以便于结果的二次实现
np.random.seed(seed)

df = pd.read_csv("iris.csv")  # 读入数据
X = df.values[:, 1: 5].astype(float)  # 前四项作为输入
Y = df.values[:, 5]  # 后一项为输出

encoder = LabelEncoder()  # 将字符串与数字对应
Y_encode = encoder.fit_transform(Y)
Y_onehot = np_utils.to_categorical(Y_encode)

def bassline_model():  # 建立深度神经网络
    model = Sequential()  # 初始化
    model.add(Dense(7, input_dim=4, activation="tanh"))  # 隐藏层数,输入维度和激活函数
    model.add(Dense(3, activation="softmax"))  # 输出维度以及归一化函数
    model.compile(loss="mean_squared_error", optimizer="sgd", metrics=["accuracy"])  # 对神经网络进行编译,定义损失函数,优化方法以及正确性衡量方法
    return model

estimator = KerasClassifier(build_fn=bassline_model, epochs=20, batch_size=1, verbose=1)  # build_fn 神经网络 epochs 训练几回 batch_size 每批训练的数量

kfold = KFold(n_splits=10, shuffle=True, random_state=seed)  # 将150个数据随机分成10份进行交叉训练
result = cross_val_score(estimator, X, Y_onehot, cv=kfold)  # 训练结果(相应参数)
print("Accuracy of cross validation, mean %.2f, std %.2f" % (result.mean(), result.std()))  # 打印均值和方差

estimator.fit(X, Y_onehot)
model_json = estimator.model.to_json()  # 用json表达神经网络的相关信息
with open("model.json", "w",) as json_file:  # 输出到本地文件中
    json_file.write(model_json)

estimator.model.save_weights("model.h5")  # .h5是一种层次文件
print("save model to disk")

json_file = open("model.json", "r")  # 重新使用之前训练的网络
loaded_model_json = json_file.read()  # 读入网络
json_file.close()

loaded_model = model_from_json(loaded_model_json)  # 将json转回model
loaded_model.load_weights("model.h5")  # 从.h5文件中获取训练数据
print("Loaded model from disk")

# 对X进行预测(之前是通过X和Y训练网络,现在通过X预测Y看结果是否相同)
predicted = loaded_model.predict(X)  # 方法一
print("predicted probability:" + str(predicted))  # 打印预测概率

predicted_label = loaded_model.predict_classes(X)  # 方法二
print("predicted Label:" + str(predicted_label))  # 打印预测结果

【运行结果】

Using TensorFlow backend.
2020-01-12 19:51:09.946191: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cudart64_101.dll
2020-01-12 19:51:16.791391: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library nvcuda.dll
2020-01-12 19:51:16.819312: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1555] Found device 0 with properties: 
pciBusID: 0000:01:00.0 name: GeForce GTX 1050 computeCapability: 6.1
coreClock: 1.493GHz coreCount: 5 deviceMemorySize: 2.00GiB deviceMemoryBandwidth: 104.43GiB/s
2020-01-12 19:51:16.819748: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cudart64_101.dll
2020-01-12 19:51:16.829257: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cublas64_10.dll
2020-01-12 19:51:16.836288: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cufft64_10.dll
2020-01-12 19:51:16.838789: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library curand64_10.dll
2020-01-12 19:51:16.845896: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cusolver64_10.dll
2020-01-12 19:51:16.849711: I tensorflow/stream_executor/platform/default/dso_loader.cc:44] Successfully opened dynamic library cusparse64_10.dll
2020-01-12 19:51:16.856122: W tensorflow/stream_executor/platform/default/dso_loader.cc:55] Could not load dynamic library 'cudnn64_7.dll'; dlerror: cudnn64_7.dll not found
2020-01-12 19:51:16.856464: W tensorflow/core/common_runtime/gpu/gpu_device.cc:1592] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.
Skipping registering GPU devices...
2020-01-12 19:51:16.857637: I tensorflow/core/platform/cpu_feature_guard.cc:142] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2
2020-01-12 19:51:16.858641: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1096] Device interconnect StreamExecutor with strength 1 edge matrix:
2020-01-12 19:51:16.859462: I tensorflow/core/common_runtime/gpu/gpu_device.cc:1102]      
Epoch 1/20

  1/135 [..............................] - ETA: 8s - loss: 0.4754 - accuracy: 0.0000e+00
 81/135 [=================>............] - ETA: 0s - loss: 0.2616 - accuracy: 0.3086    
135/135 [==============================] - 0s 1ms/step - loss: 0.2443 - accuracy: 0.3407
Epoch 2/20

  1/135 [..............................] - ETA: 0s - loss: 0.2527 - accuracy: 0.0000e+00
 57/135 [===========>..................] - ETA: 0s - loss: 0.2126 - accuracy: 0.4561    
106/135 [======================>.......] - ETA: 0s - loss: 0.2101 - accuracy: 0.5377
135/135 [==============================] - 0s 949us/step - loss: 0.2072 - accuracy: 0.5704
Epoch 3/20

  1/135 [..............................] - ETA: 0s - loss: 0.1820 - accuracy: 1.0000
 50/135 [==========>...................] - ETA: 0s - loss: 0.1954 - accuracy: 0.6800
124/135 [==========================>...] - ETA: 0s - loss: 0.1946 - accuracy: 0.6532
135/135 [==============================] - 0s 803us/step - loss: 0.1936 - accuracy: 0.6593
Epoch 4/20

  1/135 [..............................] - ETA: 0s - loss: 0.1372 - accuracy: 1.0000
 68/135 [==============>...............] - ETA: 0s - loss: 0.1847 - accuracy: 0.6471
127/135 [===========================>..] - ETA: 0s - loss: 0.1821 - accuracy: 0.6614
135/135 [==============================] - 0s 798us/step - loss: 0.1820 - accuracy: 0.6593
Epoch 5/20

  1/135 [..............................] - ETA: 0s - loss: 0.1353 - accuracy: 1.0000
 73/135 [===============>..............] - ETA: 0s - loss: 0.1688 - accuracy: 0.6849
135/135 [==============================] - 0s 677us/step - loss: 0.1701 - accuracy: 0.6593
Epoch 6/20

  1/135 [..............................] - ETA: 0s - loss: 0.2446 - accuracy: 0.0000e+00
 77/135 [================>.............] - ETA: 0s - loss: 0.1546 - accuracy: 0.7143    
135/135 [==============================] - 0s 754us/step - loss: 0.1584 - accuracy: 0.6593
Epoch 7/20

  1/135 [..............................] - ETA: 0s - loss: 0.2557 - accuracy: 0.0000e+00
 68/135 [==============>...............] - ETA: 0s - loss: 0.1586 - accuracy: 0.6176    
135/135 [==============================] - 0s 735us/step - loss: 0.1492 - accuracy: 0.6593
Epoch 8/20

  1/135 [..............................] - ETA: 0s - loss: 0.0758 - accuracy: 1.0000
 73/135 [===============>..............] - ETA: 0s - loss: 0.1480 - accuracy: 0.6301
127/135 [===========================>..] - ETA: 0s - loss: 0.1384 - accuracy: 0.6772
135/135 [==============================] - 0s 805us/step - loss: 0.1409 - accuracy: 0.6593
Epoch 9/20

  1/135 [..............................] - ETA: 0s - loss: 0.0762 - accuracy: 1.0000
 62/135 [============>.................] - ETA: 0s - loss: 0.1375 - accuracy: 0.6452
135/135 [==============================] - 0s 768us/step - loss: 0.1340 - accuracy: 0.6593
Epoch 10/20

  1/135 [..............................] - ETA: 0s - loss: 0.0511 - accuracy: 1.0000
 73/135 [===============>..............] - ETA: 0s - loss: 0.1272 - accuracy: 0.6986
135/135 [==============================] - 0s 702us/step - loss: 0.1281 - accuracy: 0.6741
Epoch 11/20

  1/135 [..............................] - ETA: 0s - loss: 0.0321 - accuracy: 1.0000
 68/135 [==============>...............] - ETA: 0s - loss: 0.1332 - accuracy: 0.7353
135/135 [==============================] - 0s 761us/step - loss: 0.1231 - accuracy: 0.7407
Epoch 12/20

  1/135 [..............................] - ETA: 0s - loss: 0.2039 - accuracy: 1.0000
 66/135 [=============>................] - ETA: 0s - loss: 0.1254 - accuracy: 0.7879
135/135 [==============================] - 0s 724us/step - loss: 0.1188 - accuracy: 0.7704
Epoch 13/20

  1/135 [..............................] - ETA: 0s - loss: 0.2189 - accuracy: 0.0000e+00
 60/135 [============>.................] - ETA: 0s - loss: 0.1203 - accuracy: 0.7833    
128/135 [===========================>..] - ETA: 0s - loss: 0.1148 - accuracy: 0.7891
135/135 [==============================] - 0s 805us/step - loss: 0.1151 - accuracy: 0.8000
Epoch 14/20

  1/135 [..............................] - ETA: 0s - loss: 0.1044 - accuracy: 1.0000
 71/135 [==============>...............] - ETA: 0s - loss: 0.1130 - accuracy: 0.8451
135/135 [==============================] - 0s 724us/step - loss: 0.1108 - accuracy: 0.8667
Epoch 15/20

  1/135 [..............................] - ETA: 0s - loss: 0.1916 - accuracy: 1.0000
 75/135 [===============>..............] - ETA: 0s - loss: 0.1127 - accuracy: 0.8133
135/135 [==============================] - 0s 761us/step - loss: 0.1084 - accuracy: 0.8296
Epoch 16/20

  1/135 [..............................] - ETA: 0s - loss: 0.1004 - accuracy: 1.0000
 52/135 [==========>...................] - ETA: 0s - loss: 0.1209 - accuracy: 0.8654
111/135 [=======================>......] - ETA: 0s - loss: 0.1031 - accuracy: 0.8649
135/135 [==============================] - 0s 879us/step - loss: 0.1048 - accuracy: 0.8741
Epoch 17/20

  1/135 [..............................] - ETA: 0s - loss: 0.1867 - accuracy: 1.0000
 59/135 [============>.................] - ETA: 0s - loss: 0.0902 - accuracy: 0.9492
126/135 [===========================>..] - ETA: 0s - loss: 0.0998 - accuracy: 0.8889
135/135 [==============================] - 0s 818us/step - loss: 0.1022 - accuracy: 0.8815
Epoch 18/20

  1/135 [..............................] - ETA: 0s - loss: 0.1754 - accuracy: 1.0000
 58/135 [===========>..................] - ETA: 0s - loss: 0.0993 - accuracy: 0.9138
135/135 [==============================] - 0s 768us/step - loss: 0.0991 - accuracy: 0.9185
Epoch 19/20

  1/135 [..............................] - ETA: 0s - loss: 0.0911 - accuracy: 1.0000
 69/135 [==============>...............] - ETA: 0s - loss: 0.0930 - accuracy: 0.8696
128/135 [===========================>..] - ETA: 0s - loss: 0.0950 - accuracy: 0.9141
135/135 [==============================] - 0s 820us/step - loss: 0.0963 - accuracy: 0.9111
Epoch 20/20

  1/135 [..............................] - ETA: 0s - loss: 0.1543 - accuracy: 1.0000
 66/135 [=============>................] - ETA: 0s - loss: 0.0964 - accuracy: 0.8939
135/135 [==============================] - 0s 737us/step - loss: 0.0929 - accuracy: 0.9185

 1/15 [=>............................] - ETA: 0s
15/15 [==============================] - 0s 1ms/step
Epoch 1/20

  1/135 [..............................] - ETA: 5s - loss: 0.0712 - accuracy: 1.0000
 70/135 [==============>...............] - ETA: 0s - loss: 0.2244 - accuracy: 0.3857
135/135 [==============================] - 0s 1ms/step - loss: 0.2182 - accuracy: 0.4593
Epoch 2/20

  1/135 [..............................] - ETA: 0s - loss: 0.1864 - accuracy: 1.0000
 72/135 [===============>..............] - ETA: 0s - loss: 0.1878 - accuracy: 0.6389
135/135 [==============================] - 0s 724us/step - loss: 0.1816 - accuracy: 0.6593
Epoch 3/20

  1/135 [..............................] - ETA: 0s - loss: 0.2754 - accuracy: 0.0000e+00
 51/135 [==========>...................] - ETA: 0s - loss: 0.1854 - accuracy: 0.5686    
108/135 [=======================>......] - ETA: 0s - loss: 0.1713 - accuracy: 0.6111
135/135 [==============================] - 0s 931us/step - loss: 0.1637 - accuracy: 0.6593
Epoch 4/20

  1/135 [..............................] - ETA: 0s - loss: 0.0406 - accuracy: 1.0000
 73/135 [===============>..............] - ETA: 0s - loss: 0.1450 - accuracy: 0.6575
135/135 [==============================] - 0s 746us/step - loss: 0.1527 - accuracy: 0.6519
Epoch 5/20

  1/135 [..............................] - ETA: 0s - loss: 0.2189 - accuracy: 0.0000e+00
 58/135 [===========>..................] - ETA: 0s - loss: 0.1411 - accuracy: 0.7414    
129/135 [===========================>..] - ETA: 0s - loss: 0.1484 - accuracy: 0.6357
135/135 [==============================] - 0s 791us/step - loss: 0.1458 - accuracy: 0.6444
Epoch 6/20

  1/135 [..............................] - ETA: 0s - loss: 0.1792 - accuracy: 1.0000
 76/135 [===============>..............] - ETA: 0s - loss: 0.1359 - accuracy: 0.5526
135/135 [==============================] - 0s 702us/step - loss: 0.1409 - accuracy: 0.5481
Epoch 7/20

  1/135 [..............................] - ETA: 0s - loss: 0.1807 - accuracy: 0.0000e+00
 56/135 [===========>..................] - ETA: 0s - loss: 0.1338 - accuracy: 0.5893    
122/135 [==========================>...] - ETA: 0s - loss: 0.1358 - accuracy: 0.5820
135/135 [==============================] - 0s 824us/step - loss: 0.1372 - accuracy: 0.5630
Epoch 8/20

  1/135 [..............................] - ETA: 0s - loss: 0.0365 - accuracy: 1.0000
 78/135 [================>.............] - ETA: 0s - loss: 0.1302 - accuracy: 0.6538
135/135 [==============================] - 0s 707us/step - loss: 0.1343 - accuracy: 0.5926
Epoch 9/20

  1/135 [..............................] - ETA: 0s - loss: 0.1899 - accuracy: 0.0000e+00
 80/135 [================>.............] - ETA: 0s - loss: 0.1254 - accuracy: 0.5875    
135/135 [==============================] - 0s 716us/step - loss: 0.1319 - accuracy: 0.5630
Epoch 10/20

  1/135 [..............................] - ETA: 0s - loss: 0.1748 - accuracy: 1.0000
 57/135 [===========>..................] - ETA: 0s - loss: 0.1357 - accuracy: 0.5965
110/135 [=======================>......] - ETA: 0s - loss: 0.1268 - accuracy: 0.6636
135/135 [==============================] - 0s 894us/step - loss: 0.1299 - accuracy: 0.6222
Epoch 11/20

  1/135 [..............................] - ETA: 0s - loss: 0.0270 - accuracy: 1.0000
 64/135 [=============>................] - ETA: 0s - loss: 0.1223 - accuracy: 0.5781
117/135 [=========================>....] - ETA: 0s - loss: 0.1271 - accuracy: 0.6154
135/135 [==============================] - 0s 887us/step - loss: 0.1284 - accuracy: 0.6148
Epoch 12/20

  1/135 [..............................] - ETA: 0s - loss: 0.1717 - accuracy: 1.0000
 56/135 [===========>..................] - ETA: 0s - loss: 0.1204 - accuracy: 0.7143
124/135 [==========================>...] - ETA: 0s - loss: 0.1267 - accuracy: 0.6452
135/135 [==============================] - 0s 827us/step - loss: 0.1267 - accuracy: 0.6370
Epoch 13/20

  1/135 [..............................] - ETA: 0s - loss: 0.1665 - accuracy: 1.0000
 62/135 [============>.................] - ETA: 0s - loss: 0.1276 - accuracy: 0.6129
117/135 [=========================>....] - ETA: 0s - loss: 0.1243 - accuracy: 0.6239
135/135 [==============================] - 0s 867us/step - loss: 0.1258 - accuracy: 0.6222
Epoch 14/20

  1/135 [..............................] - ETA: 0s - loss: 0.0186 - accuracy: 1.0000
 67/135 [=============>................] - ETA: 0s - loss: 0.1257 - accuracy: 0.6716
135/135 [==============================] - 0s 711us/step - loss: 0.1246 - accuracy: 0.6741
Epoch 15/20

  1/135 [..............................] - ETA: 0s - loss: 0.1856 - accuracy: 0.0000e+00
 71/135 [==============>...............] - ETA: 0s - loss: 0.1271 - accuracy: 0.7042    
135/135 [==============================] - 0s 761us/step - loss: 0.1234 - accuracy: 0.6741
Epoch 16/20

  1/135 [..............................] - ETA: 0s - loss: 0.1698 - accuracy: 1.0000
 54/135 [===========>..................] - ETA: 0s - loss: 0.1135 - accuracy: 0.7593
131/135 [============================>.] - ETA: 0s - loss: 0.1224 - accuracy: 0.6794
135/135 [==============================] - 0s 776us/step - loss: 0.1229 - accuracy: 0.6667
Epoch 17/20

  1/135 [..............................] - ETA: 0s - loss: 0.0161 - accuracy: 1.0000
 77/135 [================>.............] - ETA: 0s - loss: 0.1172 - accuracy: 0.6623
135/135 [==============================] - 0s 724us/step - loss: 0.1222 - accuracy: 0.6593
Epoch 18/20

  1/135 [..............................] - ETA: 0s - loss: 0.0122 - accuracy: 1.0000
 58/135 [===========>..................] - ETA: 0s - loss: 0.1168 - accuracy: 0.6552
126/135 [===========================>..] - ETA: 0s - loss: 0.1243 - accuracy: 0.6349
135/135 [
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值