三十三.TF2自动梯度计算实现神经网络分类

#数据预处理
#载入数据
from sklearn.datasets import load_iris
iris = load_iris()
x,y=iris.data,iris.target
print(x.shape,y.shape)
输出:
(150, 4) (150,)
#打乱数据
import numpy as np
import tensorflow as tf
np.random.seed(100)
np.random.shuffle(x)
np.random.seed(100)
np.random.shuffle(y)
tf.random.set_seed(100)
#划分训练集和测试集
#转换数据类型
from sklearn.model_selection import train_test_split
x_train,x_test,y_train,y_test = train_test_split(x,y)
x_train = tf.cast(x_train,tf.float32)
x_test = tf.cast(x_test,tf.float32)
#切片配对和设定batch大小
train_db = tf.data.Dataset.from_tensor_slices((x_train,y_train)).batch(32)
test_db = tf.data.Dataset.from_tensor_slices((x_test,y_test)).batch(32)
#初始化神经网络中的所有可训练参数
#神经网络只有输入层和输出层两层,样本有4个维度,所以输入层有4个神经元,类别有3种,所以输出有三个维度,则权重系数为[4,3]矩阵
w1 = tf.Variable(tf.random.truncated_normal([4,3],stddev=0.1,seed=1))
b1 = tf.Variable(tf.random.truncated_normal([3],stddev=0.1,seed=1))
#设置超参数和记录结果
lr = 0.1
test_acc = []
train_loss_result = []
epochs = 100
loss_all=0
#开始训练
for epoch in range(epochs):
    for step,(x_train,y_train) in enumerate(train_db):
        with tf.GradientTape() as tape:
            y=tf.matmul(x_train,w1)+b1#前项传播过程
            y = tf.nn.softmax(y)#将输出映射为概率分布
            y_= tf.one_hot(y_train,depth=3)
            loss = tf.reduce_mean(tf.square(y_-y))
        grads = tape.gradient(loss,[w1,b1])#计算loss对各个参数的梯度
        w1.assign_sub(lr*grads[0])
        b1.assign_sub(lr*grads[1])
        loss_all +=loss.numpy()
        train_loss_result.append(loss_all/4)
        
        #测试
        total_correct,total_number = 0,0
        for x_test,y_test in test_db:
            y=tf.matmul(x_test,w1)+b1#进行预测
            y=tf.nn.softmax(y)
            pred=tf.argmax(y,axis=1)
            pred = tf.cast(pred,dtype=y_test.dtype)
            correct=tf.cast(tf.equal(pred,y_test),dtype=tf.int32)
            correct=tf.reduce_sum(correct)
            total_correct+=int(correct)
            total_number+=x_test.shape[0]
        acc=total_correct/total_number
        test_acc.append(acc)
        print('Epoch:%d,loss:%f,test accuracy:%f'%(epoch,loss_all/4,acc))
        loss_all = 0
输出:
Epoch:0,loss:0.065097,test accuracy:0.000000
Epoch:0,loss:0.061090,test accuracy:0.263158
Epoch:0,loss:0.059538,test accuracy:0.473684
Epoch:0,loss:0.056154,test accuracy:0.394737
Epoch:1,loss:0.059555,test accuracy:0.394737
Epoch:1,loss:0.061746,test accuracy:0.421053
Epoch:1,loss:0.055985,test accuracy:0.473684
Epoch:1,loss:0.050054,test accuracy:0.394737
Epoch:2,loss:0.057971,test accuracy:0.394737
Epoch:2,loss:0.059468,test accuracy:0.394737
Epoch:2,loss:0.053694,test accuracy:0.421053
Epoch:2,loss:0.047514,test accuracy:0.394737
Epoch:3,loss:0.055079,test accuracy:0.394737
Epoch:3,loss:0.056426,test accuracy:0.394737
Epoch:3,loss:0.051332,test accuracy:0.394737
Epoch:3,loss:0.044981,test accuracy:0.394737
Epoch:4,loss:0.052072,test accuracy:0.710526
Epoch:4,loss:0.053397,test accuracy:0.710526
Epoch:4,loss:0.048963,test accuracy:0.710526
Epoch:4,loss:0.042474,test accuracy:0.710526
Epoch:5,loss:0.049186,test accuracy:0.710526
Epoch:5,loss:0.050594,test accuracy:0.710526
Epoch:5,loss:0.046696,test accuracy:0.710526
Epoch:5,loss:0.040187,test accuracy:0.710526
Epoch:6,loss:0.046545,test accuracy:0.710526
Epoch:6,loss:0.048110,test accuracy:0.710526
Epoch:6,loss:0.044611,test accuracy:0.710526
Epoch:6,loss:0.038230,test accuracy:0.710526
Epoch:7,loss:0.044211,test accuracy:0.710526
Epoch:7,loss:0.045956,test accuracy:0.710526
Epoch:7,loss:0.042742,test accuracy:0.710526
Epoch:7,loss:0.036619,test accuracy:0.710526
Epoch:8,loss:0.042196,test accuracy:0.710526
Epoch:8,loss:0.044105,test accuracy:0.710526
Epoch:8,loss:0.041092,test accuracy:0.710526
Epoch:8,loss:0.035316,test accuracy:0.710526
Epoch:9,loss:0.040477,test accuracy:0.710526
Epoch:9,loss:0.042516,test accuracy:0.710526
Epoch:9,loss:0.039645,test accuracy:0.736842
Epoch:9,loss:0.034263,test accuracy:0.710526
Epoch:10,loss:0.039018,test accuracy:0.710526
Epoch:10,loss:0.041146,test accuracy:0.710526
Epoch:10,loss:0.038377,test accuracy:0.763158
Epoch:10,loss:0.033406,test accuracy:0.710526
Epoch:11,loss:0.037778,test accuracy:0.710526
Epoch:11,loss:0.039956,test accuracy:0.710526
Epoch:11,loss:0.037263,test accuracy:0.789474
Epoch:11,loss:0.032698,test accuracy:0.710526
Epoch:12,loss:0.036718,test accuracy:0.710526
Epoch:12,loss:0.038913,test accuracy:0.710526
Epoch:12,loss:0.036279,test accuracy:0.815789
Epoch:12,loss:0.032106,test accuracy:0.710526
Epoch:13,loss:0.035805,test accuracy:0.710526
Epoch:13,loss:0.037991,test accuracy:0.710526
Epoch:13,loss:0.035404,test accuracy:0.842105
Epoch:13,loss:0.031601,test accuracy:0.710526
Epoch:14,loss:0.035013,test accuracy:0.710526
Epoch:14,loss:0.037167,test accuracy:0.736842
Epoch:14,loss:0.034620,test accuracy:0.894737
Epoch:14,loss:0.031164,test accuracy:0.710526
Epoch:15,loss:0.034318,test accuracy:0.710526
Epoch:15,loss:0.036425,test accuracy:0.789474
Epoch:15,loss:0.033913,test accuracy:0.894737
Epoch:15,loss:0.030780,test accuracy:0.710526
Epoch:16,loss:0.033704,test accuracy:0.710526
Epoch:16,loss:0.035750,test accuracy:0.789474
Epoch:16,loss:0.033271,test accuracy:0.894737
Epoch:16,loss:0.030437,test accuracy:0.710526
Epoch:17,loss:0.033157,test accuracy:0.710526
Epoch:17,loss:0.035131,test accuracy:0.815789
Epoch:17,loss:0.032683,test accuracy:0.921053
Epoch:17,loss:0.030126,test accuracy:0.710526
Epoch:18,loss:0.032664,test accuracy:0.710526
Epoch:18,loss:0.034560,test accuracy:0.842105
Epoch:18,loss:0.032142,test accuracy:0.947368
Epoch:18,loss:0.029840,test accuracy:0.710526
Epoch:19,loss:0.032216,test accuracy:0.710526
Epoch:19,loss:0.034028,test accuracy:0.842105
Epoch:19,loss:0.031641,test accuracy:0.973684
Epoch:19,loss:0.029575,test accuracy:0.710526
Epoch:20,loss:0.031808,test accuracy:0.710526
Epoch:20,loss:0.033530,test accuracy:0.894737
Epoch:20,loss:0.031175,test accuracy:0.973684
Epoch:20,loss:0.029327,test accuracy:0.710526
Epoch:21,loss:0.031431,test accuracy:0.710526
Epoch:21,loss:0.033062,test accuracy:0.894737
Epoch:21,loss:0.030738,test accuracy:0.973684
Epoch:21,loss:0.029091,test accuracy:0.710526
Epoch:22,loss:0.031082,test accuracy:0.710526
Epoch:22,loss:0.032618,test accuracy:0.894737
Epoch:22,loss:0.030328,test accuracy:0.973684
Epoch:22,loss:0.028866,test accuracy:0.710526
Epoch:23,loss:0.030757,test accuracy:0.710526
Epoch:23,loss:0.032197,test accuracy:0.894737
Epoch:23,loss:0.029941,test accuracy:0.973684
Epoch:23,loss:0.028650,test accuracy:0.710526
Epoch:24,loss:0.030452,test accuracy:0.710526
Epoch:24,loss:0.031794,test accuracy:0.894737
Epoch:24,loss:0.029574,test accuracy:0.973684
Epoch:24,loss:0.028441,test accuracy:0.710526
Epoch:25,loss:0.030164,test accuracy:0.710526
Epoch:25,loss:0.031409,test accuracy:0.921053
Epoch:25,loss:0.029225,test accuracy:0.973684
Epoch:25,loss:0.028238,test accuracy:0.710526
Epoch:26,loss:0.029891,test accuracy:0.710526
Epoch:26,loss:0.031039,test accuracy:0.973684
Epoch:26,loss:0.028893,test accuracy:0.973684
Epoch:26,loss:0.028039,test accuracy:0.710526
Epoch:27,loss:0.029632,test accuracy:0.710526
Epoch:27,loss:0.030681,test accuracy:0.973684
Epoch:27,loss:0.028575,test accuracy:0.973684
Epoch:27,loss:0.027845,test accuracy:0.710526
Epoch:28,loss:0.029384,test accuracy:0.710526
Epoch:28,loss:0.030336,test accuracy:0.973684
Epoch:28,loss:0.028270,test accuracy:0.973684
Epoch:28,loss:0.027654,test accuracy:0.710526
Epoch:29,loss:0.029146,test accuracy:0.710526
Epoch:29,loss:0.030002,test accuracy:0.973684
Epoch:29,loss:0.027977,test accuracy:0.973684
Epoch:29,loss:0.027466,test accuracy:0.710526
Epoch:30,loss:0.028918,test accuracy:0.710526
Epoch:30,loss:0.029678,test accuracy:0.973684
Epoch:30,loss:0.027695,test accuracy:0.973684
Epoch:30,loss:0.027281,test accuracy:0.710526
Epoch:31,loss:0.028697,test accuracy:0.710526
Epoch:31,loss:0.029362,test accuracy:0.973684
Epoch:31,loss:0.027422,test accuracy:0.973684
Epoch:31,loss:0.027098,test accuracy:0.710526
Epoch:32,loss:0.028484,test accuracy:0.710526
Epoch:32,loss:0.029055,test accuracy:0.973684
Epoch:32,loss:0.027159,test accuracy:0.973684
Epoch:32,loss:0.026917,test accuracy:0.710526
Epoch:33,loss:0.028277,test accuracy:0.710526
Epoch:33,loss:0.028756,test accuracy:0.973684
Epoch:33,loss:0.026905,test accuracy:0.973684
Epoch:33,loss:0.026738,test accuracy:0.710526
Epoch:34,loss:0.028075,test accuracy:0.710526
Epoch:34,loss:0.028464,test accuracy:0.973684
Epoch:34,loss:0.026658,test accuracy:0.973684
Epoch:34,loss:0.026560,test accuracy:0.710526
Epoch:35,loss:0.027879,test accuracy:0.710526
Epoch:35,loss:0.028178,test accuracy:0.973684
Epoch:35,loss:0.026418,test accuracy:0.973684
Epoch:35,loss:0.026384,test accuracy:0.710526
Epoch:36,loss:0.027688,test accuracy:0.710526
Epoch:36,loss:0.027899,test accuracy:0.973684
Epoch:36,loss:0.026185,test accuracy:0.973684
Epoch:36,loss:0.026210,test accuracy:0.710526
Epoch:37,loss:0.027501,test accuracy:0.710526
Epoch:37,loss:0.027625,test accuracy:0.973684
Epoch:37,loss:0.025958,test accuracy:0.973684
Epoch:37,loss:0.026036,test accuracy:0.710526
Epoch:38,loss:0.027318,test accuracy:0.710526
Epoch:38,loss:0.027357,test accuracy:0.973684
Epoch:38,loss:0.025737,test accuracy:0.973684
Epoch:38,loss:0.025864,test accuracy:0.710526
Epoch:39,loss:0.027138,test accuracy:0.710526
Epoch:39,loss:0.027095,test accuracy:0.973684
Epoch:39,loss:0.025522,test accuracy:0.973684
Epoch:39,loss:0.025693,test accuracy:0.710526
Epoch:40,loss:0.026962,test accuracy:0.710526
Epoch:40,loss:0.026837,test accuracy:0.973684
Epoch:40,loss:0.025312,test accuracy:0.973684
Epoch:40,loss:0.025524,test accuracy:0.710526
Epoch:41,loss:0.026789,test accuracy:0.736842
Epoch:41,loss:0.026584,test accuracy:0.973684
Epoch:41,loss:0.025106,test accuracy:0.973684
Epoch:41,loss:0.025356,test accuracy:0.710526
Epoch:42,loss:0.026619,test accuracy:0.736842
Epoch:42,loss:0.026336,test accuracy:0.973684
Epoch:42,loss:0.024905,test accuracy:0.973684
Epoch:42,loss:0.025188,test accuracy:0.710526
Epoch:43,loss:0.026452,test accuracy:0.763158
Epoch:43,loss:0.026092,test accuracy:0.973684
Epoch:43,loss:0.024709,test accuracy:0.973684
Epoch:43,loss:0.025022,test accuracy:0.710526
Epoch:44,loss:0.026287,test accuracy:0.789474
Epoch:44,loss:0.025853,test accuracy:0.973684
Epoch:44,loss:0.024517,test accuracy:0.973684
Epoch:44,loss:0.024858,test accuracy:0.710526
Epoch:45,loss:0.026125,test accuracy:0.789474
Epoch:45,loss:0.025617,test accuracy:0.973684
Epoch:45,loss:0.024328,test accuracy:0.973684
Epoch:45,loss:0.024694,test accuracy:0.710526
Epoch:46,loss:0.025964,test accuracy:0.789474
Epoch:46,loss:0.025386,test accuracy:0.973684
Epoch:46,loss:0.024144,test accuracy:0.973684
Epoch:46,loss:0.024532,test accuracy:0.710526
Epoch:47,loss:0.025806,test accuracy:0.789474
Epoch:47,loss:0.025158,test accuracy:0.973684
Epoch:47,loss:0.023963,test accuracy:0.973684
Epoch:47,loss:0.024370,test accuracy:0.710526
Epoch:48,loss:0.025651,test accuracy:0.789474
Epoch:48,loss:0.024934,test accuracy:0.973684
Epoch:48,loss:0.023786,test accuracy:0.973684
Epoch:48,loss:0.024210,test accuracy:0.710526
Epoch:49,loss:0.025497,test accuracy:0.842105
Epoch:49,loss:0.024714,test accuracy:0.973684
Epoch:49,loss:0.023611,test accuracy:0.973684
Epoch:49,loss:0.024052,test accuracy:0.710526
Epoch:50,loss:0.025344,test accuracy:0.842105
Epoch:50,loss:0.024497,test accuracy:0.973684
Epoch:50,loss:0.023441,test accuracy:0.973684
Epoch:50,loss:0.023894,test accuracy:0.710526
Epoch:51,loss:0.025194,test accuracy:0.842105
Epoch:51,loss:0.024284,test accuracy:0.973684
Epoch:51,loss:0.023273,test accuracy:0.973684
Epoch:51,loss:0.023738,test accuracy:0.710526
Epoch:52,loss:0.025046,test accuracy:0.842105
Epoch:52,loss:0.024074,test accuracy:0.973684
Epoch:52,loss:0.023108,test accuracy:0.973684
Epoch:52,loss:0.023583,test accuracy:0.710526
Epoch:53,loss:0.024899,test accuracy:0.842105
Epoch:53,loss:0.023867,test accuracy:0.973684
Epoch:53,loss:0.022945,test accuracy:0.973684
Epoch:53,loss:0.023429,test accuracy:0.710526
Epoch:54,loss:0.024754,test accuracy:0.842105
Epoch:54,loss:0.023664,test accuracy:0.973684
Epoch:54,loss:0.022786,test accuracy:0.973684
Epoch:54,loss:0.023276,test accuracy:0.710526
Epoch:55,loss:0.024610,test accuracy:0.842105
Epoch:55,loss:0.023463,test accuracy:0.973684
Epoch:55,loss:0.022629,test accuracy:0.973684
Epoch:55,loss:0.023125,test accuracy:0.710526
Epoch:56,loss:0.024468,test accuracy:0.842105
Epoch:56,loss:0.023266,test accuracy:0.973684
Epoch:56,loss:0.022475,test accuracy:0.973684
Epoch:56,loss:0.022975,test accuracy:0.710526
Epoch:57,loss:0.024327,test accuracy:0.868421
Epoch:57,loss:0.023072,test accuracy:0.973684
Epoch:57,loss:0.022323,test accuracy:0.973684
Epoch:57,loss:0.022826,test accuracy:0.710526
Epoch:58,loss:0.024188,test accuracy:0.868421
Epoch:58,loss:0.022881,test accuracy:0.973684
Epoch:58,loss:0.022174,test accuracy:0.973684
Epoch:58,loss:0.022679,test accuracy:0.710526
Epoch:59,loss:0.024051,test accuracy:0.868421
Epoch:59,loss:0.022693,test accuracy:0.973684
Epoch:59,loss:0.022027,test accuracy:0.973684
Epoch:59,loss:0.022533,test accuracy:0.710526
Epoch:60,loss:0.023914,test accuracy:0.894737
Epoch:60,loss:0.022507,test accuracy:0.973684
Epoch:60,loss:0.021882,test accuracy:0.973684
Epoch:60,loss:0.022388,test accuracy:0.710526
Epoch:61,loss:0.023780,test accuracy:0.894737
Epoch:61,loss:0.022324,test accuracy:0.973684
Epoch:61,loss:0.021740,test accuracy:0.973684
Epoch:61,loss:0.022245,test accuracy:0.710526
Epoch:62,loss:0.023646,test accuracy:0.894737
Epoch:62,loss:0.022144,test accuracy:0.973684
Epoch:62,loss:0.021599,test accuracy:0.973684
Epoch:62,loss:0.022102,test accuracy:0.710526
Epoch:63,loss:0.023514,test accuracy:0.894737
Epoch:63,loss:0.021967,test accuracy:0.973684
Epoch:63,loss:0.021461,test accuracy:0.973684
Epoch:63,loss:0.021961,test accuracy:0.710526
Epoch:64,loss:0.023383,test accuracy:0.894737
Epoch:64,loss:0.021793,test accuracy:0.973684
Epoch:64,loss:0.021325,test accuracy:0.973684
Epoch:64,loss:0.021822,test accuracy:0.736842
Epoch:65,loss:0.023254,test accuracy:0.894737
Epoch:65,loss:0.021620,test accuracy:0.973684
Epoch:65,loss:0.021191,test accuracy:0.973684
Epoch:65,loss:0.021683,test accuracy:0.736842
Epoch:66,loss:0.023126,test accuracy:0.894737
Epoch:66,loss:0.021451,test accuracy:0.973684
Epoch:66,loss:0.021058,test accuracy:0.973684
Epoch:66,loss:0.021546,test accuracy:0.736842
Epoch:67,loss:0.022999,test accuracy:0.894737
Epoch:67,loss:0.021284,test accuracy:0.973684
Epoch:67,loss:0.020928,test accuracy:0.973684
Epoch:67,loss:0.021410,test accuracy:0.763158
Epoch:68,loss:0.022873,test accuracy:0.894737
Epoch:68,loss:0.021119,test accuracy:0.973684
Epoch:68,loss:0.020799,test accuracy:0.973684
Epoch:68,loss:0.021276,test accuracy:0.763158
Epoch:69,loss:0.022749,test accuracy:0.894737
Epoch:69,loss:0.020957,test accuracy:0.973684
Epoch:69,loss:0.020672,test accuracy:0.973684
Epoch:69,loss:0.021143,test accuracy:0.789474
Epoch:70,loss:0.022626,test accuracy:0.894737
Epoch:70,loss:0.020798,test accuracy:0.973684
Epoch:70,loss:0.020547,test accuracy:0.973684
Epoch:70,loss:0.021011,test accuracy:0.789474
Epoch:71,loss:0.022504,test accuracy:0.894737
Epoch:71,loss:0.020640,test accuracy:0.973684
Epoch:71,loss:0.020424,test accuracy:0.973684
Epoch:71,loss:0.020880,test accuracy:0.789474
Epoch:72,loss:0.022383,test accuracy:0.894737
Epoch:72,loss:0.020485,test accuracy:0.973684
Epoch:72,loss:0.020303,test accuracy:0.973684
Epoch:72,loss:0.020751,test accuracy:0.789474
Epoch:73,loss:0.022264,test accuracy:0.894737
Epoch:73,loss:0.020332,test accuracy:0.973684
Epoch:73,loss:0.020183,test accuracy:0.973684
Epoch:73,loss:0.020622,test accuracy:0.789474
Epoch:74,loss:0.022145,test accuracy:0.894737
Epoch:74,loss:0.020182,test accuracy:0.973684
Epoch:74,loss:0.020064,test accuracy:0.973684
Epoch:74,loss:0.020495,test accuracy:0.789474
Epoch:75,loss:0.022028,test accuracy:0.894737
Epoch:75,loss:0.020033,test accuracy:0.973684
Epoch:75,loss:0.019948,test accuracy:0.973684
Epoch:75,loss:0.020370,test accuracy:0.789474
Epoch:76,loss:0.021913,test accuracy:0.894737
Epoch:76,loss:0.019887,test accuracy:0.973684
Epoch:76,loss:0.019832,test accuracy:0.973684
Epoch:76,loss:0.020245,test accuracy:0.815789
Epoch:77,loss:0.021798,test accuracy:0.894737
Epoch:77,loss:0.019742,test accuracy:0.973684
Epoch:77,loss:0.019719,test accuracy:0.973684
Epoch:77,loss:0.020122,test accuracy:0.815789
Epoch:78,loss:0.021684,test accuracy:0.894737
Epoch:78,loss:0.019600,test accuracy:0.973684
Epoch:78,loss:0.019607,test accuracy:0.973684
Epoch:78,loss:0.020000,test accuracy:0.842105
Epoch:79,loss:0.021572,test accuracy:0.894737
Epoch:79,loss:0.019460,test accuracy:0.973684
Epoch:79,loss:0.019496,test accuracy:0.973684
Epoch:79,loss:0.019879,test accuracy:0.842105
Epoch:80,loss:0.021460,test accuracy:0.921053
Epoch:80,loss:0.019322,test accuracy:0.973684
Epoch:80,loss:0.019387,test accuracy:0.973684
Epoch:80,loss:0.019759,test accuracy:0.842105
Epoch:81,loss:0.021350,test accuracy:0.973684
Epoch:81,loss:0.019186,test accuracy:0.973684
Epoch:81,loss:0.019279,test accuracy:0.973684
Epoch:81,loss:0.019641,test accuracy:0.842105
Epoch:82,loss:0.021241,test accuracy:0.973684
Epoch:82,loss:0.019051,test accuracy:0.973684
Epoch:82,loss:0.019173,test accuracy:0.973684
Epoch:82,loss:0.019523,test accuracy:0.842105
Epoch:83,loss:0.021133,test accuracy:0.973684
Epoch:83,loss:0.018919,test accuracy:0.973684
Epoch:83,loss:0.019068,test accuracy:0.973684
Epoch:83,loss:0.019407,test accuracy:0.842105
Epoch:84,loss:0.021026,test accuracy:0.973684
Epoch:84,loss:0.018788,test accuracy:0.973684
Epoch:84,loss:0.018964,test accuracy:0.973684
Epoch:84,loss:0.019292,test accuracy:0.842105
Epoch:85,loss:0.020921,test accuracy:0.973684
Epoch:85,loss:0.018659,test accuracy:0.973684
Epoch:85,loss:0.018862,test accuracy:0.973684
Epoch:85,loss:0.019178,test accuracy:0.842105
Epoch:86,loss:0.020816,test accuracy:0.973684
Epoch:86,loss:0.018532,test accuracy:0.973684
Epoch:86,loss:0.018761,test accuracy:0.973684
Epoch:86,loss:0.019065,test accuracy:0.842105
Epoch:87,loss:0.020712,test accuracy:0.973684
Epoch:87,loss:0.018407,test accuracy:0.973684
Epoch:87,loss:0.018661,test accuracy:0.973684
Epoch:87,loss:0.018954,test accuracy:0.842105
Epoch:88,loss:0.020610,test accuracy:0.973684
Epoch:88,loss:0.018284,test accuracy:0.973684
Epoch:88,loss:0.018562,test accuracy:0.973684
Epoch:88,loss:0.018843,test accuracy:0.842105
Epoch:89,loss:0.020508,test accuracy:0.973684
Epoch:89,loss:0.018162,test accuracy:0.973684
Epoch:89,loss:0.018465,test accuracy:0.973684
Epoch:89,loss:0.018734,test accuracy:0.868421
Epoch:90,loss:0.020408,test accuracy:0.973684
Epoch:90,loss:0.018042,test accuracy:0.973684
Epoch:90,loss:0.018369,test accuracy:0.973684
Epoch:90,loss:0.018625,test accuracy:0.868421
Epoch:91,loss:0.020308,test accuracy:0.973684
Epoch:91,loss:0.017923,test accuracy:0.973684
Epoch:91,loss:0.018274,test accuracy:0.973684
Epoch:91,loss:0.018518,test accuracy:0.868421
Epoch:92,loss:0.020210,test accuracy:0.973684
Epoch:92,loss:0.017806,test accuracy:0.973684
Epoch:92,loss:0.018180,test accuracy:0.973684
Epoch:92,loss:0.018412,test accuracy:0.868421
Epoch:93,loss:0.020113,test accuracy:0.973684
Epoch:93,loss:0.017691,test accuracy:0.973684
Epoch:93,loss:0.018088,test accuracy:0.973684
Epoch:93,loss:0.018307,test accuracy:0.868421
Epoch:94,loss:0.020016,test accuracy:0.973684
Epoch:94,loss:0.017577,test accuracy:0.973684
Epoch:94,loss:0.017996,test accuracy:0.973684
Epoch:94,loss:0.018203,test accuracy:0.868421
Epoch:95,loss:0.019921,test accuracy:0.973684
Epoch:95,loss:0.017465,test accuracy:0.973684
Epoch:95,loss:0.017906,test accuracy:0.973684
Epoch:95,loss:0.018100,test accuracy:0.868421
Epoch:96,loss:0.019827,test accuracy:0.973684
Epoch:96,loss:0.017354,test accuracy:0.973684
Epoch:96,loss:0.017817,test accuracy:0.973684
Epoch:96,loss:0.017997,test accuracy:0.868421
Epoch:97,loss:0.019733,test accuracy:0.973684
Epoch:97,loss:0.017245,test accuracy:0.973684
Epoch:97,loss:0.017729,test accuracy:0.973684
Epoch:97,loss:0.017896,test accuracy:0.894737
Epoch:98,loss:0.019641,test accuracy:0.973684
Epoch:98,loss:0.017137,test accuracy:0.973684
Epoch:98,loss:0.017642,test accuracy:0.973684
Epoch:98,loss:0.017796,test accuracy:0.894737
Epoch:99,loss:0.019549,test accuracy:0.973684
Epoch:99,loss:0.017031,test accuracy:0.973684
Epoch:99,loss:0.017556,test accuracy:0.973684
Epoch:99,loss:0.017697,test accuracy:0.894737
​
​

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值