3-Tensorflow-demo_0802_复杂的Saver


import tensorflow as tf
import os
import numpy as np

"""
实现模型的断点继续训练。
"""
tf.set_random_seed(42)
np.random.seed(42)

def reg():
    with tf.Graph().as_default():
        # 一、构建模型图
        with tf.variable_scope('network'):
            # 1、构建输入的占位符
            input_x = tf.placeholder(
                dtype=tf.float32, shape=[None, 1], name='input_x'
            )
            input_y = tf.placeholder(
                dtype=tf.float32, shape=[None, 1], name='input_y'
            )

            # 2、构建变量
            w = tf.get_variable(
                name='w', shape=[1, 1], dtype=tf.float32,
                initializer=tf.random_normal_initializer(stddev=0.1)
            )
            b = tf.get_variable(
                name='b', shape=[1], dtype=tf.float32,
                initializer=tf.zeros_initializer()
            )

            # 3、正向传播,得到预测值。
            y_pred = tf.matmul(input_x, w) + b

        with tf.variable_scope('loss'):
            # 4、计算模型损失(MSE)
            loss = tf.reduce_mean(tf.square(input_y - y_pred))

        with tf.variable_scope('optimizer'):
            # 5、定义优化器。(含义:使用梯度下降的方式求解让损失函数最小的模型参数)
            optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
            train_opt = optimizer.minimize(loss=loss)
        print(input_x, input_y, loss)
        print(train_opt.name)
        # 构建持久化的对象
        """
        tf.train.Saver()(self,
               var_list=None,       # 给定具体持久化哪些变量,默认是持久化所有变量(参与模型训练的)
               reshape=False,
               sharded=False,
               max_to_keep=5,        # 指定最多保存最近的几份模型
               keep_checkpoint_every_n_hours=10000.0,
               name=None,
               restore_sequentially=False,
               saver_def=None,
               builder=None,
               defer_build=False,
               allow_empty=False,
               write_version=saver_pb2.SaverDef.V2,
               pad_step_number=False,
               save_relative_paths=False,
               filename=None):
        """
        saver = tf.train.Saver(max_to_keep=2)

        # 创建持久化的文件路径
        save_file = './models/ai20/11/model.ckpt'
        dirpath = os.path.dirname(save_file)
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
            print('成功创建文件夹:{}'.format(dirpath))

        # 二、构建会话
        with tf.Session() as sess:
            # a、变量初始化
            sess.run(tf.global_variables_initializer())
            # b、加载数据(训练的数据生成)
            N = 100
            train_x = np.linspace(0, 6, N) + np.random.normal(0, 2.0, N)
            train_y = train_x * 14 + 7 + np.random.normal(0, 5.0, N)
            train_x.shape = -1, 1
            train_y.shape = -1, 1
            print(train_x.shape, train_y.shape)
            # c、模型训练
            step = 1
            for e in range(1, 200):
                # 执行模型训练操作
                feed = {input_x: train_x, input_y: train_y}
                _, train_loss = sess.run([train_opt, loss], feed_dict=feed)
                print('Epoch:{} - Train Loss:{:.5f}'.format(e, train_loss))
                step += 1

            # 执行模型持久化
            # saver.save(sess, save_path=save_file, global_step=step)
            # print('模型成功保存至:{}'.format(save_file))


def restore_method1():
    """
    恢复模型第一种方式: 需要定义和 saver保存模型时候的一样的模型图,然后调用saver.restore()进行恢复
    :return:
    """
    with tf.Graph().as_default():
        # 一、构建模型图
        with tf.variable_scope('network'):
            # 1、构建输入的占位符
            input_x = tf.placeholder(
                dtype=tf.float32, shape=[None, 1], name='input_x'
            )
            input_y = tf.placeholder(
                dtype=tf.float32, shape=[None, 1], name='input_y'
            )

            # 2、构建变量
            w = tf.get_variable(
                name='w', shape=[1, 1], dtype=tf.float32,
                initializer=tf.random_normal_initializer(stddev=0.1)
            )
            b = tf.get_variable(
                name='b', shape=[1], dtype=tf.float32,
                initializer=tf.zeros_initializer()
            )

            # 3、正向传播,得到预测值。
            y_pred = tf.matmul(input_x, w) + b

        with tf.variable_scope('loss'):
            # 4、计算模型损失(MSE)
            loss = tf.reduce_mean(tf.square(input_y - y_pred))

        with tf.variable_scope('optimizer'):
            # 5、定义优化器。(含义:使用梯度下降的方式求解让损失函数最小的模型参数)
            optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.01)
            train_opt = optimizer.minimize(loss=loss)

        # 构建持久化的对象
        saver = tf.train.Saver(max_to_keep=2)

        # 创建持久化的文件路径
        save_file = './models/ai20/11/model.ckpt'
        dirpath = os.path.dirname(save_file)
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
            print('成功创建文件夹:{}'.format(dirpath))

        # 二、构建会话
        with tf.Session() as sess:
            # 恢复模型
            # a、获取持久化的信息对象。
            ckpt = tf.train.get_checkpoint_state(dirpath)
            if ckpt and ckpt.model_checkpoint_path:
                # 恢复之前持久化的变量
                saver.restore(sess, ckpt.model_checkpoint_path)
                # 如果你有多个持久化文件,从多个持久化文件中恢复最后一个模型。
                saver.recover_last_checkpoints(ckpt.all_model_checkpoint_paths)
                print('方式1:加载持久化文件,继续训练!!')
            else:
                # a、变量初始化
                sess.run(tf.global_variables_initializer())
                print('没有持久化文件,从头开始训练!')

            # b、加载数据(训练的数据生成)
            N = 100
            train_x = np.linspace(0, 6, N) + np.random.normal(0, 2.0, N)
            train_y = train_x * 14 + 7 + np.random.normal(0, 5.0, N)
            train_x.shape = -1, 1
            train_y.shape = -1, 1
            print(train_x.shape, train_y.shape)
            # c、模型训练
            step = 1
            for e in range(1, 200):
                # 执行模型训练操作
                feed = {input_x: train_x, input_y: train_y}
                _, train_loss = sess.run([train_opt, loss], feed_dict=feed)
                print('Epoch:{} - Train Loss:{:.5f}'.format(e, train_loss))
                step += 1


def restore_method2():
    """
    恢复模型第2种方式: 直接从持久化的模型图文件中加载图,然后调用saver.restore()进行恢复变量
    :return:
    """
    # 二、构建会话
    with tf.Session() as sess:
        # 恢复模型
        # 创建持久化的文件路径
        save_file = './models/ai20/11/model.ckpt'
        dirpath = os.path.dirname(save_file)
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
            print('成功创建文件夹:{}'.format(dirpath))

        # a、获取持久化的信息对象。
        ckpt = tf.train.get_checkpoint_state(dirpath)
        if ckpt is None and ckpt.model_checkpoint_path is None:
            raise Exception('没有持久化文件!!')

        # b、加载模型图
        saver = tf.train.import_meta_graph(
            meta_graph_or_file='{}.meta'.format(ckpt.model_checkpoint_path)
        )
        # c、恢复模型
        saver.restore(sess, ckpt.model_checkpoint_path)
        print('方式2:加载持久化模型,继续训练!!')

        # d、加载数据(训练的数据生成)
        N = 100
        train_x = np.linspace(0, 6, N) + np.random.normal(0, 2.0, N)
        train_y = train_x * 14 + 7 + np.random.normal(0, 5.0, N)
        train_x.shape = -1, 1
        train_y.shape = -1, 1
        print(train_x.shape, train_y.shape)

        # e、获取断点训练的操作对象
        """
        Tensor("network/input_x:0", shape=(?, 1), dtype=float32) 
        Tensor("network/input_y:0", shape=(?, 1), dtype=float32) 
        Tensor("loss/Mean:0", shape=(), dtype=float32)
        optimizer/GradientDescent
        """
        input_x = tf.get_default_graph().get_tensor_by_name('network/input_x:0')
        input_y = tf.get_default_graph().get_tensor_by_name('network/input_y:0')
        loss = tf.get_default_graph().get_tensor_by_name('loss/Mean:0')
        train_opt = tf.get_default_graph().get_operation_by_name('optimizer/GradientDescent')

        # c、模型训练
        step = 1
        for e in range(1, 200):
            # 执行模型训练操作
            feed = {input_x: train_x, input_y: train_y}
            _, train_loss = sess.run([train_opt, loss], feed_dict=feed)
            print('Epoch:{} - Train Loss:{:.5f}'.format(e, train_loss))
            step += 1


if __name__ == '__main__':
    # reg()
    restore_method1()
    restore_method1()
方式1:加载持久化文件,继续训练!!
(100, 1) (100, 1)
Epoch:1 - Train Loss:22.61078
Epoch:2 - Train Loss:22.60567
Epoch:3 - Train Loss:22.60065
Epoch:4 - Train Loss:22.59573
Epoch:5 - Train Loss:22.59088
Epoch:6 - Train Loss:22.58613
Epoch:7 - Train Loss:22.58146
Epoch:8 - Train Loss:22.57687
Epoch:9 - Train Loss:22.57236
Epoch:10 - Train Loss:22.56793
Epoch:11 - Train Loss:22.56358
Epoch:12 - Train Loss:22.55930
Epoch:13 - Train Loss:22.55511
Epoch:14 - Train Loss:22.55098
Epoch:15 - Train Loss:22.54693
Epoch:16 - Train Loss:22.54294
Epoch:17 - Train Loss:22.53903
Epoch:18 - Train Loss:22.53518
Epoch:19 - Train Loss:22.53142
Epoch:20 - Train Loss:22.52770
Epoch:21 - Train Loss:22.52406
Epoch:22 - Train Loss:22.52048
Epoch:23 - Train Loss:22.51696
Epoch:24 - Train Loss:22.51351
Epoch:25 - Train Loss:22.51011
Epoch:26 - Train Loss:22.50677
Epoch:27 - Train Loss:22.50350
Epoch:28 - Train Loss:22.50028
Epoch:29 - Train Loss:22.49712
Epoch:30 - Train Loss:22.49402
Epoch:31 - Train Loss:22.49097
Epoch:32 - Train Loss:22.48797
Epoch:33 - Train Loss:22.48502
Epoch:34 - Train Loss:22.48212
Epoch:35 - Train Loss:22.47928
Epoch:36 - Train Loss:22.47649
Epoch:37 - Train Loss:22.47375
Epoch:38 - Train Loss:22.47105
Epoch:39 - Train Loss:22.46841
Epoch:40 - Train Loss:22.46581
Epoch:41 - Train Loss:22.46325
Epoch:42 - Train Loss:22.46074
Epoch:43 - Train Loss:22.45827
Epoch:44 - Train Loss:22.45585
Epoch:45 - Train Loss:22.45347
Epoch:46 - Train Loss:22.45113
Epoch:47 - Train Loss:22.44883
Epoch:48 - Train Loss:22.44658
Epoch:49 - Train Loss:22.44436
Epoch:50 - Train Loss:22.44218
Epoch:51 - Train Loss:22.44004
Epoch:52 - Train Loss:22.43794
Epoch:53 - Train Loss:22.43587
Epoch:54 - Train Loss:22.43385
Epoch:55 - Train Loss:22.43185
Epoch:56 - Train Loss:22.42989
Epoch:57 - Train Loss:22.42797
Epoch:58 - Train Loss:22.42608
Epoch:59 - Train Loss:22.42422
Epoch:60 - Train Loss:22.42240
Epoch:61 - Train Loss:22.42060
Epoch:62 - Train Loss:22.41885
Epoch:63 - Train Loss:22.41712
Epoch:64 - Train Loss:22.41542
Epoch:65 - Train Loss:22.41375
Epoch:66 - Train Loss:22.41211
Epoch:67 - Train Loss:22.41050
Epoch:68 - Train Loss:22.40891
Epoch:69 - Train Loss:22.40736
Epoch:70 - Train Loss:22.40583
Epoch:71 - Train Loss:22.40433
Epoch:72 - Train Loss:22.40286
Epoch:73 - Train Loss:22.40141
Epoch:74 - Train Loss:22.39999
Epoch:75 - Train Loss:22.39858
Epoch:76 - Train Loss:22.39721
Epoch:77 - Train Loss:22.39586
Epoch:78 - Train Loss:22.39454
Epoch:79 - Train Loss:22.39324
Epoch:80 - Train Loss:22.39196
Epoch:81 - Train Loss:22.39070
Epoch:82 - Train Loss:22.38946
Epoch:83 - Train Loss:22.38825
Epoch:84 - Train Loss:22.38706
Epoch:85 - Train Loss:22.38589
Epoch:86 - Train Loss:22.38474
Epoch:87 - Train Loss:22.38361
Epoch:88 - Train Loss:22.38250
Epoch:89 - Train Loss:22.38141
Epoch:90 - Train Loss:22.38034
Epoch:91 - Train Loss:22.37929
Epoch:92 - Train Loss:22.37825
Epoch:93 - Train Loss:22.37724
Epoch:94 - Train Loss:22.37624
Epoch:95 - Train Loss:22.37526
Epoch:96 - Train Loss:22.37430
Epoch:97 - Train Loss:22.37335
Epoch:98 - Train Loss:22.37242
Epoch:99 - Train Loss:22.37151
Epoch:100 - Train Loss:22.37061
Epoch:101 - Train Loss:22.36973
Epoch:102 - Train Loss:22.36886
Epoch:103 - Train Loss:22.36801
Epoch:104 - Train Loss:22.36718
Epoch:105 - Train Loss:22.36636
Epoch:106 - Train Loss:22.36555
Epoch:107 - Train Loss:22.36476
Epoch:108 - Train Loss:22.36398
Epoch:109 - Train Loss:22.36321
Epoch:110 - Train Loss:22.36246
Epoch:111 - Train Loss:22.36173
Epoch:112 - Train Loss:22.36100
Epoch:113 - Train Loss:22.36029
Epoch:114 - Train Loss:22.35958
Epoch:115 - Train Loss:22.35890
Epoch:116 - Train Loss:22.35823
Epoch:117 - Train Loss:22.35756
Epoch:118 - Train Loss:22.35691
Epoch:119 - Train Loss:22.35627
Epoch:120 - Train Loss:22.35564
Epoch:121 - Train Loss:22.35502
Epoch:122 - Train Loss:22.35441
Epoch:123 - Train Loss:22.35382
Epoch:124 - Train Loss:22.35323
Epoch:125 - Train Loss:22.35266
Epoch:126 - Train Loss:22.35209
Epoch:127 - Train Loss:22.35154
Epoch:128 - Train Loss:22.35099
Epoch:129 - Train Loss:22.35046
Epoch:130 - Train Loss:22.34992
Epoch:131 - Train Loss:22.34941
Epoch:132 - Train Loss:22.34890
Epoch:133 - Train Loss:22.34840
Epoch:134 - Train Loss:22.34791
Epoch:135 - Train Loss:22.34743
Epoch:136 - Train Loss:22.34696
Epoch:137 - Train Loss:22.34649
Epoch:138 - Train Loss:22.34604
Epoch:139 - Train Loss:22.34558
Epoch:140 - Train Loss:22.34514
Epoch:141 - Train Loss:22.34471
Epoch:142 - Train Loss:22.34428
Epoch:143 - Train Loss:22.34387
Epoch:144 - Train Loss:22.34346
Epoch:145 - Train Loss:22.34305
Epoch:146 - Train Loss:22.34265
Epoch:147 - Train Loss:22.34227
Epoch:148 - Train Loss:22.34188
Epoch:149 - Train Loss:22.34150
Epoch:150 - Train Loss:22.34114
Epoch:151 - Train Loss:22.34078
Epoch:152 - Train Loss:22.34042
Epoch:153 - Train Loss:22.34007
Epoch:154 - Train Loss:22.33972
Epoch:155 - Train Loss:22.33938
Epoch:156 - Train Loss:22.33905
Epoch:157 - Train Loss:22.33873
Epoch:158 - Train Loss:22.33841
Epoch:159 - Train Loss:22.33809
Epoch:160 - Train Loss:22.33778
Epoch:161 - Train Loss:22.33748
Epoch:162 - Train Loss:22.33718
Epoch:163 - Train Loss:22.33689
Epoch:164 - Train Loss:22.33660
Epoch:165 - Train Loss:22.33631
Epoch:166 - Train Loss:22.33603
Epoch:167 - Train Loss:22.33576
Epoch:168 - Train Loss:22.33549
Epoch:169 - Train Loss:22.33523
Epoch:170 - Train Loss:22.33497
Epoch:171 - Train Loss:22.33472
Epoch:172 - Train Loss:22.33447
Epoch:173 - Train Loss:22.33422
Epoch:174 - Train Loss:22.33398
Epoch:175 - Train Loss:22.33374
Epoch:176 - Train Loss:22.33351
Epoch:177 - Train Loss:22.33328
Epoch:178 - Train Loss:22.33306
Epoch:179 - Train Loss:22.33284
Epoch:180 - Train Loss:22.33262
Epoch:181 - Train Loss:22.33240
Epoch:182 - Train Loss:22.33220
Epoch:183 - Train Loss:22.33199
Epoch:184 - Train Loss:22.33179
Epoch:185 - Train Loss:22.33159
Epoch:186 - Train Loss:22.33139
Epoch:187 - Train Loss:22.33120
Epoch:188 - Train Loss:22.33102
Epoch:189 - Train Loss:22.33083
Epoch:190 - Train Loss:22.33065
Epoch:191 - Train Loss:22.33047
Epoch:192 - Train Loss:22.33029
Epoch:193 - Train Loss:22.33012
Epoch:194 - Train Loss:22.32995
Epoch:195 - Train Loss:22.32978
Epoch:196 - Train Loss:22.32962
Epoch:197 - Train Loss:22.32946
Epoch:198 - Train Loss:22.32931
Epoch:199 - Train Loss:22.32915
方式2加载持久化模型,继续训练!!
(100, 1) (100, 1)
Epoch:1 - Train Loss:19.94881
Epoch:2 - Train Loss:19.73273
Epoch:3 - Train Loss:19.63893
Epoch:4 - Train Loss:19.59705
Epoch:5 - Train Loss:19.57723
Epoch:6 - Train Loss:19.56680
Epoch:7 - Train Loss:19.56039
Epoch:8 - Train Loss:19.55572
Epoch:9 - Train Loss:19.55181
Epoch:10 - Train Loss:19.54827
Epoch:11 - Train Loss:19.54490
Epoch:12 - Train Loss:19.54164
Epoch:13 - Train Loss:19.53845
Epoch:14 - Train Loss:19.53533
Epoch:15 - Train Loss:19.53226
Epoch:16 - Train Loss:19.52924
Epoch:17 - Train Loss:19.52627
Epoch:18 - Train Loss:19.52335
Epoch:19 - Train Loss:19.52048
Epoch:20 - Train Loss:19.51765
Epoch:21 - Train Loss:19.51487
Epoch:22 - Train Loss:19.51213
Epoch:23 - Train Loss:19.50944
Epoch:24 - Train Loss:19.50679
Epoch:25 - Train Loss:19.50418
Epoch:26 - Train Loss:19.50162
Epoch:27 - Train Loss:19.49909
Epoch:28 - Train Loss:19.49660
Epoch:29 - Train Loss:19.49416
Epoch:30 - Train Loss:19.49177
Epoch:31 - Train Loss:19.48940
Epoch:32 - Train Loss:19.48707
Epoch:33 - Train Loss:19.48478
Epoch:34 - Train Loss:19.48252
Epoch:35 - Train Loss:19.48031
Epoch:36 - Train Loss:19.47813
Epoch:37 - Train Loss:19.47598
Epoch:38 - Train Loss:19.47387
Epoch:39 - Train Loss:19.47179
Epoch:40 - Train Loss:19.46975
Epoch:41 - Train Loss:19.46773
Epoch:42 - Train Loss:19.46576
Epoch:43 - Train Loss:19.46381
Epoch:44 - Train Loss:19.46189
Epoch:45 - Train Loss:19.46001
Epoch:46 - Train Loss:19.45816
Epoch:47 - Train Loss:19.45633
Epoch:48 - Train Loss:19.45453
Epoch:49 - Train Loss:19.45277
Epoch:50 - Train Loss:19.45103
Epoch:51 - Train Loss:19.44932
Epoch:52 - Train Loss:19.44764
Epoch:53 - Train Loss:19.44598
Epoch:54 - Train Loss:19.44435
Epoch:55 - Train Loss:19.44275
Epoch:56 - Train Loss:19.44117
Epoch:57 - Train Loss:19.43962
Epoch:58 - Train Loss:19.43810
Epoch:59 - Train Loss:19.43659
Epoch:60 - Train Loss:19.43511
Epoch:61 - Train Loss:19.43366
Epoch:62 - Train Loss:19.43223
Epoch:63 - Train Loss:19.43082
Epoch:64 - Train Loss:19.42944
Epoch:65 - Train Loss:19.42808
Epoch:66 - Train Loss:19.42673
Epoch:67 - Train Loss:19.42542
Epoch:68 - Train Loss:19.42412
Epoch:69 - Train Loss:19.42284
Epoch:70 - Train Loss:19.42158
Epoch:71 - Train Loss:19.42035
Epoch:72 - Train Loss:19.41913
Epoch:73 - Train Loss:19.41793
Epoch:74 - Train Loss:19.41676
Epoch:75 - Train Loss:19.41560
Epoch:76 - Train Loss:19.41446
Epoch:77 - Train Loss:19.41334
Epoch:78 - Train Loss:19.41223
Epoch:79 - Train Loss:19.41114
Epoch:80 - Train Loss:19.41008
Epoch:81 - Train Loss:19.40903
Epoch:82 - Train Loss:19.40799
Epoch:83 - Train Loss:19.40697
Epoch:84 - Train Loss:19.40597
Epoch:85 - Train Loss:19.40499
Epoch:86 - Train Loss:19.40402
Epoch:87 - Train Loss:19.40306
Epoch:88 - Train Loss:19.40212
Epoch:89 - Train Loss:19.40120
Epoch:90 - Train Loss:19.40029
Epoch:91 - Train Loss:19.39940
Epoch:92 - Train Loss:19.39852
Epoch:93 - Train Loss:19.39766
Epoch:94 - Train Loss:19.39680
Epoch:95 - Train Loss:19.39597
Epoch:96 - Train Loss:19.39514
Epoch:97 - Train Loss:19.39433
Epoch:98 - Train Loss:19.39353
Epoch:99 - Train Loss:19.39275
Epoch:100 - Train Loss:19.39198
Epoch:101 - Train Loss:19.39122
Epoch:102 - Train Loss:19.39047
Epoch:103 - Train Loss:19.38973
Epoch:104 - Train Loss:19.38901
Epoch:105 - Train Loss:19.38829
Epoch:106 - Train Loss:19.38759
Epoch:107 - Train Loss:19.38691
Epoch:108 - Train Loss:19.38623
Epoch:109 - Train Loss:19.38556
Epoch:110 - Train Loss:19.38490
Epoch:111 - Train Loss:19.38425
Epoch:112 - Train Loss:19.38362
Epoch:113 - Train Loss:19.38300
Epoch:114 - Train Loss:19.38238
Epoch:115 - Train Loss:19.38177
Epoch:116 - Train Loss:19.38118
Epoch:117 - Train Loss:19.38059
Epoch:118 - Train Loss:19.38002
Epoch:119 - Train Loss:19.37945
Epoch:120 - Train Loss:19.37889
Epoch:121 - Train Loss:19.37834
Epoch:122 - Train Loss:19.37780
Epoch:123 - Train Loss:19.37727
Epoch:124 - Train Loss:19.37675
Epoch:125 - Train Loss:19.37623
Epoch:126 - Train Loss:19.37572
Epoch:127 - Train Loss:19.37523
Epoch:128 - Train Loss:19.37473
Epoch:129 - Train Loss:19.37425
Epoch:130 - Train Loss:19.37378
Epoch:131 - Train Loss:19.37331
Epoch:132 - Train Loss:19.37285
Epoch:133 - Train Loss:19.37240
Epoch:134 - Train Loss:19.37195
Epoch:135 - Train Loss:19.37151
Epoch:136 - Train Loss:19.37108
Epoch:137 - Train Loss:19.37066
Epoch:138 - Train Loss:19.37024
Epoch:139 - Train Loss:19.36983
Epoch:140 - Train Loss:19.36943
Epoch:141 - Train Loss:19.36903
Epoch:142 - Train Loss:19.36864
Epoch:143 - Train Loss:19.36826
Epoch:144 - Train Loss:19.36788
Epoch:145 - Train Loss:19.36751
Epoch:146 - Train Loss:19.36714
Epoch:147 - Train Loss:19.36678
Epoch:148 - Train Loss:19.36642
Epoch:149 - Train Loss:19.36608
Epoch:150 - Train Loss:19.36573
Epoch:151 - Train Loss:19.36540
Epoch:152 - Train Loss:19.36506
Epoch:153 - Train Loss:19.36473
Epoch:154 - Train Loss:19.36441
Epoch:155 - Train Loss:19.36409
Epoch:156 - Train Loss:19.36378
Epoch:157 - Train Loss:19.36348
Epoch:158 - Train Loss:19.36318
Epoch:159 - Train Loss:19.36288
Epoch:160 - Train Loss:19.36259
Epoch:161 - Train Loss:19.36230
Epoch:162 - Train Loss:19.36202
Epoch:163 - Train Loss:19.36174
Epoch:164 - Train Loss:19.36147
Epoch:165 - Train Loss:19.36120
Epoch:166 - Train Loss:19.36094
Epoch:167 - Train Loss:19.36067
Epoch:168 - Train Loss:19.36042
Epoch:169 - Train Loss:19.36016
Epoch:170 - Train Loss:19.35992
Epoch:171 - Train Loss:19.35967
Epoch:172 - Train Loss:19.35943
Epoch:173 - Train Loss:19.35920
Epoch:174 - Train Loss:19.35896
Epoch:175 - Train Loss:19.35874
Epoch:176 - Train Loss:19.35851
Epoch:177 - Train Loss:19.35829
Epoch:178 - Train Loss:19.35807
Epoch:179 - Train Loss:19.35786
Epoch:180 - Train Loss:19.35764
Epoch:181 - Train Loss:19.35744
Epoch:182 - Train Loss:19.35723
Epoch:183 - Train Loss:19.35703
Epoch:184 - Train Loss:19.35684
Epoch:185 - Train Loss:19.35664
Epoch:186 - Train Loss:19.35645
Epoch:187 - Train Loss:19.35626
Epoch:188 - Train Loss:19.35608
Epoch:189 - Train Loss:19.35589
Epoch:190 - Train Loss:19.35571
Epoch:191 - Train Loss:19.35553
Epoch:192 - Train Loss:19.35536
Epoch:193 - Train Loss:19.35519
Epoch:194 - Train Loss:19.35502
Epoch:195 - Train Loss:19.35486
Epoch:196 - Train Loss:19.35469
Epoch:197 - Train Loss:19.35453
Epoch:198 - Train Loss:19.35438
Epoch:199 - Train Loss:19.35422

Process finished with exit code 0

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值