py-MDNet/modules/models.py 的set_optimizer函数

 该函数用了两次,一次是用来初始化优化器,一次是用来更新优化器。

在初始化中,fc4,fc5的lr为0.0005,fc6的lr为0.005

在更新优化器中,fc4,fc5的lr为0.0001,fc6的lr为0.001

import torch
import torch.nn as nn
from collections import OrderedDict
import numpy as np
import torch.nn.functional as F
import collections
import yaml
def append_params(params, module, prefix):
    for child in module.children():
        for k,p in child._parameters.items():
            if p is None: continue

            if isinstance(child, nn.BatchNorm2d):
                name = prefix + '_bn_' + k
            else:
                name = prefix + '_' + k

            if name not in params:
                params[name] = p
            else:
                raise RuntimeError('Duplicated param name: {:s}'.format(name))



layers = nn.Sequential(OrderedDict([
                ('conv1', nn.Sequential(nn.Conv2d(3, 96, kernel_size=7, stride=2),
                                        nn.ReLU(inplace=True),#激活函数,同时inplace设置为True,原值将会被覆盖。
                                        nn.LocalResponseNorm(2),#归一化
                                        nn.MaxPool2d(kernel_size=3, stride=2))),
                ('conv2', nn.Sequential(nn.Conv2d(96, 256, kernel_size=5, stride=2),
                                        nn.ReLU(inplace=True),
                                        nn.LocalResponseNorm(2),
                                        nn.MaxPool2d(kernel_size=3, stride=2))),
                ('conv3', nn.Sequential(nn.Conv2d(256, 512, kernel_size=3, stride=1),
                                        nn.ReLU(inplace=True))),
                ('fc4',   nn.Sequential(nn.Linear(512 * 3 * 3, 512),
                                        nn.ReLU(inplace=True))),
                ('fc5',   nn.Sequential(nn.Dropout(0.5),
                                        nn.Linear(512, 512),
                                        nn.ReLU(inplace=True)))]))
# for l in layers:
#     print(l)
#
params = collections.OrderedDict()
for name, module in layers.named_children():
    append_params(params, module, name)


#此处为set_optimizer函数:
lr_base = 0.0005
lr_mult = {'fc6':10}
param_list = []
for k, p in params.items():#原代码中的params.items()为可学习的参数,此处只为了简单说明,用了全部的参数
        lr = lr_base#给每一个可学习的参数设置初始学习率为0.0005.
        for l, m in lr_mult.items():#取出fc6和10.
            if k.startswith(l):#再次判断该层是否是fc6层,如果是,重新设置学习率为0.0005*10
                lr = lr_base * m  #没看懂啥意思???#原来:初始化,fc4,fc5的lr = 0.0005,fc6lr为0.0005*10 = 0.005
                                                    #后来在更新优化器中,将fc4,fc5的lr = 0.0001,fc6lr为0.0001*10 = 0.001
        param_list.append({'params': [p], 'lr':lr})#给每一个参数添加学习率,并且放到param_list中,用于更新参数
print(param_list)

 

 optimizer = optim.SGD(param_list, lr = lr, momentum=momentum, weight_decay=w_decay)#用随机梯度下降算法来更新。

输出太长了,只写一个参数的输出:

{'params': [Parameter containing:
tensor([-9.7996e-04, -1.3808e-02, -4.5937e-03, -6.0048e-03,  1.2511e-02,
        -2.1006e-03, -1.0121e-02, -7.6366e-03,  6.7936e-03, -3.5825e-03,
         4.2030e-03,  2.7011e-04, -4.8736e-04, -1.6295e-03,  1.0839e-04,
        -1.4446e-02, -1.2529e-02, -8.9380e-03,  2.0073e-03, -8.8903e-03,
        -4.6784e-03, -4.3598e-03, -3.1074e-03, -1.1800e-02,  7.5774e-03,
         2.0441e-03, -1.0189e-02,  1.1629e-02, -1.7825e-03, -1.2042e-02,
        -2.8074e-03,  1.3914e-02, -9.7211e-03, -1.1965e-02, -8.9130e-03,
         7.7358e-03, -1.4190e-02,  1.3166e-02,  4.4527e-03, -2.6648e-03,
        -5.4300e-03, -6.2514e-03,  1.1893e-02, -4.0076e-03,  5.4227e-03,
        -9.0793e-05,  1.3886e-02, -1.1545e-04, -9.9586e-03, -4.6667e-03,
        -1.0431e-02, -1.2155e-02,  4.9751e-03,  3.1109e-03, -7.8064e-04,
         2.2946e-03,  1.0914e-02, -1.4820e-03,  1.1918e-02,  1.8418e-03,
         1.0376e-02, -2.9378e-03,  5.3166e-03,  8.3707e-03,  9.0079e-03,
         1.2650e-02, -2.4181e-04, -1.2410e-02, -5.1961e-03, -1.3523e-02,
         8.4351e-03,  7.5505e-03, -1.0436e-02,  1.7089e-03,  2.7658e-03,
        -5.2591e-03,  9.7041e-03, -1.1256e-02,  3.5063e-03, -1.7378e-03,
         1.2455e-02, -1.0269e-03, -7.7194e-03,  1.3258e-02,  2.4350e-03,
        -4.9133e-03,  9.2081e-03,  8.5081e-03,  1.2384e-02, -6.4718e-03,
        -4.8999e-03, -4.0182e-03, -9.2176e-03,  9.7834e-03,  3.7964e-04,
         4.5026e-03,  1.2167e-02, -1.4664e-02,  1.1228e-02,  8.5231e-03,
         1.3651e-02, -5.3227e-03,  5.4230e-03, -8.2249e-04, -1.3989e-02,
         2.1924e-04, -1.0577e-02,  1.1865e-02, -1.1138e-02,  2.6534e-03,
        -1.3639e-02,  1.2414e-02,  8.0182e-03, -8.4310e-03, -4.6266e-04,
         9.6214e-03, -5.0531e-03,  7.6720e-03, -1.0858e-02,  2.1585e-03,
        -1.1626e-02,  2.1764e-03, -1.0342e-02, -7.3508e-03, -7.7608e-03,
         1.3097e-03, -2.5480e-03,  3.2041e-03, -8.7478e-03, -3.9601e-04,
        -1.3795e-03, -9.8961e-03,  1.3321e-02, -5.0472e-03,  2.1772e-03,
         8.8212e-03, -1.3187e-02, -1.9957e-03,  5.6873e-04, -6.9666e-03,
        -8.6587e-03, -9.6083e-03, -1.1854e-03,  1.7915e-04,  6.0027e-03,
        -2.0544e-03, -1.1476e-02, -5.0658e-03, -7.6849e-03, -2.4973e-03,
         2.0419e-03, -4.5506e-03,  2.9620e-03, -3.2309e-03,  1.3539e-02,
        -1.1862e-02, -9.2049e-03,  6.4188e-03,  9.1977e-03, -8.0867e-04,
        -1.1572e-02,  5.5306e-03,  9.5759e-03,  1.2949e-02,  7.0124e-03,
        -5.0841e-03,  1.0426e-02, -1.4442e-02, -1.3870e-02,  1.0678e-02,
        -2.7039e-03,  1.0045e-02, -7.0405e-03,  8.7101e-03,  1.2058e-02,
         7.0557e-03, -2.8028e-03,  1.4522e-02,  5.5917e-03, -1.4681e-02,
        -1.4458e-02, -1.2010e-02, -1.3544e-02,  5.9987e-03, -6.3309e-03,
        -6.5906e-03,  3.2252e-03,  3.5921e-03, -1.1923e-03, -3.7617e-03,
         6.8003e-03,  1.3999e-02,  5.9038e-03,  5.5420e-03,  5.6343e-03,
        -1.0731e-02,  1.1194e-02,  1.6021e-03, -5.9992e-03,  7.3606e-03,
         5.4576e-03, -5.4887e-03, -1.0835e-02, -1.3024e-02, -5.5601e-03,
        -3.8713e-04, -2.5151e-03,  1.0624e-02,  1.1473e-02,  1.3984e-02,
         1.2166e-02,  1.4217e-02, -2.3866e-03, -7.8582e-03, -1.2900e-02,
         9.6388e-03, -6.1611e-03,  6.0932e-03,  3.9141e-03,  1.1766e-02,
         1.2260e-02, -9.8111e-03,  8.2297e-03,  2.4504e-04, -4.9892e-03,
        -4.9711e-03, -3.9541e-03, -9.3591e-03, -3.0520e-04,  1.0177e-02,
         6.5117e-04,  8.6885e-03,  8.4901e-03,  2.4759e-03,  1.3559e-02,
         2.3605e-03, -1.1138e-02,  6.4135e-03,  1.0722e-02,  5.5246e-03,
        -1.3005e-02,  1.4767e-03, -1.4082e-02, -5.9980e-03, -5.3016e-03,
         1.1024e-02, -2.6556e-04,  1.8943e-03, -1.6160e-03,  1.3960e-03,
         7.6261e-03,  4.0395e-03, -9.3436e-03, -8.5269e-03,  4.6853e-03,
         3.7670e-03, -4.5787e-03, -1.3209e-02, -8.1064e-03,  2.3322e-03,
         2.6557e-04,  1.1375e-02,  8.1874e-03,  1.0817e-02,  1.3913e-02,
        -7.1802e-03,  2.8955e-03, -3.1215e-03, -8.6171e-04,  6.0029e-03,
        -1.3699e-02, -4.8303e-03, -3.5216e-03,  3.4359e-04,  1.4860e-03,
        -1.0640e-02, -8.6912e-03, -6.6180e-03, -3.6240e-03, -1.1537e-02,
        -7.2260e-04, -7.3301e-03, -5.9921e-03,  3.4591e-03, -1.4225e-02,
        -8.5539e-03, -6.2051e-03,  1.4569e-02, -2.2009e-03, -2.6747e-03,
        -5.5179e-03,  8.3656e-03,  2.9272e-03,  2.4063e-03, -8.4560e-03,
         6.5624e-03, -1.4684e-03,  5.2038e-03,  7.3709e-03, -1.3974e-02,
         1.0177e-02, -6.8862e-03,  2.3943e-03, -6.8217e-03,  5.6183e-03,
         4.4880e-03,  1.3474e-02, -8.0845e-03, -1.2294e-02,  2.7392e-04,
        -4.8835e-03,  1.2524e-02,  1.1793e-03,  5.1806e-03, -1.4838e-03,
         9.0306e-04, -4.4492e-03,  1.2986e-02, -2.4484e-03,  6.2296e-03,
         1.4317e-03,  7.3858e-03,  3.2452e-03, -8.8778e-03,  6.7631e-03,
         1.0843e-02, -1.2882e-02, -7.8930e-03, -8.5071e-03, -1.2341e-02,
         2.7987e-03, -2.8790e-03, -9.3937e-04, -9.7012e-04, -1.3647e-02,
         6.1630e-04,  3.4063e-03, -1.7538e-03,  3.3001e-04, -1.1046e-02,
         1.3102e-02,  4.0374e-03, -4.2662e-04, -5.3356e-03, -3.3789e-03,
         3.3269e-03, -7.7432e-03,  3.6758e-03, -1.2676e-04,  9.6517e-04,
         9.4532e-03,  1.2901e-02, -5.6450e-03,  9.9988e-03,  1.2033e-02,
         5.8831e-03,  8.7418e-03,  1.1377e-02, -1.2109e-02,  4.8248e-03,
         3.0873e-04, -1.3131e-03,  5.7273e-03,  8.5990e-03, -4.2199e-05,
        -1.1171e-02, -7.7607e-03,  5.5476e-03, -5.1593e-03,  1.7264e-03,
        -1.0673e-02, -8.9686e-03,  8.7543e-03, -1.0857e-02, -2.1810e-03,
         8.1193e-04,  9.4333e-03,  4.9037e-03,  8.9145e-03, -3.7248e-03,
         1.6342e-03, -8.7813e-03,  9.8404e-03, -4.3694e-03, -1.2426e-03,
        -9.7616e-03, -1.2427e-02, -8.6000e-03,  9.4947e-03, -1.4157e-02,
        -2.1250e-03, -1.0904e-02, -8.5022e-03, -7.1042e-03, -6.9400e-03,
        -1.4017e-02,  9.9084e-03,  8.0107e-03, -8.8633e-03,  5.0381e-03,
         4.9674e-03,  3.9428e-03, -4.0924e-03, -6.3208e-03,  1.4038e-02,
         1.1027e-02, -3.7331e-03, -9.0263e-03, -3.5533e-03,  1.3111e-03,
         1.4046e-02, -1.2333e-02,  1.0851e-02,  5.5085e-04,  1.6670e-03,
        -3.7356e-03, -3.5309e-03,  6.6846e-03,  8.6261e-03,  1.0450e-02,
        -7.1360e-03, -6.7073e-03, -6.5171e-03, -3.8617e-03,  8.7826e-04,
        -1.1495e-02,  3.3184e-03, -7.5176e-03,  1.4319e-02, -1.1720e-02,
        -7.2552e-04, -1.3190e-02, -4.3041e-03, -9.3194e-03,  4.1943e-03,
        -1.4046e-02, -3.4918e-03,  5.4038e-03,  1.0315e-02,  1.4209e-03,
        -4.5999e-03, -5.3947e-03,  9.1369e-03, -7.2507e-03, -1.3286e-02,
        -6.9775e-03,  3.7646e-03, -1.1768e-02, -1.0414e-02,  3.2221e-03,
        -7.8921e-03, -3.5480e-03, -1.3368e-03, -8.1434e-03, -1.0687e-02,
        -9.0426e-03, -1.3291e-02,  2.3811e-03, -1.0330e-02, -1.1018e-02,
        -4.9283e-03, -5.0695e-03,  1.2905e-02,  1.4453e-04, -8.7770e-03,
        -5.7908e-03,  5.8476e-03,  1.3474e-02,  1.2490e-02, -2.8245e-03,
         6.9402e-03,  7.4797e-03,  2.1419e-03, -6.8003e-03, -6.3343e-03,
        -4.7033e-04, -6.1954e-04,  1.4372e-02, -9.1525e-04, -4.3067e-03,
         7.1100e-03,  8.7888e-03,  4.6450e-03,  2.4096e-04, -1.8005e-03,
         1.2901e-02, -1.0009e-02,  5.1384e-03,  2.3607e-03, -2.6453e-03,
         1.1616e-02, -1.0240e-02,  1.3125e-04,  4.2553e-03, -6.0711e-04,
        -1.0913e-02, -1.2875e-02,  5.4294e-03,  3.9550e-03,  1.3669e-02,
         5.1295e-03, -1.2370e-02,  1.4325e-02,  1.3003e-02, -1.4419e-02,
        -1.6637e-03, -1.4156e-02, -1.0884e-02,  1.1537e-02,  3.1856e-03,
        -1.1488e-02, -1.2153e-02], requires_grad=True)], 'lr': 0.0005}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值