init_lr = 0.01
optimizer = RAdam(model.parameters(), lr = init_lr, weight_decay=0.0004)
def adjust_learning_rate(optimizer, epoch, init_lr):
"""Sets the learning rate to the initial LR decayed by 0.95 every 10 epochs"""
# lr = args.lr * (0.95 ** (epoch // 100))
lr = init_lr * (0.95 ** (epoch // 20))
for i, param_group in enumerate(optimizer.param_groups):
param_group['lr'] = lr
pass
调整学习率
最新推荐文章于 2023-05-02 15:55:21 发布