def StepLRTest():
model=AlexNet(num_classes=2)
optimizer=optim.SGD(params=model.parameters(),lr=0.01)
scheduler=lr_scheduler.MultiStepLR(optimizer,milestones=[5,20,40,80],gamma=0.1)
plt.figure()
x=list(range(100))
y=[]
for epoch in range(100):
scheduler.step()
lr=scheduler.get_lr()
print('epoch:{}, lr:{} '.format(epoch,lr[0]))
y.append(scheduler.get_lr()[0])
plt.xlabel('epoch')
plt.ylabel('lr')
plt.plot(x,y)
plt.show()
效果图:
def StepLRTest():
model=AlexNet(num_classes=2)
optimizer=optim.SGD(params=model.parameters(),lr=0.01)
# scheduler=lr_scheduler.MultiStepLR(optimizer,milestones=[5,20,40,80],gamma=0.1)
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lambda step: (1.0 - step / 1000) if step <= 1000 else 0, last_epoch=-1)
plt.figure()
x=list(range(100))
y=[]
for epoch in range(100):
scheduler.step()
lr=scheduler.get_lr()
print('epoch:{}, lr:{} '.format(epoch,lr[0]))
y.append(scheduler.get_lr()[0])
plt.xlabel('epoch')
plt.ylabel('lr')
plt.plot(x,y)
plt.show()
效果图:
参考:
PyTorch torch.optim.lr_scheduler 学习率 - LambdaLR;StepLR;MultiStepLR;ExponentialLR_zisuina_2的博客-CSDN博客_lambdalr
torch.optim.lr_scheduler:调整学习率_qyhaill的博客-CSDN博客_lr_scheduler 相关参数解析