import matplotlib.pyplot as plt
from torch.optim import *
import torch.nn as nn
import torch, math
class net(nn.Module):
def __init__(self):
super(net, self).__init__()
self.fc = nn.Linear(1,10)
def forward(self,x):
return self.fc(x)
lr_list = []
model = net()
LR = 0.01
optimizer = Adam(model.parameters(), lr=LR)
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda epoch: 0.8 ** (epoch // 5))
scheduler = lr_scheduler.StepLR(optimizer,step_size=5, gamma = 0.8)
scheduler = lr_scheduler.MultiStepLR(optimizer,milestones=[20,80],gamma = 0.9)
scheduler = lr_scheduler.ExponentialLR(optimizer, gamma=0.9)
scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=100)
warm_up_with_cosine_lr = lambda epoch: epoch / 5 if epoch <= 5 else 0.5 * (
math.cos((epoch - 5) / (100 - 5) * math.pi) + 1)
scheduler = torch.optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=warm_up_with_cosine_lr)
for epoch in range(100):
optimizer.step()
scheduler.step()
# lr_list.append(optimizer.state_dict()['param_groups'][0]['lr'])
lr_list.append(scheduler.get_last_lr()[0])
plt.plot(range(100), lr_list)
plt.show()
torch Lr调整
最新推荐文章于 2022-03-16 20:56:00 发布