Timm optimizer 和 lr_scheduler使用
c o s i n e cosine cosine learning rate:
from timm.optim import create_optimizer_v2, optimizer_kwargs
from timm.scheduler import create_scheduler
from torch import nn
from matplotlib import pyplot as plt
%matplotlib inline
class arg:
opt = 'sgd'
lr = 0.01
weight_decay = 0
momentum = 0.9
epochs = 100
sched = 'cosine'
min_lr = 0.002
warmup_lr = 0.005
warmup_epochs = 10
cooldown_epochs = 50
model = nn.Conv2d(1,