# 创建 ResNet 实例
resnet = ResNet(num_classes, args)
# 定义损失函数和优化器
loss_fn1 = nn.CrossEntropyLoss()
loss_fn2 = nn.MSELoss()
optimizer1 = optim.Adam([
{'params': resnet.conv1.parameters()},
{'params': resnet.conv2.parameters()},
{'params': resnet.cross_attention.parameters()}
], lr=0.001) #调整自己想要的学习率
optimizer2 = optim.Adam([
{'params': resnet.backbone.parameters()},
{'params': resnet.fc.parameters()},
{'params': resnet.fc2.parameters()}
], lr=0.001)
# 训练循环
for epoch in range(num_epochs):
total_loss1 = 0.0
total_loss2 = 0.0
for inputs, targets in dataloader:
optimizer1.zero_grad()
optimizer2.zero_grad()
outputs1, outputs2 = resnet(inputs)
loss1 = loss_fn1(outputs1, targets)
loss2 = loss_fn2(outputs2, targets)
loss1.backward()
loss2.backward()
optimizer1.step()
optimizer2.step()
用两个优化器独立训练神经网络两个模块
最新推荐文章于 2024-04-28 09:44:01 发布