Activations
1. Swish
# Swish activation
class Swish(nn.Module):
def __init__(self):
super(Swish, self).__init__()
def forward(self, x):
x = x * F.sigmoid(x)
return x
2. Mish
# Mish activation
class Mish(nn.Module):
def __init__(self):
super().__init__()
def forward(self,x):
x = x * (torch.tanh(F.softplus(x)))
return x
(以上两个cls全部放在model.py内)
3. 接口设置:
主要方法就是在create_modules的时候再传入一个参数 act_opt,默认是leaky,就是用pytorch中自带的leaky relu去做,如果指定为mish或者是swish就用自己写的方法用作激活函数。
def create_modules(module_defs, act_opt="leaky"):
再加上一些判断的条件:
if module_def["activation"] == "leaky":
if act_opt == "swish":
modules.add_module(f"swish_{module_i}", Swish)
elif act_opt == "mish":
modules.add_module(f"mish_{module_i}", Mish)
else:
modules.add_module(f"leaky_{module_i}", nn.LeakyReLU(0.1))
(前面有很多空格是因为这部分是从代码段中间截下来的)本来是想改module_def中的activation的,但是这样一来好像会涉及cfg文件的修改,每次换激活函数还得重写生成cfg的bash,感觉反而不是很实用。
IoU Loss
(这一部分的代码都写在losses.py文件内了,用的时候直接
from losses import *
就好)
1. IoU loss
# IoU Loss
def iou(bboxes1, bboxes2):
rows = bboxes1.shape[0]
cols = bboxes2.shape[0]
ious = torch.zeros((rows, cols))
if rows * cols == 0:
return ious
exchange = False
if bboxes1.shape[0] > bboxes2.shape[0]:
bboxes1, bboxes2 = bboxes2, bboxes1
ious = torch.zeros((cols, rows))
exchange = True
area1 = (bboxes1[:, 2] - bboxes1[:, 0]) * (
bboxes1[:, 3] - bboxes1[:, 1])
area2 = (bboxes2[:, 2] - bboxes2[:, 0]) * (
bboxes2[:, 3] - bboxes2[:, 1])
inter_max_xy = torch.min(bboxes1[:, 2:],bboxes2[:, 2:])
inter_min_xy = torch.max(bboxes1[:, :2],bboxes2[:, :2])
inter = torch.clamp((inter_max_xy - inter_min_xy), min=0)
inter_area = inter[:, 0] * inter[:, 1]
union = area1+area2-inter_area
ious = inter_area / union
ious = torch.clamp(ious,min=0,max = 1.0)
if exchange:
ious = ious.T
return torch.sum(1-ious)
2. GIoU Loss
# GIoU Loss
def