代码实现yolov3主干网络,可以直接运行

目录

 1. 主干网head版本1

  2. 主干网head版本2

 3. 将网络层全部放入数组切片取输出

 4. 用循环写法将输入输出提取出来yaml文件


 1. 主干网head版本1

import torch
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
    def __init__(self,in_ch,out_ch,k=3,s=1):
        super().__init__()
        self.layer=nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )
    def forward(self,x):
        return self.layer(x)
class Resnet(nn.Module):
    def __init__(self,ch):
        super().__init__()
        self.resblock=nn.Sequential(
            ConvBnLeakRelu(ch,ch//2,k=1),
            ConvBnLeakRelu(ch//2,ch,k=3)
        )
    def forward(self,x):
        return F.relu(self.resblock(x)+x)
class Unsample(nn.Module):
    def __init__(self,in_ch,out_ch):
        super().__init__()
        self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2)
    def forward(self,x):
        return self.layer(x)

class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer=nn.Conv2d(3,32,3,1)
        self.layer1=nn.Sequential(
            Unsample(32,64),
                Resnet(64),
        )
        self.layer2=nn.Sequential(
            Unsample(64, 128),
            Resnet(128),
            Resnet(128),
        )
        self.layer3 = nn.Sequential(
            Unsample(128, 256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
            Resnet(256),
        )

        self.layer4 = nn.Sequential(
            Unsample(256, 512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
            Resnet(512),
        )
        self.layer5 = nn.Sequential(
            Unsample(512, 1024),
            Resnet(1024),
            Resnet(1024),
            Resnet(1024),
            Resnet(1024),
        )
    def forward(self,x):
        x=self.inpuiut_layer(x)
        x=self.layer1(x)
        x=self.layer2(x)
        x_52=self.layer3(x)
        x_26=self.layer4(x_52)
        x_13=self.layer5(x_26)
        return x_52,x_26,x_13
if __name__ == '__main__':
    x=torch.randn(1,3,416,416)
    net=Net()
    out=net(x)
    for i in out:
        print(i.shape)

 

  2. 主干网head版本2

import torch
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
    def __init__(self,in_ch,out_ch,k=3,s=1):
        super().__init__()
        self.layer=nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )
    def forward(self,x):
        return self.layer(x)
class Resnet(nn.Module):
    def __init__(self,ch):
        super().__init__()
        self.resblock=nn.Sequential(
            ConvBnLeakRelu(ch,ch//2,k=1),
            ConvBnLeakRelu(ch//2,ch,k=3)
        )
    def forward(self,x):
        return F.relu(self.resblock(x)+x)
class Unsample(nn.Module):
    def __init__(self,in_ch,out_ch):
        super().__init__()
        self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2)
    def forward(self,x):
        return self.layer(x)

class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer=nn.Conv2d(3,32,3,1)
        self.layer1=self.make_layer(32,64,1)
        self.layer2=self.make_layer(64, 128,2)
        self.layer3=self.make_layer(128, 256,8)
        self.layer4=self.make_layer(256, 512,8)
        self.layer5=self.make_layer(512, 1024,4)

    def make_layer(self,in_ch,out_ch,block_num):
        layers=[]
        layers+=[Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers+=[Resnet(out_ch)]
        return nn.Sequential(*layers)
    def forward(self,x):
        x=self.inpuiut_layer(x)
        x=self.layer1(x)
        x=self.layer2(x)
        x_52=self.layer3(x)
        x_26=self.layer4(x_52)
        x_13=self.layer5(x_26)
        return x_52,x_26,x_13
if __name__ == '__main__':
    x=torch.randn(1,3,416,416)
    net=Net()
    out=net(x)
    for i in out:
        print(i.shape)

 

 

 3. 将网络层全部放入数组切片取输出

 

import torch
from torch import nn
import torch.nn.functional as F


class ConvBnLeakRelu(nn.Module):
    def __init__(self, in_ch, out_ch, k=3, s=1):
        super().__init__()
        self.layer = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )

    def forward(self, x):
        return self.layer(x)


class Resnet(nn.Module):
    def __init__(self, ch):
        super().__init__()
        self.resblock = nn.Sequential(
            ConvBnLeakRelu(ch, ch // 2, k=1),
            ConvBnLeakRelu(ch // 2, ch, k=3)
        )

    def forward(self, x):
        return F.relu(self.resblock(x) + x)


class Unsample(nn.Module):
    def __init__(self, in_ch, out_ch):
        super().__init__()
        self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2)

    def forward(self, x):
        return self.layer(x)


cif = [[32, 64, 1],
       [64, 128, 2],
       [128, 256, 8],
       [256, 512, 8],
       [512, 1024, 4]]


class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1)
        # self.layer1 = self.make_layer(32, 64, 1)
        # self.layer2 = self.make_layer(64, 128, 2)
        # self.layer3 = self.make_layer(128, 256, 8)
        # self.layer4 = self.make_layer(256, 512, 8)
        # self.layer5 = self.make_layer(512, 1024, 4)
        self.layers = []
        for in_ch,out_ch,block_num in cif:
            self.layers+=[self.make_layer(in_ch, out_ch,block_num)]
        self.layers=nn.Sequential(*self.layers)
    def make_layer(self, in_ch, out_ch, block_num):
        layers = []
        layers += [Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers += [Resnet(out_ch)]
        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.inpuiut_layer(x)
        x_52 = self.layers[0:3](x)
        x_26 = self.layers[3:4](x_52)
        x_13 = self.layers[4:5](x_26)
        return x_52, x_26, x_13


if __name__ == '__main__':
    x = torch.randn(1, 3, 416, 416)
    net = Net()
    out = net(x)
    for i in out:
        print(i.shape)

 4. 用循环写法将输入输出提取出来yaml文件

import torch
import yaml
from torch import nn
import torch.nn.functional as F


class ConvBnLeakRelu(nn.Module):
    def __init__(self, in_ch, out_ch, k=3, s=1):
        super().__init__()
        self.layer = nn.Sequential(
            nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s),
            nn.BatchNorm2d(out_ch),
            nn.LeakyReLU(),
        )

    def forward(self, x):
        return self.layer(x)


class Resnet(nn.Module):
    def __init__(self, ch):
        super().__init__()
        self.resblock = nn.Sequential(
            ConvBnLeakRelu(ch, ch // 2, k=1),
            ConvBnLeakRelu(ch // 2, ch, k=3)
        )

    def forward(self, x):
        return F.relu(self.resblock(x) + x)


class Unsample(nn.Module):
    def __init__(self, in_ch, out_ch):
        super().__init__()
        self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2)

    def forward(self, x):
        return self.layer(x)


# cif = [[32, 64, 1],
#        [64, 128, 2],
#        [128, 256, 8],
#        [256, 512, 8],
#        [512, 1024, 4]]
with open('tt.yaml','r') as file:
    cif=yaml.load(file,Loader=yaml.Loader)
    cif=cif['conf']
print(cif)
class Net(nn.Module):
    def __init__(self):
        super().__init__()
        self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1)
        # self.layer1 = self.make_layer(32, 64, 1)
        # self.layer2 = self.make_layer(64, 128, 2)
        # self.layer3 = self.make_layer(128, 256, 8)
        # self.layer4 = self.make_layer(256, 512, 8)
        # self.layer5 = self.make_layer(512, 1024, 4)
        self.layers = []
        for in_ch,out_ch,block_num in cif:
            self.layers+=[self.make_layer(in_ch, out_ch,block_num)]
        self.layers=nn.Sequential(*self.layers)
    def make_layer(self, in_ch, out_ch, block_num):
        layers = []
        layers += [Unsample(in_ch, out_ch)]
        for i in range(block_num):
            layers += [Resnet(out_ch)]
        return nn.Sequential(*layers)

    def forward(self, x):
        x = self.inpuiut_layer(x)
        x_52 = self.layers[0:3](x)
        x_26 = self.layers[3:4](x_52)
        x_13 = self.layers[4:5](x_26)
        return x_52, x_26, x_13


if __name__ == '__main__':
    x = torch.randn(1, 3, 416, 416)
    net = Net()
    out = net(x)
    for i in out:
        print(i.shape)

 tt.yaml

#conf:
#  [ [ 32, 64, 1 ],
#    [ 64, 128, 2 ],
#    [ 128, 256, 8 ],
#    [ 256, 512, 8 ],
#    [ 512, 1024, 4 ] ]
conf:
  - [ 32, 64, 1 ]
  - [ 64, 128, 2 ]
  - [ 128, 256, 8 ]
  - [ 256, 512, 8 ]
  - [ 512, 1024, 4 ]

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

西柚与蓝莓

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值