FC中的BN(伪代码)

'''
全连接层中的batch normalization
'''
import torch
import torch.nn as nn
import copy
class Net(nn.Module):
    def __init__(self,dim,pretrained):
        super(Net,self).__init__()
        self.bn=nn.BatchNorm1d(dim,1)
        if pretrained:
            self.pretrained()
    def forward(self, input):
        return self.bn(input)
    def pretrained(self):
        nn.init.constant_(self.bn.weight, 1)
        nn.init.constant_(self.bn.bias, 0)

def bn_train_fc(input,model):
    state_dict=model.state_dict()
    print(state_dict)
    # weights=state_dict.items()[0].view(1,-1).expand(input.shape)
    weights=torch.tensor([])
    bias=torch.tensor([])
    for k,v in state_dict.items():
        if k=='bn.weight':
            weights=v
        if k=='bn.bias':
            bias=v
    weights=weights.view(1,-1).expand(input.shape)
    bias=bias.view(1,-1).expand(input.shape)
    run_mean=torch.mean(input,dim=0).view(1,-1).expand(input.shape)
    run_var=torch.var(input,dim=0).view(1,-1).expand(input.shape)
    output=(input-run_mean)/(run_var+1e-5).sqrt()
    output=output*weights+bias

    print(run_mean,run_var)

    return output

def bn_test_fc(input,model):
    state_dict=model.state_dict()

    weights = torch.tensor([])
    bias = torch.tensor([])
    mean=torch.tensor([])
    val=torch.tensor([])
    for k, v in state_dict.items():
        if k == 'bn.weight':
            weights = v
        if k == 'bn.bias':
            bias = v
        if k=='bn.running_mean':
            mean=v
        if k=='bn.running_var':
            var=v

    weights=weights.view(1,-1).expand(input.shape)
    bias=bias.view(1,-1).expand(input.shape)

    run_mean=mean.view(1,-1).expand(input.shape)
    run_var=var.view(1,-1).expand(input.shape)

    output=(input-run_mean)/(run_var+1e-5).sqrt()
    output=output*weights+bias
    return output

if __name__=='__main__':
    model=Net(dim=5,pretrained=False)
    input=torch.randn((3,5))
    output=model(input)
    print(model.state_dict()['bn.running_mean'])
    print(model.state_dict()['bn.running_var'])
    '''
    全连接层的输入为  [batch_Size,num_dims]
    进行batch normalization时是对每个节点进行BN操作的,也就是batch size个数值求平均值和方差
    可以想象成:全连接层的每个节点就是CNN的一个卷积核(体现在CNN的一个通道)
    故而全连接层的BN是在每个节点上   over  batch size dimension 
    CNN的BN是在每个卷积核上          over batch H W dimension
    '''
    output2=bn_train_fc(input,model)
    print(output,output2)



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值