1.3维数据row normalization
import torch
a, b, c = 20, 20, 30
t0 = torch.rand(a, b, c)
t1 = t0 / (t0.sum(dim=1).view(a, 1, c))
print(t1.sum(1))
2. batch norm
import torch
import numpy as np
from torch import nn
import torch.nn.functional as F
## batch norm
gamma = 1.0
beta = 0
eps = 1e-5
means = torch.mean(x, dim=(0, 2, 3)).reshape(1, C, 1, 1)
var = torch.var(x, dim=(0, 2, 3), unbiased=False).reshape(1, C, 1, 1)
y1 = gamma * (x - means) / (torch.sqrt(var) + eps) + beta
y2 = F.batch_norm(x, running_mean=torch.zeros(C), running_var=torch.ones(C), weight=torch.ones(C),
bias=torch.zeros(C), training=True, momentum=0., eps=eps)
print(y1)
print(" ")
print(y2)
结果输出
tensor([[[[-0.6857, 0.1656, 0.7152],
[-2.3615, 0.4585, -0.5597],
[ 0.4091, 0.2575, 1.4793]],
[[-1.4772, 0.6332, 1.8835],
[ 0.7574, 0.2748, -0.3092],
[-0.4099, 0.1309, 2.2694]]],
[[[ 1.0750, 0.5130, -1.6202],
[-0.6978, -1.1462, 0.3473],
[ 0.8907, 1.2229, -0.4630]],
[[-0.2802, -1.3265, -1.0468],
[ 1.0219, 0.1826, -0.0740],
[-0.7218, -0.5156, -0.9925]]]])
tensor([[[[-0.6857, 0.1656, 0.7152],
[-2.3615, 0.4585, -0.5597],
[ 0.4091, 0.2576, 1.4793]],
[[-1.4772, 0.6332, 1.8835],
[ 0.7574, 0.2748, -0.3092],
[-0.4099, 0.1309, 2.2694]]],
[[[ 1.0750, 0.5130, -1.6202],
[-0.6978, -1.1462, 0.3473],
[ 0.8907, 1.2229, -0.4630]],
[[-0.2802, -1.3265, -1.0468],
[ 1.0219, 0.1826, -0.0740],
[-0.7218, -0.5156, -0.9925]]]])
3.instance norm
# instance norm
gamma = 1.0
beta = 0.0
eps = 1e-5
means = torch.mean(x, dim=(2, 3)).reshape(B, C, 1, 1)
var = torch.var(x, dim=(2, 3), unbiased=False).reshape(B, C, 1, 1)
y1 = gamma * (x - means) / (torch.sqrt(var) + eps) + beta
y1
y3 = nn.InstanceNorm2d(C)(x)
y3
结果输出
tensor([[[[-1.0038, 1.4137, -2.0396],
[ 0.0739, 0.4103, -0.6672],
[ 0.3224, 0.7337, 0.7567]],
[[-0.4517, 0.2687, -0.9556],
[-1.0445, -0.3434, 2.0049],
[ 0.5106, -1.0717, 1.0827]]],
[[[ 0.6794, -0.7771, 0.2650],
[-1.5320, 2.0234, -0.0900],
[ 0.6411, -0.9737, -0.2360]],
[[-0.2841, 0.9513, 0.9278],
[-1.8077, 0.3377, 1.5133],
[-1.1645, -0.1577, -0.3162]]]])
###
tensor([[[[-1.0038, 1.4137, -2.0396],
[ 0.0739, 0.4103, -0.6672],
[ 0.3224, 0.7337, 0.7567]],
[[-0.4517, 0.2687, -0.9556],
[-1.0445, -0.3434, 2.0049],
[ 0.5106, -1.0717, 1.0828]]],
[[[ 0.6794, -0.7771, 0.2650],
[-1.5321, 2.0234, -0.0900],
[ 0.6411, -0.9737, -0.2360]],
[[-0.2841, 0.9513, 0.9278],
[-1.8077, 0.3377, 1.5133],
[-1.1645, -0.1577, -0.3162]]]])
双随机归一化
def _doubly_stocahstic_normalize(e):
if len(e.size()) == 2:
e = torch.unsqueeze(e, -1)
a, b, c = e.size()
e_ = e / (e.sum(dim=1).view(a, 1, c))
e_1 = (e_.view(a, b*c).repeat_interleave(a, dim=0) * e_.view(a, b*c).repeat(b, 1)).view(a**2, b, c) / e_.sum(dim=0).view(1, b, c)
return e_1.sum(dim=1).view(a, b, c).squeeze(2)