File "/home/sla/.conda/envs/sla_envs/lib/python3.8/site-packages/timm/models/layers/norm.py", line 23, in forward
return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
File "/home/sla/.conda/envs/sla_envs/lib/python3.8/site-packages/torch/nn/functional.py", line 2347, in layer_norm
return torch.layer_norm(input, normalized_shape, weight, bias, eps, torch.backends.cudnn.enabled)
RuntimeError: Given normalized_shape=[64, 1, 1], expected input with shape [*, 64, 1, 1], but got input of size[32, 64, 56, 56]
安装的timm 是0.4.12 version
class LayerNorm2d(nn.LayerNorm):
""" Layernorm for channels of '2d' spatial BCHW tensors """
def __init__(self, num_channels):
super().__init__([num_channels, 1,1])def forward(self, x: torch.Tensor) -> torch.Tensor:
return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
在timm 0.8.6.dev0 version中
class LayerNorm2d(nn.LayerNorm):
""" LayerNorm for channels of '2D' spatial NCHW tensors """
def __init__(self, num_channels, eps=1e-6, affine=True):
super().__init__(num_channels, eps=eps, elementwise_affine=affine)
self._fast_norm = is_fast_norm() # can't script unless we have these flags here (no globals)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = x.permute(0, 2, 3, 1)
if self._fast_norm:
x = fast_layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
else:
x = F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
x = x.permute(0, 3, 1, 2)
return x
所以我直接找到/home/sla/.conda/envs/sla_envs/lib/python3.8/site-packages/timm/models/layers/norm.py", line 23, 修改
class LayerNorm2d(nn.LayerNorm):
""" Layernorm for channels of '2d' spatial BCHW tensors """
def __init__(self, num_channels, eps=1e-6, affine=True):
super().__init__(num_channels, eps=eps, elementwise_affine=affine)def forward(self, x: torch.Tensor) -> torch.Tensor:
return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
如果有问题, 请留言