File "/home/sla/.conda/envs/sla_envs/lib/python3.8/site-packages/timm/models/layers/norm.py", line 23, in forward
return F.layer_norm(x, self.normalized_shape, self.weight, self.bias, self.eps)
File "/home/sla/.conda/envs/sla_envs/lib/python3.8/site-packages/torch/nn/functional.py", line 2347, in layer_norm
return torch.layer_norm(input, normalized_shape, weight, bias, eps, torch.backends.cudnn.enabled)
RuntimeError: Given normalized_shape=[64, 1, 1], expected input with shape [*, 64, 1, 1], but got input of size[32, 64, 56, 56]
安装的timm 是0.4.12 version
class LayerNorm2d(nn.LayerNorm):
""" Layernorm for channels of '2d' spatial BCHW tensors """
def __init__(self, num_channels):
super().__init__([num_channels, 1,1])def forward(self, x: torch.Tensor) -> torch.Tensor:
return