1、报错:expected scalar type Long but found Int
参考:
点击这里,不过这里的方法我没实验成功
最后参考这个链接,成功解决。
修改的代码: 在target.view(-1)后面添加.type(torch.LongTensor)
class MySoftmaxCrossEntropyLoss(nn.Module):
def __init__(self, nbclasses):
super(MySoftmaxCrossEntropyLoss, self).__init__()
self.nbclasses = nbclasses
def forward(self, inputs, target):
if inputs.dim() > 2:
# view:相当于重整形
inputs = inputs.view(inputs.size(0), inputs.size(1), -1) # N,C,H,W => N,C,H*W
inputs = inputs.transpose(1, 2) # N,C,H*W => N,H*W,C
# 参考:https://blog.csdn.net/qq_37828380/article/details/107855070?ops_request_misc=%257B%2522request%255Fid%2522%253A%2522164273704816780271538395%2522%252C%2522scm%2522%253A%252220140713.130102334.pc%255Fall.%2522%257D&request_id=164273704816780271538395&biz_id=0&utm_medium=distribute.pc_search_result.none-task-blog-2~all~first_rank_ecpm_v1~hot_rank-1-107855070.pc_search_insert_ulrmf&utm_term=torch.contiguous%28%29.view&spm=1018.2226.3001.4187
inputs = inputs.contiguous().view(-1, self.nbclasses) # N,H*W,C => N*H*W,C
target = target.view(-1).type(torch.LongTensor)
# 对数据进行softmax,再log,再进行NLLLoss
# 参考:https://blog.csdn.net/qq_41468616/article/details/120970613?ops_request_misc=%257B%2522request%255Fid%2522%253A%2522164273824516780366581839%2522%252C%2522scm%2522%253A%252220140713.130102334.pc%255Fall.%2522%257D&request_id=164273824516780366581839&biz_id=0&utm_medium=distribute.pc_search_result.none-task-blog-2~all~first_rank_ecpm_v1~hot_rank-1-120970613.pc_search_insert_ulrmf&utm_term=nn.CrossEntropyLoss&spm=1018.2226.3001.4187
# 参考:https://blog.csdn.net/geter_CS/article/details/84857220?ops_request_misc=%257B%2522request%255Fid%2522%253A%2522164636280416780274183574%2522%252C%2522scm%2522%253A%252220140713.130102334..%2522%257D&request_id=164636280416780274183574&biz_id=0&utm_medium=distribute.pc_search_result.none-task-blog-2~all~baidu_landing_v2~default-2-84857220.es_vector_control_group&utm_term=nn.CrossEntropyLoss&spm=1018.2226.3001.4187
return nn.CrossEntropyLoss(reduction="mean")(inputs, target)
2、IndexError: Target 1 is out of bounds.