使用视差回归的方式来估算连续的视差图。根据由softmax操作得到预测代价Cd来计算每一个视差值d的可能性。预测视差值d'由每一个视差值*其对应的可能性求和得到,如下式
视差值回归比基于分类的立体匹配方法鲁棒性更强
1.submodule.py
class disparityregression(nn.Module):
def __init__(self, maxdisp):
super(disparityregression, self).__init__()
#self.disp = Variable(torch.Tensor(np.reshape(np.array(range(maxdisp)),[1,maxdisp,1,1])).cuda(), requires_grad=False)
self.disp = Variable(torch.Tensor(np.reshape(np.array(range(maxdisp)),[1,maxdisp,1,1])).cpu(), requires_grad=False)#ll
def forward(self, x):
print('submodule.py disparityregression x ')
print(x.size()[0])
print(x.size()[1])
print(x.size()[2])
print(x.size()[3])
print('submodule.py disparityregression self.disp ')
print(self.disp.size()[0])
print(self.disp.size()[1])
print(self.disp.size()[2])
print(self.disp.size()[3])
disp = self.disp.repeat(x.size()[0],1,x.size()[2],x.size()[3])
print('submodule.py disparityregression disp ')
print(disp.size()[0])
print(disp.size()[1])
print(disp.size()[2])
print(disp.size()[3])
out = torch.sum(x*disp,1)
return out
打印结果:
submodule.py disparityregression x
1
192
384
1248
submodule.py disparityregression self.disp
1
192
1
1
submodule.py disparityregression disp
1
192
384
1248
2.
def forward(self, x):
print(self.disp)
disp = self.disp.repeat(x.size()[0],1,x.size()[2],x.size()[3])
submodule.py disparityregression self.disp
tensor([[[[ 0.]],
[[ 1.]],
[[ 2.]],
[[ 3.]],
[[ 4.]],