过平滑Madgap

#the tensor version for mad_gap (Be able to transfer gradients)
#intensor: [node_num * hidden_dim], the node feature matrix;
#neb_mask,rmt_mask:[node_num * node_num], the mask matrices of the neighbor and remote raltion;
#target_idx = [1,2,3…n], the nodes idx for which we calculate the mad_gap value;
def mad_gap_regularizer(intensor,neb_mask,rmt_mask,target_idx):
node_num,feat_num = intensor.size()

input1 = intensor.expand(node_num,node_num,feat_num)
input2 = input1.transpose(0,1)

input1 = input1.contiguous().view(-1,feat_num)
input2 = input2.contiguous().view(-1,feat_num)

simi_tensor = F.cosine_similarity(input1,input2, dim=1, eps=1e-8).view(node_num,node_num)
dist_tensor = 1 - simi_tensor

neb_dist = torch.mul(dist_tensor,neb_mask)
rmt_dist = torch.mul(dist_tensor,rmt_mask)

divide_neb = (neb_dist!=0).sum(1).type(torch.FloatTensor).cuda() + 1e-8
divide_rmt = (rmt_dist!=0).sum(1).type(torch.FloatTensor).cuda() + 1e-8

neb_mean_list = neb_dist.sum(1) / divide_neb
rmt_mean_list = rmt_dist.sum(1) / divide_rmt

neb_mad = torch.mean(neb_mean_list[target_idx])
rmt_mad = torch.mean(rmt_mean_list[target_idx])

mad_gap = rmt_mad - neb_mad

return 
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值