class GParam(nn.Module):
def __init__(self):
super(GParam, self).__init__()
# self.length = 3
self.weight = nn.Parameter(torch.ones(3),requires_grad=True)
def forward(self, x):
weight = F.softmax(self.weight, 0)
s = 0
for i in range(len(weight)):
s += x[i] * weight[i]
return s