https://github.com/torch/nn/blob/master/doc/module.md
-- make an mlp
mlp1=nn.Sequential();
mlp1:add(nn.Linear(100,10));
-- make a copy that shares the weights and biases
mlp2=mlp1:clone('weight','bias');
-- we change the bias of the first mlp
mlp1:get(1).bias[1]=99;
-- and see that the second one's bias has also changed..
print(mlp2:get(1).bias[1])
http://blog.csdn.net/u010167269/article/details/52073136
require 'nn'
local convNet = nn.Sequential()
convNet:add(nn.Linear(2, 3))
local convNet2 = convNet:clone('weight', 'bias', 'gradWeight', 'gradBias')
local params, gradParams = convNet:getParameters()
print(params)
params:fill(0)
local params, gradParams = convNet:getParameters()
print(params)
local params2, gradParams2 = convNet2:getParameters()
print(params2)
Pay attention to getParameter().
This time convNet2 will change when convNet changes.
require 'nn'
local convNet = nn.Sequential()
convNet:add(nn.Linear(2, 3))
convNet:add(nn.Tanh())
local params, gradParams = convNet:getParameters()
local convNet2 = convNet:clone('weight', 'bias', 'gradWeight', 'gradBias')
params:fill(0)
print(convNet2:get(1).weight)