列表推导式
self.fc = nn.Sequential(
BasicBlock(input_dim, hidden_dim),
# 循环实现N次,1024*1024全连接层
*[BasicBlock(hidden_dim, hidden_dim) for _ in range(hidden_layers)],
nn.Linear(hidden_dim, output_dim)
)
repeat()
https://blog.csdn.net/m0_46412065/article/details/128043821
view()
N = 420,L = 39 * 21,H_in = 39
x = torch.randn(N,L)
x.view(N,-1,H_in) # (420,21,39)
permute()
https://www.jb51.net/article/246243.htm
BatchNorm1D/BatchNorm2D
# BatchNorm1D 适用于2D或3D输入,即 (N,C) 或 (N,C,L)
# BatchNorm1D 适用于4D输入,即 (N,C,H,W)
RNN/LSTM/GRU
# RNN 1982
model_base = nn.RNN(
input_size= 4,
hidden_size= 256,
#循环神经网络的层数
num_layers= 1,
#方向神经网络
bidirectional= False
)
x = torch.randn(size=(365,8,4))
# L:对数据的定量长度,如小时、天、月等
# N:批量大小
# H_in:单个时间步数据的长度
output, h_n = model_base(x)
# LSTM 1997
model_lstm = nn.LSTM(
input_size=4,
hidden_size=256,
num_layers=1,
bidirectional=False
)
output, (h_n, c_n) = model_lstm(x)
# GRU 2014
model_gru = nn.GRU(
input_size= 4,
hidden_size= 256,
num_layers= 1,
bidirectional= False
)
output, h_n = model_gru(x)
模型参数计算
print("base's parameters : {}".format( sum([p.numel() for p in model_base.parameters() if p.requires_grad])))