前言
本文是为了记录一些张量计算的代码。
内容
a=torch.randn([1,2,512])
print(a.size(2))
print(a)
'''
512
tensor([[[-1.1323, 0.3696, -1.9115, ..., 0.4906, 1.0524, -0.2123],
[ 1.1486, 1.3612, 0.9830, ..., -1.4560, -0.3854, -1.1252]]])
'''
lstm= nn.LSTM(
input_size=512,
hidden_size=512,
batch_first=True,
)
print(lstm)
# LSTM(512, 512, batch_first=True)
b,(hn,cn)=lstm(a)
print(b)
print(b.size(0))
print(b.view(b.size(0),-1))
'''
tensor([[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]], grad_fn=<TransposeBackward0>)
1
tensor([[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]], grad_fn=<ViewBackward0>)
'''
print(hn)
print(hn.shape)
print(hn.squeeze(0))
print(hn.shape)
# squeeze(0)是把第n维的张量吃掉。
'''
tensor([[[-1.6775e-02, -1.2802e-01, -1.3298e-01, 1.6421e-02, -1.8329e-02,
-4.3580e-02, 1.4466e-01, -4.0687e-02, 2.9165e-02, 1.5078e-01,
-1.8954e-01, 1.7006e-01, 2.7011e-01, -1.6784e-01, 1.0218e-01,
-1.0139e-02, -1.6341e-01, 3.6628e-02, 1.2854e-02, 9.4384e-02,
4.2753e-02, 1.6124e-01, -3.8185e-02, -2.3212e-01, 4.5247e-02,
-1.3699e-01, 3.7054e-02, 1.1712e-01, -1.8006e-01, 2.4981e-01,
-2.2181e-01, -2.1069e-02, 2.3976e-01, 6.8467e-02, 2.9459e-02,
9.2270e-02, 8.4774e-02, -7.3295e-03, 9.5728e-02, 2.1479e-02,
7.4260e-02, 1.4771e-01, -3.2441e-01, -4.3296e-02, -1.0209e-02,
-1.1378e-02, 2.6153e-01, -1.5116e-01, 1.4122e-02, 2.5741e-02,
6.1708e-02, -1.8396e-01, -1.0477e-01, 1.1969e-01, 1.2648e-01,
-1.0754e-02, 1.1686e-01, 1.3573e-01, 6.9055e-02, -1.2007e-01,
8.1360e-03, 1.4355e-01, 1.2352e-01, -5.4311e-02, -3.8756e-02,
3.9205e-03, -9.9260e-02, 4.7536e-02, 5.1142e-02, 1.8423e-01,
-1.0731e-01, 1.1135e-01, -1.3549e-01, 1.7294e-01, 5.0986e-02,
-5.8934e-02, -3.4282e-01, -1.1393e-01, -4.6791e-02, 1.1893e-01,
-6.2751e-02, 1.2634e-01, -7.9654e-04, -1.0954e-01, 1.0032e-01,
-1.0314e-01, -1.2881e-01, 7.8082e-02, -8.4047e-02, 2.1715e-02,
2.3799e-02, 2.3186e-01, 8.1507e-02, -8.3323e-02, -1.4407e-01,
8.3925e-02, 7.2382e-02, -4.1981e-03, -7.1413e-02, -1.4803e-01,
-1.3128e-01, -1.8841e-01, 2.1212e-02, 5.0904e-02, -3.0373e-01,
-1.0145e-01, -5.2514e-02, -1.6187e-01, 2.4256e-02, -1.9084e-01,
1.0873e-01, 2.9643e-01, -1.2656e-01, -5.0081e-02, -1.7037e-01,
4.3655e-02, -4.5883e-02, 1.6412e-01, 2.8270e-02, 1.4275e-02,
1.4415e-01, 8.1914e-02, 3.6579e-01, 1.9107e-01, 3.4559e-02,
3.0687e-02, 8.3171e-02, 2.4629e-01, -1.1412e-01, -1.7214e-01,
2.4416e-01, 3.4949e-02, 1.6710e-01, -1.5360e-01, 1.9276e-03,
9.7000e-02, -8.0130e-03, -6.1881e-02, -5.3881e-02, -5.5072e-02,
-1.0772e-01, 2.1949e-01, 2.6226e-03, -2.0730e-01, -1.0347e-01,
-1.1284e-01, -4.3615e-02, 6.7544e-02, 9.0735e-02, -6.0498e-02,
7.6948e-02, 4.8204e-02, 2.9848e-01, 1.0635e-01, -1.3626e-01,
4.9391e-02, -6.8483e-02, 3.4272e-01, 5.5965e-03, -1.0638e-01,
-2.3370e-01, -4.0659e-03, -1.6091e-01, -1.6466e-01, 3.3220e-02,
6.5826e-02, -2.3473e-01, 7.4325e-02, 1.3120e-02, 1.4068e-01,
4.2596e-02, -1.9515e-01, -2.4903e-02, -6.1892e-02, 9.8676e-02,
-5.4812e-02, 1.5949e-01, -7.3396e-02, -4.5775e-02, -3.5854e-02,
5.6608e-02, 5.5453e-02, -5.5210e-02, -2.6580e-01, 2.4316e-02,
-1.0547e-01, 1.6184e-01, -1.4431e-01, -1.7595e-01, -2.6681e-02,
6.1187e-02, 1.8197e-01, -3.4382e-02, -2.4445e-01, -1.2798e-02,
2.0844e-01, 1.0961e-02, 2.3830e-01, 1.5412e-02, 4.7803e-02,
-4.9469e-02, 8.6480e-02, 1.0502e-01, 1.3683e-01, -1.0906e-01,
1.8315e-01, -1.6926e-01, 6.7685e-02, 1.4302e-01, 2.0200e-01,
-1.6581e-02, 2.3473e-02, -5.4680e-02, -9.1343e-02, -1.1577e-01,
-1.7815e-01, -2.4094e-03, -2.5826e-01, -1.1775e-01, 5.1905e-02,
3.6086e-02, -1.0750e-01, -1.2831e-01, 8.3533e-02, -1.4438e-01,
1.2324e-01, -6.4327e-02, -3.0302e-02, 7.1690e-02, -3.6331e-02,
1.6484e-01, -1.9270e-01, -1.1841e-01, 4.5019e-02, -4.9667e-02,
-1.8155e-01, 4.6446e-02, -4.9312e-02, 1.0905e-01, 2.0723e-02,
3.1146e-01, -1.0456e-01, -4.3192e-02, -5.3976e-02, 3.8233e-02,
8.8835e-02, -5.4876e-02, -1.2498e-01, 8.2719e-04, 9.4209e-02,
-1.1003e-01, -2.6216e-04, 6.7120e-02, -1.3812e-01, 8.5963e-02,
1.1459e-01, 2.4391e-01, -8.1725e-02, 2.8770e-02, 1.7611e-01,
3.3859e-03, -4.2708e-02, 3.2480e-01, -7.4312e-02, -1.8125e-01,
-1.6728e-02, -2.2103e-01, 1.1631e-01, 3.0838e-01, -1.4409e-01,
3.0490e-02, -1.7704e-02, -2.5283e-02, -7.8298e-02, 8.4382e-02,
1.5057e-01, 3.0552e-02, -2.1087e-01, 3.6355e-02, 9.8925e-02,
-1.7509e-02, 8.3993e-02, 4.7230e-02, -9.8825e-02, -1.7501e-01,
-1.1874e-02, 3.4072e-03, -5.7396e-03, 3.3018e-02, 2.0779e-03,
-6.9407e-02, -1.3767e-01, -6.6382e-02, 4.7882e-02, 2.3917e-02,
-4.6292e-02, 9.7735e-02, 2.0505e-01, 1.2554e-01, 7.7993e-02,
-1.0793e-01, 8.8213e-02, 1.2033e-01, 2.1842e-01, 6.4340e-03,
2.4654e-02, 3.5552e-03, -1.4628e-01, 2.0085e-02, 4.5753e-02,
4.3009e-02, -9.7676e-02, 4.6977e-02, -2.2229e-01, -1.4192e-02,
-1.0132e-01, 2.9819e-01, -1.0419e-01, 2.0494e-01, 3.6330e-02,
2.7305e-01, -8.9728e-02, 1.2522e-01, -6.3107e-02, 1.3669e-01,
1.8770e-02, -2.0930e-01, -2.3184e-01, 2.4479e-02, -8.2672e-02,
1.6979e-01, 1.0288e-01, -1.0814e-01, -7.1540e-02, -2.0300e-01,
-2.9115e-02, -4.4196e-02, 3.0109e-02, -1.0013e-01, -1.8419e-02,
-1.1248e-01, -4.2762e-02, -1.2878e-01, -2.4389e-02, 3.9541e-02,
-2.3708e-02, -6.2289e-02, 1.1509e-01, -1.0722e-02, -9.7608e-02,
2.1784e-01, 8.2911e-02, -5.4663e-02, 8.7219e-02, 2.2724e-01,
9.9705e-03, 5.5121e-02, 1.7778e-02, -2.2188e-02, -2.2488e-01,
-1.1970e-01, -1.6762e-01, 3.7462e-01, -1.6029e-01, 2.6230e-01,
5.5045e-02, 8.5112e-02, 1.6099e-01, -1.2228e-01, 1.7173e-01,
1.4112e-01, -2.4059e-01, -8.8582e-02, -3.5767e-02, 1.2705e-01,
7.8592e-02, 1.9212e-01, 2.6424e-02, -9.0852e-02, 2.3156e-02,
1.1583e-01, 1.2757e-01, 8.8223e-02, 1.7917e-01, -3.6271e-02,
-1.0862e-01, -1.4201e-01, 6.0964e-02, 2.7103e-01, 5.8236e-02,
-8.9240e-02, 2.6828e-01, 4.9628e-02, 9.9134e-03, -2.4928e-01,
1.1214e-01, 1.3677e-01, 6.3401e-02, 8.1496e-02, -1.8777e-01,
-7.0221e-02, -1.9381e-01, 2.1204e-02, 2.8126e-01, 1.9171e-02,
2.3216e-01, -7.3388e-02, -2.2626e-01, -1.8087e-02, 1.1417e-02,
2.7308e-03, 1.4678e-01, -3.2881e-02, -1.5486e-01, 2.1136e-01,
5.0693e-02, -3.5543e-02, -5.2943e-02, -3.1434e-02, 8.3841e-02,
1.8363e-02, -1.1190e-01, -1.8980e-01, 1.3968e-01, 2.0669e-01,
-1.8052e-01, 2.1495e-02, 1.7563e-02, -4.1377e-02, 6.0014e-02,
-2.5489e-02, 1.8321e-01, -1.0703e-01, 2.8876e-02, 2.3822e-01,
1.1765e-01, -6.9066e-02, -6.3046e-02, -8.4153e-02, 6.2055e-02,
-1.7554e-01, 9.7331e-03, 1.2741e-01, -1.4436e-01, -2.1517e-02,
6.3535e-02, 2.0248e-01, 1.6203e-01, 1.8814e-01, -8.0166e-02,
-1.0553e-01, 1.3414e-02, 5.0031e-02, -2.7672e-01, -9.3413e-02,
-3.0474e-01, -4.3589e-02, 4.4321e-02, -2.1993e-01, -1.3792e-01,
-3.3862e-02, -9.6375e-04, 1.8912e-01, 8.3477e-02, -1.4530e-01,
1.6552e-01, 2.0678e-01, 6.0893e-02, -2.0629e-01, 4.7654e-02,
1.3139e-01, -2.8164e-03, -2.7888e-03, 1.9645e-01, 4.6857e-02,
1.4539e-01, -2.8893e-01, -6.1499e-02, -6.2819e-02, -2.1821e-01,
5.5194e-02, -6.8829e-03, -4.0185e-02, 1.0161e-01, -1.1229e-01,
9.4012e-04, 8.6136e-02, 1.5603e-01, 5.7312e-02, 1.8660e-01,
-8.9664e-02, 5.4440e-02, -2.7999e-01, -7.4824e-02, 1.2205e-01,
7.0123e-02, 4.6657e-02, -1.7813e-01, -2.2433e-01, -2.9011e-01,
-5.5910e-02, -2.7735e-03, -1.3472e-02, -1.0402e-01, -1.4841e-01,
3.2440e-01, 1.6229e-01, 1.6840e-01, 1.6917e-01, -1.1784e-01,
1.1972e-01, -5.0820e-02]]], grad_fn=<StackBackward0>)
torch.Size([1, 1, 512])
tensor([[-1.6775e-02, -1.2802e-01, -1.3298e-01, 1.6421e-02, -1.8329e-02,
-4.3580e-02, 1.4466e-01, -4.0687e-02, 2.9165e-02, 1.5078e-01,
-1.8954e-01, 1.7006e-01, 2.7011e-01, -1.6784e-01, 1.0218e-01,
-1.0139e-02, -1.6341e-01, 3.6628e-02, 1.2854e-02, 9.4384e-02,
4.2753e-02, 1.6124e-01, -3.8185e-02, -2.3212e-01, 4.5247e-02,
-1.3699e-01, 3.7054e-02, 1.1712e-01, -1.8006e-01, 2.4981e-01,
-2.2181e-01, -2.1069e-02, 2.3976e-01, 6.8467e-02, 2.9459e-02,
9.2270e-02, 8.4774e-02, -7.3295e-03, 9.5728e-02, 2.1479e-02,
7.4260e-02, 1.4771e-01, -3.2441e-01, -4.3296e-02, -1.0209e-02,
-1.1378e-02, 2.6153e-01, -1.5116e-01, 1.4122e-02, 2.5741e-02,
6.1708e-02, -1.8396e-01, -1.0477e-01, 1.1969e-01, 1.2648e-01,
-1.0754e-02, 1.1686e-01, 1.3573e-01, 6.9055e-02, -1.2007e-01,
8.1360e-03, 1.4355e-01, 1.2352e-01, -5.4311e-02, -3.8756e-02,
3.9205e-03, -9.9260e-02, 4.7536e-02, 5.1142e-02, 1.8423e-01,
-1.0731e-01, 1.1135e-01, -1.3549e-01, 1.7294e-01, 5.0986e-02,
-5.8934e-02, -3.4282e-01, -1.1393e-01, -4.6791e-02, 1.1893e-01,
-6.2751e-02, 1.2634e-01, -7.9654e-04, -1.0954e-01, 1.0032e-01,
-1.0314e-01, -1.2881e-01, 7.8082e-02, -8.4047e-02, 2.1715e-02,
2.3799e-02, 2.3186e-01, 8.1507e-02, -8.3323e-02, -1.4407e-01,
8.3925e-02, 7.2382e-02, -4.1981e-03, -7.1413e-02, -1.4803e-01,
-1.3128e-01, -1.8841e-01, 2.1212e-02, 5.0904e-02, -3.0373e-01,
-1.0145e-01, -5.2514e-02, -1.6187e-01, 2.4256e-02, -1.9084e-01,
1.0873e-01, 2.9643e-01, -1.2656e-01, -5.0081e-02, -1.7037e-01,
4.3655e-02, -4.5883e-02, 1.6412e-01, 2.8270e-02, 1.4275e-02,
1.4415e-01, 8.1914e-02, 3.6579e-01, 1.9107e-01, 3.4559e-02,
3.0687e-02, 8.3171e-02, 2.4629e-01, -1.1412e-01, -1.7214e-01,
2.4416e-01, 3.4949e-02, 1.6710e-01, -1.5360e-01, 1.9276e-03,
9.7000e-02, -8.0130e-03, -6.1881e-02, -5.3881e-02, -5.5072e-02,
-1.0772e-01, 2.1949e-01, 2.6226e-03, -2.0730e-01, -1.0347e-01,
-1.1284e-01, -4.3615e-02, 6.7544e-02, 9.0735e-02, -6.0498e-02,
7.6948e-02, 4.8204e-02, 2.9848e-01, 1.0635e-01, -1.3626e-01,
4.9391e-02, -6.8483e-02, 3.4272e-01, 5.5965e-03, -1.0638e-01,
-2.3370e-01, -4.0659e-03, -1.6091e-01, -1.6466e-01, 3.3220e-02,
6.5826e-02, -2.3473e-01, 7.4325e-02, 1.3120e-02, 1.4068e-01,
4.2596e-02, -1.9515e-01, -2.4903e-02, -6.1892e-02, 9.8676e-02,
-5.4812e-02, 1.5949e-01, -7.3396e-02, -4.5775e-02, -3.5854e-02,
5.6608e-02, 5.5453e-02, -5.5210e-02, -2.6580e-01, 2.4316e-02,
-1.0547e-01, 1.6184e-01, -1.4431e-01, -1.7595e-01, -2.6681e-02,
6.1187e-02, 1.8197e-01, -3.4382e-02, -2.4445e-01, -1.2798e-02,
2.0844e-01, 1.0961e-02, 2.3830e-01, 1.5412e-02, 4.7803e-02,
-4.9469e-02, 8.6480e-02, 1.0502e-01, 1.3683e-01, -1.0906e-01,
1.8315e-01, -1.6926e-01, 6.7685e-02, 1.4302e-01, 2.0200e-01,
-1.6581e-02, 2.3473e-02, -5.4680e-02, -9.1343e-02, -1.1577e-01,
-1.7815e-01, -2.4094e-03, -2.5826e-01, -1.1775e-01, 5.1905e-02,
3.6086e-02, -1.0750e-01, -1.2831e-01, 8.3533e-02, -1.4438e-01,
1.2324e-01, -6.4327e-02, -3.0302e-02, 7.1690e-02, -3.6331e-02,
1.6484e-01, -1.9270e-01, -1.1841e-01, 4.5019e-02, -4.9667e-02,
-1.8155e-01, 4.6446e-02, -4.9312e-02, 1.0905e-01, 2.0723e-02,
3.1146e-01, -1.0456e-01, -4.3192e-02, -5.3976e-02, 3.8233e-02,
8.8835e-02, -5.4876e-02, -1.2498e-01, 8.2719e-04, 9.4209e-02,
-1.1003e-01, -2.6216e-04, 6.7120e-02, -1.3812e-01, 8.5963e-02,
1.1459e-01, 2.4391e-01, -8.1725e-02, 2.8770e-02, 1.7611e-01,
3.3859e-03, -4.2708e-02, 3.2480e-01, -7.4312e-02, -1.8125e-01,
-1.6728e-02, -2.2103e-01, 1.1631e-01, 3.0838e-01, -1.4409e-01,
3.0490e-02, -1.7704e-02, -2.5283e-02, -7.8298e-02, 8.4382e-02,
1.5057e-01, 3.0552e-02, -2.1087e-01, 3.6355e-02, 9.8925e-02,
-1.7509e-02, 8.3993e-02, 4.7230e-02, -9.8825e-02, -1.7501e-01,
-1.1874e-02, 3.4072e-03, -5.7396e-03, 3.3018e-02, 2.0779e-03,
-6.9407e-02, -1.3767e-01, -6.6382e-02, 4.7882e-02, 2.3917e-02,
-4.6292e-02, 9.7735e-02, 2.0505e-01, 1.2554e-01, 7.7993e-02,
-1.0793e-01, 8.8213e-02, 1.2033e-01, 2.1842e-01, 6.4340e-03,
2.4654e-02, 3.5552e-03, -1.4628e-01, 2.0085e-02, 4.5753e-02,
4.3009e-02, -9.7676e-02, 4.6977e-02, -2.2229e-01, -1.4192e-02,
-1.0132e-01, 2.9819e-01, -1.0419e-01, 2.0494e-01, 3.6330e-02,
2.7305e-01, -8.9728e-02, 1.2522e-01, -6.3107e-02, 1.3669e-01,
1.8770e-02, -2.0930e-01, -2.3184e-01, 2.4479e-02, -8.2672e-02,
1.6979e-01, 1.0288e-01, -1.0814e-01, -7.1540e-02, -2.0300e-01,
-2.9115e-02, -4.4196e-02, 3.0109e-02, -1.0013e-01, -1.8419e-02,
-1.1248e-01, -4.2762e-02, -1.2878e-01, -2.4389e-02, 3.9541e-02,
-2.3708e-02, -6.2289e-02, 1.1509e-01, -1.0722e-02, -9.7608e-02,
2.1784e-01, 8.2911e-02, -5.4663e-02, 8.7219e-02, 2.2724e-01,
9.9705e-03, 5.5121e-02, 1.7778e-02, -2.2188e-02, -2.2488e-01,
-1.1970e-01, -1.6762e-01, 3.7462e-01, -1.6029e-01, 2.6230e-01,
5.5045e-02, 8.5112e-02, 1.6099e-01, -1.2228e-01, 1.7173e-01,
1.4112e-01, -2.4059e-01, -8.8582e-02, -3.5767e-02, 1.2705e-01,
7.8592e-02, 1.9212e-01, 2.6424e-02, -9.0852e-02, 2.3156e-02,
1.1583e-01, 1.2757e-01, 8.8223e-02, 1.7917e-01, -3.6271e-02,
-1.0862e-01, -1.4201e-01, 6.0964e-02, 2.7103e-01, 5.8236e-02,
-8.9240e-02, 2.6828e-01, 4.9628e-02, 9.9134e-03, -2.4928e-01,
1.1214e-01, 1.3677e-01, 6.3401e-02, 8.1496e-02, -1.8777e-01,
-7.0221e-02, -1.9381e-01, 2.1204e-02, 2.8126e-01, 1.9171e-02,
2.3216e-01, -7.3388e-02, -2.2626e-01, -1.8087e-02, 1.1417e-02,
2.7308e-03, 1.4678e-01, -3.2881e-02, -1.5486e-01, 2.1136e-01,
5.0693e-02, -3.5543e-02, -5.2943e-02, -3.1434e-02, 8.3841e-02,
1.8363e-02, -1.1190e-01, -1.8980e-01, 1.3968e-01, 2.0669e-01,
-1.8052e-01, 2.1495e-02, 1.7563e-02, -4.1377e-02, 6.0014e-02,
-2.5489e-02, 1.8321e-01, -1.0703e-01, 2.8876e-02, 2.3822e-01,
1.1765e-01, -6.9066e-02, -6.3046e-02, -8.4153e-02, 6.2055e-02,
-1.7554e-01, 9.7331e-03, 1.2741e-01, -1.4436e-01, -2.1517e-02,
6.3535e-02, 2.0248e-01, 1.6203e-01, 1.8814e-01, -8.0166e-02,
-1.0553e-01, 1.3414e-02, 5.0031e-02, -2.7672e-01, -9.3413e-02,
-3.0474e-01, -4.3589e-02, 4.4321e-02, -2.1993e-01, -1.3792e-01,
-3.3862e-02, -9.6375e-04, 1.8912e-01, 8.3477e-02, -1.4530e-01,
1.6552e-01, 2.0678e-01, 6.0893e-02, -2.0629e-01, 4.7654e-02,
1.3139e-01, -2.8164e-03, -2.7888e-03, 1.9645e-01, 4.6857e-02,
1.4539e-01, -2.8893e-01, -6.1499e-02, -6.2819e-02, -2.1821e-01,
5.5194e-02, -6.8829e-03, -4.0185e-02, 1.0161e-01, -1.1229e-01,
9.4012e-04, 8.6136e-02, 1.5603e-01, 5.7312e-02, 1.8660e-01,
-8.9664e-02, 5.4440e-02, -2.7999e-01, -7.4824e-02, 1.2205e-01,
7.0123e-02, 4.6657e-02, -1.7813e-01, -2.2433e-01, -2.9011e-01,
-5.5910e-02, -2.7735e-03, -1.3472e-02, -1.0402e-01, -1.4841e-01,
3.2440e-01, 1.6229e-01, 1.6840e-01, 1.6917e-01, -1.1784e-01,
1.1972e-01, -5.0820e-02]], grad_fn=<SqueezeBackward1>)
torch.Size([1, 512])
'''
bl=list()
bl.append(b)
bl.append(b)
print(bl)
bl=torch.stack(bl,dim=0)
print(bl)
print(bl.shape)
'''
[tensor([[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]], grad_fn=<TransposeBackward0>), tensor([[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]], grad_fn=<TransposeBackward0>)]
tensor([[[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]],
[[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]]], grad_fn=<StackBackward0>)
torch.Size([2, 1, 2, 512])
'''
tp_bl=bl.transpose_(0,1)
print(tp_bl)
print(tp_bl.shape)
'''
tensor([[[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]],
[[ 1.1483e-01, -2.2099e-01, -3.3913e-02, ..., -2.8261e-02,
8.0818e-02, -4.9915e-02],
[-6.6520e-02, -2.8783e-01, 7.3646e-02, ..., 1.4219e-04,
3.5501e-01, 5.9964e-02]]]], grad_fn=<TransposeBackward1>)
torch.Size([1, 2, 2, 512])
'''
lstm.flatten_parameters()
print(lstm)
# flatten_parameters对实际网络不会产生影响,只会影响内存。
# LSTM(512, 512, batch_first=True)