import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
attns=[]
attni=torch.tensor([[-0.3],[-0.4],[5]])
print(attni)
attns.append(attni)
print(attns)
print(torch.cat(attns,dim=-1))
print(np.shape(torch.cat(attns,dim=-1)))
print(F.softmax(torch.cat(attns,dim=-1), dim=-1))
print(np.shape(F.softmax(torch.cat(attns,dim=-1), dim=-1)))
------------
import numpy as np
import torch
temps=[]
hidi=torch.tensor([[1,5,6],[6,2,5],[8,0,8],[2,3,0]])
print(hidi)
print(np.shape(hidi))
temps.append(hidi)
hids = torch.stack(temps, dim=0)
print(hids)
print(np.shape(hids))
hids = torch.stack(temps, dim=0).transpose(0, 1)
print(hids)
print(np.shape(hids))
----------
import numpy as np
import torch
a=[]
b=torch.tensor([[1,2],[3,4],[5,6]])
a.append(b)
print(torch.stack(a, dim=2))
#stack,如果是二维矩阵的话,不管dim是多少,一定会变成三维
c=[]
c.append(b)
print(torch.cat(c,dim=-1))
print(np.shape(torch.cat(c,dim=-1)))
-------------
import torch
temps=[]
hidi=torch.Tensor([[3,2],[2,5],[2,2],[8,8]])
temps.append(hidi)
print(temps)
print(type(temps))
print(temps[-1])
print(type(temps[-1]))
--------
import numpy as np
import torch
pos_train=np.array([[1,1520],[0,1666],[1,8250],[2,9210],[1,1777]])#(40252,2)
out_s=torch.Tensor([[-0.1523,0.4822, 0.1230],[-0.0937, 1.5249, 0.6794],[-0.1603,1.4735,0.4970]]) #(31092,64)
print(out_s[pos_train[:, 0]])
------------
import torch.nn as nn
import torch
in_dims = 3
n_hid = 2
ws = torch.nn.ModuleList()
ws.append(nn.Linear(in_dims, n_hid))
print(ws[0])
x = torch.Tensor([[1, 2, 3],
[4, 5, 6]])
print(ws[0](x))
n_hid_prev=3
n_hid=3
affine = nn.Linear(n_hid_prev, n_hid)
x = torch.Tensor([[1, 2, 3],
[4, 5, 6]])
print(affine(x))