import torch
from torchvision import transforms
from torchvision import datasets
from torch.utils.data import DataLoader
import torch.nn.functional as F
import torch.optim as optim
batch_size = 64
transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307, ), (0.3081, ))
])
train_dataset = datasets.MNIST(root='./dataset/mnist/',
train=True,
download=True,
transform=transform)
train_loader = DataLoader(train_dataset,
shuffle=True,
batch_size=batch_size)
test_dataset = datasets.MNIST(root='./dataset/mnist/',
train=False,
download=True,
transform=transform)
test_loader = DataLoader(test_dataset,
shuffle=False,
batch_size=batch_size)
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = torch.nn.Conv2d(1, 10, kernel_size=5)
self.conv2 = torch.nn.Conv2d(10, 20, kernel_size=5)
self.pooling = torch.nn.MaxPool2d(2)
self.fc = torch.nn.Linear(320, 10)
def forward(self, x):
batch_size = x.size(0)
x = F.relu(self.pooling(self.conv1(x)))
x = F.relu(self.pooling(self.conv2(x)))
x = x.view(batch_size, -1)
x = self.fc(x)
return x
model = Net()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
model.to(device)
criterion = torch.nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=0.01, momentum=0.5)
def train(epoch):
running_loss = 0.0
for batch_idx, data in enumerate(train_loader, 0):
inputs, target = data
inputs, target = inputs.to(device), target.to(device)
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, target)
loss.backward()
optimizer.step()
running_loss += loss.item()
if batch_idx % 300 == 299:
print('[%d, %5d] loss: %.3f' % (epoch + 1, batch_idx + 1, running_loss / 300))
running_loss = 0.0
def test():
correct = 0
total = 0
with torch.no_grad():
for data in test_loader:
inputs, target = data
inputs, target = inputs.to(device), target.to(device)
outputs = model(inputs)
_, predicted = torch.max(outputs.data, dim=1)
total += target.size(0)
correct += (predicted == target).sum().item()
print('Accuracy on test set: %d %%' % (100 * correct / total))
if __name__ == '__main__':
params = list(model.parameters())
print("length of params:{}".format(len(params)))
for name,paramters in model.named_parameters():
print(name,":",paramters.size(),"values:",paramters)
length of params:6
conv1.weight : torch.Size([10, 1, 5, 5]) values: Parameter containing:
tensor([[[[ 0.0292, -0.1148, -0.0155, -0.1542, -0.0642],
[ 0.1563, 0.1593, 0.0421, 0.0064, -0.0413],
[-0.0512, -0.1027, 0.1217, 0.0769, -0.1619],
[ 0.0971, -0.1814, -0.1111, 0.0227, -0.1429],
[-0.1311, -0.1288, 0.0786, 0.1557, -0.1380]]],
[[[-0.1490, -0.1661, 0.1592, -0.1834, -0.0604],
[ 0.1499, 0.1580, -0.0498, -0.0208, 0.0102],
[-0.1532, -0.0275, -0.1507, 0.0455, 0.1321],
[-0.0655, 0.0359, 0.1715, 0.1594, -0.0239],
[-0.0198, 0.0505, 0.1103, 0.0207, 0.1015]]],
[[[-0.0818, -0.1510, 0.1396, -0.0151, 0.0119],
[-0.0443, 0.1819, 0.1518, 0.1544, 0.1078],
[-0.0325, -0.0857, -0.1713, -0.1537, 0.1546],
[-0.1783, -0.1008, 0.0156, -0.1134, 0.1781],
[-0.0690, -0.1707, -0.0203, 0.1112, -0.1106]]],
[[[ 0.1699, 0.1946, 0.0837, -0.0528, -0.1850],
[-0.1715, 0.0184, -0.1637, 0.1184, 0.0194],
[-0.1666, 0.1877, -0.1423, 0.0742, 0.1340],
[-0.0158, 0.0251, 0.1079, -0.1123, 0.0050],
[-0.1145, 0.0858, 0.1148, 0.1656, -0.1704]]],
[[[ 0.0305, -0.1555, -0.0259, -0.0981, 0.0180],
[ 0.1186, 0.0850, 0.1193, 0.1757, -0.1128],
[-0.1621, 0.1792, -0.1562, -0.1266, 0.1293],
[-0.0334, -0.1747, -0.0385, -0.1260, -0.1633],
[ 0.0512, 0.0287, -0.1757, -0.0872, -0.0906]]],
[[[ 0.1485, -0.0537, 0.1543, 0.0719, 0.0233],
[ 0.1325, 0.1277, 0.0307, -0.0970, 0.0722],
[-0.1042, 0.0687, 0.1692, -0.0762, 0.0279],
[-0.0478, -0.0325, -0.1661, -0.1942, -0.1265],
[ 0.1228, 0.0873, 0.0225, 0.0592, 0.0716]]],
[[[-0.1059, 0.0920, 0.1661, -0.0101, -0.0377],
[ 0.0480, 0.0999, 0.0434, -0.0047, -0.0520],
[-0.0541, 0.1078, 0.0733, -0.1655, -0.0624],
[ 0.0647, 0.0793, -0.0082, 0.0932, 0.0217],
[ 0.1159, -0.1487, -0.0984, -0.0615, -0.0867]]],
[[[ 0.1851, 0.0249, -0.0024, 0.0110, 0.1678],
[-0.1211, -0.1862, -0.1676, 0.0258, -0.0749],
[ 0.1309, -0.1548, 0.1669, 0.0081, 0.0921],
[-0.1081, -0.0686, 0.1135, -0.0269, 0.0289],
[-0.0014, 0.0757, -0.1843, -0.0605, 0.1531]]],
[[[ 0.1794, 0.0711, 0.0297, 0.1064, -0.1433],
[-0.0614, -0.0482, 0.1997, -0.0697, 0.1592],
[-0.1439, 0.1009, -0.1246, 0.1187, -0.0678],
[ 0.0934, 0.1738, -0.0657, 0.1890, 0.0004],
[ 0.0722, 0.1557, -0.1722, -0.1403, -0.1126]]],
[[[-0.0196, 0.1592, -0.0318, 0.0382, -0.0635],
[-0.0452, -0.1104, 0.0558, -0.1944, -0.0693],
[-0.1448, 0.1235, 0.1222, -0.1038, 0.0207],
[ 0.1631, -0.0482, 0.0538, -0.0039, 0.0798],
[ 0.1920, -0.1149, -0.1949, 0.1732, 0.1329]]]], device='cuda:0',
requires_grad=True)
conv1.bias : torch.Size([10]) values: Parameter containing:
tensor([-0.1944, -0.1169, -0.1010, -0.1470, 0.0047, 0.0386, -0.1319, 0.0324,
-0.0498, 0.0420], device='cuda:0', requires_grad=True)
conv2.weight : torch.Size([20, 10, 5, 5]) values: Parameter containing:
tensor([[[[-5.2368e-02, 3.1554e-02, -5.4118e-02, 2.8604e-02, -2.4989e-02],
[ 5.4621e-02, -2.9379e-02, 1.0100e-02, 2.5552e-02, 5.3117e-02],
[ 6.7153e-03, -3.2922e-02, 1.4443e-02, 5.9338e-02, 4.6442e-02],
[-4.4611e-02, -3.3952e-02, -2.4117e-02, 1.1620e-02, -5.9166e-02],
[ 1.2533e-02, -1.2082e-02, 3.8800e-02, -3.4241e-02, 8.9317e-03]],
[[ 4.1846e-02, 3.7014e-02, -5.0647e-02, -9.1374e-03, 3.1137e-02],
[ 5.7464e-02, -1.3512e-02, -5.9673e-02, -4.0360e-04, -5.8839e-02],
[ 4.8177e-02, -1.7239e-02, -1.6065e-03, 1.5424e-02, 2.5880e-02],
[-4.5047e-02, 2.1510e-03, 1.1369e-02, 5.6708e-03, 2.5710e-02],
[ 2.0581e-02, 3.3126e-02, -4.6665e-02, -8.5124e-04, 1.8519e-02]],
[[ 2.9420e-02, -5.2846e-03, 3.5169e-02, -1.8284e-02, 2.7788e-02],
[-5.3204e-02, 4.3202e-02, 1.8915e-02, -2.5574e-02, -4.3234e-02],
[ 6.2644e-02, 4.4236e-02, -3.4475e-02, -6.2509e-02, -3.1133e-02],
[ 9.3859e-04, -2.0049e-02, -3.6657e-02, -2.3389e-02, -4.9290e-02],
[ 5.8727e-02, -3.1091e-02, -5.7634e-03, 3.9177e-02, 5.7926e-02]],
...,
[[ 2.3437e-02, 1.4578e-02, 5.1117e-02, 2.4858e-02, 4.9612e-02],
[-3.9311e-02, 5.0185e-02, 2.5639e-02, 2.1394e-02, -5.6477e-02],
[-3.4678e-02, 4.1966e-03, -4.5253e-02, 1.6301e-02, -3.8402e-02],
[-5.5444e-02, -5.2513e-02, -5.1018e-02, -6.1785e-02, -3.3350e-02],
[-3.9832e-02, 2.7723e-03, 1.4727e-02, 6.1696e-03, 3.0029e-02]],
[[ 2.4824e-02, 4.8004e-02, -1.5382e-02, -6.1675e-02, 4.4248e-02],
[-3.3983e-02, -8.7866e-03, 1.1843e-02, -4.8161e-02, -1.4101e-02],
[-5.5533e-03, 4.6829e-02, 5.9879e-02, -4.5932e-02, 1.5345e-02],
[ 6.2069e-02, -6.0446e-02, -5.4356e-02, -1.8430e-02, 5.8237e-03],
[-1.4261e-02, -6.1078e-03, -4.5779e-02, -4.6705e-03, 7.9402e-03]],
[[-4.9596e-02, -1.2698e-02, 4.9327e-02, 3.5700e-02, -3.4350e-02],
[-3.8808e-02, 1.4130e-02, -8.1186e-03, -3.6734e-02, -4.3259e-03],
[-2.0556e-02, 2.2191e-02, 2.7012e-02, 8.8912e-03, -5.9635e-02],
[-2.9462e-02, 3.0790e-02, 4.4507e-02, 3.0119e-02, 9.7185e-03],
[ 5.2443e-02, -5.4204e-03, 6.7616e-03, -2.7343e-02, -4.6512e-02]]],
[[[ 5.9749e-02, -3.4324e-02, 3.8714e-02, -2.1660e-02, 4.8476e-02],
[ 8.1200e-04, 2.4718e-02, 3.9875e-02, 9.9266e-03, 3.6421e-04],
[-4.1626e-02, -5.8603e-02, -6.9467e-03, 5.6937e-02, 3.5564e-02],
[-1.2004e-02, 1.1038e-02, 3.6674e-02, -4.9492e-02, 1.2047e-02],
[ 2.7506e-02, 4.0841e-02, -2.2967e-02, 5.4300e-02, 4.5586e-03]],
[[ 3.7827e-02, 1.8058e-02, 4.2477e-02, -4.5514e-02, -6.0702e-02],
[ 2.1211e-02, -5.9180e-02, 1.0838e-02, 1.4655e-02, 2.8223e-02],
[-5.2725e-02, 1.8744e-02, 2.1331e-02, 1.2605e-02, -1.8589e-03],
[-2.4681e-02, -4.1699e-02, 5.3656e-02, -5.3694e-02, -1.4808e-02],
[-3.3128e-02, -6.1342e-02, 4.1222e-02, 2.1920e-02, -3.0237e-02]],
[[-4.0631e-02, 3.2673e-02, -3.7668e-03, -5.6161e-02, -2.5354e-02],
[ 3.7301e-02, 3.9205e-02, -2.7044e-03, 3.2688e-02, 4.1842e-02],
[-3.2785e-03, 3.9226e-02, -2.3104e-02, -4.4690e-02, -1.3086e-02],
[-5.8275e-02, 4.2288e-02, -3.5103e-02, -3.7395e-02, 7.9434e-03],
[ 2.2111e-02, 2.3425e-02, -4.3885e-02, -4.8912e-02, 4.4387e-02]],
...,
[[-1.1500e-02, 3.7778e-03, -1.0640e-02, 4.5538e-02, 5.0465e-02],
[-4.7463e-02, 6.2900e-02, -5.8583e-02, -5.7497e-02, -2.0911e-02],
[-3.7290e-02, -4.2980e-02, 4.6220e-02, -1.3599e-02, 2.0710e-03],
[-1.3804e-02, -7.4738e-03, 5.9528e-02, 5.8424e-02, 2.8293e-03],
[-7.1658e-04, 8.4385e-03, -4.8809e-02, -2.4261e-02, 1.0562e-02]],
[[-5.6798e-02, -9.6285e-03, -6.1419e-02, -3.4784e-02, 4.3185e-02],
[-6.2391e-02, 1.6141e-03, 3.3683e-02, -3.3232e-02, -4.1071e-02],
[-4.0307e-02, -4.1360e-02, -4.7162e-02, 1.7653e-02, 3.3942e-02],
[ 4.6899e-02, 3.3870e-02, -2.7944e-02, -4.5826e-02, -5.4681e-02],
[-1.9034e-02, 3.0065e-02, -1.2766e-02, -2.8829e-02, -1.8029e-02]],
[[ 5.6210e-02, -4.0113e-02, -4.8251e-02, 3.6768e-03, -2.8708e-02],
[-5.4318e-02, 3.5962e-02, 1.9123e-02, 5.3562e-02, 2.5681e-02],
[ 4.7974e-04, -2.4335e-02, -5.1696e-02, 1.5568e-02, -5.9729e-02],
[-5.1589e-02, -2.5462e-02, -2.9544e-02, -4.1907e-02, -3.4104e-02],
[-3.8630e-02, -3.0189e-02, 1.9416e-02, -3.9969e-02, -5.0012e-02]]],
[[[ 1.8875e-02, 1.2668e-02, 2.9536e-02, 5.2968e-02, 4.8621e-02],
[ 1.0259e-02, -4.2283e-02, -1.5881e-02, -4.0137e-02, 2.0386e-02],
[-1.4541e-02, 5.5391e-02, -1.5400e-02, -1.1771e-02, 5.9276e-02],
[ 2.9438e-02, -1.0177e-03, 8.9482e-03, -3.9851e-02, 1.2374e-02],
[ 4.5095e-02, 5.6810e-02, -4.3297e-02, -5.0333e-02, -1.0831e-02]],
[[-3.5740e-02, 4.8352e-02, 5.0192e-04, 2.2286e-02, 3.8444e-02],
[ 5.4620e-02, -4.8168e-02, -5.1637e-02, 6.0479e-02, -3.5714e-02],
[ 2.7755e-02, -4.6374e-03, -5.8292e-02, -2.9264e-02, 5.8959e-02],
[-5.5255e-02, 4.8488e-02, -3.6999e-02, 2.2060e-02, 3.5020e-03],
[ 2.3997e-02, -8.6682e-03, -3.9716e-02, -5.9086e-02, -2.6488e-02]],
[[-3.8342e-02, 5.3473e-02, -1.7118e-03, 3.9043e-02, -4.2491e-02],
[ 4.9556e-02, -1.4256e-02, -5.8375e-03, 1.7318e-02, 5.3478e-02],
[-2.2852e-02, -2.2135e-02, 1.3626e-02, 6.2688e-02, -5.4085e-03],
[ 3.0429e-02, -1.1710e-02, 2.2301e-02, -5.9121e-02, 4.3087e-02],
[-3.8621e-02, 3.6340e-02, -3.8902e-02, -4.1751e-02, 2.5260e-02]],
...,
[[ 4.4129e-02, -1.7677e-02, 1.3659e-02, -5.9717e-02, -2.8333e-02],
[ 4.3234e-02, 6.1530e-02, -1.5638e-02, -6.2452e-02, -1.1310e-02],
[-4.1910e-02, 5.7290e-02, 1.8888e-02, -5.8616e-02, 2.7848e-03],
[ 4.7585e-02, -4.0041e-02, 2.4481e-02, -1.0197e-02, 5.6576e-02],
[ 3.7881e-02, 1.8084e-02, 1.6202e-02, -3.2640e-02, 5.0570e-03]],
[[ 6.0793e-02, 5.5923e-02, -5.4141e-02, 5.9115e-03, 1.8392e-02],
[-6.3995e-04, -5.4829e-02, -4.5618e-03, 5.2113e-02, 2.3171e-02],
[ 3.4625e-02, 4.8866e-02, 2.4438e-02, -7.9352e-03, -1.2292e-02],
[-5.7379e-02, -2.8284e-02, 2.9702e-02, 8.0225e-03, -1.7087e-02],
[-1.2084e-03, -3.0352e-02, -4.9397e-02, -2.3069e-02, -3.4533e-02]],
[[ 8.4772e-03, -1.1037e-02, 8.2330e-03, -1.4237e-02, 4.1052e-03],
[-5.0191e-02, -5.4346e-02, 5.5931e-03, 3.1324e-02, -6.1376e-02],
[-1.7859e-02, -5.9138e-02, -2.3151e-02, -4.4270e-04, 2.3552e-02],
[-8.1268e-03, -1.8367e-02, -4.7919e-02, 5.3359e-02, 2.7391e-02],
[ 4.7805e-02, 2.3470e-02, -2.9792e-03, 5.0512e-02, -1.5744e-02]]],
...,
[[[ 4.3788e-02, 1.1773e-02, -1.9859e-03, 3.4032e-02, 5.2554e-02],
[-1.6575e-03, -8.9330e-03, 3.6900e-02, 1.6530e-02, -1.4109e-02],
[-4.0862e-02, 5.2959e-02, -1.5769e-02, 1.9714e-02, 4.9050e-02],
[-5.2912e-02, 6.0175e-02, -1.6552e-02, -5.5575e-03, -1.2666e-02],
[ 3.9843e-02, 4.8994e-02, 6.3026e-03, 3.1719e-02, 4.4317e-02]],
[[ 5.9765e-02, -2.9124e-05, 3.7130e-02, -4.0320e-02, 2.8108e-02],
[ 6.8713e-03, 1.1782e-02, -2.0284e-02, 2.0401e-02, -4.9913e-02],
[-3.9153e-02, -6.2478e-02, 3.5344e-02, 1.5019e-02, -3.1088e-02],
[ 1.6529e-02, 1.9522e-02, 6.9483e-03, 1.1334e-03, 6.0805e-02],
[ 2.3773e-02, -1.5872e-02, 1.5955e-02, 1.9066e-02, 5.4981e-02]],
[[ 8.0943e-03, 5.1840e-02, 6.1965e-02, -2.4831e-02, 5.4198e-02],
[-1.3582e-02, 6.1034e-02, 4.1037e-02, 9.0570e-03, -3.3151e-02],
[-4.7179e-02, -3.0043e-02, 3.3698e-02, -2.0000e-02, -7.1394e-03],
[-3.3714e-02, -5.2707e-02, 5.3939e-02, -5.4213e-02, 4.1943e-02],
[-2.1934e-03, -5.0298e-02, 2.5775e-03, -1.7821e-02, 3.0552e-02]],
...,
[[-2.2315e-02, 1.5523e-02, 5.6068e-02, -6.3239e-02, 2.5698e-02],
[-4.4381e-02, 3.9510e-05, -1.6564e-02, -1.5760e-02, -2.2124e-02],
[-2.0979e-02, 4.3864e-02, -4.5107e-03, -5.6620e-02, 2.9139e-02],
[-1.4656e-02, -1.5456e-02, -3.0923e-02, 4.1544e-02, -3.9379e-02],
[-4.6234e-02, -3.6687e-02, -3.1174e-02, -6.0573e-02, 4.1418e-02]],
[[ 2.9600e-02, 2.6317e-02, -2.6230e-02, 3.0193e-02, 6.2804e-02],
[-4.0259e-02, -4.1753e-02, -4.2367e-02, 3.9166e-02, 6.6288e-03],
[-1.4645e-02, -1.9352e-02, 1.1461e-02, 5.5662e-02, -4.4850e-02],
[-3.2785e-02, -1.1640e-02, -5.3309e-02, -4.6422e-02, 4.3497e-03],
[ 5.8036e-02, -2.5314e-02, -1.9412e-02, -5.5456e-03, 2.3219e-02]],
[[ 2.6274e-02, -2.8481e-02, -1.0018e-02, 2.0311e-02, -3.7622e-02],
[ 2.3305e-02, -5.6636e-02, -4.5737e-02, 5.4595e-02, 4.7142e-02],
[-4.1472e-02, 5.8590e-04, 1.1027e-02, -2.6692e-02, 4.1540e-03],
[ 2.2730e-02, 5.0395e-02, 4.3297e-02, 4.0691e-02, 5.0566e-02],
[-1.9606e-02, -1.9264e-02, 2.9296e-02, 3.0899e-02, 3.0212e-02]]],
[[[-4.2170e-02, 1.1396e-02, -2.7098e-02, 5.4463e-02, 6.3175e-02],
[-2.0579e-02, -2.0069e-02, -4.4863e-02, 5.5597e-02, 2.3287e-02],
[ 3.0968e-02, -2.8856e-02, 5.7500e-02, -5.3603e-02, 3.0844e-02],
[ 2.9263e-03, 2.7712e-02, -2.8336e-02, 2.6889e-02, 2.9511e-02],
[-3.8393e-02, 5.0375e-02, 5.5281e-02, -2.6216e-02, 1.2935e-02]],
[[ 3.3712e-02, 2.3145e-02, -8.2533e-03, 4.2996e-02, 5.1002e-02],
[-5.4223e-02, 3.0516e-02, -2.5553e-02, -5.3590e-02, 1.9521e-02],
[-5.2095e-02, -3.4188e-02, 7.8968e-03, -7.2009e-03, -1.2043e-02],
[ 4.9380e-02, 1.2707e-02, 6.0095e-02, 1.1709e-02, -2.8002e-02],
[ 4.7874e-02, -4.3280e-03, 2.3758e-02, -6.1083e-02, 1.8665e-03]],
[[-6.2303e-03, -3.7240e-02, 2.6535e-03, 2.2819e-02, 1.5594e-02],
[-5.5811e-02, -3.7369e-02, -3.8774e-02, -1.2823e-02, -4.7457e-02],
[ 3.5825e-02, 2.2103e-02, 3.0501e-02, 2.5271e-03, 2.9810e-02],
[ 2.8184e-02, 3.2689e-02, -5.7110e-02, 3.0984e-03, -4.2065e-02],
[ 5.9532e-02, -9.9016e-03, 2.3054e-02, -1.5889e-02, -1.7150e-02]],
...,
[[-1.2491e-02, -4.5455e-02, 2.6438e-02, -1.7727e-03, -2.8818e-02],
[ 1.3458e-02, 5.1609e-02, -4.1369e-02, 4.8613e-02, -2.0304e-02],
[ 5.2768e-02, 6.2602e-02, -6.4756e-03, 9.6404e-03, -1.4051e-02],
[ 4.6155e-03, -3.4282e-02, -9.0568e-03, 9.0611e-03, 6.2874e-02],
[ 2.4995e-02, 2.6157e-02, -3.0200e-02, 9.0291e-03, -2.8310e-02]],
[[ 6.2724e-02, -2.2696e-02, 6.0710e-02, -3.4729e-02, -2.5444e-02],
[ 6.8124e-03, 4.5257e-02, 5.9886e-02, -5.5591e-02, -1.8090e-02],
[ 1.1808e-02, 9.0323e-03, -4.9131e-02, -2.0279e-02, -2.5376e-02],
[ 5.9254e-02, -2.9263e-02, 1.3205e-02, 7.8838e-03, -6.1559e-02],
[ 5.3340e-02, -2.1244e-02, 6.0115e-02, -3.9059e-02, 4.2499e-02]],
[[ 8.1047e-03, 3.9263e-02, 2.4664e-03, 3.4638e-02, 1.2273e-02],
[ 2.0263e-02, -4.0178e-02, 5.0428e-02, -4.6262e-02, -5.7125e-02],
[ 2.5478e-04, -5.2544e-02, 6.1997e-02, 1.1998e-02, 4.3741e-02],
[ 5.7119e-02, 6.0448e-02, 5.6355e-02, 3.7687e-03, 3.7294e-03],
[-2.4424e-02, -5.6543e-02, 4.6519e-02, 6.0415e-02, -5.3495e-02]]],
[[[-3.8466e-02, -1.7578e-02, 5.7745e-03, -5.6509e-02, -4.1998e-02],
[ 3.4366e-02, -5.3207e-02, -5.1829e-02, -4.2150e-02, -2.0275e-02],
[-4.5197e-02, 3.4126e-02, 4.0532e-02, 3.0810e-02, -5.5078e-02],
[ 4.1298e-02, 1.7359e-02, -4.9447e-02, -4.4042e-02, -2.6199e-02],
[ 1.1152e-02, -3.9997e-02, 7.7384e-03, -4.1174e-02, -2.6033e-02]],
[[-2.9997e-02, -1.2136e-02, 2.9759e-02, -1.7717e-02, 5.2896e-02],
[-5.4648e-02, -4.8484e-02, -3.5876e-03, 4.6508e-02, -1.0753e-02],
[ 4.2723e-03, 4.9155e-02, 8.3463e-04, 4.5282e-02, -4.9390e-02],
[ 4.1561e-02, -4.4283e-02, 5.1784e-02, -1.7539e-02, 3.3454e-02],
[ 1.1207e-02, 5.4565e-02, 4.9700e-02, 1.7071e-03, -5.2736e-02]],
[[ 3.7429e-02, 9.2725e-03, -5.9805e-02, -1.1457e-02, 2.8986e-02],
[-1.8773e-02, -4.7642e-02, 1.0300e-03, -1.8093e-03, -2.5260e-02],
[ 4.4488e-03, -2.9149e-02, 1.1524e-02, -5.1723e-02, -1.6372e-02],
[-4.6572e-02, -6.5025e-04, 5.3897e-02, -4.4050e-02, 6.1977e-02],
[ 4.9635e-02, -5.2397e-02, 3.4928e-04, 5.0795e-02, -6.6452e-03]],
...,
[[-2.6595e-02, -2.6740e-03, 3.8268e-04, -6.1627e-02, 1.7806e-02],
[ 1.3060e-02, -6.9344e-03, 4.7959e-02, 3.9956e-02, 9.8456e-03],
[ 3.8722e-02, -8.7205e-03, 4.5907e-02, -5.6536e-02, -3.9802e-02],
[ 2.1997e-02, 4.5718e-02, -4.1491e-04, 1.9693e-02, 5.0694e-03],
[-1.7540e-02, 3.3420e-02, 5.6518e-02, -2.2284e-02, 2.6917e-02]],
[[ 2.1929e-02, -2.1110e-02, 5.1832e-02, 5.8693e-02, 2.7636e-02],
[-1.4571e-02, -6.0895e-02, -4.5327e-02, 2.3749e-02, -1.0829e-02],
[ 1.7146e-02, -5.9555e-02, -4.3269e-02, -3.4748e-02, 6.4499e-03],
[ 5.8877e-02, 5.5910e-02, 2.5082e-02, -4.6437e-02, -3.6999e-02],
[-3.6002e-02, 5.0111e-02, 1.1110e-03, -4.7821e-02, -4.2575e-02]],
[[-8.5748e-03, -5.4568e-02, 5.6058e-02, 2.8580e-03, 2.2698e-02],
[ 3.6471e-02, -5.0648e-02, 5.7448e-02, 9.0434e-03, -5.9686e-02],
[ 4.0076e-03, -1.7306e-02, 1.9660e-02, -8.7202e-03, -1.9142e-02],
[ 3.5483e-02, 4.8521e-02, -3.7919e-02, -2.6546e-02, -1.2540e-02],
[-4.6048e-03, -1.5638e-02, 2.6315e-02, 4.5429e-03, -3.4280e-03]]]],
device='cuda:0', requires_grad=True)
conv2.bias : torch.Size([20]) values: Parameter containing:
tensor([ 0.0088, -0.0576, -0.0200, -0.0618, 0.0362, -0.0358, 0.0356, 0.0391,
0.0190, 0.0471, 0.0130, 0.0433, 0.0533, 0.0139, -0.0531, 0.0445,
0.0421, -0.0414, -0.0449, -0.0462], device='cuda:0',
requires_grad=True)
fc.weight : torch.Size([10, 320]) values: Parameter containing:
tensor([[-0.0181, 0.0105, 0.0539, ..., 0.0189, 0.0501, -0.0400],
[-0.0311, 0.0237, 0.0181, ..., -0.0480, -0.0082, 0.0199],
[-0.0059, -0.0422, 0.0020, ..., 0.0078, -0.0445, -0.0014],
...,
[-0.0547, 0.0252, -0.0203, ..., 0.0212, -0.0054, -0.0410],
[-0.0329, -0.0288, 0.0495, ..., 0.0549, 0.0448, -0.0438],
[ 0.0068, -0.0067, -0.0553, ..., -0.0470, -0.0394, -0.0304]],
device='cuda:0', requires_grad=True)
fc.bias : torch.Size([10]) values: Parameter containing:
tensor([-0.0496, 0.0287, 0.0346, -0.0194, -0.0078, 0.0319, 0.0379, -0.0283,
0.0217, -0.0406], device='cuda:0', requires_grad=True)