inception-v1 自复现 有问题尽管问

代码:

import  torch
import torch.nn as nn
import torch.nn.functional as F



class inceptionModuleV1(nn.Module):
    def __init__(self,inputs, outputs1  , outputs2 ,outputs3 ,outputs4 ,outputs5 ,outputs6):
        super(inceptionModuleV1,self).__init__()

        self.blocks1 = nn.Sequential(nn.Conv2d(in_channels= inputs, out_channels= outputs1 ,kernel_size=1 ,stride= 1  ))

        self.blocks2 = nn.Sequential(nn.Conv2d(in_channels= inputs ,out_channels= outputs2 ,kernel_size= 1 ,stride = 1 ),
                                     nn.ReLU(inplace= True),
                                     nn.Conv2d(in_channels= outputs2 ,out_channels= outputs3 ,kernel_size=3 ,stride=1,padding= 1 ),
                                     nn.ReLU(inplace=True)
                                     )
        self.blocks3 = nn.Sequential(nn.Conv2d(in_channels= inputs ,out_channels= outputs4 ,kernel_size= 1 ,stride = 1),
                                     nn.ReLU(inplace= True),
                                     nn.Conv2d(in_channels= outputs4 ,out_channels= outputs5 ,kernel_size=5 ,stride=1,padding=2 ),
                                     nn.ReLU(inplace=True)
                                     )
        self.blocks4= nn.Sequential(nn.MaxPool2d(kernel_size=3 ,stride=1,padding= 1 ),
                                     nn.Conv2d(in_channels= inputs ,out_channels= outputs6 ,kernel_size=1 ,stride=1),
                                     nn.ReLU(inplace=True)
                                     )

    def forward(self,  x):
        x1 = self.blocks1(x)
        x2 = self.blocks2(x)
        x3 = self.blocks3(x)
        x4 = self.blocks4(x)
        print(x1.shape,x2.shape,x3.shape,x4.shape)
        xM = torch.cat([x1 , x2 , x3 , x4],dim=1)

        return  xM




class classifier(nn.Module):
    def __init__(self,inputs ,outs):
        super(classifier,self).__init__()
        self.block = nn.AvgPool2d(kernel_size=5 ,stride =  3)
        self.conv  = nn.Conv2d(in_channels=inputs ,out_channels=128 ,kernel_size=1 ,stride =  1 )

        self.fc1 = nn.Linear(128*4*4 ,1024)
        self.fc2 = nn.Linear(1024, outs)

    def forward(self, x):

        x = self.block(x)
        x = self.conv(x)
        x = F.relu(x)
        x = torch.flatten(x ,start_dim=1)
        print(x.shape)
        x = F.relu(self.fc1(x))
        x = F.dropout(x , p = 0.5 )
        x = self.fc2(x)
        return  x






class inceptionMid(nn.Module):


    def __init__(self,outputs_Numclass =1000 ,trains = True):
        super(inceptionMid,self).__init__()
        self.inputsinceptions = nn.Sequential(
            nn.Conv2d(
            in_channels= 3, out_channels=64 ,kernel_size=7 ,stride  = 2  ,padding=3),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(kernel_size=3 , stride=2 ,padding= 1 ),
            nn.Conv2d(in_channels= 64, out_channels=64 ,kernel_size=1 ,stride  = 1 ),
            nn.ReLU(inplace=True),
            nn.Conv2d(in_channels=64, out_channels=192, kernel_size=3, stride=1,padding=1),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(kernel_size=3 , stride=2 , padding=1)
        )

        self.traini = trains




        self.block1 = inceptionModuleV1(inputs=192, outputs1 = 64 , outputs2 = 96,outputs3 = 128,outputs4 = 16,outputs5 = 32,outputs6 = 32)
        self.block2 =inceptionModuleV1(inputs=256, outputs1 = 128 , outputs2 = 128,outputs3 = 192,outputs4 = 32,outputs5 = 96,outputs6 = 64)
        self.block3 = inceptionModuleV1(inputs=480, outputs1 = 192 , outputs2 = 96,outputs3 = 208,outputs4 = 16,outputs5 = 48,outputs6 = 64)

        self.classif1 = classifier(512,1000)
        self.classif2 = classifier(528, 1000)
        self.maxpooling1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.block4 = inceptionModuleV1(inputs=512, outputs1=160, outputs2=112, outputs3=224, outputs4=24, outputs5=64,
                                        outputs6=64)
        self.block5 = inceptionModuleV1(inputs=512, outputs1=128, outputs2=128, outputs3=128*2, outputs4=24, outputs5=64,
                                        outputs6=64)
        self.block6 = inceptionModuleV1(inputs=512, outputs1=112, outputs2=144, outputs3=288, outputs4=32, outputs5=32*2,
                                        outputs6=32*2)
        self.block7 = inceptionModuleV1(inputs=528, outputs1=256, outputs2=160, outputs3=320, outputs4=32, outputs5=128,
                                        outputs6=128)
        self.block8 = inceptionModuleV1(inputs=832, outputs1=256, outputs2=160, outputs3=320, outputs4=32, outputs5=128,
                                        outputs6=128)
        self.block9 = inceptionModuleV1(inputs=832, outputs1=384, outputs2=192, outputs3=384, outputs4=48, outputs5=128,
                                        outputs6=128)


        self.lastFc = nn.Linear(1024,outputs_Numclass)


    def forward(self,x):
        x = self.inputsinceptions(x)
        x = self.block1(x)
        x =self.block2(x)
        x = self.maxpooling1(x)
        x =self.block3(x)
        aur = self.classif1(x)
        x = self.block4(x)
        x =self.block5(x)
        x =self.block6(x)
        aur1 = self.classif2(x)
        x = self.block7(x)
        x = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)(x)
        x =self.block8(x)
        x =self.block9(x)
        x= nn.AvgPool2d(kernel_size=7,stride=1 )(x)
        x= nn.Dropout(p= 0.4)(x)
        x = torch.flatten(x , start_dim=1)
        x = self.lastFc(x)

        if self.traini == True:
            return aur,aur1,x
        else:
            return x



if __name__  == '__main__':
    model  = inceptionMid()
    input = torch.randn(1, 3, 224, 224)
    aux1, aux2, out = model(input)
    print(aux1.shape)
    print(aux2.shape)
    print(out.shape)
    print(out)

结果:

torch.Size([1, 64, 28, 28]) torch.Size([1, 128, 28, 28]) torch.Size([1, 32, 28, 28]) torch.Size([1, 32, 28, 28])
torch.Size([1, 128, 28, 28]) torch.Size([1, 192, 28, 28]) torch.Size([1, 96, 28, 28]) torch.Size([1, 64, 28, 28])
torch.Size([1, 192, 14, 14]) torch.Size([1, 208, 14, 14]) torch.Size([1, 48, 14, 14]) torch.Size([1, 64, 14, 14])
torch.Size([1, 2048])
torch.Size([1, 160, 14, 14]) torch.Size([1, 224, 14, 14]) torch.Size([1, 64, 14, 14]) torch.Size([1, 64, 14, 14])
torch.Size([1, 128, 14, 14]) torch.Size([1, 256, 14, 14]) torch.Size([1, 64, 14, 14]) torch.Size([1, 64, 14, 14])
torch.Size([1, 112, 14, 14]) torch.Size([1, 288, 14, 14]) torch.Size([1, 64, 14, 14]) torch.Size([1, 64, 14, 14])
torch.Size([1, 2048])
torch.Size([1, 256, 14, 14]) torch.Size([1, 320, 14, 14]) torch.Size([1, 128, 14, 14]) torch.Size([1, 128, 14, 14])
torch.Size([1, 256, 7, 7]) torch.Size([1, 320, 7, 7]) torch.Size([1, 128, 7, 7]) torch.Size([1, 128, 7, 7])
torch.Size([1, 384, 7, 7]) torch.Size([1, 384, 7, 7]) torch.Size([1, 128, 7, 7]) torch.Size([1, 128, 7, 7])
torch.Size([1, 1000])
torch.Size([1, 1000])
torch.Size([1, 1000])
tensor([[ 2.4082e-02, -5.8420e-02, -1.6949e-02,  2.4449e-02, -3.1221e-02,
          9.9499e-03, -1.0006e-02, -2.0253e-02, -1.8088e-02, -3.7963e-02,
         -3.7275e-02, -3.3681e-02,  3.5420e-02,  2.8644e-02,  2.4509e-02,
          3.2399e-02, -1.0624e-02, -5.6492e-03, -2.3976e-02,  1.2449e-02,
         -3.3331e-03, -3.3036e-02, -2.6920e-03, -4.6949e-02, -5.7959e-03,
         -4.0529e-02,  7.5937e-03,  3.9678e-02, -1.2029e-02,  2.1007e-03,
          6.8112e-03, -2.6430e-02,  7.0184e-03, -3.8686e-03,  2.1889e-02,
          4.3211e-03, -1.2345e-02, -1.8592e-02, -1.5193e-02,  5.0065e-03,
          7.8268e-03, -1.8961e-02,  1.8172e-04, -2.0164e-02,  2.8331e-02,
          1.0635e-02,  6.0798e-04, -1.7938e-02,  1.5952e-02, -1.0309e-02,
         -1.6886e-02,  4.7581e-03,  1.7903e-02, -1.0933e-02,  2.5766e-02,
          5.7573e-03, -5.2550e-02, -3.3916e-02, -1.6154e-03,  2.6200e-02,
         -1.2264e-02, -2.0989e-02, -1.1595e-02, -3.4834e-02, -4.2556e-03,
          3.4324e-02, -5.0266e-03,  4.0287e-02, -2.2876e-02,  1.3520e-02,
         -3.0992e-02, -1.7167e-02, -8.6823e-03, -2.8599e-03, -7.4686e-03,
         -3.6761e-03, -1.3066e-02,  4.6699e-03,  3.1325e-02,  2.3370e-02,
         -4.2785e-02, -2.2272e-03, -2.2222e-02, -1.5821e-02,  1.9573e-02,
         -2.2068e-02, -6.5074e-03, -8.1939e-03, -1.2682e-02,  8.4411e-03,
         -1.2017e-02,  3.0149e-02, -1.1643e-02,  1.8636e-02,  4.7695e-02,
          3.9128e-02,  2.7035e-02, -4.1406e-02,  1.2780e-02, -4.7799e-04,
         -6.5371e-03,  4.4641e-04, -9.4797e-03, -2.3611e-02, -3.7593e-02,
         -3.1111e-02,  2.1029e-02,  2.0499e-02,  3.8366e-02,  2.3151e-02,
         -1.3156e-02, -1.7605e-03,  1.1638e-02, -4.0335e-02, -3.5895e-03,
          7.7831e-03, -3.5065e-02, -1.4641e-02,  1.7871e-02, -3.1669e-02,
          5.0952e-03, -7.8447e-03,  1.0265e-02,  4.6557e-03, -2.5233e-05,
          3.3268e-02, -3.9162e-03,  7.2370e-03,  9.4397e-03,  1.3352e-02,
         -1.9267e-03, -1.1092e-02, -3.2677e-02, -5.1903e-02,  1.5745e-03,
          2.7011e-02, -1.0265e-02,  1.1868e-02, -3.2690e-03,  4.1474e-03,
          1.6440e-02,  4.7689e-03,  3.7905e-02,  2.9972e-02, -4.2048e-02,
         -3.6497e-03,  7.8156e-03, -3.9843e-02,  6.7347e-03, -8.7800e-03,
          3.0305e-02,  5.0274e-04,  4.7436e-03, -3.5154e-02,  1.9998e-02,
         -6.0062e-03,  1.7108e-02,  1.0258e-02,  1.0531e-02, -1.4720e-02,
          1.8512e-02, -1.2778e-02, -2.4172e-03,  5.8789e-03,  1.0503e-02,
          1.9236e-02,  9.2774e-03,  1.1328e-02, -3.2919e-02, -2.3924e-03,
         -8.5038e-03,  3.1433e-03,  1.8359e-02, -1.1192e-02, -3.1357e-02,
          2.0731e-02,  1.5249e-03,  2.0635e-03,  3.8798e-02,  3.0231e-03,
         -1.8950e-03,  8.7791e-03, -1.2833e-03,  2.0644e-02, -5.6835e-03,
          1.1199e-02, -7.8529e-03,  5.6543e-03, -1.1213e-02, -3.1228e-02,
          4.5107e-02, -1.6331e-02,  2.5534e-02, -8.7112e-03,  1.5752e-02,
          1.0887e-02,  1.2107e-02, -3.8128e-02, -4.4536e-02, -7.7093e-03,
          2.3829e-02, -1.5492e-02, -2.1253e-02,  3.7076e-02,  4.6774e-02,
         -1.7859e-02,  9.1062e-03,  1.6448e-02, -2.7026e-02,  4.7439e-02,
         -3.3028e-02, -1.5446e-02,  7.6355e-04, -2.6603e-02,  2.4906e-02,
         -2.0782e-02, -1.5107e-02, -1.9324e-02, -1.2933e-03,  1.5820e-02,
         -1.5549e-02, -1.3298e-02, -2.8954e-03,  9.9213e-04, -1.1579e-03,
         -1.8003e-02, -1.9594e-02, -1.8866e-02, -1.7737e-02,  2.7968e-02,
         -2.3817e-02, -6.5641e-03,  1.7233e-02,  1.1122e-02,  3.5230e-03,
          3.2654e-02,  2.7185e-02,  2.4406e-02,  1.4909e-02, -4.1884e-02,
          1.9934e-02, -3.5589e-03,  2.1564e-02,  2.7957e-02,  1.8883e-02,
          8.8828e-03,  1.5761e-02,  1.2281e-02, -2.5506e-02, -1.2245e-02,
         -1.8268e-02,  2.0923e-02, -4.7589e-02, -4.1264e-02, -4.1494e-02,
         -3.8508e-02,  2.0369e-02, -1.7287e-02,  1.7207e-02,  8.1962e-03,
         -5.4090e-03, -2.0081e-02, -3.0943e-02,  9.3603e-03,  1.2516e-03,
          9.1543e-03, -4.1601e-02,  1.6724e-02,  1.9197e-02,  1.5865e-02,
         -2.4194e-02, -5.7647e-04, -2.0852e-02, -3.0386e-02,  1.0097e-02,
          4.6852e-03,  3.7471e-02,  1.9684e-02, -7.1819e-03, -4.4601e-03,
          2.4531e-02, -6.4913e-02, -1.2066e-02,  4.4049e-03,  1.0368e-02,
         -3.0523e-02, -2.1976e-02,  1.3212e-02, -4.3818e-04, -1.0769e-02,
          6.0931e-03, -1.0415e-02, -3.1166e-03, -2.8357e-02,  2.7949e-02,
          8.1592e-03, -3.5564e-02,  2.3272e-02, -3.0470e-03,  2.1489e-02,
         -1.2392e-02,  2.9945e-03, -2.5286e-02,  2.7782e-02,  2.3024e-02,
          2.4525e-02,  1.5654e-02,  1.5213e-02, -1.2619e-02, -1.6058e-02,
          1.4045e-02, -3.0519e-02,  8.2257e-03, -1.4888e-02, -1.2208e-02,
         -2.9892e-02, -7.7522e-03,  3.2680e-03,  1.4804e-02, -1.6078e-02,
         -2.2648e-02,  1.0364e-02,  2.5817e-02,  2.1088e-02,  3.6622e-02,
          2.8599e-02, -2.4781e-02, -2.2218e-02, -9.0425e-03,  1.4814e-02,
         -7.5499e-03,  3.1399e-02, -1.5732e-03,  1.6703e-02, -4.1085e-02,
         -1.5341e-02,  1.2894e-02,  1.2118e-02,  1.3661e-02,  4.8410e-02,
         -2.7868e-02,  1.9562e-02, -4.9497e-03, -1.0930e-02, -1.8428e-02,
          1.6511e-02,  2.1969e-02, -1.2764e-02, -4.3133e-02, -5.9951e-03,
         -3.0336e-03,  8.5734e-03, -4.9862e-03,  1.1979e-02, -2.4210e-03,
          1.6173e-03, -2.7066e-03, -2.7072e-02, -1.5158e-02,  1.5197e-02,
         -1.9974e-02,  2.0532e-03,  2.5726e-02, -1.5820e-02,  4.0023e-03,
         -8.4175e-04,  3.2635e-02, -2.0302e-02, -1.3907e-02,  3.0658e-02,
          3.2124e-02,  7.1663e-03, -7.8291e-03, -3.6056e-02,  2.2520e-02,
          6.5366e-03,  4.3307e-03, -9.8971e-03,  1.1610e-02, -1.9075e-02,
          4.6235e-03,  2.0744e-02, -2.7762e-02,  1.8821e-02, -1.7459e-03,
         -7.6045e-03, -9.3994e-03,  2.3197e-02,  1.7146e-02,  9.3226e-03,
         -4.0184e-02, -8.7527e-03,  2.2346e-02, -6.8706e-03,  3.5168e-02,
         -2.6045e-02, -1.7890e-02, -3.3777e-02, -3.8513e-02, -2.9700e-02,
          1.1137e-02,  3.9390e-03, -4.0386e-02,  2.5594e-03, -2.5501e-02,
         -3.2641e-02,  3.7139e-02, -4.5358e-02,  6.5998e-04,  5.0904e-02,
         -3.8759e-03, -1.1821e-02,  3.8974e-02, -4.1670e-03,  1.3982e-02,
         -2.9981e-02,  1.2272e-02,  4.5514e-02, -1.2491e-02,  1.7688e-02,
          2.9717e-02,  3.9861e-02, -1.6042e-02,  1.6126e-02, -1.4905e-02,
         -3.0308e-02, -3.0771e-02,  1.6590e-02,  2.8907e-03,  1.7373e-02,
          6.5046e-03,  1.4448e-02,  2.9400e-02, -3.7911e-02, -6.5282e-03,
         -2.8108e-02, -1.8571e-02,  1.6104e-02,  1.0774e-02, -3.0158e-02,
          2.2438e-02, -3.7657e-03,  3.3016e-02,  3.0791e-02, -8.1840e-03,
          2.7840e-02,  3.1950e-02, -3.8733e-02,  1.6285e-02,  3.7405e-02,
          3.4211e-02, -7.4677e-03, -2.9737e-03,  5.1064e-03,  1.7600e-02,
          2.3516e-02,  2.3141e-02,  6.7744e-03, -2.2302e-02, -7.2515e-03,
          2.7171e-02, -8.1749e-04, -9.0510e-03, -1.6749e-02, -2.8215e-02,
          3.2985e-02,  4.5769e-03, -3.2471e-03,  2.4396e-02,  2.5836e-02,
         -2.8360e-02,  5.2908e-03,  4.1436e-02, -2.7840e-02,  2.2693e-02,
          1.4262e-02,  7.7141e-03, -2.6910e-03,  1.3212e-02,  5.8913e-03,
          1.4510e-02,  3.4849e-02, -7.7778e-03, -1.2928e-02, -4.2563e-02,
         -1.1851e-02, -2.4793e-03, -1.6587e-03, -1.1851e-04,  1.8163e-02,
         -7.5610e-03, -2.5529e-02, -2.0527e-02,  3.0055e-02, -4.6836e-03,
         -4.8483e-03, -1.7288e-02, -1.6413e-02,  3.7989e-04,  1.8169e-02,
          2.5964e-02,  2.0588e-02, -1.5517e-02, -2.4541e-02, -1.9917e-03,
         -8.5226e-03, -1.0326e-02,  2.6165e-03,  2.6116e-02, -9.8867e-03,
         -4.2200e-02, -8.5166e-03, -2.6917e-02,  1.3766e-02,  2.6235e-03,
         -1.9894e-03,  6.3684e-03, -1.3894e-02, -9.3206e-03,  1.2365e-02,
         -3.1835e-02, -2.7892e-02, -7.7311e-03, -1.1453e-02,  2.1802e-02,
          6.3711e-03, -2.9177e-02, -3.8801e-02,  2.8015e-02,  4.0248e-02,
         -1.6877e-02,  2.7930e-02, -2.0983e-02,  2.1100e-02, -9.0967e-03,
         -2.3590e-02, -2.1185e-02, -5.4761e-04, -2.3765e-02, -5.9543e-03,
          1.0147e-02, -4.1056e-02, -2.6868e-02, -3.2207e-02, -1.2326e-02,
         -1.1984e-02, -1.1907e-02, -2.0721e-02, -3.1955e-02, -2.0342e-03,
         -2.8468e-02, -1.0702e-02, -9.4689e-03,  2.7801e-02,  1.8132e-02,
          3.8859e-03,  9.8106e-03,  3.4932e-02, -1.8404e-02, -1.1318e-02,
         -2.7875e-02,  2.7088e-02,  1.0845e-02,  2.1144e-02, -2.4799e-02,
         -4.2368e-03, -2.1209e-02,  9.8159e-03, -3.0673e-02, -8.6852e-03,
         -2.6721e-02,  3.6820e-03,  8.3970e-04,  1.1959e-02, -2.2295e-02,
          1.0240e-04, -1.0409e-02, -2.4296e-03,  3.1209e-02,  6.3481e-03,
         -2.0317e-02, -1.7248e-02,  1.3495e-02,  2.9935e-02, -1.0744e-02,
          1.1985e-02,  3.7952e-03,  6.9590e-03,  4.5128e-02, -3.5511e-02,
          8.2757e-03,  1.9478e-02,  1.2501e-02, -1.7899e-02,  2.3701e-02,
          1.4647e-02,  2.1040e-02,  2.7586e-02, -2.8423e-02, -7.3693e-03,
          4.4682e-02, -2.9940e-02,  3.9848e-02,  6.2207e-03,  2.4694e-02,
          2.3230e-02, -3.1932e-03, -9.9655e-03,  1.0513e-02,  2.4302e-02,
         -5.4736e-03,  1.4624e-02, -1.7328e-02,  7.9377e-03,  1.6687e-02,
          3.6305e-02, -1.7780e-02,  2.3812e-02, -6.3378e-03, -3.5877e-02,
          3.3251e-03, -1.7411e-02,  6.8248e-04, -2.5844e-02, -4.5487e-03,
         -1.8799e-02, -5.4912e-03, -8.8579e-03, -3.1248e-02,  3.6441e-03,
          3.4262e-02, -3.4388e-02, -1.0399e-03,  9.7923e-03, -2.3480e-02,
          4.2403e-02, -1.8477e-02,  2.2839e-02, -3.1252e-02,  8.9852e-03,
         -2.6305e-02, -1.6721e-02, -1.9544e-02, -2.3927e-02, -1.5472e-02,
          1.5737e-02,  1.5991e-02, -2.2882e-02,  1.3390e-02, -2.7449e-02,
         -1.7273e-02, -6.9247e-03, -1.6713e-02,  1.4949e-02,  1.5176e-02,
          1.9066e-02, -2.1817e-02, -4.0542e-03, -9.2348e-03,  1.4565e-02,
          1.2855e-02,  7.3751e-03, -1.4878e-02,  2.5662e-02, -3.0453e-02,
         -3.9791e-03,  7.5653e-03, -9.2595e-03,  2.9804e-03, -1.4995e-02,
          2.6876e-02,  1.4967e-03,  8.7974e-03,  1.6473e-02,  2.6183e-02,
          3.0166e-03, -1.6064e-03, -2.6358e-02, -2.2579e-02,  8.0824e-03,
          2.1325e-02,  1.3667e-02, -1.0321e-02,  1.7947e-02,  1.4116e-02,
          1.8065e-02, -2.6304e-02,  2.6044e-02, -1.5716e-02, -6.7091e-03,
          2.0704e-02,  8.3390e-03,  4.3058e-02,  5.7722e-03,  1.6593e-02,
         -1.1851e-02,  2.1086e-02,  1.3658e-02,  3.9412e-02,  1.3713e-02,
          3.7449e-02,  8.7189e-03,  1.6376e-02,  1.4716e-02, -9.5834e-03,
         -1.8180e-02,  1.7997e-02,  1.2260e-03,  4.9386e-02, -2.9460e-02,
          2.1820e-02, -2.3730e-02,  2.8945e-02,  1.6169e-02, -2.7648e-03,
          1.6288e-02, -1.7214e-02,  4.2345e-02, -8.3445e-03,  3.5156e-02,
          3.9470e-02,  1.0655e-02,  5.4865e-03, -1.7921e-03,  1.7450e-02,
         -2.7785e-02, -1.4330e-02, -3.1285e-02,  2.6268e-02, -2.7772e-02,
         -2.4397e-02, -2.3181e-02,  2.6009e-02, -1.5131e-02,  2.8456e-02,
         -1.6606e-02,  5.1522e-03,  2.7270e-02, -3.8541e-02, -2.1967e-02,
          2.9303e-02,  1.4878e-02,  1.5034e-02, -2.1997e-03, -8.3801e-03,
         -2.1234e-02, -2.4439e-03,  3.8561e-02,  1.3133e-02,  1.1736e-02,
          2.4341e-02, -3.2635e-02,  3.1007e-02,  3.3495e-02, -7.7501e-03,
         -7.1284e-03,  1.0267e-02, -2.8204e-02,  1.2758e-02,  5.1837e-03,
         -2.1703e-02,  2.4386e-02,  1.1329e-02, -3.2334e-02,  5.4065e-03,
         -2.3549e-02, -2.6736e-02,  1.8833e-02,  3.3755e-03,  4.2148e-02,
          2.2605e-02, -2.4928e-03, -2.9420e-02, -4.7755e-02, -3.1332e-02,
         -2.9352e-02,  1.7874e-02,  2.9931e-02,  2.2340e-02,  1.6732e-02,
          5.3264e-03, -3.3778e-02, -1.9956e-02,  4.9978e-03, -5.2016e-03,
         -1.6985e-03,  1.0104e-02, -5.1241e-02, -8.4232e-03, -1.3560e-02,
         -1.9405e-02, -4.8525e-02, -4.3083e-02, -2.9999e-02,  6.0505e-02,
          8.3615e-03,  1.6270e-02, -2.6771e-02,  2.5608e-03, -1.4557e-02,
          1.5263e-02, -1.4419e-02, -5.4917e-04, -3.2982e-02, -1.6840e-02,
         -1.1572e-02,  1.5651e-02,  1.3509e-02,  7.3461e-03, -2.8315e-02,
          2.0102e-02, -4.4708e-02,  1.9183e-02,  7.8982e-05,  1.7029e-04,
         -2.0587e-02,  5.9534e-03,  9.7653e-03, -3.6275e-04,  1.7876e-02,
         -1.7093e-02,  3.7239e-02, -1.0755e-02, -3.5184e-04, -1.1173e-02,
          4.6020e-02,  7.5721e-03, -1.7287e-02, -3.7060e-02,  7.8803e-03,
          3.7895e-02, -1.3081e-02,  2.7829e-02, -4.9862e-02,  2.3073e-02,
          1.0341e-02, -1.7600e-02,  3.4911e-02,  3.4776e-02,  3.2101e-02,
          1.0664e-03, -3.0447e-02,  2.4802e-03, -1.3010e-02, -2.8162e-03,
         -1.4231e-02,  2.6457e-02,  1.1781e-02, -3.9268e-04,  8.4195e-04,
          1.4934e-02, -2.6485e-02,  1.8859e-02,  1.3969e-02, -2.0413e-03,
          2.2274e-02,  5.0586e-03,  8.9480e-03,  2.5266e-02, -4.9164e-03,
         -1.7366e-02,  1.4795e-02,  3.0111e-02, -1.0350e-02,  2.2511e-02,
          8.0158e-03, -1.5161e-02, -2.7918e-03, -9.1841e-03,  5.6364e-03,
         -1.9910e-02,  9.2131e-03, -3.3319e-03, -3.7577e-03,  2.2115e-03,
         -6.1464e-02, -1.8719e-02,  5.1098e-02,  2.3493e-02,  7.1576e-03,
         -3.8655e-02, -8.6109e-03,  2.6675e-02,  1.7054e-02,  3.6374e-02,
         -1.7054e-02,  1.6608e-02, -1.4358e-02,  7.2648e-04, -1.3183e-03,
         -1.6677e-02,  1.1783e-03, -9.1894e-04,  1.6137e-03,  7.7208e-03,
          1.1354e-02,  4.2095e-03,  2.1681e-02, -4.3674e-04, -1.3702e-02,
         -2.4548e-02,  4.2742e-03, -3.2218e-03,  1.7599e-02, -3.5182e-02,
         -3.0997e-02,  1.0245e-02, -4.2554e-03, -2.4211e-02,  7.2424e-03,
          6.2503e-03,  4.5980e-04, -1.4893e-02,  1.6730e-02, -2.3941e-02,
          1.9243e-02, -2.8669e-02,  6.0019e-03,  1.0314e-02, -2.0390e-02,
          1.7210e-02, -2.6136e-02, -2.4784e-02,  1.6919e-02, -4.6081e-04,
         -3.5554e-02,  2.2545e-02, -6.1369e-03, -1.6966e-02,  7.0338e-03,
          2.8618e-02, -4.4364e-02, -1.5062e-02, -3.5687e-02,  1.2352e-03,
         -4.0268e-04, -1.2330e-03,  8.1580e-03, -7.7175e-03,  2.1781e-02,
         -8.4020e-03, -2.6037e-02, -1.0351e-03,  2.6721e-02, -1.9837e-02,
          6.4983e-03, -1.8126e-03, -1.5037e-02, -1.3538e-02, -1.1233e-02,
          2.2226e-02,  6.3896e-03,  7.2887e-03,  3.3260e-02, -7.0485e-03,
          1.6398e-02,  3.9379e-03,  1.4868e-02, -1.1855e-02, -1.9606e-02,
         -1.9075e-02,  1.0427e-02,  2.6717e-02, -1.0573e-02, -7.0019e-03,
          1.8224e-02, -2.7268e-03, -3.6840e-02,  5.5233e-03,  6.4812e-03,
          1.0367e-03, -1.3217e-02, -2.6354e-03,  1.2521e-02, -1.5189e-02,
          2.6507e-02, -3.7729e-02, -1.8050e-02, -3.2470e-02, -3.7903e-02,
          2.4325e-02, -5.1281e-03,  1.6413e-02, -3.3414e-02,  1.8317e-02,
          1.6425e-02, -3.8652e-02,  1.6691e-02,  1.4437e-02, -1.9905e-02,
          3.2776e-02, -2.7744e-02, -1.2522e-02, -5.7744e-03, -2.4705e-02,
         -1.2601e-02, -1.4709e-02, -1.5809e-02, -2.5390e-02,  4.0849e-02]],
       grad_fn=<AddmmBackward>)

调试了很久 终于弄明白了

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值