if x is a Tensor with x.requires_grad == True
then after backpropagation x.grad
will be another Tensor.(Tensor 相当于np.array,torch相当于np)
x = torch.arange(12).view(2, 1, 3, 2)
x.view(N, -1)
w1 = torch.randn(shape, device=device, dtype=dtype) * np.sqrt(2. / fan_in)
w.requires_grad = True
w2 =torch.zeros(shape, device=device, dtype=dtype, requires_grad=True)
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.utils.data import DataLoader
from torch.utils.data import sampler
from torchvision import datasets, models, transforms
net = models.resnet50(pretrained=True) 使用预训练的网络
net.avgpool = nn.AdaptiveAvgPool2d(1)
net.fc = nn.Linear(net.fc.in_features, classes)
PyTorch Sequential API定义model nn.Sequential merges init and forward() steps into one
PyTorch Module API定义model nn.Module init forward()