计算一下pytorch中Resnet34模型前传一次所需要的时间

import pdb
import time
import torch
import torchvision.models as models
from torch.autograd import Variable

class Timer(object):
    """A simple timer."""
    def __init__(self):
        self.total_time = 0.
        self.calls = 0
        self.start_time = 0.
        self.diff = 0.
        self.average_time = 0.

    def tic(self):
        # using time.time instead of time.clock because time time.clock
        # does not normalize for multithreading
        self.start_time = time.time()

    def toc(self, average=True):
        self.diff = time.time() - self.start_time
        self.total_time += self.diff
        self.calls += 1
        self.average_time = self.total_time / self.calls
        if average:
            return self.average_time
        else:
            return self.diff

GPUID = 1

resnet34 = models.resnet34(pretrained=True)
resnet34.cuda(GPUID)

x = torch.rand(1,3,400,400)
x = Variable(x.cuda(GPUID))

# preheat
y = resnet34(x)
timer = Timer()
timer.tic()
for i in xrange(100):
  y = resnet34(x)
timer.toc()

print ('Do once forward need {:.3f}ms ').format(timer.total_time*1000/100.0)
好的,我可以为你讲解一下pytorchresnet18模型的搭建过程及其代码含义。 首先,resnet18是深度残差网络(ResNet)的18层版本,其主要目的是通过引入残差连接来解决深度神经网络的梯度消失和梯度爆炸问题。 接下来是搭建代码,我们可以通过pytorch内置的torchvision库来加载预训练的resnet18模型,代码如下: ```python import torch import torchvision.models as models # 加载预训练的resnet18模型 resnet18 = models.resnet18(pretrained=True) ``` 上述代码,我们使用了`models.resnet18(pretrained=True)`来加载预训练的resnet18模型。 如果我们想要自己从头开始训练一个resnet18模型,可以使用以下代码: ```python import torch import torch.nn as nn # 定义resnet18模型 class ResNet18(nn.Module): def __init__(self, num_classes=1000): super(ResNet18, self).__init__() self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(64) self.relu = nn.ReLU(inplace=True) self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.layer1 = self._make_layer(64, 2) self.layer2 = self._make_layer(128, 2, stride=2) self.layer3 = self._make_layer(256, 2, stride=2) self.layer4 = self._make_layer(512, 2, stride=2) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = nn.Linear(512, num_classes) def _make_layer(self, planes, blocks, stride=1): downsample = None if stride != 1 or self.inplanes != planes: downsample = nn.Sequential( nn.Conv2d(self.inplanes, planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(planes) ) layers = [] layers.append(ResidualBlock(self.inplanes, planes, stride, downsample)) self.inplanes = planes for i in range(1, blocks): layers.append(ResidualBlock(self.inplanes, planes)) return nn.Sequential(*layers) def forward(self, x): x = self.conv1(x) x = self.bn1(x) x = self.relu(x) x = self.maxpool(x) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.avgpool(x) x = x.view(x.size(0), -1) x = self.fc(x) return x # 定义resnet18的残差块 class ResidualBlock(nn.Module): def __init__(self, inplanes, planes, stride=1, downsample=None): super(ResidualBlock, self).__init__() self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.relu = nn.ReLU(inplace=True) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.downsample = downsample def forward(self, x): residual = x out = self.conv1(x) out = self.bn1(out) out = self.relu(out) out = self.conv2(out) out = self.bn2(out) if self.downsample is not None: residual = self.downsample(x) out += residual out = self.relu(out) return out ``` 上述代码,我们首先定义了一个ResNet18类,其包含了`_make_layer`方法和`forward`方法。 `_make_layer`方法用于构建resnet18的一个残差块,其包含了若干个ResidualBlock,具体实现可以参考代码。 `forward`方法则是将上述构建好的残差块按照顺序连接起来,实现整个resnet18模型的搭建。 在ResNet18类,我们还定义了一个ResidualBlock类,用于构建resnet18的残差块。其,每个残差块包含了两个卷积层和一个残差连接,具体实现可以参考代码。 希望以上讲解可以帮助你理解resnet18模型的搭建过程及其代码含义。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值