linux实现简单的共享单车,PyTorch实现AlexNet示例

2e2dc4b06d05d78c34cdfff1499637c1.png

import torch

import torch.nn as nn

import torchvision

class AlexNet(nn.Module):

def __init__(self,num_classes=1000):

super(AlexNet,self).__init__()

self.feature_extraction = nn.Sequential(

nn.Conv2d(in_channels=3,out_channels=96,kernel_size=11,stride=4,padding=2,bias=False),

nn.ReLU(inplace=True),

nn.MaxPool2d(kernel_size=3,stride=2,padding=0),

nn.Conv2d(in_channels=96,out_channels=192,kernel_size=5,stride=1,padding=2,bias=False),

nn.ReLU(inplace=True),

nn.MaxPool2d(kernel_size=3,stride=2,padding=0),

nn.Conv2d(in_channels=192,out_channels=384,kernel_size=3,stride=1,padding=1,bias=False),

nn.ReLU(inplace=True),

nn.Conv2d(in_channels=384,out_channels=256,kernel_size=3,stride=1,padding=1,bias=False),

nn.ReLU(inplace=True),

nn.Conv2d(in_channels=256,out_channels=256,kernel_size=3,stride=1,padding=1,bias=False),

nn.ReLU(inplace=True),

nn.MaxPool2d(kernel_size=3, stride=2, padding=0),

)

self.classifier = nn.Sequential(

nn.Dropout(p=0.5),

nn.Linear(in_features=256*6*6,out_features=4096),

nn.ReLU(inplace=True),

nn.Dropout(p=0.5),

nn.Linear(in_features=4096, out_features=4096),

nn.ReLU(inplace=True),

nn.Linear(in_features=4096, out_features=num_classes),

)

def forward(self,x):

x = self.feature_extraction(x)

x = x.view(x.size(0),256*6*6)

x = self.classifier(x)

return x

if __name__ =='__main__':

# model = torchvision.models.AlexNet()

model = AlexNet()

print(model)

input = torch.randn(8,3,224,224)

out = model(input)

print(out.shape)

以上这篇PyTorch实现AlexNet示例就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持我们。

时间: 2020-01-13

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值