AlexNet的pytorch实现

from torch import nn
import torch
from torch.nn import functional as F

class AlexNet(nn.Module):
    def __init__(self, num_class = 1000):
        super().__init__()
        #输出为3*224*224
        self.features = nn.Sequential(
            nn.Conv2d(3, 96, kernel_size = 11, stride = 4, padding = 0),#输出为96*54*54
            nn.ReLU(),
            nn.MaxPool2d(kernel_size = 3, stride = 2),#输出为96*26*26
            nn.BatchNorm2d(96),
            nn.Conv2d(96, 256, kernel_size = 5, padding = 2),#输出为256*26*26
            nn.ReLU(),
            nn.MaxPool2d(kernel_size = 3, stride = 2),#输出为256*12*12
            nn.BatchNorm2d(256),        
            nn.Conv2d(256, 384, kernel_size = 3, padding = 1),#输出为384*12*12
            nn.ReLU(),
            nn.Conv2d(384, 384, kernel_size = 3, padding = 1),#输出为384*12*12
            nn.ReLU(),
            nn.Conv2d(384, 256, kernel_size = 3, padding = 1),#输出为256*12*12
            nn.ReLU(),
            nn.MaxPool2d(kernel_size = 3, stride = 2),#输出为256*5*5
            nn.Flatten()
        )
        self.classifier = nn.Sequential(
            nn.Linear(256*5*5, 4096),
            nn.ReLU(),
            nn.Dropout(p = 0.5),
            nn.Linear(4096, 4096),
            nn.ReLU(),
            nn.Dropout(p = 0.5),
            nn.Linear(4096, 1000)
        )
    def forward(self, x):
        x = self.features(x)
        x = self.classifier(x)

网络结构:

AlexNet(
  (features): Sequential(
    (0): Conv2d(3, 96, kernel_size=(11, 11), stride=(4, 4))
    (1): ReLU()
    (2): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)
    (3): BatchNorm2d(96, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (4): Conv2d(96, 256, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2))
    (5): ReLU()
    (6): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)
    (7): BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
    (8): Conv2d(256, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (9): ReLU()
    (10): Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (11): ReLU()
    (12): Conv2d(384, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
    (13): ReLU()
    (14): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)
    (15): Flatten(start_dim=1, end_dim=-1)
  )
  (classifier): Sequential(
    (0): Linear(in_features=6400, out_features=4096, bias=True)
    (1): ReLU()
    (2): Dropout(p=0.5, inplace=False)
    (3): Linear(in_features=4096, out_features=4096, bias=True)
    (4): ReLU()
    (5): Dropout(p=0.5, inplace=False)
    (6): Linear(in_features=4096, out_features=1000, bias=True)
  )
)

 
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
AlexNet是一个经典的深度学习模型,用于图像分类任务。下面是一个使用PyTorch实现AlexNet模型的基本结构: ```python import torch import torch.nn as nn class My_AlexNet(nn.Module): def __init__(self): super(My_AlexNet, self).__init__() # 定义模型的基本结构 self.features = nn.Sequential( nn.Conv2d(3, 48, kernel_size=11), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(48, 128, kernel_size=5, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(128, 192, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(192, 192, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(192, 128, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2) ) self.classifier = nn.Sequential( nn.Linear(128 * 6 * 6, 4096), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(4096, 4096), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(4096, 1000), nn.ReLU(inplace=True), nn.Dropout(), nn.Linear(1000, num_classes) ) def forward(self, x): x = self.features(x) x = torch.flatten(x, start_dim=1) x = self.classifier(x) return x ``` 这个模型的前向传播过程中,首先通过一系列的卷积层和池化层提取图像的特征,然后通过全连接层进行分类。在每个卷积层之后,都使用了ReLU激活函数来引入非线性。在全连接层之间,还使用了Dropout层来防止过拟合。\[1\]\[2\]\[3\] #### 引用[.reference_title] - *1* [pytorch实战2:基于pytorch实现AlexNet模型](https://blog.csdn.net/weixin_46676835/article/details/128730161)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control_2,239^v3^insert_chatgpt"}} ] [.reference_item] - *2* *3* [pytorch实现AlexNet(含完整代码)](https://blog.csdn.net/weixin_45836809/article/details/121690604)[target="_blank" data-report-click={"spm":"1018.2226.3001.9630","extra":{"utm_source":"vip_chatgpt_common_search_pc_result","utm_medium":"distribute.pc_search_result.none-task-cask-2~all~insert_cask~default-1-null.142^v91^control_2,239^v3^insert_chatgpt"}} ] [.reference_item] [ .reference_list ]
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值