简单粗暴的提取西刺IP和端口(附源码,爬虫小白,求勿喷)

import urllib
import re
import time
from urllib import request
from urllib import parse
import chardet
proxy = {"http": "123.207.30.131:80"}
proxy_support = request.ProxyHandler(proxy)
opener = request.build_opener(proxy_support)
request.install_opener(opener)
url = "http://www.xicidaili.com/nn"
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36"}
patternIP = re.compile(r'(?<=<td>)[\d]{1,3}\.[\d]{1,3}\.[\d]{1,3}\.[\d]{1,3}')
patternPORT = re.compile(r'(?<=<td>)[\d]{2,5}(?=</td>)')
req = request.Request(url, headers=headers)
response = request.urlopen(req)
html = response.read()
findIP = re.findall(patternIP,str(html))
findPORT = re.findall(patternPORT,str(html))
charset = chardet.detect(html)['encoding']
IP_data =[]
for i in range(len(findIP)):
     findIP[i] = findIP[i] + ":" + findPORT[i]
     IP_data.extend(findIP)
print(charset)
print(IP_data)

 

LeNet是由Yann LeCun在1998年提出的经典卷积神经网络模型,被广泛应用于手写数字识别、文本识别等领域。LeNet5是LeNet的一种改进版本,主要的改进在于增加了卷积层和池化层,并使用了Sigmoid和Tanh等激活函数。 下面是LeNet和LeNet5的详细结构和代码实现。 LeNet结构: 输入层(32x32的图像) => 卷积层1(6个5x5的卷积核)=> 池化层1(2x2的最大池化)=> 卷积层2(16个5x5的卷积核)=> 池化层2(2x2的最大池化)=> 全连接层1(120个神经元)=> 全连接层2(84个神经元)=> 输出层(10个神经元) LeNet5结构: 输入层(32x32的图像)=> 卷积层1(6个5x5的卷积核)=> 池化层1(2x2的最大池化)=> 卷积层2(16个5x5的卷积核)=> 池化层2(2x2的最大池化)=> 卷积层3(120个5x5的卷积核)=> 全连接层1(84个神经元)=> 输出层(10个神经元) 完整代码实现: ```python import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F class LeNet(nn.Module): def __init__(self): super(LeNet, self).__init__() self.conv1 = nn.Conv2d(1, 6, kernel_size=5) self.pool1 = nn.MaxPool2d(kernel_size=2) self.conv2 = nn.Conv2d(6, 16, kernel_size=5) self.pool2 = nn.MaxPool2d(kernel_size=2) self.fc1 = nn.Linear(16*5*5, 120) self.fc2 = nn.Linear(120, 84) self.fc3 = nn.Linear(84, 10) def forward(self, x): x = F.relu(self.conv1(x)) x = self.pool1(x) x = F.relu(self.conv2(x)) x = self.pool2(x) x = x.view(-1, 16*5*5) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return x class LeNet5(nn.Module): def __init__(self): super(LeNet5, self).__init__() self.conv1 = nn.Conv2d(1, 6, kernel_size=5) self.pool1 = nn.MaxPool2d(kernel_size=2) self.conv2 = nn.Conv2d(6, 16, kernel_size=5) self.pool2 = nn.MaxPool2d(kernel_size=2) self.conv3 = nn.Conv2d(16, 120, kernel_size=5) self.fc1 = nn.Linear(120, 84) self.fc2 = nn.Linear(84, 10) def forward(self, x): x = F.relu(self.conv1(x)) x = self.pool1(x) x = F.relu(self.conv2(x)) x = self.pool2(x) x = F.relu(self.conv3(x)) x = x.view(-1, 120) x = F.relu(self.fc1(x)) x = self.fc2(x) return x ``` 以上就是LeNet和LeNet5的结构详解和完整代码实现。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值