在爬取类似 起点 色魔张大妈 这样的网站时,会被网站看出是爬虫机制,这时需要伪装成浏览器以及使用IP代理的方式来爬去正常内容。
- 实例
-
import re import requests import urllib.request from lxml import etree url='https://www.qidian.com/rank/hotsales' #浏览器伪装 # opener=urllib.request.build_opener() #设置代理 proxy = {'http': '125.127.24.239:808'} proxy_support = urllib.request.ProxyHandler(proxy) opener=urllib.request.build_opener(proxy_support) opener.addheaders=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36')] urllib.request.install_opener(opener) html=urllib.request.urlopen(url).read().decode('utf-8') # html=requests.get(url).text # f = open('FC.html','w') # f.write(str(html)) # f.close() # print(html) selector=etree.HTML(html) info = selector.xpath('//div[@class="book-mid-info"]') for each in info: name=each.xpath('h4/a[@data-eid="qd_C40"]/text()') author=each.xpath('p[@class="author"]/a[@class="name"]/text()') target=each.xpath('p[@class="author"]/a[@data-eid="qd_C42"]/text()') intro=each.xpath('p[@class="intro"]/text()')[0] print(name) print(author) print(target) print(intro+'\n')
- 爬虫浏览器伪装
-
#导入urllib.request模块 import urllib.request #设置请求头 headers=("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.221 Safari/537.36 SE 2.X MetaSr 1.0") #创建一个opener opener=urllib.request.build_opener() #将headers添加到opener中 opener.addheaders=[headers] #将opener安装为全局 urllib.request.install_opener(opener) #用urlopen打开网页 data=urllib.request.urlopen(url).read().decode('utf-8','ignore')
- 设置代理
-
#定义代理ip proxy_addr="122.241.72.191:808" #设置代理 proxy=urllib.request.ProxyHandle({'http':proxy_addr}) #创建一个opener opener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle) #将opener安装为全局 urllib.request.install_opener(opener) #用urlopen打开网页 data=urllib.request.urlopen(url).read().decode('utf-8','ignore')
- 同时设置用代理和模拟浏览器访问
-
#定义代理ip proxy_addr="122.241.72.191:808" #创建一个请求 req=urllib.request.Request(url) #添加headers req.add_header("User-Agent","Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) #设置代理 proxy=urllib.request.ProxyHandle("http":proxy_addr) #创建一个opener opener=urllib.request.build_opener(proxy,urllib.request.HTTPHandle) #将opener安装为全局 urllib.request.install_opener(opener) #用urlopen打开网页 data=urllib.request.urlopen(req).read().decode('utf-8','ignore')
代理用IP可以参考http://www.xicidaili.com/
内容参考于http://blog.csdn.net/d1240673769/article/details/74295148 感谢