python写微信公众号文章_Python爬取微信公众号文章

搜狗对微信公众平台的公众号和文章做了整合,使用代理爬取。

from urllib.parse import urlencode

import pymongo

import requests

from lxml.etree import XMLSyntaxError

from requests.exceptions import ConnectionError

from pyquery import PyQuery as pq

from config import *

client = pymongo.MongoClient(MONGO_URI)

db = client[MONGO_DB]

base_url = 'http://weixin.sogou.com/weixin?'

headers = {

'Cookie': 'IPLOC=CN1100; SUID=194E796A2E08990A000000005B114E85; SUV=1527860869604056; ABTEST=1|1527860872|v1; SNUID=9FCBFCEF8680EB12510E6A9C86088B29; weixinIndexVisited=1; JSESSIONID=aaaqa95rD87Zu9-CJwlnw; sct=5; ppinf=5|1527862844|1529072444|dHJ1c3Q6MToxfGNsaWVudGlkOjQ6MjAxN3x1bmlxbmFtZToyNzolRTclOEUlOEIlRTclOTAlQjMlRTYlOUQlQjB8Y3J0OjEwOjE1Mjc4NjI4NDR8cmVmbmljazoyNzolRTclOEUlOEIlRTclOTAlQjMlRTYlOUQlQjB8dXNlcmlkOjQ0Om85dDJsdUh5bE5VSDJEVWNuSHBDWnVOVG9sN2tAd2VpeGluLnNvaHUuY29tfA; pprdig=EZE8CVVtoUTqmCoJj6bEWwKngY4di5UpGDFImTA9-1qrMK_tIJEtUyGR9_0Jcv5Xw1EuqLO9BNFvAKQv5DOQvmCWh-jxudk7SGv89NuhCLow7dxPysoOtLSI-keSaKVLKT82Vhg7rDBg0SlQ3y2uiG53lBUWL0wLVw4D_f_7MLg; sgid=17-35315605-AVsRVjwpV4ichpAzPibp6olGY; ppmdig=1527862844000000243bdb95cb03e086685bb1de06087c32',

'Host': 'weixin.sogou.com',

'Upgrade-Insecure-Requests': '1',

'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.7 Safari/537.36'

}

proxy = None

def get_proxy():

try:

response = requests.get(PROXY_POOL_URL)

if response.status_code == 200:

return response.text

return None

except ConnectionError:

return None

def get_html(url, count=1):

print('Crawling', url)

print('Trying Count', count)

global proxy

if count >= MAX_COUNT:

print('Tried Too Many Counts')

return None

try:

if proxy:

proxies = {

'http': 'http://' + proxy

}

response = requests.get(url, allow_redirects=False, headers=headers, proxies=proxies)

else:

response = requests.get(url, allow_redirects=False, headers=headers)

if response.status_code == 200:

return response.text

if response.status_code == 302:

# Need Proxy

print('302')

proxy = get_proxy()

if proxy:

print('Using Proxy', proxy)

#count += 1

#return get_html(url, count)

return get_html(url)

else:

print('Get Proxy Failed')

return None

except ConnectionError as e:

print('Error Occurred', e.args)

proxy = get_proxy()

count += 1

return get_html(url, count)

def get_index(keyword, page):

data = {

'query': keyword,

'type': 2,

'page': page

}

queries = urlencode(data)

url = base_url + queries

html = get_html(url)

return html

def parse_index(html):

doc = pq(html)

items = doc('.news-box .news-list li .txt-box h3 a').items()

for item in items:

yield item.attr('href')

def get_detail(url):

try:

response = requests.get(url)

if response.status_code == 200:

return response.text

return None

except ConnectionError:

return None

def parse_detail(html):

try:

doc = pq(html)

title = doc('.rich_media_title').text()

content = doc('.rich_media_content').text()

date = doc('#publish_time').text()

nickname = doc('#js_profile_qrcode > div > strong').text()

wechat = doc('#js_profile_qrcode > div > p:nth-child(3) > span').text()

return {

'title': title,

'content': content,

'date': date,

'nickname': nickname,

'wechat': wechat

}

except XMLSyntaxError:

return None

def save_to_mongo(data):

if db['articles'].update({'title': data['title']}, {'$set': data}, True):

print('Saved to Mongo', data['title'])

else:

print('Saved to Mongo Failed', data['title'])

def main():

for page in range(1, 101):

html = get_index(KEYWORD, page)

if html:

article_urls = parse_index(html)

for article_url in article_urls:

#print(article_url)

article_html = get_detail(article_url)

if article_html:

article_data = parse_detail(article_html)

print(article_data)

if article_data:

save_to_mongo(article_data)

if __name__ == '__main__':

main()

config.py

1 PROXY_POOL_URL = 'http://127.0.0.1:5555/random'

2 KEYWORD = 'python'

3 MONGO_URI = 'localhost'

4 MONGO_DB = 'weixin'

5 MAX_COUNT = 5

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值