一个简单的知乎爬虫,还有一些问题,后续搞定后补上:
python版本:3.3
# -*- coding: utf-8 -*-
'''
Required
- requests (必须)
- pillow (可选)
Info
- author : "xchaoinfo"
- email : "xchaoinfo@qq.com"
- date : "2016.2.4"
Update
- name : "wangmengcn"
- email : "eclipse_sv@163.com"
- date : "2016.4.21"
'''
import requests
try:
import cookielib
except:
import http.cookiejar as cookielib
import re
import time
import os.path
try:
from PIL import Image
except:
pass
# 构造 Request headers
agent = 'Mozilla/5.0 (Windows NT 5.1; rv:33.0) Gecko/20100101 Firefox/33.0'
headers = {
"Host": "www.zhihu.com",
"Referer": "https://www.zhihu.com/",
'User-Agent': agent
}
# 使用登录cookie信息
session = requests.session()
session.cookies = cookielib.LWPCookieJar(filename='cookies')
try:
session.cookies.load(ignore_discard=True)
except:
print("Cookie 未能加载")
def get_xsrf():
'''_xsrf 是一个动态变化的参数'''
index_url = 'http://www.zhihu.com'
# 获取登录时需要用到的_xsrf
index_page = session.get(index_url, headers=headers)
html = index_page.text
pattern = r'name="_xsrf" value="(.*?)"'
# 这里的_xsrf 返回的是一个list
_xsrf = re.findall(pattern, html)
return _xsrf[0]
# 获取验证码
def get_captcha():
t = str(int(time.time() * 1000))
captcha_url = 'http://www.zhihu.com/captcha.gif?r=' + t + "&type=login"
r = session.get(captcha_url, headers=headers)
with open('captcha.jpg', 'wb') as f:
f.write(r.content)
f.close()
# 用pillow 的 Image 显示验证码
# 如果没有安装 pillow 到源代码所在的目录去找到验证码然后手动输入
try:
im = Image.open('captcha.jpg')
im.show()
im.close()
except:
print(u'请到 %s 目录找到captcha.jpg 手动输入' % os.path.abspath('captcha.jpg'))
captcha = input("please input the captcha\n>")
return captcha
def isLogin():
# 通过查看用户个人信息来判断是否已经登录
url = "https://www.zhihu.com/settings/profile"
login_code = session.get(url, headers=headers, allow_redirects=False).status_code
if login_code == 200:
return True
else:
return False
def login(secret, account):
# 通过输入的用户名判断是否是手机号
if re.match(r"^1\d{10}$", account):
print("手机号登录 \n")
post_url = 'http://www.zhihu.com/login/phone_num'
postdata = {
'_xsrf': get_xsrf(),
'password': secret,
'remember_me': 'true',
'phone_num': account,
}
else:
if "@" in account:
print("邮箱登录 \n")
else:
print("你的账号输入有问题,请重新登录")
return 0
post_url = 'http://www.zhihu.com/login/email'
postdata = {
'_xsrf': get_xsrf(),
'password': secret,
'remember_me': 'true',
'email': account,
}
try:
# 不需要验证码直接登录成功
login_page = session.post(post_url, data=postdata, headers=headers)
login_code = login_page.text
print(login_page.status_code)
print(login_code)
except:
# 需要输入验证码后才能登录成功
postdata["captcha"] = get_captcha()
login_page = session.post(post_url, data=postdata, headers=headers)
login_code = eval(login_page.text)
print(login_code['msg'])
session.cookies.save()
# try:
# input = raw_input
# except:
# pass
from lib3 import spider
import json
class ZhiHuSpider:
def __init__(self,userAgent,account,secret):
self.sp = spider.Spider(userAgent)
self.loginStatus = self.preLogin(account,secret)
def preLogin(self,account,secret):
if isLogin():
return True
else:
login(secret, account)
return False
def getUrlToken(self,url):
tempStr = url.replace("http://","")
tempArr= tempStr.split("/")
return tempArr[len(tempArr)-1]
def go(self,pageUrl):
if self.loginStatus == True:
content = self.sp.sessionGetHttp(pageUrl).text
# print(content)
strPattern = '''<a class="author-link".*?data-hovercard=".*?".*?target="_blank" href="(.*?)".*?>(.*?)</a>'''
regContentList = self.sp.parseReg(content,strPattern,2)
# print(regContentList)
urlToken = int(self.getUrlToken(pageUrl))
# https://www.zhihu.com/node/QuestionAnswerListV2
# {"url_token":20618133,"pagesize":10,"offset":20}
c = self.sp.sessionPostHttp("https://www.zhihu.com/node/QuestionAnswerListV2",
param=json.dumps({"method":"next","params":{"url_token":urlToken,"pagesize":10,"offset":10}}))
print(c.text)
# 此处没走通
if len(regContentList):
for i in range(0,len(regContentList)):
userName = regContentList[i][1]
userUrl = "https://www.zhihu.com"+regContentList[i][0]
print(userName,userUrl)
else:
print("can not login")
demo = ZhiHuSpider("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko","账号","密码")
demo.go("https://www.zhihu.com/question/20618133")