python爬虫之后,为什么出现了信息不一致,在浏览器中直接查看的信息与最后经过爬虫之后数据不一致。
代码
import requests
from bs4 import BeautifulSoup
import json
def main():
proxies = {
"http": "http://60.191.134.165:9999"
}
headers = {
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Content-Length': '25',
'Cookie': '_ga=GA1.2.1320949454.1514984795; _gid=GA1.2.1879436474.1514984795; user_trace_token=20180103210635-ed72273d-f086-11e7-9fc4-5254005c3644; LGUID=20180103210635-ed722d89-f086-11e7-9fc4-5254005c3644; hasDeliver=0; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; index_location_city=%E4%B8%8A%E6%B5%B7; JSESSIONID=ABAAABAAADEAAFIAE15C222689F961134964EAE30686CD3; _gat=1; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1514984795,1515032379,1515055492; LGSID=20180104164453-887600c8-f12b-11e7-be47-525400f775ce; PRE_UTM=m_cf_cpt_baidu_pc; PRE_HOST=bzclk.baidu.com; PRE_SITE=http%3A%2F%2Fbzclk.baidu.com%2Fadrc.php%3Ft%3D06KL00c00f7Ghk60yUKm0FNkUsaKlRdp00000PW4pNb00000V2KXI1.THL0oUhY1x60UWY4rj0knW03r7tdgvwM0ZnqmW03mW9huAfsnj0srjI9rfKd5HbkrDPjfW7KPbR1PYwAwHczwHKKwbmsPjNKnH01PHPj0ADqI1YhUyPGujY1njn1nW0dn10YFMKzUvwGujYkP6K-5y9YIZK1rBtEILILQhk9uvqdQhPEUitOIgwVgLPEIgFWuHdVgvPhgvPsI7qBmy-bINqsmsKWThnqn16Ln1m%26tpl%3Dtpl_10085_15730_11224%26l%3D1500117464%26attach%3Dlocation%253D%2526linkName%253D%2525E6%2525A0%252587%2525E9%2525A2%252598%2526linkText%253D%2525E3%252580%252590%2525E6%25258B%252589%2525E5%25258B%2525BE%2525E7%2525BD%252591%2525E3%252580%252591%2525E5%2525AE%252598%2525E7%2525BD%252591-%2525E4%2525B8%252593%2525E6%2525B3%2525A8%2525E4%2525BA%252592%2525E8%252581%252594%2525E7%2525BD%252591%2525E8%252581%25258C%2525E4%2525B8%25259A%2525E6%25259C%2525BA%2526xp%253Did%28%252522m6c247d9c%252522%29%25252FDIV%25255B1%25255D%25252FDIV%25255B1%25255D%25252FDIV%25255B1%25255D%25252FDIV%25255B1%25255D%25252FH2%25255B1%25255D%25252FA%25255B1%25255D%2526linkType%253D%2526checksum%253D220%26ie%3Dutf-8%26f%3D8%26ch%3D11%26tn%3D98012088_5_dg%26wd%3D%25E6%258B%2589%25E5%258B%25BE%25E7%25BD%2591%26oq%3D%25E6%258B%2589%25E5%258B%25BE%25E7%25BD%2591%26rqlang%3Dcn; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2F%3Futm_source%3Dm_cf_cpt_baidu_pc; _putrc=7DC8891E31D8ED9B; login=true; unick=%E8%8B%97%E4%B9%A6%E5%AE%87; TG-TRACK-CODE=index_search; LGRID=20180104164506-90362101-f12b-11e7-be47-525400f775ce; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1515055505; SEARCH_ID=e78029bc203748cab2163c1545822c01',
'Host': 'www.lagou.com',
'Origin': 'https://www.lagou.com',
'Pragma': 'no-cache',
'Referer': 'https://www.lagou.com/jobs/list_python?labelWords=&fromSearch=true&suginput=',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
'X-Anit-Forge-Code': '0',
'X-Anit-Forge-Token': 'None',
'X-Requested-With': 'XMLHttpRequest',
'Content-type': 'application/json;charset=utf-8'
}
form_data = {
"first": "true",
"pn": "1",
"kd": "python"
}
url = 'https://www.lagou.com/jobs/positionAjax.json?city=%E4%B8%8A%E6%B5%B7&needAddtionalResult=false&isSchoolJob=0'
result = requests.post(url, data=json.dumps(form_data), headers=headers, proxies=proxies)
print(result.text)
if __name__ == '__main__':
main()