python爬虫爬取_用Python爬虫爬取拉勾网

# -*- coding:utf-8 -*-

import re,json

import urllib2

import urllib

import pandas as pd

import math

#字符串转URL编码

def StringToUrl(string):

urlcode = urllib.quote(string)

return urlcode

#获取职位数与页码

def GetPagnum(url,keyword,headers):

values = {'first': 'true','pn': '1', 'kd': keyword}

data = urllib.urlencode(values)

req = urllib2.Request(url,data,headers)

jsondata = urllib2.urlopen(req).read()

totalCount = int(json.loads(str(jsondata))["content"]["positionResult"]["totalCount"])

print('***本次搜索到%d个职位***'%totalCount)

pagenum =  int (math.ceil(totalCount/15) )

return pagenum

def LagouSpider(keyword):

keyword_url = StringToUrl(keyword)

city_list = ['北京','上海','深圳','广州','杭州','成都','南京','武汉','西安','厦门','长沙','苏州','天津']

for n in list(range(len(city_list))):

city = city_list[n]

print('***正在保存'+city+'的职位***')

city_url = StringToUrl(city)

url = 'https://www.lagou.com/jobs/positionAjax.json?city='+city_url+'&needAddtionalResult=false&isSchoolJob=0'

Referer = 'https://www.lagou.com/jobs/list_'+keyword_url+'?city='+city_url+'=false&fromSearch=true&labelWords=&suginput='

headers = {

'Accept':'application/json, text/javascript, */*; q=0.01',

'Accept-Encoding':'gzip, deflate, br',

'Accept-Language':'zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4',

'Connection':'keep-alive',

'Content-Length':'55',

'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',

'Cookie':'user_trace_token=20170912104426-9ba6e9c6-3053-45fd-9025-681bef8b0c8f; LGUID=20170916191219-e783b163-9acf-11e7-952a-525400f775ce; index_location_city=%E6%B7%B1%E5%9C%B3; TG-TRACK-CODE=index_search; _gid=GA1.2.1386711592.1505703954; _ga=GA1.2.351899359.1505560343; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1505560343,1505703955; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1505703988; LGRID=20170918110627-5c595dd3-9c1e-11e7-9196-5254005c3644; JSESSIONID=ABAAABAAAIAACBIF3290756E031DCE7CCEA3986CB372F49; SEARCH_ID=d30eb13562344eb9b5f6b8f05eb2cefc',

'Host':'www.lagou.com',

'Origin':'https://www.lagou.com',

'Referer':Referer,

'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.113 Safari/537.36',

'X-Anit-Forge-Code':'0',

'X-Anit-Forge-Token':'None',

'X-Requested-With':'XMLHttpRequest'

}

Pagenum = GetPagnum(url,keyword,headers)

for i in range(0,Pagenum):

if i == 0:

values = {'first': 'true','pn': '1', 'kd': keyword}

data = urllib.urlencode(values)

else:

values = {'first': 'false','pn': (i+1), 'kd': keyword}

data = urllib.urlencode(values)

req = urllib2.Request(url,data,headers)

data = urllib2.urlopen(req).read()

jsondata = json.loads(str(data))['content']['positionResult']['result']

for t in list(range(len(jsondata))):

jsondata[t].pop('companyLogo')

jsondata[t].pop('businessZones')

jsondata[t].pop('explain')

jsondata[t].pop('plus')

jsondata[t].pop('gradeDescription')

jsondata[t].pop('promotionScoreExplain')

jsondata[t].pop('positionLables')

jsondata[t].pop('district')

jsondata[t].pop('adWord')

jsondata[t].pop('appShow')

jsondata[t].pop('approve')

jsondata[t].pop('companyId')

jsondata[t].pop('companyLabelList')

jsondata[t].pop('deliver')

jsondata[t].pop('imState')

jsondata[t].pop('industryLables')

jsondata[t].pop('pcShow')

jsondata[t].pop('positionId')

jsondata[t].pop('score')

jsondata[t].pop('publisherId')

if t == 0:

rdata=pd.DataFrame(pd.Series(data=jsondata[t])).T

else:

rdata=pd.concat([rdata,pd.DataFrame(pd.Series(data=jsondata[t])).T])

if i == 0:

citydata=rdata

else:

citydata=pd.concat([citydata,rdata])

print('***正在保存第%d页***'%(i+1))

if n == 0:

totaldata = citydata

else:

totaldata=pd.concat([totaldata,citydata])

totaldata.to_excel('LagouSpider.xls',sheet_name='sheet1')

if __name__ == "__main__":

keyword = raw_input('请输入要爬取的关键词:')

LagouSpider(keyword)

print '***LagouSpider@Awesome_Tang***'

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值