Python爬取招聘网站

import time
import pymongo
import requests
from bs4 import BeautifulSoup

client=pymongo.MongoClient('localhost',27017)
mydb=client['mydb']
lagou=mydb['lagou']

headers={
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Cache-Control': 'max-age=0',
'Connection': 'keep-alive',
'Cookie': '',#需要自己填写
'Host': 'www.lagou.com',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36'
}

#s_position_list > ul > li:nth-child(2) > div.list_item_top > div.company > div.company_name > a


def get_page(url):
    web_data=requests.get(url,headers=headers)
    soup=BeautifulSoup(web_data.text,'html.parser',from_encoding='utf-8')
    #print(web_data.text)
    companynames=soup.select('.list_item_top > div.company > div.company_name > a')
    industrys=soup.select('div.list_item_top > div.company > div.industry')
    positions=soup.select('div.list_item_top > div.position > div.p_top > a > h3')
    addresses=soup.select('.list_item_top > div.position > div.p_top > a > span > em')
    moneys=soup.select('div.list_item_top > div.position > div.p_bot > div > span')
    advantages=soup.select('div.list_item_bot > div.li_b_r')
    #requires=soup.select('div.list_item_bot > div.li_b_l')

    for companyname,industry,position,address,money,advantage in zip(companynames,industrys,positions,addresses,moneys,advantages):
        data={
            'companyname': companyname.get_text().strip(),
            'industry': industry.get_text().strip(),
            'position': position.get_text().strip(),
            'address':address.get_text().strip(),
            'money': money.get_text().split(),
            'advantage': advantage.get_text().split()
        }
        print(data)
        lagou_id=lagou.insert(data)
        time.sleep(1)
        print(lagou_id)
        #output_html(data)
        print('----------------------------')

if __name__ == '__main__':
    urls=['https://www.lagou.com/zhaopin/Python/1/?filterOption=3'.format(str(i)) for i in range(1,4)]

    for url in urls:
        get_page(url)
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值