爬虫-4-bs4

bs4介绍使用

Beautiful Soup 4 文档地址

https://www.crummy.com/software/BeautifulSoup/bs4/doc/index.zh.html

示例:爬取某程某忧招聘岗位数量
from bs4 import BeautifulSoup
import requests

def downloads1(url):
    headers = {"User-Agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0);"}
	response = requests.get(url, headers=headers)
	html = response.content.decode('gbk')
    
    soup = BeautifulSoup(html, 'lxml')
    # 获取岗位数量的多种查找方式
    # 方式1: 使用find_all
    jobnum = soup.find_all('div', class_='rt')
    print(jobnum[0].text)
    
    # 方式2: 使用select
    jobnum = soup.select('.rt')[0].string
	print(jobnum.strip())  # 去掉首尾空格

download(url = "https://search.51job.com/list/000000,000000,0000,00,9,99,python,2,1.html?lang=c&stype=&postchannel=0000&workyear=99&cotype=99&degreefrom=99&jobterm=99&companysize=99&providesalary=99&lonlat=0%2C0&radius=-1&ord_field=0&confirmdate=9&fromType=&dibiaoid=0&address=&line=&specialarea=00&from=&welfare=")

示例:爬取某票
import urllib
from urllib import request
from bs4 import BeautifulSoup

stockList = []

def downloads(url):
    headers = {"User-Agent": "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0);"}
    request = urllib.request.Request(url, headers=headers)  # 请求,修改,模拟http.
    data = urllib.request.urlopen(request).read()  # 打开请求,抓取数据
    
    soup = BeautifulSoup(data, "html5lib", from_encoding="gb2312")
    mytable = soup.select("#datalist")
    for line in mytable[0].find_all("tr"):
        print(line.get_text())  # 提取每一个行业
        print(line.select("td:nth-of-type(3)")[0].text) # 提取具体的某一个

if __name__ == '__main__':
    download("http://quote.stockstar.com/fund/stock_3_1_2.html")

将数据存库

import pymysql

# 将list存入数据库
def save_jobs(job_list):

    # 连接数据库
    db = pymysql.connect(host="127.0.0.1", port=3306, user='root', password="root",database='job', charset='utf8')
    # 游标
    cursor = db.cursor()
	
    # 遍历,插入job
    for job in tencent_job_list:
        sql = 'insert into job(name, address, type, num) VALUES("%s","%s","%s","%s") ' % (job["name"], job["address"], job["type"], job["num"])
        cursor.execute(sql)
        db.commit()

    cursor.close()
    db.close()
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值