python爬虫怎么提取数据_python爬虫的几种数据解析提取方式

import requests

from bs4 import BeautifulSoup

import json

import re

from lxml import etree

from urllib.parse import quote

header={

"Cookie": "BIDUPSID=1D3E686AE65F1365106D5F2B6DEDA5C9; PSTM=1584013332; BAIDUID=1D3E686AE65F13650E993E0898A5D1CA:SL=0:NR=10:FG=1; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; BDUSS=2IxN0pKRmVtR1lLV3JqY2ZUSXpFbmF5Q3htbU5wNTh3TWlTQU83a0J2Zy0xVkZmRVFBQUFBJCQAAAAAAAAAAAEAAACm4ladsfnWrsHox-UAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD5IKl8-SCpfQ; BDUSS_BFESS=2IxN0pKRmVtR1lLV3JqY2ZUSXpFbmF5Q3htbU5wNTh3TWlTQU83a0J2Zy0xVkZmRVFBQUFBJCQAAAAAAAAAAAEAAACm4ladsfnWrsHox-UAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD5IKl8-SCpfQ; Hm_lvt_55b574651fcae74b0a9f1cf9c8d7c93a=1596201563,1596236841,1596616615,1596697867; H_PS_PSSID=32292_1428_32301_32361_32327_31254_32349_32045_32394_32405_32429_32117_26350_32482; Hm_lpvt_55b574651fcae74b0a9f1cf9c8d7c93a=1596698546",

"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.105 Safari/537.36"

}

url="https://baike.baidu.com/item/%E5%B0%8F%E6%98%8E/33553"

print(url)

r=requests.get(url,headers=header)

r.encoding=r.apparent_encoding

#第一种,使用bs4库来完成信息的提取

def get_bs4(r):

soup = BeautifulSoup(r.content, "html.parser")

str_one = soup.find("div", {"class": "para"})

text = str_one.text

print(str_one.text)

with open("D://download//demo_bs4.text", "w") as file:

file.write(text)

#第二种,使用re库来完成

def get_re(r):

str_two=re.findall('

print(type(str_two))

# print(str_two)

# print(r.text)

with open("D://download//demo_re.text", "w") as file:

file.write(str(str_two[0]))

# get_re(r)

#第三种,使用xpath来完成

def get_xpath(r):

str_one=""

tree=etree.HTML(r.text)

str_three=tree.xpath('//div[@class="lemma-summary"]//*[@class="para"]/text()')

#print(str_three)

for i in str_three:

str_one=str_one+str(i)

print(str_one)

#get_xpath(r)

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值