安装
#win平台以管理员权限运行cmd
pip install beautifulsoup4
#验证是否安装成功
import requests
r = requests.get("http://python123.io/ws/demo.html")
r.text
demo = r.text
from bs4 import BeautifulSoup
soup = BeautifulSoup(demo , "html.parser")#html.parser解析器
# soup = BeautifulSoup('<p>data</p>','html.parser')
print(soup.prettyfy())
基于bs4库的HTML内容遍历方法
1.下行遍历
soup = BeautifulSoup(demo,"html.parser")
soup.head
soup.head.contents
soup.body.contents[1]
#.children与.descendants分别为子节点的迭代类型与子孙类型的迭代类型
for child in soup.body.children:
print(child)
for child in soup.body.descenants:
print(child)
2.上行遍历
soup = BeautifulSoup(demo,"html.parser")
soup.title.parent
soup.html.parent
#迭代类型 遍历所有先辈节点,包括soup本身所以要区分辨别
for parent in soup.a.parents:
if parent is None:
print(parent)
else:
print(parent.name)
3.平行遍历
平行遍历发生在同一个父节点下的各节点间
soup = BeautifulSoup(demo,"html.parser")
soup.a.next_sibling
soup.a.previous_sibling
#迭代类型
for sibling in soup.a.next_sibing:
print(sibling)
for sibling in soup.a.previous_sibling:
print(sibling)
基于bs4库的HTML格式输出
soup.rettify()
print(soup.prettify())
bs4库将任何HTML输入都变成‘utf-8’编码
信息组织与提取方法
信息标记的三种形式
XML,JSON,YAML
提取HTML中所有URL链接
思路:
1)搜索到所有<a>标签**
2)解析<a>标签格式,提取href后的链接内容
from bs4 import BeautifulSoup
import requests
r = requests.get("http://python123.io/ws/demo.html")
demo = r.text
soup = BeautifulSoup(demo,"html.parser")
for link in soup.find_all('a'):
print(link.get('href'))#获得属性
#运行结果:http://www.icourse163.org/course/BIT-268001
# http://www.icourse163.org/course/BIT-1001870001
基于bs4库的HTML内容查找方法
from bs4 import BeautifulSoup
import requests
r = requests.get("http://python123.io/ws/demo.html")
demo = r.text
soup = BeautifulSoup(demo,"html.parser")
soup.find_all('a')
soup.find_all(['a','b'])
for tag in soup.find_all(True)
print(tag.name) #打印所有标签名
import re
for tag in soup.find_all(re.compile('b')):
print(tag.name)#正则表达式 找到所有以b开头标签
soup.find_all('p','course')
soup.find_all(id='link1')
soup.find_all(id=re.compile('link'))
soup.find_all('a',recursive=False)#不检索子孙
soup.find_all(string = "Basic Python")
soup.find_all(string = re.compile("python"))