import requests as r
from bs4 import BeautifulSoup
import pymysql as pq
url='https://www.shanghairanking.cn/rankings/bcur/202011'
req=r.get(url)
req.encoding=req.apparent_encoding
bs=BeautifulSoup(req.text,'html.parser')
datalist=bs.select('.rk-table tbody tr')
try:
print('连接到mysql服务器...')
conn = pq.connect(
host='127.0.0.1',
port=3306,
user='root',
passwd='123456',
db='中国大学排名',
charset='utf8'
)
print('已连接上服务器!')
cursor = conn.cursor()
for data in datalist:
order=int(data.select('td')[0].text.strip())
university_name=data.select('td')[1].select('a')[0].text
position=data.select('td')[2].text.strip()
types=data.select('td')[3].text.strip()
scores=float(data.select('td')[4].text.strip())
sqlstring="insert ignore into `rankings`(`rank`, `cname`, `situ`, `types`, `score`) VALUES('%d','%s','%s','%s','%f') " % (order,university_name,position,types,scores)
cursor.execute(sqlstring)
conn.commit()
print(order, university_name, position, types, scores, '插入成功!')
except Exception as e:
print(e)
conn.rollback()
finally:
cursor.close()
conn.close()
insert ignore into与UNIQUE类型的索引相配合使用,重复就不插入那条新数据!