#coding = utf-8
import requests
from bs4 import BeautifulSoup
import pymysql
urls = []
i = 0
while i<=225:
urls.append(i)
i = i+25
else:
print("循环结束")
data_list = []
for l in urls:
url = "https://movie.douban.com/top250?start="+str(l)+"&filter="
print("开始爬取下一页")
header = {'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) Chrome/65.0.3325.181'}
request = requests.get(url=url,headers=header)
soup = BeautifulSoup(request.text,'lxml')
all = soup.find_all("div",class_ = "item")
for l in all:
movie_name =l.find('div',class_ = "hd").text.split("/")[0].replace("\n",'').replace("\xa0",'')
movie_top =l.find("em").text.replace("\n",'')
movie_gre =l.find("span",class_ = "rating_num").text.replace("\n",'')
movie_at =l.find("p",class_ = "").text.replace("\n",'').replace("\xa0",'').replace(" ",'').replace(" ",'')
try:
movie_con =l.find("span",class_ = "inq").text.replace("\n",'')
except:
continue
data_list.append({'movie_name':movie_name,'movie_top':movie_top,'movie_gre':movie_gre,'movie_at':movie_at,'movie_con':movie_con})
#print(movie_name,movie_top,movie_gre,movie_at,movie_con)
db = pymysql.connect(host='127.0.0.1', user='root', password='123456', database='mysql')
cur = db.cursor()
cur.execute("DROP TABLE IF EXISTS movies")
sql = """CREATE TABLE movies(
movie_name VARCHAR(200) NOT NULL,
movie_top VARCHAR(200) NOT NULL,
movie_gre VARCHAR(200) NOT NULL,
movie_at VARCHAR(200) NOT NULL,
movie_con VARCHAR(200) NOT NULL,
PRIMARY KEY (movie_top)
) ENGINE=MYISAM AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
"""
cur.execute(sql)
for data in data_list:
print(data)
cur.execute("insert into movies(movie_name,movie_top,movie_gre,movie_at,movie_con)values(%s,%s,%s,%s,%s)",(data['movie_name'],data['movie_top'],data['movie_gre'],data['movie_at'],data['movie_con']))
废话不多说,直接上代码
效果如下