本文章实现用Python3 借助pymysql模块爬取斗鱼一些直播信息 并存入数据库中的简单操作
第一步:爬取网站的直播信息并处理
第二步:存入本地*.(txt)
第三步:存入数据库
import re
from urllib import request
import pymysql
class Spider():
url = 'https://www.huya.com/g/wzry'
root_pattern = '<span class="txt">([\s\S]*?)</li>'
name_pattern = '<i class="nick" title="([\s\S]*?)">'
number_pattern = '<i class="js-num">([\s\S]*?)</i>'
def _fetch_content(self):
r = request.urlopen(Spider.url)
htmls = r.read()
htmls = str(htmls,encoding='utf-8')
#htmls = r.read().decode('utf-8')
return htmls
def refine(self,anchors):
l = lambda anchor:{'name':anchor['name'][0].strip(),'number':anchor['number'][0]}
return map(l,anchors)
def sort__seed(self,anchor):
r = re.findall('\d*',anchor['number'])
number = float(r[0])
if '万' in anchor['number']:
number *= 10000
return number
def sort__rank(self,anchors):
return sorted(anchors,key = self.sort__seed,reverse = True)
def __analysis(self,htmls):
root_html = re.findall(Spider.root_pattern,htmls)
anchors = []
for html in root_html:
number = re.findall(Spider.number_pattern, html)
name = re.findall(Spider.name_pattern,html)
anchor = {'name':name,'number':number}
anchors.append(anchor)
return anchors
def __show(self,anchors):
for anchor in anchors:
print('name : '+ anchor['name']+' number : '+anchor['number'])
def __write(self,anchors):
fr = open("d:\spider__write.txt", "w")
fr.write('id+^+name+^+viewer__number\n')
for anchor in anchors:
fr.write(anchor['name']+'^'+anchor['number']+'\n')
fr.close()
def read__db(self,anchors):
db = pymysql.connect("localhost", "root", "123456", "imooc")
cursor = db.cursor()
count = 0
fr = open("d:\spider__write.txt", "r")
try:
for line in fr:
count += 1
if(count == 1):
continue
line = line.strip().split('^')
# cursor.execute("insert into rank__list(name,viewer__number) values(%s,%s)", [line[0],line[1]])
cursor.execute("insert into rank__list(name,viewer__number) values('%s','%s')" %(line[0],line[1]))
db.commit()
except Exception as e:
print(e)
db.rollback()
cursor.close()
db.close()
fr.close()
def go(self):
htmls = self._fetch_content()
anchors = self.__analysis(htmls)
anchors = list(self.refine(anchors))
anchors = self.sort__rank(anchors)
# self.__show(anchors)
self.__write(anchors)
self.read__db(anchors)
spider = Spider()
spider.go()