import requests
import time
from bs4 import BeautifulSoup
import os
from multiprocessing.dummy import Pool as ThreadPool
from multiprocessing import Pool
from threading import Thread
import pandas as pd
from pandas import DataFrame,Series
import numpy as np
classMyThread(Thread):def__init__(self, url, urls3):super(MyThread).__init__()
self.url = url
self.urls3 = urls3
defrun(self):
res = requests.get(self.url).content.decode('gbk')
soup = BeautifulSoup(res,"html.parser")#
contents = soup.find_all("div", attrs={
"class":"l"})# 热门小说
contents2 = soup.find_all("div", attrs={
"class":"r"})# 玄幻小说,仙侠小说,都市言情小说
contents3 = soup.find_all("div", attrs={
"class":"novelslist"})# 更新小说
contents4 = soup.find_all("div", attrs={
"id":"newscontent"})for i, content inenumerate(contents):
dts = content.find_all("dt")for dt in dts:try:
self.urls3.append(dt.a.get("href"))except Exception as e:print(i)for c in contents2:
lis = c.find_all("li")for li in lis:
self.urls3.append(li.a.get("href"))for c in contents3:
dts = c.find_all("dt")
lis = c.find_all("li")for dt in dts:
self.urls3.append(dt.a.get("href"))for li in lis:
self.urls3.append(li.a.get("href"))for c in contents4:
lis = c.find_all("li")for li in lis:
self.urls3.append(li.a.get("href"))defresult(self):return self.urls3
classMyThread1(Thread):def__init__(self, zzz, i):super(MyThread).__init__()
self.zzz = zzz
self.i = i
self.contents =""
self.urls