import pymysql
import requests
import re
import time
import random
#1创建表格增加数据
#打开数据库连接 主机地址 端口号3306 用户名 密码 数据库名
db=pymysql.Connect(host="localhost",port=3306,user="***",password="****",db="daili",charset="utf8")
#创建一个游标对象cursor()
cursor=db.cursor()
url=r"http://www.xicidaili.com/nt/1"
header={"User-Agent":"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36"
}
num=1
for g in range(1,num+1):
urll=r"http://www.xicidaili.com/nt/"+str(g)
print(urll)
page=requests.get(urll,headers=header).text
#print(page)
#htm1=etree.HTML(pagehome)
#resault1=htm1.xpath('//ul[@class="textlarge22"]//li/a/@href')
pat=r'<td>(.*?)</td>'
res=re.findall(pat, page)
print(res)
leng=int(len(res)/5)
for i in range(0,leng):
l=i*5
ip=str(res[l])
pp=l+1
port=str(res[pp])
print("测试",ip,port,"中")
proxy={"http":str(ip)+":"+str(port)}
#以下代码测试所爬取的代理的有效性,并将有效的打印出来
try:
requests.get('http://www.dodblog.cn/',headers=header, proxies=proxy)
except:
pass
else:
tim = random.randint(20, 100)
print("成功")
time.sleep(tim)
print(ip)
j=i+1
print(j,ip,port)
print(type(j),type(ip),type(port))
sql='''insert into ipp (IP,PORT) values("%s",%s)'''%(ip,port)
cursor.execute(sql)
db.commit()
print(ip,port)
print("success")
db.close()