抓取IP并验证其可用性

#proxy.py
#coding=utf-8
from bs4 import BeautifulSoup
import http.client
import requests
import threading


inFile = open('proxy.txt')
outFile = open('verified.txt', 'w')
lock = threading.Lock()


def getProxyList(targeturl="http://www.xicidaili.com/nn/"):
    countNum = 0
    proxyFile = open('proxy.txt', 'w', encoding='utf-8')
    requestHeader = {'User-Agent': "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.80 Safari/537.36"}
    for page in range(1, 10):
        url = targeturl + str(page)
        html_doc = requests.get(url, headers=requestHeader).text
        soup = BeautifulSoup(html_doc, "html.parser")
        trs = soup.find('table', id='ip_list').findAll('tr')
        for tr in trs[1:]:
            tds = tr.findAll('td')
            # 国家
            if tds[0].find('img') is None:
                nation = '未知'
                locate = '未知'
            else:
                nation = tds[0].find('img')['alt'].strip()
                locate = tds[3].text.strip()
            ip = tds[1].text.strip()
            port = tds[2].text.strip()
            anony = tds[4].text.strip()
            protocol = tds[5].text.strip()
            speed = tds[6].find('div')['title'].strip()
            time = tds[8].text.strip()
            print(nation+'|'+ip+'|'+port+'|'+locate+'|'+anony+'|'+protocol+'|'+speed+'|'+time)
            lines = nation+'|'+ip+'|'+port+'|'+locate+'|'+anony+'|'+protocol+'|'+speed+'|'+time
            proxyFile.write(lines+'\n')
            # print '%s=%s:%s' % (protocol, ip, port)
            countNum += 1
    
    proxyFile.close()
    return countNum


def verifyProxyList():
    '''
    验证代理的有效性
    '''
    requestHeader = {'User-Agent': "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.80 Safari/537.36"}
    myurl = 'http://www.baidu.com/'

    while True:
        lock.acquire()
        ll = inFile.readline().strip()
        lock.release()
        if len(ll) == 0:
            break
        line = ll.strip().split('|')
        protocol = line[5]
        ip = line[1]
        port = line[2]
        
        try:
            conn = http.client.HTTPConnection(ip, port, timeout=5.0)
            conn.request(method='GET', url=myurl, headers=requestHeader )
            res = conn.getresponse()
            lock.acquire()
            print("+++Success:" + ip + ":" + port)
            outFile.write(ll + "\n")
            lock.release()
        except:
            print("---Failure:" + ip + ":" + port)
        
    
if __name__ == '__main__':
    tmp = open('proxy.txt', 'w')
    tmp.write("")
    tmp.close()
    proxynum = getProxyList("http://www.xicidaili.com/nn/")
    print(u"国内高匿:" + str(proxynum))
    proxynum = getProxyList("http://www.xicidaili.com/nt/")
    print(u"国内透明:" + str(proxynum))
    proxynum = getProxyList("http://www.xicidaili.com/wn/")
    print(u"国外高匿:" + str(proxynum))
    proxynum = getProxyList("http://www.xicidaili.com/wt/")
    print(u"国外透明:" + str(proxynum))

    print(u"\n验证代理的有效性:")
    
    all_thread = []
    for i in range(30):
        t = threading.Thread(target=verifyProxyList)
        all_thread.append(t)
        t.start()

    for t in all_thread:
        t.join()
    
    inFile.close()
    outFile.close()
    print("All Done.")

 

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

young_kp

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值