自动解析JSON获取代理


import requests
from bs4 import BeautifulSoup
import time
from threading import Thread
import json
import urllib

#__import__('shutil').rmtree('C:\\Python27\res\IP2.txt')


url='https://proxylist.geonode.com/api/proxy-list?limit=50&page=1&sort_by=lastChecked&sort_type=desc&speed=medium&protocols=http'
r = requests.get(url)
response = r.content.decode('utf-8') 
body = json.loads(response) 



class Douban:

    def __init__(self):
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36',
            'Referer': 'https://proxylist.geonode.com/api/proxy-list?limit=50&page=1&sort_by=lastChecked&sort_type=desc&speed=medium&protocols=http'
        }

 
    def get_page_html(self, url):
        try:
            result = requests.get(url=url, headers=self.headers)
            result.raise_for_status()  
            html = BeautifulSoup(result.text, 'lxml')
            return html
        except:
            print('link')

    
    def get_proxy(self, html):
        proxies = [] 
        trs = body['total']
        for tr in range(1,trs):
            tcp = body['data'][tr]['protocols'][0]
            ip = body['data'][tr]['ip']
            port = body['data'][tr]['port']
            proxies.append((tcp, ip+" "+port))  
        return proxies

   
    def test_proxies(self, proxies):
        proxies = proxies
        for proxy in proxies:
            test = Thread(target=self.thread_test_proxy, args=(proxy[0], proxy[1],))
            test.start()

   
    def thread_test_proxy(self, tcp, address):
        try:
            print('waitcheckip%s' % address)
          
            result = requests.get('https://www.baidu.com/', headers=self.headers, proxies={tcp: address}, timeout=3)  
            if result.status_code == 200:
                self.save_proxys((tcp, address))  
            else:
                pass
        except Exception:
            pass

   
    def save_proxys(self, proxy):
        #file = open("./res/IP2.txt", 'w').close()
        with open("./res/IP2.txt", 'a+') as f:
            f.write(proxy[0]+' '+proxy[1] + '\n')
            print proxy[0]+' '+proxy[1]
            
    
    def func(self, base_url, page):
        try:
            time.sleep(1)
            url = base_url
            #print self.get_page_html(url)
            proxies = self.get_proxy(self.get_page_html(url))
            self.test_proxies(proxies)
        except Exception as e:
            print('typeerror' % e)
        


if __name__ == '__main__':
    obj = Douban()
    obj.func('https://proxylist.geonode.com/api/proxy-list?limit=50&page=1&sort_by=lastChecked&sort_type=desc&speed=medium&protocols=http', 2) 

    print('gotip')

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值