爬虫爬取快代理网站动态IP

爬虫爬取快代理网站动态IP

import requests, time
from lxml import etree
import time
import random

cookie = """shshshfpa=baf64610-d2a6-0761-dd41-dd2abc541c0a-1602122238; __jdu=160212223903369816534; shshshfpb=yVpBghADH9esETOim4DLz2A%3D%3D; areaId=13; ipLoc-djd=13-1000-1002-0; TrackID=1mCGXQ_KDXhEAIfY7UTde_zOtAen6GoxzGZcMyBLRssMzPp_vDvgWkC7Kte0ayvtvvYNSwS7VUZ6jwYeVDwKbGOn33gEFv16Tt8xPIc-ivwo; pinId=N3uZ6aFqmKttEU4tp0fVXbV9-x-f3wj7; pin=jd_5fa1c56f34819; unick=%E9%83%AD%E5%BA%86%E6%B1%9D; ceshi3.com=000; _tp=fySfDglDqxh9lnk3%2FjqMKipqRmWr%2BuRWRKYMyntrQy4%3D; _pst=jd_5fa1c56f34819; jwotest_product=99; unpl=V2_ZzNtbUMERBQgAU9TeBxZAmJUEltLBxZGdVtGUS5NCANhBBINclRCFnUUR1NnGVUUZwUZXENcQxFFCEdkeBBVAWMDE1VGZxBFLV0CFSNGF1wjU00zQwBBQHcJFF0uSgwDYgcaDhFTQEJ2XBVQL0oMDDdRFAhyZ0AVRQhHZHseXQZnABFeQF9CF3cPTl17HlgHYjMiWnJncxN2C0BQeSldNWYzUAkeVEYVdwpOGXseXQZnABFeQF9CF3cPTl17HlgHYjMTbUE%3d; __jdv=76161171|baidu-pinzhuan|t_288551095_baidupinzhuan|cpc|0f3d30c8dba7459bb52f2eb5eba8ac7d_0_0b60d88624464f178adb1b14dee7761a|1624006835098; user-key=7174d1cb-8908-4901-836d-63d1d1f7c712; __jdc=122270672; __jda=122270672.160212223903369816534.1602122239.1624021969.1624069754.39; JSESSIONID=CFA1610910BB0B51F37AF7CEAA734B41.s1; 3AB9D23F7A4B3C9B=LKOARNNSX36NKIRVUCN4FCFKQIJXETIXQCC5MXMYCSJRVSTV6DQBISY6BVWS7V6G3PRKAQ7SPN64BML4WMZUYFJAA4; shshshfp=a25e89bc2bc70d081ba26f4a3a6fd2e5; shshshsID=b98e34beb41756511992266c5feae031_5_1624069855785; __jdb=122270672.5.160212223903369816534|39.1624069754"""

class Get_ip(object):
    def __init__(self,page_num):
        self.url="https://www.kuaidaili.com/free/inha/{}/"
        self.page_num=page_num
        self.ip_list = self.get_ip(self.page_num)

    def get_ip(self,page_num):
        """
            获取动态ip列表
        :param page_num: 需要爬取的页数
        :return:
        """
        proxies = list()  # 代理ip列表
        for page in range(1, page_num):
            # 请求头
            headers = {
                "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36"
                , "Cookie": cookie
            }
            time.sleep(0.5)
            # url = "https://www.kuaidaili.com/free/"
            url = self.url.format(str(page))
            response = requests.get(url, headers=headers)
            response.encoding = "utf-8"
            text = response.text

            selector = etree.HTML(text)
            # td_list = selector.xpath('//*[@id="list"]/table/tbody/tr')

            td_list = selector.xpath('//table[@class="table table-bordered table-striped"]/tbody/tr')

            for tr in td_list:
                dict_proxies = {}
                # 获取ip
                ip = tr.xpath('./td[@data-title="IP"]/text()')[0]
                # print(ip)

                # 获取端口
                port = tr.xpath('./td[@data-title="PORT"]/text()')[0]
                # print(port)

                # 获取类型
                type = tr.xpath('./td[@data-title="类型"]/text()')[0]
                # print(type)

                dict_proxies[type] = ip + ":" + port
                print(dict_proxies)

                proxies.append(dict_proxies)

        return proxies

    def check_ip(self):
        """
            校验ip的质量
        :return: 质量好的ip列表
        """

        proxies = self.ip_list
        # 请求头
        headers = {
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36"
            , "Cookie": cookie

        }
        good_ip = list()
        for proxy in proxies:
            try:
                response = requests.get('https://www.baidu.com/', headers=headers, proxies=proxy, timeout=0.1)
                if response.status_code == 200:
                    good_ip.append(proxy)
            except Exception as e:
                print(e)
        return good_ip


ip = Get_ip(50)
check_ip = ip.check_ip()
print(check_ip)

import json

ip_str = json.dumps(check_ip)
with open('ip.txt','a',encoding='utf-8') as f:
    f.write(ip_str)





  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值