反复访问自己的博客 (代理ip+selenium实现)

代码的目的上面已经说明了(滑稽),话不多说,直接上代码

import random,requests,time,re
from time import sleep
from selenium import webdriver
chromeOptions = webdriver.ChromeOptions()     #设置访问的浏览器为chrome
chromeOptions.add_argument('--headless')    #设置不弹出浏览器界面
chromeOptions.add_argument('--disable-gpu')

list= [
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",
    "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",
    "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",
    "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",
    "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",
    "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
    ]
url = 'http://basic.10jqka.com.cn/300001/equity.html'

def get_random_header():		#随机获取请求头
    headers={'User-Agent':random.choice(list),'Accept':"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",'Accept-Encoding':'gzip'}
    return headers

def   scraw_proxies(page_num,scraw_url="http://www.xicidaili.com/nt/"):
    ip=[]
    print("抓取第%d页代理IP" %page)
    url=scraw_url+str(page)
    r=requests.get(url,headers=get_random_header())
    r.encoding='utf-8'
    pattern = re.compile('<td class="country">.*?alt="Cn" />.*?</td>.*?<td>(.*?)</td>.*?<td>(.*?)</td>', re.S)
    scraw_ip= re.findall(pattern, r.text)
    print(scraw_ip)
    ip = scraw_ip       
    print('抓取结束')
    return ip
    

def check(scraw_ip):
    ip_pass = []
    for ip in  scraw_ip:
        ip = (ip[0]+":"+ip[1])
        result = validateIp(ip)
        print(ip)
        print(result)
        if result == True:
        	ip_pass.append(ip)
    return ip_pass


def validateIp(proxy):		#检测ip是否可用
    # proxy_temp = {"http": 'http:'+ proxy}
    proxy_temp = {"http": proxy}
    # print(proxy_temp)
    try:
        response = requests.get(url, proxies=proxy_temp, timeout=3)
        return True	
    except Exception as e:
        return False



def check_again(url,ip_uncheck):
    i= 0
    ip_work = []
    while i < len(ip_uncheck):        #遍历
        ip = random.choice(ip_uncheck)
        chromeOptions.add_argument("--proxy-server=http://" + a) #设置chrome使用代理ip
        print(ip)
        # 一定要注意,=两边不能有空格,不能是这样--proxy-server = http://202.20.16.82:10152
        driver = webdriver.Chrome(chrome_options=chromeOptions)
        driver.set_page_load_timeout(30) #设置访问最大延时
        driver.set_script_timeout(30)  # 这两种设置都进行才有效
        try:
            driver.get(url)
            sleep(5)
            i += 1
            ip_work.append(ip)    #若代理ip可用则加进用ip中
            print("浏览了" + str(i) + "遍")
            driver.quit()
        except:
            driver.close()
            print("useless")
    print("--THE IP HAVE BEEN WASHED--")
    return ip_work

def visit(url,ip_work):
    i= 0
    while i<100:    #设置访问100遍
        a = random.choice(ip_work)
        chromeOptions.add_argument("--proxy-server=http://" + a)
        print(a)
        # 一定要注意,=两边不能有空格,不能是这样--proxy-server = http://202.20.16.82:10152
        driver = webdriver.Chrome(chrome_options=chromeOptions)
        driver.set_page_load_timeout(30)
        driver.set_script_timeout(30)  # 这两种设置都进行才有效
        try:
            driver.get(url)
            sleep(5)
            i += 1
            print("浏览了" + str(i) + "遍")
            driver.quit()
        except:
            driver.close()
            print("-----------------")

if __name__ == '__main__':
    while True:
        available_ip = scraw_proxies(2)
        ip_uncheck = check(available_ip)
        ip_work = check_again('https://blog.csdn.net/qq_41253208/article/details/86683757',ip_uncheck)
        visit('https://blog.csdn.net/qq_41253208/article/details/86683757',ip_work)

爬取ip地址的部分参考https://blog.csdn.net/XRRRICK/article/details/78650764

  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值