import requests,time
import multiprocessing
from lxml import etree
def get_proxy(queue):
url = 'http://www.66ip.cn/areaindex_35/1.html'
response = requests.get(url)
# with open('66.html','wb') as f:
# f.write(response.content)
html_ele = etree.HTML(response.text)
ip_ele = html_ele.xpath('//*[@id="footer"]/div/table/tr/td[1]/text()')[1:]
port_ele = html_ele.xpath('//*[@id="footer"]/div/table/tr/td[2]/text()')[1:]
# print(ip_ele)
# print(port_ele)
proxy_list = []
for i in range(0,len(ip_ele)):
proxy_str = 'http://' + ip_ele[i] + ':' + port_ele[i]
# proxy_list.append(proxy_str)
# print(proxy_list)
queue.put(proxy_str)
def check_all_proxy(proxy):
# valid_proxy_list = []
try:
url = 'http://www.baidu.com/s?wd=ip'
proxy_dict = {
'http':proxy
}
try:
response = requests.get(url,proxies=proxy_dict,timeout=5)
if response.status_code == 200:
print('可用'+ proxy)
return proxy
else:
print('不可用')
return proxy
except:
return None
except Exception as e:
print(e)
if __name__ == "__main__":
# 创建消息队列
q = multiprocessing.Queue()
# 获取所有代理
p = multiprocessing.Process(target=get_proxy,args=(q,))
p.start()
# 检测代理的可用性
pool = multiprocessing.Pool(5)
result_list = []
while True:
try:
proxy_str = q.get(timeout=5)
except:
break
proxy_res = pool.apply_async(check_all_proxy,(proxy_str,))
result_list.append(proxy_res)
valid_proxy_list = []
for proxy_res in result_list:
result = proxy_res.get()
if result is None:
pass
else:
valid_proxy_list.append(result)
print(valid_proxy_list)
pool.close()
pool.join()
p.join()
多进程66代理测试
最新推荐文章于 2024-03-18 13:59:25 发布