import urllib.request
import urllib.parse
import urllib.error
import re
import random
def open_url(url):
req_header = {'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0',
'Accept':'*/*',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3'}
req = urllib.request.Request(url, None, req_header)
with urllib.request.urlopen(req, None, timeout=5) as respond:
html = respond.read()
return html
def get_proxy_urls():
proxy_list = []
for i in range(1,11):
str1 = r'http://www.kuaidaili.com/proxylist/%d/' % i
proxy_list.append(str1)
return proxy_list
de