# -*- coding:utf-8 -*-
from bs4 import BeautifulSoup
import urllib, urllib2, sys, json, re, os, time, cgi
import string,time,datetime
from multiprocessing import Pool
import pymysql.cursors
from Queue import Queue
from random import choice
from random import Random
import datetime
import random
import requests
reload(sys)
sys.setdefaultencoding('utf-8')
# 要访问的目标页面
targetUrl = "http://httpbin.org/ip"
# 代理服务器
proxyHost = "18.224.225.175"
proxyPort = "8080"
proxyMeta = "http://%(host)s:%(port)s" % {
"host" : proxyHost,
"port" : proxyPort,
}
# 设置 http和https访问都是用HTTP代理
proxies = {
"http" : proxyMeta,
"https" : proxyMeta,
}
# 设置IP切换头
tunnel = random.randint(1,10000)
headers = {"Proxy-Tunnel": str(tunnel)}
resp = requests.get(targetUrl, proxies=proxies,headers=headers)
print resp.status_code
print resp.text
proxy_info = { 'host': proxyHost,'port': proxyPort}
proxy_support = urllib2.ProxyHandler({"http" :proxyMeta})
opener =urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
r = urllib2.Request(targetUrl, headers=headers)
response = urllib2.urlopen(r, timeout=30)
page = response.read()
print response
print page
执行结果:
python使用代理