python beautifulsoup多线程分析抓取网页

python数据抓取

encoding=utf-8
#@description:蜘蛛抓取内容。
import Queue
import threading
import urllib,urllib2
import time
from BeautifulSoup import BeautifulSoup
hosts = ["http://www.baidu.com","http://www.163.com"]#要抓取的网页
queue = Queue.Queue()
out_queue = Queue.Queue()
class ThreadUrl(threading.Thread):
    """Threaded Url Grab"""
    def __init__(self, queue, out_queue):
        threading.Thread.__init__(self)
        self.queue = queue
        self.out_queue = out_queue
    def run(self):
        while True:
            #grabs host from queue
            host = self.queue.get()
            proxy_support = urllib2.ProxyHandler({'http':'http://xxx.xxx.xxx.xxxx'})#代理IP
            opener = urllib2.build_opener(proxy_support, urllib2.HTTPHandler)
            urllib2.install_opener(opener)
            #grabs urls of hosts and then grabs chunk of webpage
            url = urllib.urlopen(host)
            chunk = url.read()
            #place chunk into out queue
            self.out_queue.put(chunk)
            #signals to queue job is done
            self.queue.task_done()
class DatamineThread(threading.Thread):
    """Threaded Url Grab"""
    def __init__(self, out_queue):
        threading.Thread.__init__(self)
        self.out_queue = out_queue
    def run(self):
        while True:
            #grabs host from queue
            chunk = self.out_queue.get()
            #parse the chunk
            soup = BeautifulSoup(chunk)
            print soup.findAll(['title'])
            #signals to queue job is done
            self.out_queue.task_done()
start = time.time()
def main():
    #spawn a pool of threads, and pass them queue instance
    t = ThreadUrl(queue, out_queue)
    t.setDaemon(True)
    t.start()
    #populate queue with data
    for host in hosts:
        queue.put(host)
    dt = DatamineThread(out_queue)
    dt.setDaemon(True)
    dt.start()
    #wait on the queue until everything has been processed
    queue.join()
    out_queue.join()
main()
print "Elapsed Time: %s" % (time.time() - start)

 

 

 

转载于:https://my.oschina.net/zhangdapeng89/blog/53636

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值