多线程爬取安客居

import requests
from lxml import etree
import  threading
from  queue  import  Queue
import time
import re
import pymysql


def is_none(message_list):
    if message_list:
        return message_list[0]
    else:
        str = '无'
        return str
class  lianjia(threading.Thread):
    def  __init__(self):
        super().__init__()#继承父类


    def  run(self):
        while True:
            if q.empty():
                break
            try:
                city=q.get()
                self.shuju(city)
            except:
                pass
    def shuju(self,city):
        city_name = city.xpath('./text()')[0]
        city_url = 'https:' + city.xpath('./@href')[0]
        print(city_name, city_url)
        url = city_url + '/loupan'
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36'}
        response1 = requests.get(url=url, headers=headers).text
        tree = etree.HTML(response1)
        page = tree.xpath('//div[@class="page-box"]/@data-total-count')[0]
        page = int(page)
        if page / 10 == page // 10:
            page_num = page // 10
        else:
            page_num = page // 10 + 1

        for i in range(1, page_num + 1):
            url = city_url + '/loupan/pg' + str(i) + '/'
            print(url)
            headers = {
                'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36'}
            response = requests.get(url=url, headers=headers).text
            tree = etree.HTML(response)
            listpage_message = tree.xpath('//ul[@class="resblock-list-wrapper"]/li')
            for message in listpage_message:
                all_message_list = []
                image = message.xpath('./a/img/@data-original')
                image = is_none(image)
                all_message_list.append(image)
                building_name = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-name"]/a/text()')
                building_name = is_none(building_name)
                all_message_list.append(building_name)
                average_price1 = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-price"]/div[@class="main-price"]/span[@class="number"]/text()')
                average_price1 = is_none(average_price1)
                average_price2 = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-price"]/div[@class="main-price"]/span[@class="desc"]/text()')
                average_price2 = is_none(average_price2)
                average_price = average_price1 + average_price2.strip()
                all_message_list.append(average_price)
                building_area = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-area"]/span/text()')
                building_area = is_none(building_area)
                all_message_list.append(building_area)
                district = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-location"]/span[1]/text()')
                district = is_none(district)
                all_message_list.append(district)
                bussiness_district = message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-location"]/span[2]/text()')
                bussiness_district = is_none(bussiness_district)
                all_message_list.append(bussiness_district)
                building_url = city_url + message.xpath(
                    './div[@class="resblock-desc-wrapper"]/div[@class="resblock-name"]/a/
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值