scrapy框架中pipelines持久化操作,保存在MongoDB或者本地文档中

item文件定义爬取的内容

import scrapy

class DailiItem(scrapy.Item):
    # define the fields for your item here like:
    # name = scrapy.Field()

    country = scrapy.Field()
    agent_ip = scrapy.Field()
    agent_port = scrapy.Field()
    agent_addr = scrapy.Field()
    anonymity = scrapy.Field()
    agent_type = scrapy.Field()
    # 存活时间
    survival_time = scrapy.Field()
    # 验证时间
    verify_time = scrapy.Field()

dailiSpider蜘蛛文件

import scrapy
from urllib import parse

from daili.items import DailiItem


class DailispiderSpider(scrapy.Spider):
    name = 'dailiSpider'
    allowed_domains = ['xicidaili.com']
    start_urls = ['http://www.xicidaili.com/']

    def parse(self, response):
        item = DailiItem()
        base_url = 'http://www.xicidaili.com/'
        tr_list = response.xpath('//*[@id="ip_list"]//tr')
        # tr_list = response.xpath('//*[@id="ip_list"]//tbody/tr')
        for tr in tr_list:
            # href = li.xpath('h6/a/@href').extract_first()
            tr_class = tr.xpath('@class').extract_first()
            if tr_class == '' or tr_class == 'odd':
                item['country'] = tr.xpath('td[1]/img/@src').extract_first()
                item['agent_ip'] = tr.xpath('td[2]/text()').extract_first()
                item['agent_port'] = tr.xpath('td[3]/text()').extract_first()
                item['agent_addr'] = tr.xpath('td[4]/text()').extract_first()
                item['anonymity'] = tr.xpath('td[5]/text()').extract_first()
                item['agent_type'] = tr.xpath('td[6]/text()').extract_first()
                item['survival_time'] = tr.xpath('td[7]/text()').extract_first()
                item['verify_time'] = tr.xpath('td[8]/text()').extract_first()
                yield item
            elif tr_class == None:
                a_href = tr.xpath('th/a/@href').extract_first()
                url = parse.urljoin(base_url, a_href)
                # url =response.urljoin(a_href)
                yield scrapy.Request(url=url, callback=self.parse)
            else:
                pass

1.保存在MongoDB中

import pymongo

class DailiPipeline(object):

    def __init__(self):
        self.mongo_client = pymongo.MongoClient(host='47.98.173.29', port=27017)
        self.db = self.mongo_client.daili
        self.connection = self.db.agent

    def process_item(self, item, spider):
        self.connection.insert(dict(item))
        return item

2.保存在本地.txt文档中

import time
import os

class DailiPipeline(object):

    def process_item(self, item, spider):
        now = time.strftime('%Y%m%d', time.localtime())
        filename = 'Daili' + now + '.txt'
        with open(filename, 'a', encoding='utf-8') as fp:
            imgname = os.path.basename(item['country'])
            fp.write(imgname + ' ')
            if os.path.exists(imgname):
                pass
            else:
                with open(imgname, 'wb') as fp:
                    response = request.urlopen(item['country'])
                    fp.write(response.read())
            fp.write(item['agent_ip'] + ' ')
            fp.write(item['agent_port'] + ' ')
            fp.write(item['agent_addr'] + ' ')
            fp.write(item['anonymity'] + ' ')
            fp.write(item['agent_type'] + ' ')
            fp.write(item['survival_time'] + ' ')
            fp.write(item['verify_time'] + '\n\n')
            time.sleep(1)
        return item

3.保存在本地.json文档中

import codecs
import json
import time

from urllib import request

class DailiPipeline(object):

    def process_item(self, item, spider):
        now = time.strftime('%Y%m%d', time.localtime())
        filename = 'Daili' + now + '.json'
        with codecs.open(filename, 'a', encoding='utf-8') as fp:
            line = json.dumps(dict(item), ensure_ascii=False) + '\n'
            fp.write(line)
        return item
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值