scrapy图片

1:在setting.py 中设置文件存放的路径
IMAGES_STORE = "/home/xx/xx/xx/images/"

2:在pipelines.py 文件代码
import scrapy                                                                     
                                                                                  
from scrapy.utils.project import get_project_settings                             
from scrapy.pipelines.images import ImagesPipeline                                
import os                                                                         
                                                                                  
class LolspiderPipeline(ImagesPipeline):                                          
                                                                                  
    IMAGES_STORE = get_project_settings().get("IMAGES_STORE")                     
                                                                                  
    def get_media_requests(self,item,info):                                       
        image_url = item["image"]                                                 
        yield scrapy.Request(image_url)                                           
                                                                                  
                                                                                  
    def item_completed(self,result,item,info):                                    
        image_path = [x["path"] for ok, x in result if ok]                        
                                                                                  
        os.rename(self.IMAGES_STORE + image_path[0], self.IMAGES_STORE + item["name
"] + ".png")                                                                      
                                                                                  
        item["imagePath"] = self.IMAGES_STORE + item["name"]                      
        print(item["imagePath"][0])                                               
                                                                                  
        return item           


3:items中的代码    
import scrapy                                   
                                                
                                                
class xxxspiderItem(scrapy.Item):               
    # define the fields for your item here like:
    name = scrapy.Field()                       
    image = scrapy.Field()                      
    imagePath = scrapy.Field()     

    4:在自己的爬虫代码中:
 def parse(self,response):                                          
     hero_list = response.xpath('//div[@class="mod-pic-bd"]//ul/li')
     i = 0                                                          
     for each in hero_list:                                         
        item = xxxspiderItem()                                      
        name = each.xpath('./div/text()').extract()[0]              
        image = each.xpath('./a/img/@src').extract()[0]             
        item['name'] = name                                         
        item['image'] = image                                       
        print(name)                                                 
        print(image)                                                
                                                                    
        yield item             


5: 保存json 文件的写法 在pipelines.py文件中
    
import scrapy
import json

class LolspiderPipeline(object):
   def __init__(self):
        self.filename = open("lol.json","w")
   def process_item(self,item,spider):
        text = json.dumps(dict(item), ensure_ascii = False) + ",\n"
        self.filename.write(text.encode("utf-8"))
        return item
   def close_spider(self,spider):
        self.filename.close()
                                                                                
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值