基于python获取少量图片的简单代码

这个程序可以自动从百度图库爬取指定关键字的相关图片,这边使用了re模块来筛选内容、使用urllib中的parse来拼接url(秉着多学多用的想法加上去的其实没必要这么麻烦),并且我在创建了一个UA列表搭配random随机选取任意UA,在爬取到内容后保存在本地。在这个程序中能够获取的图片数量比较少,如果需要大量图片需要使用其他方法进行图片爬取,其实只要在这个程序上再加点东西就行了。

代码如下:

from urllib import parse
from ua import ua_list
import requests
import random
import re
import os


class ImageSpider(object):
    def __init__(self,keyword):
        self.url = 'https://image.baidu.com/search/index?tn=baiduimage&cl=2&lm&{}'

        self.headers = {
            'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9" ,
            'Accept-Encoding': "gzip, deflate, br",
            'Accept-Language': "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
            'Cookie': r'winWH=%5E6_1442x754; BDIMGISLOGIN=0; BDqhfp=%E7%8C%AB%26%260-10-1undefined%26%260%26%261; PSTM=1667830036; BIDUPSID=1DD2D460CD28A78C61A455B013785648; ZFY=exSvOfzZgzDWqEe7nW:AQ2nlMzk50xWo8ZQrdXJfonjc:C; BAIDUID=87E4BAFD410FCEB2CC9B21C331E40286:FG=1; BAIDUID_BFESS=87E4BAFD410FCEB2CC9B21C331E40286:FG=1; __bid_n=18483a81ae6a021a1d4207; wapbaikedeclare=showed; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; H_PS_PSSID=36543_37686_37493_37623_34813_37303_37722_37801_37536_37673_37741_26350_37790; PSINO=6; delPer=0; BAIDU_WISE_UID=wapp_1668874380241_77; BCLID=9001483227611951886; BCLID_BFESS=9001483227611951886; BDSFRCVID=BMIOJeCT5G3oo_TjfDU7bjPcQ2KK0gOTTPjcTR5qJ04BtyCVNwZ1EG0PtOpvTLPbwBlLogKK3mOTH4-F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; BDSFRCVID_BFESS=BMIOJeCT5G3oo_TjfDU7bjPcQ2KK0gOTTPjcTR5qJ04BtyCVNwZ1EG0PtOpvTLPbwBlLogKK3mOTH4-F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF=tb4toCtbtD83fP36q45HMt00qxby26nmye79aJ5nQI5nhn7vBpO8bx0fXM5BQPJIte3ion3vQUbmjRO206oay6O3LlO83h5wbaLDKl0MLPbcq-Q2Xh3D5UDjXMnMBMPeamOnaI-E3fAKftnOM46JehL3346-35543bRTLnLy5KJYMDFRD5_aD5JXDNRf-b-XKD600PK8Kb7VbIjqeMnkbft7jtte2-RdQIQhKnvHKJcmHUD4yU42bU473b3B5h3NJ66ZoIbPbPTTSlr6W6QpQT8r5-nj-q3rLIrf5JR1ab3vOIOzXpO1jKuzBN5thURB2DkO-4bCWJ5TMl5jDh3Mb6ksD-FtqjtjJRCjoK_bf-3bfTrP-trf5DCShUFs0MLJB2Q-5M-a3KOoDU72M4jUDftVMR32Q6jiWbRM2MbmLncjSM_GKfC2jMD32tbpWJbztmTxoUJ2B-J-ShRM-RJIhMAebPRiJ-r9Qg-JslQ7tt5W8ncFbT7l5hKpbt-q0x-jLTnhVn0MBCK0hItGDj05D6JM5pJfetjK2CntsJOOaCkV8j6Oy4oT35L1DUnyyh3B5ePDWJo_LnOGhlvoD-Jc3h0rMxbnQjQDWJ4J5tbX0MQFfUFlQft20b0gKhjnWpOab2rQ2n7jWhk5Dq72y58VQlRX5q79atTMfNTJ-qcH0KQpsIJM5-DWbT8IjHCeJTLDtnAHoKv5b-0_HRjYbb__-P4DeP5uBxRZ5m7n_l0bab7rSbcIQh_b-4Ir-nLHBMPj52On-UJ_KMbCMtj_D5OI5bFd34TfXjo43bRTLp7MbfbdVbo6L4vdhP-UyNbLWh37JPjlMKoaMp78jR093JO4y4Ldj4oxJpOJ5JbMopCafJOKHICmDj0beM5; H_BDCLCKID_SF_BFESS=tb4toCtbtD83fP36q45HMt00qxby26nmye79aJ5nQI5nhn7vBpO8bx0fXM5BQPJIte3ion3vQUbmjRO206oay6O3LlO83h5wbaLDKl0MLPbcq-Q2Xh3D5UDjXMnMBMPeamOnaI-E3fAKftnOM46JehL3346-35543bRTLnLy5KJYMDFRD5_aD5JXDNRf-b-XKD600PK8Kb7VbIjqeMnkbft7jtte2-RdQIQhKnvHKJcmHUD4yU42bU473b3B5h3NJ66ZoIbPbPTTSlr6W6QpQT8r5-nj-q3rLIrf5JR1ab3vOIOzXpO1jKuzBN5thURB2DkO-4bCWJ5TMl5jDh3Mb6ksD-FtqjtjJRCjoK_bf-3bfTrP-trf5DCShUFs0MLJB2Q-5M-a3KOoDU72M4jUDftVMR32Q6jiWbRM2MbmLncjSM_GKfC2jMD32tbpWJbztmTxoUJ2B-J-ShRM-RJIhMAebPRiJ-r9Qg-JslQ7tt5W8ncFbT7l5hKpbt-q0x-jLTnhVn0MBCK0hItGDj05D6JM5pJfetjK2CntsJOOaCkV8j6Oy4oT35L1DUnyyh3B5ePDWJo_LnOGhlvoD-Jc3h0rMxbnQjQDWJ4J5tbX0MQFfUFlQft20b0gKhjnWpOab2rQ2n7jWhk5Dq72y58VQlRX5q79atTMfNTJ-qcH0KQpsIJM5-DWbT8IjHCeJTLDtnAHoKv5b-0_HRjYbb__-P4DeP5uBxRZ5m7n_l0bab7rSbcIQh_b-4Ir-nLHBMPj52On-UJ_KMbCMtj_D5OI5bFd34TfXjo43bRTLp7MbfbdVbo6L4vdhP-UyNbLWh37JPjlMKoaMp78jR093JO4y4Ldj4oxJpOJ5JbMopCafJOKHICmDj0beM5; BDRCVFR[X_XKQks0S63]=mk3SLVN4HKm; RT="z=1&dm=baidu.com&si=08fda187-ac48-489e-a20c-8a5f8388d197&ss=lao4pplx&sl=2q&tt=2gbw&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf&ld=2nc5p&ul=lwpdv&hd=lwphz"; firstShowTip=1; BDRCVFR[Q5XHKaSBNfR]=mk3SLVN4HKm; BDRCVFR[tox4WRQ4-Km]=mk3SLVN4HKm; BDRCVFR[-pGxjrCMryR]=mk3SLVN4HKm; indexPageSugList=%5B%22%E7%8C%AB%22%5D; cleanHistoryStatus=0; BDRCVFR[dG2JNJb_ajR]=mk3SLVN4HKm; userFrom=null; ab_sr=1.0.1_OWU5MTEzNjJjNWZmNDZlZGQ5OGIwNjRmNmRmMGJiN2MzZGE3Y2ZhNzM4ODUwODg2ZGU1MWQ4OTU0YWQwMWNmMjM4NzgwZmMwMDBmMTlhMWYwOWI0OGI0YjRlMjU2OTI5NTdlOWRiNjA1NjU1YzAzNWZiZjRjN2M1MjU1OTg0ZGY0ZjgxNjM4NTYyYmNmZTc0ZWYzY2NiNzIxZjBkM2RlOA==',
            'User-Agent' : random.choice(ua_list)
            }

        self.params = {
            'word' : keyword,
            'oq' : keyword
            }


    def request_html(self):
        full_url = self.url.format(parse.urlencode(self.params)) #拼接url

        response = requests.get(url=full_url,headers=self.headers)
        response.encoding = 'utf-8'
        return response.text

    def parase_data(self):
        content = self.request_html()

        pattern = re.compile('"hoverURL":"(.*?)"', re.S)
        link_list = pattern.findall(content)
        return link_list

    def run(self):
        url_list = self.parase_data()
        for x in range(1,21):
            response = requests.get(url=url_list[x],headers=self.headers)
            path = f"./save-webpage/{self.params['word']}/{self.params['word']}-{x}.jpg"
            if not os.path.exists(f"./save-webpage/{self.params['word']}"):
                os.makedirs(f"./save-webpage/{self.params['word']}/")
            with open(path,"wb") as f:
                f.write(response.content)
            print(f"{self.params['word']}-{x}.jpg -- 成功")

if __name__ == '__main__':
    keyword = input("请输入关键词:")
    spider = ImageSpider(keyword=keyword)
    spider.run()
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值