python 爬取小米商城的应用apk文件

本文在他人基础之上改良而成  

本文仅做个人技术学习使用,不得用以危害他人或群体权益!!!!

请合法合规使用!!!

需在python 3.6下运行

# coding=utf-8 chengqiang

import urllib

import requests

import re

from bs4 import BeautifulSoup

import random

import time

import ssl

def parser_apks(count = 300,category=1):

    _root_url = "http://app.mi.com"  # 应用市场主页网址

    res_parser = {}

    # 设置爬取的页面,从第一页开始爬取,第一页爬完爬取第二页,以此类推

    page_num = 8

    while count:

        # 获取应用列表页面

        print("进入循环 count=" + str(count))

        wbdata = requests.get("http://app.mi.com/catTopList/"+str(category)+"?page=" + str(page_num)).text

        print("开始爬取第"+str(category)+"类的" + str(page_num) + "页")

        # 解析应用列表页面内容

        soup = BeautifulSoup(wbdata, "html.parser")

        links = soup.find_all("a", href=re.compile("/details?"), class_="", alt="")

 

        if len(links) == 0:

            print("==============links空了=================")

            break

 

        for link in links:

            # 获取应用详情页面的链接

            detail_link = urllib.parse.urljoin(_root_url, str(link["href"]))

            package_name = detail_link.split("=")[1]

            download_page = requests.get(detail_link).text

            #解析应用详情页面

            soup1 = BeautifulSoup(download_page, "html.parser")

            download_link = soup1.find(class_="download")["href"]

            #获取直接下载的链接

            download_url = urllib.parse.urljoin(_root_url, str(download_link))

            # 解析后会有重复的结果,通过判断去重

            if package_name not in res_parser.keys():

                if count > 0:

                    res_parser[package_name] = download_url

                    count = count - 1

                else:

                    break

            if count == 0:

                    break

        if count > 0:

            page_num = page_num + 1

 

    print("爬取apk数量为: " + str(len(res_parser)))

    return res_parser

 

user_agent_list = [

        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 "

        "(KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",

        "Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 "

        "(KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",

        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 "

        "(KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",

        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 "

        "(KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",

        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 "

        "(KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",

        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 "

        "(KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",

        "Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 "

        "(KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",

        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",

        "Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",

        "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",

        "Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 "

        "(KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",

        "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 "

        "(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",

        "Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 "

        "(KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"

    ]

 

def auto_down(url,filename):

    try:

        opener = urllib.request.build_opener()

        opener.addheaders = [('User-Agent','Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_4) AppleWebKit/603.1.30 (KHTML, like Gecko) Version/10.1 Safari/603.1.30')]

        opener.addheaders = [('User-Agent',random.choice(user_agent_list))]

        urllib.request.install_opener(opener)

        urllib.request.urlretrieve(url,filename)

    except urllib.request.ContentTooShortError:

        print( 'Network conditions is not good.Reloading.')

        auto_down(url,filename)

    except urllib.error.HTTPError:

        print(str(urllib.error.HTTPError))

    except urllib.error.URLError:

        # auto_down(url,filename)

        print("sssssss oh ssssssss" + str(urllib.error.URLError))

 

 

def craw_apks(count = 300,category=1, save_path = "/Users/chengqiang/Desktop/python_spider/"):

    print("craw_apks count="+str(count))

    res_dic = parser_apks(count,category)

 

    for apk in res_dic.keys():

        print("正在下载应用: " + apk)

        time.sleep(2)

        print("下载位置: " + str(res_dic[apk]))

        fileUrl = str(res_dic[apk])

        fileUrl=get_redirect_url(res_dic[apk])

        save_to_file('apkList.txt', fileUrl)

        print("+++++++++++" + save_path + apk + ".apk")

        # print(res_dic[apk] + '-----------')

        urllib.request.urlretrieve(fileUrl, save_path + apk + ".apk")

#        auto_down(fileUrl, save_path + apk + ".apk")

#         File = requests.get(fileUrl, stream=True)

        print("下载完成: "+ fileUrl)

    print(str(category)+"=category====finish=====")

 

 

def save_to_file(file_name, contents):

    fh = open(file_name, 'a')

    fh.write(contents+'\n')

    fh.close()

 

 

def get_redirect_url(downurl):

    # 请求头,这里我设置了浏览器代理

    headers = {

        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}

    # 请求网页

    response = requests.get(downurl, headers=headers)

    print(response.status_code)  # 打印响应的状态码

    print(response.url)  # 打印重定向后的网址

    # 返回重定向后的网址

    return response.url

 

 

if __name__ == "__main__":

    category = 27

    #python 3 会进行https强验证需要关闭验证

    ssl._create_default_https_context = ssl._create_unverified_context

    #while category<=10:

    craw_apks(30, category,"/Users/chengqiang/Desktop/python_spider/")

    # craw_apks(50,category,"H:\\miApk\\")

    #category = category + 1

  • 3
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值