# -*- coding:utf-8 -*- # 仅需修改这个地方https://jn.lianjia.com/ershoufang/pg{}rs/ 将jn换成你所在城市的拼写首字母小写 import requests from lxml import etree import time import random import csv import requests import json class LianjiaSpider(object): def __init__(self): self.url = "https://mobilenext-web.meituan.com/api/newSalesBoard/getSaleBoardDetail?cityId=96&boardType=8&districtId=0&cateId={}&offset=0&limit=50" self.headers = { "User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1"} def get_page(self, url): res = requests.get(url=url, headers=self.headers) res.encoding = "utf-8" html = res.text # print(html) # results_temp = html.replace('{"totalSize":50,"saleBoardDealList":', "").replace("}}]}", "") # results = results_temp + "}}]" # print(results) self.parse_page(html) # print(html) # print(i) def parse_page(self, html): results = html[35:-20] print(results) for list in json.loads(results): # print(list) id = list["id"] name = list["name"] weekSaleCount = list["weekSaleCount"] score = list["score"] avgPrice = list["avgPrice"] cateName = list["cateName"] areaName = list["areaName"] distance = list["distance"] rank = list["rank"] frontImg = list["frontImg"] oneSentence = list["oneSentence"] # SaleBoardPoiGroup = list["SaleBoardPoiGroup"] # saleBoardPoiCoupon = list["saleBoardPoiCoupon"] # saleBoardPoiPay = list["saleBoardPoiPay"] # banchList = list["banchList"] with open('meituan.csv', 'a', newline='', encoding='utf-8')as f: write = csv.writer(f) # write.writerow([id,name,weekSaleCount,score,avgPrice,cateName,areaName,distance,rank,frontImg,oneSentence,SaleBoardPoiGroup,saleBoardPoiCoupon,saleBoardPoiPay,banchList]) write.writerow([id, name, weekSaleCount, score, avgPrice, cateName, areaName, distance, rank, frontImg, oneSentence]) write.writerow("") f.close() def main(self): for i in range(1, 9): time.sleep(random.randint(3, 5)) url = self.url.format(i) # print(url) self.get_page(url) # print(j) if __name__ == '__main__': start = time.time() spider = LianjiaSpider() spider.main() end = time.time() print("执行时间:%.2f" % (end - start))
美团店铺榜,但还需增加列指标:餐饮分类
最新推荐文章于 2024-06-25 15:19:52 发布
该博客介绍了一个Python爬虫程序,用于抓取美团网站上的餐饮热门榜单数据。程序通过替换城市代码来获取不同地区的销售排行榜,并解析JSON响应以提取店铺ID、名称、周销量、评分、平均价格等关键信息,存储到CSV文件中。博客内容涉及网络请求、XML处理和CSV文件操作,适合对网络爬虫和数据分析感兴趣的读者。
摘要由CSDN通过智能技术生成