python爬取天气数据

爬天气历史数据
[http://lishi.tianqi.com/chongqing/index.html]
一、使用requests+bs4采取数据
1、获取URL连接,URL分两部分,城市名称和年月,可以使用数据遍历月份,请求URL,解析response。

def get_url(cityname,start_year,end_year):
    years = list(range(start_year, end_year))
    months = list(range(1, 13))
    suburl = 'http://www.tianqihoubao.com/lishi/'
    urllist = []
    for year in years:
        for month in months:
            if month < 10:
                url = suburl + cityname + '/month/'+ str(year) + (str(0) + str(month)) + '.html'
            else:
                url = suburl + cityname + '/month/' + str(year) + str(month) + '.html'
            urllist.append(url.strip())
    return urllist

2、我们在上面使用了cityname,而cityname就是我们需要抓取的城市的城市名称,我们需要查询数据库获取城市名称,遍历它,将城市名称和开始年份,结束年份,给上面的函数。

def get_cityid(db_conn,db_cur,url):
    suburl = url.split('/')
    sql = 'select cityid from city where cityname = %s '
    db_cur.execute(sql,suburl[4])
    cityid = db_cur.fetchone()
    idlist = list(cityid)
    return idlist[0]

3、用bs解析网页代码

def parse_html_bs(db_conn,db_cur,url):
    proxy = get_proxy()
    proxies = {
        'http': 'http://' + proxy,
        'https': 'https://' + proxy,
    }
    headers = {
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36',
        'Connection': 'close'
    }

    # 获取天气数据的html网页源代码
    weather_data = requests.get(url=url, headers=headers,proxies = proxies).text
    weather_data_new =(weather_data.replace('\n','').replace('\r','').replace(' ',''))
    soup = BeautifulSoup(weather_data_new,'lxml')
    table = soup.find_all(['td'])
    # 获取城市id
    cityid = get_cityid(db_conn, db_cur, url)
    listall = []
    for t in list(table):
        ts = t.string
        listall.append(ts)
    n= 4
    sublist = [listall[i:i+n] for i in range(0,len(listall),n)]
    sublist.remove(sublist[0])
    flist = []
    # 将列表元素中的最高和最低气温拆分,方便后续数据分析,并插入城市代码
    for sub in sublist:
        if sub == sublist[0]:
            pass
        sub2 = sub[2].split('/')
        sub.remove(sub[2])
        sub.insert(2, sub2[0])
        sub.insert(3, sub2[1])
        sub.insert(0, cityid)  # 插入城市代码
        flist.append(sub)
    return flist

二、用scrapy框架采集天气
1、在spiders文件中编写爬虫

# -*- coding: utf-8 -*-
import scrapy
from bs4 import BeautifulSoup
from scrapy import Request
from lxml import etree
from ..items import TianqiItem
class WeatherSpider(scrapy.Spider):
    name = 'weather'
    # allowed_domains = ['tianqihoubao']
    start_urls = ['https://lishi.tianqi.com/']
    def parse(self, response):
        soup = BeautifulSoup(response.text, 'lxml')
        citylists = soup.find_all(name='div', class_='box-base')
        for citys in citylists:
            for city in citys.find_all(name='li'):
                url = 'https://lishi.tianqi.com' + city.a['href']
                yield Request(url=url,
                              callback=self.parse_citylist)
    def parse_citylist(self, response):
        soup = BeautifulSoup(response.text, 'lxml')
        monthlist = soup.find_all(name='div', class_='tqtongji1')
        for months in monthlist:
            for month in months.find_all(name='li'):
                if month.a['href'].endswith('01.html'):
                    url = month.a['href']
                    url = 'https://lishi.tianqi.com' + url
                    yield Request(url=url, callback=self.parse_weather)
    def parse_weather(self, response):  # 解析网页数据,返回数据给pipeline处
        url = response.url
        cityname = url.split('/')[4]

        weather_html = etree.HTML(response.text)
        table = weather_html.xpath('//table//tr//td//text()')
        listall = []
        for t in table:
            if t.strip() == '':
                continue
            t1 = t.replace(' ', '')
            t2 = t1.replace('\r\n', '')
            listall.append(t2.strip())
        n = 4
        sublist = [listall[i:i + n] for i in range(0, len(listall), n)]
        # 删除表头第一行
        sublist.remove(sublist[0])
        for sub in sublist:
            if sub == sublist[0]:
                pass
            sub2 = sub[2].split('/')
            sub.remove(sub[2])
            sub.insert(2, sub2[0])
            sub.insert(3, sub2[1])
            sub.insert(0, cityname)
            Weather = TianqiItem()
            Weather['cityname'] = sub[0]
            Weather['data'] = sub[1]
            Weather['tq'] = sub[2]
            Weather['maxtemp'] = sub[3]
            Weather['mintemp'] = sub[4]
            Weather['fengli'] = sub[5]
            yield Weather
  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值