一、Request爬取数据
1.获取AK
进入【百度地图开放平台】→注册认证成为开发者→【应用管理】→【我的应用】→【创建应用】→获得AK
2.爬虫代码
import requests
import json
import time
import datetime
import csv
import pandas as pd
ak = '你的ak' #这里ak就不写自己的了
k = 0
def get_one_page(location):
url_1 = 'http://api.map.baidu.com/traffic/v1/bound?parameters' #实时路况爬取链接
url_2 = 'http://api.map.baidu.com/weather/v1/?' #实时天气爬取链接
params_1 = {
#实时路况输入参数
'ak': ak, #ZDlg1RInQHBCKuq1Uo6ny3InaCQcnljs
'bounds': location,
'coord_type_input': 'bd09ll',
'coord_type_output': 'bd09ll'
}
params_2 = {
#实时天气输入参数
'ak': 'ZDlg1RInQHBCKuq1Uo6ny3InaCQcnljs',
'district_id': 610113, # 雁塔区行政区划编码
'data_type': 'all',
}
try:
response_1 = requests.get(url_1,params=params_1,timeout=20) #实时路况
response_2 = requests.get(url_2,params=params_2,timeout=20) #实时天气
if response_1.json()['status'] == 0 and response_2.json()['status'] == 0: #防止请求未成功字段不完整
return response_1.json(),response_2.json()
else:
return None,None
except requests.ConnectionError as e:
print('Error', e.args)
##写入CSV文件
#写入表头
'''with open('主干路路况爬取.csv', 'a', newline='') as csvfile:
fieldnames = ['road_name','road_desc','road_length','localtime', 'week','status', 'status_desc', 'text', 'temp', 'feels_like', 'rh', 'wind_class', 'wind_dir']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()'''
#写入数据
def write_to_file(content):
with open('主干路路况爬取.csv','a',newline='') as csvfile:
fieldnames = ['road_name','road_desc','road_length','localtime','week','status','status_desc','text','temp','feels_like','rh','wind_class','wind_dir']
writer = csv.DictWriter