爬取西安市二手房房价
import requests
import math
import time
import random
from bs4 import BeautifulSoup
#url = 'https://xa.anjuke.com/sale/#filtersort'
url = 'https://xa.anjuke.com/sale/#'
def get_data(url):
try:
page_headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240",
"Cookie":"isp=true; als=0; sessid=EE6C479F-383E-4532-5FBE-SX0608113249; lps=http%3A%2F%2Fuser.anjuke.com%2Fajax%2FcheckMenu%2F%3Fr%3D0.9773651958602518%26callback%3DjQuery1113019512877839682363_1559964793029%26_%3D1559964793030%7Chttps%3A%2F%2Ftj.fang.anjuke.com%2Floupan%2Fall%2F; lp_lt_ut=e9d09e6a59979c288d00ea3d7f7f7573; __xsptplusUT_8=1; _ga=GA1.2.205429121.1559658612; _gid=GA1.2.1582905499.1559979223; _gat=1; isp=true; wmda_uuid=b01457fc0c7d92a795d75c630ff6262c; wmda_new_uuid=1; wmda_visited_projects=%3B8788302075828; isp=true; aQQ_ajkguid=C8BE4FEC-9EE8-C5E6-D456-C6695B138768; twe=2; wmda_session_id_8788302075828=1559979235829-aa5a0183-f840-376c; __xsptplus8=8.7.1559979226.1559979258.3%234%7C%7C%7C%7C%7C%23%23zPjxAYBc5rYjoCxhfqsuNhjRq2XqZ05X%23; 58tj_uuid=73404522-7a8e-420a-93be-a96ac163fc6d; new_session=0; init_refer=https%253A%252F%252Ftianjin.anjuke.com%252F%253Fpi%253DPZ-baidu-pc-all-biaoti; new_uv=7; Hm_lvt_c5899c8768ebee272710c9c5f365a6d8=1559658645,1559804791,1559964795; Hm_lpvt_c5899c8768ebee272710c9c5f365a6d8=1559979258; ctid=26"
}
res=requests.get(url,headers=page_headers)
status=res.status_code
html=res.text
# print(status)
# print(data)
soup= BeautifulSoup(html,'html.parser')
# print(soup)
return soup, status
except Exception as e:
print(str(e))
return 0,0
# get_data(url)
def get_hourse(url):
soup,status=get_data(url)
fang_all=soup.find_all("li", attrs={'class': 'list-item'})
# print(fang_all)
with open('ershou_house_anjuke.csv', 'a+', encoding='utf-8') as fh:
for fang in fang_all:
try:
#楼盘
fang_name = fang.find('span', attrs = {'class': 'comm-address'}).text.split()[0]
# print(fang_name)
#位置
fang_location = fang.find('span', attrs = {'class': 'comm-address'}).text.split()[1]
# print(fang_location)
#价格
fang_price = fang.find('span', attrs = {'class': 'unit-price'}).text
# print(fang_price)
fh.write(fang_name + "," + fang_location + "," + fang_price + "\n")
except Exception as e:
print(e)
# get_hourse(url)
if __name__ == '__main__':
url_start = 'https://xa.anjuke.com/sale/'
soup,status=get_data(url_start)
with open('ershou_house_anjuke.csv','w',encoding='utf-8') as fh:
fh.write("楼盘,位置,价格\n")
for i in range(0, 51):
# url = 'https://xa.anjuke.com/sale/p49/#filtersort'
url = url_start + 'p' + str(i) + '/' + '#filtersort'
# print(url)
print("正在爬取第 %s 页..." % i)
get_hourse(url)
#随机等待
span=round(random.random()*6,1)
time.sleep(span)