import requests, json, time, pymongo, random
from bs4 import BeautifulSoup
# client = pymongo.MongoClient()
# ganji = client['ganji']
# url_list = ganji['url_list']
# item_info = ganji['item_info']
headers = {
'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/601.4.4 (KHTML, like Gecko) Version/9.0.3 Safari/601.4.4',
'Connection':'Keep-alive'
}
#ip.zdaye.com
proxy_list = [
'http://120.41.0.29:8080',
'http://58.20.132.23:8088',
'http://121.140.126.250:3128'
]
proxy_ip = random.choice(proxy_list)
proxies = {'http': proxy_ip}
def get_item_info_from(url, data=None):
wb_data = requests.get(url,headers=headers)
if wb_data.status_code == 404:
pass
else:
try:
soup = BeautifulSoup(wb_data.text,"lxml")
data = {
'leibie': soup.select('#header > div.breadCrumb.f12 > span.crb_i > a')[1].text,
'title': soup.title.text.strip(),
'pub_data': soup.select('#index_show > ul.mtit_con_left.fl > li.time')[0].text,
'price': soup.select('#content > div.person_add_top.no_ident_top > div.per_ad_left > div.col_sub.sumary > ul > li > div.su_con > span')[0].text.strip(),
'area': list(map(lambda x: x.text,soup.select('#content > div.person_add_top.no_ident_top > div.per_ad_left > div.col_sub.sumary > ul > li > div.su_con > span > a')))
#'chengse': soup.select(''),
# 'url': url
}
print(data)
except AttributeError:
pass
except IndexError:
pass
get_item_info_from('http://bj.58.com/pingbandiannao/25842389823684x.shtml')
爬虫小实验
最新推荐文章于 2023-10-29 01:50:08 发布