python爬虫知网实例-Python爬虫实例

importrequestsfrom bs4 importBeautifulSoupfrom datetime importdatetimeimportreimportjsonimportpandas

news_total=[]

commentURL='http://comment5.news.sina.com.cn/page/info?version=1&format=js&channel=gn&newsid=comos-{}&group=&compress=0&ie=utf-8&oe=utf-8&page=1&page_size=20'url='http://api.roll.news.sina.com.cn/zt_list?channel=news&cat_1=gnxw&cat_2==gdxw1||=gatxw||=zs-pl||=mtjj&level==1||=2&show_ext=1&show_all=1&show_num=22&tag=1&format=json&page={}&callback=newsloadercallback&_=1509373193047'

defparseListLinks(url):

newsdetails=[]

res=requests.get(url)

jd= json.loads(res.text.strip().lstrip('newsloadercallback(').rstrip(');'))for ent in jd['result']['data']:

newsdetails.append(getNewsDetail(ent['url']))returnnewsdetailsdefgetNewsDetail(newsurl):

result={}

res=requests.get(newsurl)

res.encoding='utf-8'soup=BeautifulSoup(res.text,'html.parser')

result['title']=soup.select('#artibodyTitle')[0].text

result['newssource']=soup.select('.time-source span a')[0].text

timesource=soup.select('.time-source')[0].contents[0].strip()

dt1=datetime.strptime(timesource,'%Y年%m月%d日%H:%M')

result['dt'] =dt1.strftime('%Y-%m-%d-%H:%M')

result['article']=' '.join([p.text.strip() for p in soup.select('#artibody p')[:-1]])

result['editor']=soup.select('.article-editor')[0].text.strip('责任编辑:')

result['comments']=getCommentCounts(newsurl)print('获得一条新闻')returnresultdefgetCommentCounts(newsurl):

m=re.search('doc-i(.+).shtml',newsurl)

newsid=m.group(1)

comments=requests.get(commentURL.format(newsid))

jd=json.loads(comments.text.strip('var data='))return jd['result']['count']['total']for i in range(1,8):print('正在爬取第'+str(i)+'页......')

newsurl=url.format(i)

newsary=parseListLinks(newsurl)

news_total.extend(newsary)print('抓取结束')

df=pandas.DataFrame(news_total)

df.to_excel('news.xlsx')

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值