python新闻爬虫源码下载_新闻爬虫 - python代码库 - 云代码

[python]代码库import urllib.request as request

import easygui

import bs4

import csv

import os

def is_connect():

import requests

try:

requests.get("https://www.baidu.com")

return True

except:

return False

if is_connect():

here = os.getcwd()

while True:

link = easygui.enterbox("请输入新闻文件保存路径", "新闻爬虫", here)

try:

open(link+r"\新闻数据.csv","w")

break

except:

easygui.msgbox("路径错误或文件已打开")

url = "http://news.sohu.com/"

req = request.Request(url,headers={"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.80 Safari/537.36 Edg/86.0.622.43"})

response = request.urlopen(req).read().decode("utf-8")

soup = bs4.BeautifulSoup(response,"html.parser")

news = soup.findAll("a")

news2 = soup.findAll("b")

passed_news = []

with open(link+r"\新闻数据.csv","w",newline="",encoding="utf-8") as f:

writer = csv.writer(f)

for i in news2:

print(i.string)

writer.writerow([i.string])

for new in news:

if not "None" in str(new.string) and len(str(new.string).replace(" ","").replace("\n","")) > 6:

passed_news.append(str(new.string).replace(" ","").replace("\n",""))

for new in passed_news[:-4][1:]:

print(new)

writer.writerow([new])

f.close()

a = input("已爬取数据(按Enter键退出)")

else:

easygui.msgbox("请连接网络")

input("按Enter键退出")

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值