淘股吧帖子爬取

所有内容仅为本人在学习过程中的积累,欠缺不足忘指教,谢谢

所有内容仅为本人在学习过程中的积累,欠缺不足忘指教,谢谢

import requests
import re
import time
import random
from bs4 import BeautifulSoup
from pymongo import MongoClient


def get_url(url):
    '''
        给出url,获取网页回应
    '''
    user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64)'
    cookie = 'UM_distinctid=160341f651719b-0a2862'
    headers = {"User-Agent": user_agent,
               "cookie": cookie
               }
    response = requests.get(url, headers=headers)
    response = response.text
    return response

def get_id(response):
    '''
        正则匹配开始页用户ID
    '''
    pattern = re.compile(r'<div class="p_list">(.*?)class="right t_page02"', re.S)
    item = pattern.findall(str(response))
    pattern1 = re.compile(r'<a href="(.*?)".*?target="_blank">', re.S)
    id = pattern1.findall(str(item))
    return id

def get_one_page(id):
    '''
        组合目的url,发送请求,返回数据,保存数据库
    '''
    for i in id:
        url = 'https://www.taoguba.com.cn/' + str(i)
        response = get_url(url)
        time.sleep(random.uniform(0, 2))
        pattern = re.compile(r'div class="p_wenz".*?<b id="b_subject">(.*?)</b>.*?'
                             r'<div class=" p_tationl".*?<span style="color:.*?>'
                             r'(.*?)</span>.*?<span id="totalViewNum">(.*?)</span>'
                             r'.*?<span id="replyNum">(.*?)</span>', re.S)
        item = pattern.findall(response)
        massage = []
        for i in item:
            title = i[0]
            author = i[1]
            skim = i[2]
            talk = i[3]
            massage.append(title)
            massage.append(author)
            massage.append(skim)
            massage.append(talk)
        soup = BeautifulSoup(response, "lxml")
        content = soup.find("div", class_="p_coten").get_text()
        content1 = content.replace('\xa0', '')
        content2 = content1.strip()
        massage.append(content2)
        print(massage)
        write_to_DB(massage[0], massage[1], massage[2], massage[3], massage[4])

def write_to_DB(title, author, skim, talk, content):
    '''
        保存数据库
    '''
    client = MongoClient('localhost', 27017)  # 链接数据库
    db = client['Taoguba']
    db.Taoguba.insert({"Title": title, "Author": author,
                   "Skim": skim, "Talk": talk, "Content": content})                     # 添加值

def main():
    url = 'https://www.taoguba.com.cn/index?pageNo={page}&blockID=1&flag=0&pageNum=20713'
    for i in range(100):
        url = url.format(page=i)
        response = get_url(url)
        id = get_id(response)
        get_one_page(id)
        time.sleep(random.randint(1, 10))

if __name__ == "__main__":
    main()

 

  • 4
    点赞
  • 17
    收藏
    觉得还不错? 一键收藏
  • 2
    评论
评论 2
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值