python爬取网站信息(行政区域代码)


import time
import re
import pymysql
import requests
from bs4 import BeautifulSoup
import hashlib

#连接数据库
from requests import RequestException

con = pymysql.connect(host="117.38.23.23", user="root", password="123", database="test", charset="utf8")
cursor = con.cursor()

headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36'}
url='https://xingzhengquhua.51240.com/'

def getShengData(url):
    try:
        time.sleep(15)
        #print(url)
        response = requests.get(url, headers=headers)
        if response.status_code == 200:
            return response.text
        #print(url + ',visit error')
        return None
    except RequestException:
        print('请求异常:'+response.text)
        try:
            time.sleep(61)
            response = requests.get(url, headers=headers)
            if response.status_code == 200:
                return response.text
        except RequestException:
            print('再次请求异常:' + response.text)
        return None

def edit_html(html):
    soup=BeautifulSoup(html,"html.parser")
    tables = soup.findAll('table')
    tab = tables[1]
    for tr in tab.findAll('tr')[2:]:
        tds=tr.findAll('td')[:2]
        name=''
        for i,td in enumerate(tds):
           name += td.getText() + ","
           if 1==i:
                save_mysql_js(name,'0', '1')
                t1 = td.find_all('a')
                for t2 in t1:
                    t3 = t2.get('href')
                    HTML=getShengData('https://xingzhengquhua.51240.com'+t3)
                    analysis_html(HTML,td.getText())

def analysis_html(html,code):
    soup = BeautifulSoup(html, "html.parser")
    tables = soup.findAll('table')
    tab = tables[1]
    for tr in tab.findAll('tr')[3:]:
        tds = tr.findAll('td')[:2]
        strvalue=''
        type=''
        for i, td in enumerate(tds):
                strvalue+=td.getText()+","
                if 1 == i:
                    if td.getText()[4:12]=='00000000':
                        type='2'
                    elif  td.getText()[6:12]=='000000':
                        type='3'
                    elif  td.getText()[9:12]=='000':
                        type='4'
                        save_mysql_js(strvalue, code, type)
                        break
                    print(type)
                    save_mysql_js(strvalue, code, type)
                    t1 = td.find_all('a')
                    code=td.getText()
                    for t2 in t1:
                        t3 = t2.get('href')
                        HTML = getShengData('https://xingzhengquhua.51240.com' + t3)
                        analysis_html(HTML,code)



def get_tdagg(html):
    return str(html.get_text())


def save_mysql_js(nameandcode,parentCode,type):
        nameArr=nameandcode.split(',')
        sql = "insert into tb_area_strative_new(id,name,code,parent_code,level) values(%s, %s, %s, %s, %s);"
        values=(nameArr[1],nameArr[0],nameArr[1],parentCode,type)
        print("id="+nameArr[1]+";name="+nameArr[0]+";code="+nameArr[1]+";parentCode="+parentCode+";type="+type)
        print(cursor.execute(sql,values))
        con.commit()

def create_id():
    m = hashlib.md5(str(time.perf_).encode('utf-8'))
    return m.hexdigest()

def time_Run():
   html=getShengData(url)
   if html is not None:
       try:
           # 执行sql语句
           edit_html(html);
           # 提交到数据库执行
           con.commit()
       except:
           # Rollback in case there is any error
           con.rollback()

if __name__ == '__main__':
    time_Run()
























  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值