from bs4 import BeautifulSoup
from requests.adapters import HTTPAdapter
import csv
import re
import requests
import time
class Pcctv(object):
def __init__(self):
self.session = requests
self.initCsvWriter()
def initCsvWriter(self):
self.csvFile = open('Pcctv.csv', 'w',encoding="utf-8",newline='')
self.csvWriter = csv.writer(self.csvFile)
self.csvWriter.writerow(['行政代码','名称','层级','类型'])
def csvWriteRow(self,row):
self.csvWriter.writerow(row)
# 省
def getProvice(self):
url='http://www.stats.gov.cn/sj/tjbz/tjyqhdmhcxhfdm/2023/index.html'
resp = self.session.get(url, timeout=None)
resp.encoding='utf-8'
soup = BeautifulSoup(resp.text, 'lxml')
a = soup.select('table.provincetable > tr.provincetr > td >a')
for item in a:
proviceUrl = item.get('href')
pid = re.findall("([0-9]+)\.html",proviceUrl)
print('--',pid,proviceUrl)
code = pid[0].ljust(12,'0')
print('{}-{}-{}'.format(code,item.get_text(),1))
row =[code,item.get_text(),1,'']
self.csvWriteRow(row)
cityUrl = '{}/{}'.format(url.rsplit('/',1)[0],proviceUrl)
self.getCity(cityUrl)
# 市
def getCity(self,url):
print('getCity',url)
resp = self.session.get(url, timeout=None)
resp.encoding='utf-8'
soup = BeautifulSoup(resp.text, 'lxml')
trs = soup.select('table.citytable > tr.citytr')
for tr in trs:
a = tr.select('td > a')
if len(a)>0 :
cityUrl = a[0].get('href')
print('{}-{}-{}'.format(a[0].get_text(), a[1].get_text(), 1))
row =[a[0].get_text(),a[1].get_text(),2,'']
self.csvWriteRow(row)
cityUrl = '{}/{}'.format(url.rsplit('/',1)[0],cityUrl)
self.getCounty(cityUrl)
else:
td=tr.select('td')
if len(td)>0 :
row =[td[0].get_text(),td[1].get_text(),2,'']
self.csvWriteRow(row)
# 区
def getCounty(self,url):
print('getCounty',url)
resp = self.session.get(url, timeout=None)
resp.encoding='utf-8'
soup = BeautifulSoup(resp.text, 'lxml')
trs = soup.select('table.countytable > tr.countytr')
for tr in trs:
a = tr.select('td > a')
if len(a)>0 :
countryUrl = a[0].get('href')
print('{}-{}-{}'.format(a[0].get_text(), a[1].get_text(), 3))
row =[a[0].get_text(),a[1].get_text(),3,'']
self.csvWriteRow(row)
cityUrl = '{}/{}'.format(url.rsplit('/',1)[0],countryUrl)
self.getTown(cityUrl)
else:
td=tr.select('td')
if len(td)>0 :
row =[td[0].get_text(),td[1].get_text(),3,'']
self.csvWriteRow(row)
# 县、镇、街道
def getTown(self,url):
resp = self.session.get(url, timeout=None)
resp.encoding='utf-8'
soup = BeautifulSoup(resp.text, 'lxml')
trs = soup.select('table.towntable > tr.towntr')
for tr in trs:
a = tr.select('td > a')
if len(a)>0 :
townUrl = a[0].get('href')
print('{}-{}-{}'.format(a[0].get_text(), a[1].get_text(), 4))
row =[a[0].get_text(),a[1].get_text(),4,'']
self.csvWriteRow(row)
cityUrl = '{}/{}'.format(url.rsplit('/',1)[0],townUrl)
self.getVillage(cityUrl)
else:
td=tr.select('td')
if len(td)>0 :
row =[td[0].get_text(),td[1].get_text(),4,'']
self.csvWriteRow(row)
# 居委会、村落
def getVillage(self,url):
resp = self.session.get(url, timeout=None)
resp.encoding='utf-8'
soup = BeautifulSoup(resp.text, 'lxml')
trs = soup.select('table.villagetable > tr.villagetr')
for tr in trs:
a = tr.select('td')
print('{}-{}-{}'.format(a[0].get_text(), a[2].get_text(), 5,a[1].get_text()))
row =[a[0].get_text(), a[2].get_text(), 5,a[1].get_text()]
self.csvWriteRow(row)
pet = Pcctv()
pet.getProvice()
Python爬国际统计局省市区的数据
最新推荐文章于 2024-08-08 14:28:23 发布