根据公告期号进行数据采集
import requests
import json
from crawler2.tool.database.MongoDb import MyMongoDB
import time
if __name__ == '__main__':
mongo = MyMongoDB('root', "123456", '192.168.4.231', 27017, 'xcx_gov_cn', 'sbj_gg')
get_data_url = "https://service.*****.com/getListData?sendParam={}"
# 写死公告号1179
send_param_obj = {"rows":100,"annNum":"1179","annType":"","tmType":"","coowner":"","recUserName":"","allowUserName":"","byAllowUserName":"","appId":"","appIdZhiquan":"","bfchangedAgengedName":"","changeLastName":"","transferUserName":"","acceptUserName":"","regName":"","tmName":"","intCls":"","fileType":"","totalYOrN":"true","appDateBegin":"","appDateEnd":"","agentName":"","page":1}
for i in range(1,2):
send_param_obj["page"] = i
resp = requests.get(get_data_url.format(json.dumps(send_param_obj))).json()
print("page:{}".format(i))
rows = resp["data"]["resp_data"]["rows"]
for row in rows:
print(row)
# mongo.insert_many(resp["data"]["resp_data"]["rows"])
time.sleep(5)
调用流程