爬虫之用pymysql把爬取得数据存入MySQL数据中

import json
import requests
from day3.mysql_test import mysql_conn
# 因为不能访问, 所以我们加个头试试
headers = {
    #'Accept': '*/*',
    #'Accept-Encoding': 'gzip, deflate, br',
    #'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
    #'Connection': 'keep-alive',
    'Cookie': 'aliyungf_tc=AQAAALoQF3p02gsAUhVFebQ3uBBNZn+H; xq_a_token=584d0cf8d5a5a9809761f2244d8d272bac729ed4; xq_a_token.sig=x0gT9jm6qnwd-ddLu66T3A8KiVA; xq_r_token=98f278457fc4e1e5eb0846e36a7296e642b8138a; xq_r_token.sig=2Uxv_DgYTcCjz7qx4j570JpNHIs; _ga=GA1.2.516718356.1534295265; _gid=GA1.2.1050085592.1534295265; u=301534295266356; device_id=f5c21e143ce8060c74a2de7cbcddf0b8; Hm_lvt_1db88642e346389874251b5a1eded6e3=1534295265,1534295722; Hm_lpvt_1db88642e346389874251b5a1eded6e3=1534295722',
    #'Host': 'xueqiu.com',
    #'Referer': 'https://xueqiu.com/',
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
    #'X-Requested-With': 'XMLHttpRequest',
    #'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}

# urllib 的相关操作如下
# url = 'https://xueqiu.com/v4/statuses/public_timeline_by_category.json?since_id=-1&max_id=-1&count=10&category=111'
url = 'https://xueqiu.com/v4/statuses/public_timeline_by_category.json?since_id=-1&max_id=184184&count=15&category=111'
url='https://xueqiu.com/v4/statuses/public_timeline_by_category.json?since_id=-1&max_id=183919&count=15&category=111'
#
# # request.Request
# req = request.Request(url, headers=headers)
#
# # 通过request 请求我们的雪球
# response = request.urlopen(req)
#
# res = response.read()
# ## 字符串, 需要转成dict/list

response = requests.get(url, headers=headers)
# res = response.content
#print(res)
## 转化函数 res_dict = json.loads(res)
res_dict = json.loads(response.text)

# print 这个res_dict
#print(res_dict)

list_list = res_dict['list']

# print(list_list)
# 遍历 list_list
klist = []
for list_item_dict in list_list:
    # list 列表内的一个item, 他是一个dict
    data_str = list_item_dict['data']
    xx = json.loads(data_str)
    o = xx['id']
    p = xx['title']
    s = xx['description']
    k = xx['target']

    # print(type(s))

    # print('-'*50)
    sql = "insert into xueqiu(xid,title,description,target) values(%d,'%s','%s','%s')"%(o,p,s,k)
    print(sql)
    mc = mysql_conn()
    mc.execute_modify_mysql(sql)

 

----------------------from day3.mysql_test import mysql_conn 封装的代码如下:----------------------------------

import pymysql

# mysql_coon 主要的功能就是, 将链接数据库的操作变成只连接一次
#
class mysql_conn(object):
    # 魔术方法, 初始化, 构造函数
    def __init__(self):
        self.db = pymysql.connect(host='127.0.0.1', user='root', password='123456', port=3306, database='py11')
        self.cursor = self.db.cursor()
    # 执行modify(修改)相关的操作
    def execute_modify_mysql(self, sql):
        self.cursor.execute(sql)
        self.db.commit()
    # 魔术方法, 析构化 ,析构函数
    def __del__(self):
        self.cursor.close()
        self.db.close()



if __name__=='__main__':
    sql = 'insert into xueqiu_test values (3)'
    mc = mysql_conn()
    mc.execute_modify_mysql(sql)
    sql = 'insert into xueqiu_test values (4)'
    mc.execute_modify_mysql(sql)
    sql = 'insert into xueqiu_test values (5)'
    mc.execute_modify_mysql(sql)
    sql = 'insert into xueqiu_test values (6)'
    mc.execute_modify_mysql(sql)

 

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值