scrapy mysql pipeline_scrapy MysqlPipeline 同步和异步

import MySQLdb

import MySQLdb.cursors

class MysqlPipeline(object):

#采用同步的机制写入mysql

def __init__(self):

self.conn = MySQLdb.connect('192.168.0.106', 'root', 'root', 'article_spider', charset="utf8", use_unicode=True)

self.cursor = self.conn.cursor()

def process_item(self, item, spider):

insert_sql = """

insert into jobbole_article(title, url, create_date, fav_nums)

VALUES (%s, %s, %s, %s)

"""

self.cursor.execute(insert_sql, (item["title"], item["url"], item["create_date"], item["fav_nums"]))

self.conn.commit()

class MysqlTwistedPipline(object):

def __init__(self, dbpool):

self.dbpool = dbpool

@classmethod

def from_settings(cls, settings):

dbparms = dict(

host = settings["MYSQL_HOST"],

db = settings["MYSQL_DBNAME"],

user = settings["MYSQL_USER"],

passwd = settings["MYSQL_PASSWORD"],

charset='utf8',

cursorclass=MySQLdb.cursors.DictCursor,

use_unicode=True,

)

dbpool = adbapi.ConnectionPool("MySQLdb", **dbparms)

return cls(dbpool)

def process_item(self, item, spider):

#使用twisted将mysql插入变成异步执行

query = self.dbpool.runInteraction(self.do_insert, item)

query.addErrback(self.handle_error, item, spider) #处理异常

def handle_error(self, failure, item, spider):

# 处理异步插入的异常

print (failure)

def do_insert(self, cursor, item):

#执行具体的插入

#根据不同的item 构建不同的sql语句并插入到mysql中

insert_sql, params = item.get_insert_sql()

print (insert_sql, params)

cursor.execute(insert_sql, params)

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
Scrapy提供了异步的Item Pipeline机制,可以方便地将数据存储到MySQL数据库中。具体实现步骤如下: 1. 安装异步MySQL库aiomysql:`pip install aiomysql` 2. 在settings.py中配置MySQL数据库信息: ``` MYSQL_HOST = 'localhost' MYSQL_PORT = 3306 MYSQL_USER = 'root' MYSQL_PASSWORD = 'password' MYSQL_DBNAME = 'database_name' ``` 3. 创建一个异步MySQL连接池: ``` import aiomysql class MySQLPipeline(object): def __init__(self, mysql_host, mysql_port, mysql_user, mysql_password, mysql_dbname): self.mysql_host = mysql_host self.mysql_port = mysql_port self.mysql_user = mysql_user self.mysql_password = mysql_password self.mysql_dbname = mysql_dbname self.pool = None @classmethod async def from_crawler(cls, crawler): mysql_host = crawler.settings.get('MYSQL_HOST', 'localhost') mysql_port = crawler.settings.get('MYSQL_PORT', 3306) mysql_user = crawler.settings.get('MYSQL_USER', 'root') mysql_password = crawler.settings.get('MYSQL_PASSWORD', 'password') mysql_dbname = crawler.settings.get('MYSQL_DBNAME', 'database_name') obj = cls(mysql_host, mysql_port, mysql_user, mysql_password, mysql_dbname) obj.pool = await aiomysql.create_pool( host=obj.mysql_host, port=obj.mysql_port, user=obj.mysql_user, password=obj.mysql_password, db=obj.mysql_dbname, charset='utf8mb4', autocommit=True, maxsize=10, minsize=1 ) return obj async def process_item(self, item, spider): async with self.pool.acquire() as conn: async with conn.cursor() as cur: sql = "INSERT INTO table_name (field1, field2) VALUES (%s, %s)" await cur.execute(sql, (item['field1'], item['field2'])) return item async def close_spider(self, spider): self.pool.close() await self.pool.wait_closed() ``` 4. 在settings.py中启用MySQLPipeline: ``` ITEM_PIPELINES = { 'myproject.pipelines.MySQLPipeline': 300, } ```
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值