首先创建一个数据库
use learn 然后插入点数据,要不然看不到表
db.learn.insert({"test":'1'})
然后创建集合类似于表
db.createCollection(name)
然后在setting中
ITEM_PIPELINES = { 'quote.pipelines.QuotePipeline': 300, } MONGODB_HOST = '127.0.0.1' MONGODB_PORT = 27017 MONGODB_DBNAME = 'learn' MONGODB_COLLNAME = 'python'
pipeline中
import pymongo from scrapy.conf import settings class QuotePipeline(object): def __init__(self): host = settings['MONGODB_HOST'] port = settings['MONGODB_PORT'] db_name = settings['MONGODB_DBNAME'] client = pymongo.MongoClient(host=host, port=port) db = client[db_name] self.post = db[settings['MONGODB_COLLNAME']] def process_item(self, item, spider): quotess = dict(item) self.post.insert(quotess) return item注意是从scrapy.conf中import 否则会报module 错误