# -*- coding: utf-8 -*-
from twisted.internet import reactor, defer
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging
import time
import logging
from scrapy.utils.project import get_project_settings
# 在控制台打印日志
configure_logging()
# CrawlerRunner获取settings.py里的设置信息
runner = CrawlerRunner(get_project_settings())
@defer.inlineCallbacks
def crawl():
while True:
# logging.info("new cycle starting")
yield runner.crawl("bk")
# 1s跑一次
time.sleep(1)
reactor.stop()
crawl()
reactor.run()
如上图:改代码可以循环启动srcapy项目 并将原本的日志打印到控制台
如果你不想将日志打印在控制台 就将代码中 这个段代码注释掉 configure_logging()