from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
settings = get_project_settings()
crawler = CrawlerProcess(settings)
crawler.crawl('文件名1')
crawler.crawl('文件名2')
crawler.start()
crawler.start()
Python 同时运行多个爬虫
于 2023-03-03 17:43:03 首次发布