1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
|
import scrapy
from scrapy.crawler import CrawlerProcess
class AsyncioReactorSpider1(scrapy.Spider):
name = "asyncio_reactor1"
custom_settings = {
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
}
class AsyncioReactorSpider2(scrapy.Spider):
name = "asyncio_reactor2"
custom_settings = {
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
}
process = CrawlerProcess()
process.crawl(AsyncioReactorSpider1)
process.crawl(AsyncioReactorSpider2)
process.start()
|