1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
|
from scrapy import Spider
from scrapy.crawler import CrawlerRunner
from scrapy.utils.log import configure_logging
class NoRequestsSpider(Spider):
name = "no_request"
custom_settings = {
"TWISTED_REACTOR": "twisted.internet.asyncioreactor.AsyncioSelectorReactor",
}
async def start(self):
return
yield
configure_logging({"LOG_FORMAT": "%(levelname)s: %(message)s", "LOG_LEVEL": "DEBUG"})
from scrapy.utils.reactor import install_reactor # noqa: E402
install_reactor("twisted.internet.asyncioreactor.AsyncioSelectorReactor")
runner = CrawlerRunner()
d = runner.crawl(NoRequestsSpider)
from twisted.internet import reactor # noqa: E402
d.addBoth(callback=lambda _: reactor.stop())
reactor.run()
|