1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
|
from typing import Any
import scrapy
from scrapy.crawler import Crawler, CrawlerProcess
class NoRequestsSpider(scrapy.Spider):
name = "no_request"
@classmethod
def from_crawler(cls, crawler: Crawler, *args: Any, **kwargs: Any):
spider = super().from_crawler(crawler, *args, **kwargs)
spider.settings.set("FOO", kwargs.get("foo"))
return spider
async def start(self):
self.logger.info(f"The value of FOO is {self.settings.getint('FOO')}")
return
yield
process = CrawlerProcess(settings={})
process.crawl(NoRequestsSpider, foo=42)
process.start()
|