1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
|
import argparse
import json
from scrapy.commands import ScrapyCommand
from scrapy.settings import BaseSettings
class Command(ScrapyCommand):
requires_project = False
default_settings = {"LOG_ENABLED": False, "SPIDER_LOADER_WARN_ONLY": True}
def syntax(self) -> str:
return "[options]"
def short_desc(self) -> str:
return "Get settings values"
def add_options(self, parser: argparse.ArgumentParser) -> None:
super().add_options(parser)
parser.add_argument(
"--get", dest="get", metavar="SETTING", help="print raw setting value"
)
parser.add_argument(
"--getbool",
dest="getbool",
metavar="SETTING",
help="print setting value, interpreted as a boolean",
)
parser.add_argument(
"--getint",
dest="getint",
metavar="SETTING",
help="print setting value, interpreted as an integer",
)
parser.add_argument(
"--getfloat",
dest="getfloat",
metavar="SETTING",
help="print setting value, interpreted as a float",
)
parser.add_argument(
"--getlist",
dest="getlist",
metavar="SETTING",
help="print setting value, interpreted as a list",
)
def run(self, args: list[str], opts: argparse.Namespace) -> None:
assert self.crawler_process
settings = self.crawler_process.settings
if opts.get:
s = settings.get(opts.get)
if isinstance(s, BaseSettings):
print(json.dumps(s.copy_to_dict()))
else:
print(s)
elif opts.getbool:
print(settings.getbool(opts.getbool))
elif opts.getint:
print(settings.getint(opts.getint))
elif opts.getfloat:
print(settings.getfloat(opts.getfloat))
elif opts.getlist:
print(settings.getlist(opts.getlist))
|