File: logstats.py

package info (click to toggle)
python-scrapy 2.13.3-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 5,664 kB
  • sloc: python: 52,028; xml: 199; makefile: 25; sh: 7
file content (99 lines) | stat: -rw-r--r-- 3,307 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
from __future__ import annotations

import logging
from typing import TYPE_CHECKING

from twisted.internet import task

from scrapy import Spider, signals
from scrapy.exceptions import NotConfigured

if TYPE_CHECKING:
    # typing.Self requires Python 3.11
    from typing_extensions import Self

    from scrapy.crawler import Crawler
    from scrapy.statscollectors import StatsCollector


logger = logging.getLogger(__name__)


class LogStats:
    """Log basic scraping stats periodically like:
    * RPM - Requests per Minute
    * IPM - Items per Minute
    """

    def __init__(self, stats: StatsCollector, interval: float = 60.0):
        self.stats: StatsCollector = stats
        self.interval: float = interval
        self.multiplier: float = 60.0 / self.interval
        self.task: task.LoopingCall | None = None

    @classmethod
    def from_crawler(cls, crawler: Crawler) -> Self:
        interval: float = crawler.settings.getfloat("LOGSTATS_INTERVAL")
        if not interval:
            raise NotConfigured
        assert crawler.stats
        o = cls(crawler.stats, interval)
        crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
        crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
        return o

    def spider_opened(self, spider: Spider) -> None:
        self.pagesprev: int = 0
        self.itemsprev: int = 0

        self.task = task.LoopingCall(self.log, spider)
        self.task.start(self.interval)

    def log(self, spider: Spider) -> None:
        self.calculate_stats()

        msg = (
            "Crawled %(pages)d pages (at %(pagerate)d pages/min), "
            "scraped %(items)d items (at %(itemrate)d items/min)"
        )
        log_args = {
            "pages": self.pages,
            "pagerate": self.prate,
            "items": self.items,
            "itemrate": self.irate,
        }
        logger.info(msg, log_args, extra={"spider": spider})

    def calculate_stats(self) -> None:
        self.items: int = self.stats.get_value("item_scraped_count", 0)
        self.pages: int = self.stats.get_value("response_received_count", 0)
        self.irate: float = (self.items - self.itemsprev) * self.multiplier
        self.prate: float = (self.pages - self.pagesprev) * self.multiplier
        self.pagesprev, self.itemsprev = self.pages, self.items

    def spider_closed(self, spider: Spider, reason: str) -> None:
        if self.task and self.task.running:
            self.task.stop()

        rpm_final, ipm_final = self.calculate_final_stats(spider)
        self.stats.set_value("responses_per_minute", rpm_final)
        self.stats.set_value("items_per_minute", ipm_final)

    def calculate_final_stats(
        self, spider: Spider
    ) -> tuple[None, None] | tuple[float, float]:
        start_time = self.stats.get_value("start_time")
        finish_time = self.stats.get_value("finish_time")

        if not start_time or not finish_time:
            return None, None

        mins_elapsed = (finish_time - start_time).seconds / 60

        if mins_elapsed == 0:
            return None, None

        items = self.stats.get_value("item_scraped_count", 0)
        pages = self.stats.get_value("response_received_count", 0)

        return (pages / mins_elapsed), (items / mins_elapsed)