1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59
|
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.signals import request_left_downloader
from scrapy.spiders import Spider
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SignalCatcherSpider(Spider):
name = "signal_catcher"
def __init__(self, crawler, url, *args, **kwargs):
super().__init__(*args, **kwargs)
crawler.signals.connect(self.on_request_left, signal=request_left_downloader)
self.caught_times = 0
self.start_urls = [url]
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
return cls(crawler, *args, **kwargs)
def on_request_left(self, request, spider):
self.caught_times += 1
class TestCatching(TestCase):
@classmethod
def setUpClass(cls):
cls.mockserver = MockServer()
cls.mockserver.__enter__()
@classmethod
def tearDownClass(cls):
cls.mockserver.__exit__(None, None, None)
@defer.inlineCallbacks
def test_success(self):
crawler = get_crawler(SignalCatcherSpider)
yield crawler.crawl(self.mockserver.url("/status?n=200"))
assert crawler.spider.caught_times == 1
@defer.inlineCallbacks
def test_timeout(self):
crawler = get_crawler(SignalCatcherSpider, {"DOWNLOAD_TIMEOUT": 0.1})
yield crawler.crawl(self.mockserver.url("/delay?n=0.2"))
assert crawler.spider.caught_times == 1
@defer.inlineCallbacks
def test_disconnect(self):
crawler = get_crawler(SignalCatcherSpider)
yield crawler.crawl(self.mockserver.url("/drop"))
assert crawler.spider.caught_times == 1
@defer.inlineCallbacks
def test_noconnect(self):
crawler = get_crawler(SignalCatcherSpider)
yield crawler.crawl("http://thereisdefinetelynosuchdomain.com")
assert crawler.spider.caught_times == 1
|