File: test_signals.py

package info (click to toggle)
python-scrapy 2.4.1-2%2Bdeb11u1
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 4,748 kB
  • sloc: python: 32,888; xml: 199; makefile: 90; sh: 7
file content (44 lines) | stat: -rw-r--r-- 1,336 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from pytest import mark
from twisted.internet import defer
from twisted.trial import unittest

from scrapy import signals, Request, Spider
from scrapy.utils.test import get_crawler, get_from_asyncio_queue

from tests.mockserver import MockServer


class ItemSpider(Spider):
    name = 'itemspider'

    def start_requests(self):
        for index in range(10):
            yield Request(self.mockserver.url(f'/status?n=200&id={index}'),
                          meta={'index': index})

    def parse(self, response):
        return {'index': response.meta['index']}


class AsyncSignalTestCase(unittest.TestCase):
    def setUp(self):
        self.mockserver = MockServer()
        self.mockserver.__enter__()
        self.items = []

    def tearDown(self):
        self.mockserver.__exit__(None, None, None)

    async def _on_item_scraped(self, item):
        item = await get_from_asyncio_queue(item)
        self.items.append(item)

    @mark.only_asyncio()
    @defer.inlineCallbacks
    def test_simple_pipeline(self):
        crawler = get_crawler(ItemSpider)
        crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
        yield crawler.crawl(mockserver=self.mockserver)
        self.assertEqual(len(self.items), 10)
        for index in range(10):
            self.assertIn({'index': index}, self.items)