File: test_spidermiddleware_start.py

package info (click to toggle)
python-scrapy 2.13.3-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 5,664 kB
  • sloc: python: 52,028; xml: 199; makefile: 25; sh: 7
file content (44 lines) | stat: -rw-r--r-- 1,618 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
from twisted.trial.unittest import TestCase

from scrapy.http import Request
from scrapy.spidermiddlewares.start import StartSpiderMiddleware
from scrapy.spiders import Spider
from scrapy.utils.defer import deferred_f_from_coro_f
from scrapy.utils.misc import build_from_crawler
from scrapy.utils.test import get_crawler


class TestMiddleware(TestCase):
    @deferred_f_from_coro_f
    async def test_async(self):
        crawler = get_crawler(Spider)
        mw = build_from_crawler(StartSpiderMiddleware, crawler)

        async def start():
            yield Request("data:,1")
            yield Request("data:,2", meta={"is_start_request": True})
            yield Request("data:,2", meta={"is_start_request": False})
            yield Request("data:,2", meta={"is_start_request": "foo"})

        result = [
            request.meta["is_start_request"]
            async for request in mw.process_start(start())
        ]
        assert result == [True, True, False, "foo"]

    @deferred_f_from_coro_f
    async def test_sync(self):
        crawler = get_crawler(Spider)
        mw = build_from_crawler(StartSpiderMiddleware, crawler)

        def start():
            yield Request("data:,1")
            yield Request("data:,2", meta={"is_start_request": True})
            yield Request("data:,2", meta={"is_start_request": False})
            yield Request("data:,2", meta={"is_start_request": "foo"})

        result = [
            request.meta["is_start_request"]
            for request in mw.process_start_requests(start(), Spider("test"))
        ]
        assert result == [True, True, False, "foo"]