File: test_schedulermiddleware_duplicatesfilter.py

package info (click to toggle)
python-scrapy 0.8-3
  • links: PTS, VCS
  • area: main
  • in suites: squeeze
  • size: 2,904 kB
  • ctags: 2,981
  • sloc: python: 15,349; xml: 199; makefile: 68; sql: 64; sh: 34
file content (29 lines) | stat: -rw-r--r-- 1,020 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
import unittest

from scrapy.http import Request
from scrapy.spider import BaseSpider
from scrapy.core.exceptions import IgnoreRequest
from scrapy.contrib.schedulermiddleware.duplicatesfilter import DuplicatesFilterMiddleware


class DuplicatesFilterMiddlewareTest(unittest.TestCase):

    def setUp(self):
        self.mw = DuplicatesFilterMiddleware()
        self.spider = BaseSpider()
        self.mw.open_spider(self.spider)

    def tearDown(self):
        self.mw.close_spider(self.spider)

    def test_process_spider_output(self):

        r1 = Request('http://scrapytest.org/1')
        r2 = Request('http://scrapytest.org/2')
        r3 = Request('http://scrapytest.org/2')
        r4 = Request('http://scrapytest.org/1')

        assert not self.mw.enqueue_request(self.spider, r1)
        assert not self.mw.enqueue_request(self.spider, r2)
        self.assertRaises(IgnoreRequest, self.mw.enqueue_request, self.spider, r3)
        self.assertRaises(IgnoreRequest, self.mw.enqueue_request, self.spider, r4)