File: test_robotparser.py

package info (click to toggle)
linkchecker 10.6.0-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 3,132 kB
  • sloc: python: 13,154; makefile: 134; sh: 71; xml: 36; sql: 20; javascript: 19; php: 2
file content (62 lines) | stat: -rw-r--r-- 2,001 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# Copyright (C) 2004-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test robots.txt parsing.
"""

import unittest
from tests import need_network
from linkcheck import configuration, robotparser2

import requests


class TestRobotParser(unittest.TestCase):
    """
    Test robots.txt parser (needs internet access).
    """

    def setUp(self):
        """Initialize self.rp as a robots.txt parser."""
        self.rp = robotparser2.RobotFileParser(session=requests.Session())

    def check(self, a, b):
        """Helper function comparing two results a and b."""
        if not b:
            ac = "access denied"
        else:
            ac = "access allowed"
        if a != b:
            self.fail(f"{a} != {b} ({ac})")

    @need_network
    def test_nonexisting_robots(self):
        # robots.txt that does not exist
        self.rp.set_url("http://www.lycos.com/robots.txt")
        self.rp.read()
        self.check(
            self.rp.can_fetch(configuration.UserAgent, "http://www.lycos.com/search"),
            True,
        )

    @need_network
    def test_disallowed_robots(self):
        self.rp.set_url("http://google.com/robots.txt")
        self.rp.read()
        self.check(
            self.rp.can_fetch(configuration.UserAgent, "http://google.com/search"),
            False,
        )