File: test_http_robots.py

package info (click to toggle)
linkchecker 10.6.0-2
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 3,132 kB
  • sloc: python: 13,154; makefile: 134; sh: 71; xml: 36; sql: 20; javascript: 19; php: 2
file content (49 lines) | stat: -rw-r--r-- 1,739 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
# Copyright (C) 2004-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test http checking.
"""
from .httpserver import HttpServerTest


class TestHttpRobots(HttpServerTest):
    """Test robots.txt link checking behaviour."""

    def test_html(self):
        self.robots_txt_test()
        self.robots_txt2_test()

    def robots_txt_test(self):
        url = "http://localhost:%d/robots.txt" % self.port
        resultlines = [
            "url %s" % url,
            "cache key %s" % url,
            "real url %s" % url,
            "warning The URL with content type 'text/plain' is not parseable.",
            "valid",
        ]
        self.direct(url, resultlines, recursionlevel=5)

    def robots_txt2_test(self):
        url = "http://localhost:%d/secret" % self.port
        resultlines = [
            "url %s" % url,
            "cache key %s" % url,
            "real url %s" % url,
            "info Access denied by robots.txt, checked only syntax.",
            "valid",
        ]
        self.direct(url, resultlines, recursionlevel=5)