1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49
|
# Copyright (C) 2004-2014 Bastian Kleineidam
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Test http checking.
"""
from .httpserver import HttpServerTest
class TestHttpRobots(HttpServerTest):
"""Test robots.txt link checking behaviour."""
def test_html(self):
self.robots_txt_test()
self.robots_txt2_test()
def robots_txt_test(self):
url = "http://localhost:%d/robots.txt" % self.port
resultlines = [
"url %s" % url,
"cache key %s" % url,
"real url %s" % url,
"warning The URL with content type 'text/plain' is not parseable.",
"valid",
]
self.direct(url, resultlines, recursionlevel=5)
def robots_txt2_test(self):
url = "http://localhost:%d/secret" % self.port
resultlines = [
"url %s" % url,
"cache key %s" % url,
"real url %s" % url,
"info Access denied by robots.txt, checked only syntax.",
"valid",
]
self.direct(url, resultlines, recursionlevel=5)
|