File: test-robots.c

package info (click to toggle)
wget2 1.99.1-2
  • links: PTS, VCS
  • area: main
  • in suites: bullseye, buster, sid
  • size: 13,468 kB
  • sloc: ansic: 88,607; sh: 10,241; makefile: 501; xml: 182; sed: 16
file content (123 lines) | stat: -rw-r--r-- 3,590 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
/*
 * Copyright(c) 2015-2018 Free Software Foundation, Inc.
 *
 * This file is part of libwget.
 *
 * Libwget is free software: you can redistribute it and/or modify
 * it under the terms of the GNU Lesser General Public License as published by
 * the Free Software Foundation, either version 3 of the License, or
 * (at your option) any later version.
 *
 * Libwget is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public License
 * along with libwget.  If not, see <https://www.gnu.org/licenses/>.
 *
 *
 * Testing basic robots.txt functionality
 *
 */

#include <config.h>

#include <stdlib.h> // exit()
#include "libtest.h"

int main(void)
{
	wget_test_url_t urls[]={
		{	.name = "/robots.txt",
			.code = "200 Dontcare",
			.body =
				"User-agent: Badboy\n"\
				"Disallow: /\n"\
				"\n"
				"# a simple comment\n"\
				"User-agent: *\n"\
				"Disallow: /subdir2/\n"\
			,
			.headers = {
				"Content-Type: text/plain",
			}
		},
		{	.name = "/index.html",
			.code = "200 Dontcare",
			.body =
				"<html><head><title>Main Page</title><body><p>A link to a" \
				" <A href=\"http://localhost:{{port}}/secondpage.html\">second page</a>." \
				" <a href=\"/subdir1/subpage1.html?query&param#frag\">page in subdir1</a>." \
				" <a href=\"./subdir1/subpage2.html\">page in subdir1</a>." \
				"</p></body></html>",
			.headers = {
				"Content-Type: text/html",
			}
		},
		{	.name = "/secondpage.html",
			.code = "200 Dontcare",
			.body =
				"<html><head><title>Main Page</title><base href=\"/subdir2/\"></head><body><p>A link to a" \
				" <A href=\"../secondpage.html\">second page</a>." \
				" <a href=\"subpage1.html?query&param#frag\">page in subdir2</a>." \
				" <a href=\"./subpage2.html\">page in subdir2</a>." \
				"</p></body></html>",
			.headers = {
				"Content-Type: text/html",
			}
		},
		{	.name = "/subdir1/subpage1.html?query&param",
			.code = "200 Dontcare",
			.body = "sub1_1"
		},
		{	.name = "/subdir1/subpage2.html",
			.code = "200 Dontcare",
			.body = "sub1_2"
		},
		{	.name = "/subdir2/subpage1.html?query&param",
			.code = "200 Dontcare",
			.body = "sub2_1"
		},
		{	.name = "/subdir2/subpage2.html",
			.code = "200 Dontcare",
			.body = "sub2_2"
		},
	};

	// functions won't come back if an error occurs
	wget_test_start_server(
		WGET_TEST_RESPONSE_URLS, &urls, countof(urls),
		WGET_TEST_FEATURE_MHD,
		0);

	// robots.txt forbids /subdir2/ for '*'
	wget_test(
		WGET_TEST_OPTIONS, "-r -nH",
		WGET_TEST_REQUEST_URL, "index.html",
		WGET_TEST_EXPECTED_ERROR_CODE, 0,
		WGET_TEST_EXPECTED_FILES, &(wget_test_file_t []) {
			{ urls[0].name + 1, urls[0].body },
			{ urls[1].name + 1, urls[1].body },
			{ urls[2].name + 1, urls[2].body },
			{ urls[3].name + 1, urls[3].body },
			{ urls[4].name + 1, urls[4].body },
			{	NULL } },
		0);

	// robots.txt forbids /subdir2/ for '*', but we download user-requested page
	wget_test(
		WGET_TEST_OPTIONS, "-r -nH",
		WGET_TEST_REQUEST_URLS, "index.html", "subdir2/subpage2.html", NULL,
		WGET_TEST_EXPECTED_ERROR_CODE, 0,
		WGET_TEST_EXPECTED_FILES, &(wget_test_file_t []) {
			{ urls[0].name + 1, urls[0].body },
			{ urls[1].name + 1, urls[1].body },
			{ urls[2].name + 1, urls[2].body },
			{ urls[3].name + 1, urls[3].body },
			{ urls[4].name + 1, urls[4].body },
			{ urls[6].name + 1, urls[6].body },
			{	NULL } },
		0);
	exit(0);
}