File: archiveofourown.org

package info (click to toggle)
python-protego 0.5.0%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 30,052 kB
  • sloc: python: 1,579; perl: 190; cpp: 33; sh: 4; makefile: 3
file content (35 lines) | stat: -rw-r--r-- 854 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#

User-agent:	*
Disallow:	/works? # cruel but efficient
Disallow: /autocomplete/
Disallow: /downloads/
Disallow: /external_works/
# disallow indexing of search results
Disallow: /bookmarks/search?
Disallow: /people/search?
Disallow: /tags/search?
Disallow: /works/search?

User-agent:	Googlebot
Disallow: /autocomplete/
Disallow: /downloads/
Disallow: /external_works/
# Googlebot is smart and knows pattern matching
Disallow: /works/*?
Disallow: /*search?
Disallow:	/*?*query=
Disallow:	/*?*sort_
Disallow:	/*?*selected_tags
Disallow:	/*?*selected_pseuds
Disallow: /*?*use_openid
Disallow: /*?*view_adult
Disallow: /*?*tag_id
Disallow: /*?*pseud_id
Disallow: /*?*user_id
Disallow: /*?*pseud=
Disallow: /people?*show=

User-agent: Slurp
Crawl-delay: 30