File: robots.txt

package info (click to toggle)
moin 1.9.8-1
  • links: PTS, VCS
  • area: main
  • in suites: jessie-kfreebsd
  • size: 74,956 kB
  • sloc: python: 118,347; java: 10,704; php: 2,374; perl: 1,572; xml: 371; makefile: 213; sh: 79; sed: 5
file content (13 lines) | stat: -rw-r--r-- 424 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
# if you want to add own robot rules, do it BEFORE the final rule matching *

User-agent: *
Crawl-delay: 20
# This has to match script url + cfg.url_prefix_action - it
# saves lots of search engine load and traffic by disallowing crawlers
# to request action related URLs.
#
# NOTE - in order to make this have any effect, you have to set
#        url_prefix_action to "action", cf. HelpOnConfiguration

Disallow: /action/