File: www.napaonline.com

package info (click to toggle)
python-protego 0.5.0%2Bdfsg-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 30,052 kB
  • sloc: python: 1,579; perl: 190; cpp: 33; sh: 4; makefile: 3
file content (62 lines) | stat: -rw-r--r-- 1,530 bytes parent folder | download | duplicates (4)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# For all robots
User-agent: *

# Block access to specific groups of pages
Disallow: /en/cart*
Disallow: /es/cart*
Disallow: */checkout*
Disallow: */my-account*
Disallow: */job*

Disallow: *pageSize=24*
Disallow: *pageSize=30*

Disallow: /en/search*jobId*
Disallow: /es/search*jobId*
Disallow: /search*jobId*

Disallow: /seo/*

Disallow: /store-nearme
Disallow: /store-finder

Disallow: *referer=sidebar_form-universal
Disallow: *referer=sidebar_form-exactfit
Disallow: *referer=nol-veh-conds

Disallow: /en/my-vehicle/vehicleType/*
Disallow: /es/my-vehicle/vehicleType/*
Disallow: /fr/my-vehicle/vehicleType/*

Disallow: */years/*

#Noindex to help expedite removing indexation of these URLs
Noindex: /en/search*jobId*
Noindex: /es/search*jobId*
Noindex: /search*jobId*
Noindex: /seo/*

#Block Crawlers from accessing any distil pages
Disallow: /distil*

# Block CazoodleBot as it does not present correct accept content headers
User-agent: CazoodleBot
Disallow: /

# Block MJ12bot as it is just noise
User-agent: MJ12bot
Disallow: /

# Block dotbot as it cannot parse base urls properly
User-agent: dotbot/1.0
Disallow: /

# Block Gigabot
User-agent: Gigabot
Disallow: /

# Allow search crawlers to discover the sitemap
Sitemap: https://www.napaonline.com/nol_sitemap_https.xml
Sitemap: https://www.napaonline.com/nol_store_sitemap_https.xml
Sitemap: https://www.napaonline.com/napa_category_sitemap_https.xml
Sitemap: https://www.napaonline.com/vlp_sitemap_https.xml