1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130
|
description = [[
This script crawls through the website and returns any error pages.
The script will return all pages (sorted by error code) that respond with an
http code equal or above 400. To change this behaviour, please use the
<code>errcodes</code> option.
The script, by default, spiders and searches within forty pages. For large web
applications make sure to increase httpspider's <code>maxpagecount</code> value.
Please, note that the script will become more intrusive though.
]]
---
-- @usage nmap -p80 --script http-errors.nse <target>
--
-- @args http-errors.errcodes The error codes we are interested in.
-- Default: nil (all codes >= 400)
--
-- @output
-- PORT STATE SERVICE REASON
-- 80/tcp open http syn-ack
-- | http-errors:
-- | Spidering limited to: maxpagecount=40; withinhost=some-random-page.com
-- | Found the following error pages:
-- |
-- | Error Code: 404
-- | http://some-random-page.com/admin/
-- |
-- | Error Code: 404
-- | http://some-random-page.com/foo.html
-- |
-- | Error Code: 500
-- |_ http://some-random-page.com/p.php
---
categories = {"discovery", "intrusive"}
author = "George Chatzisofroniou"
license = "Same as Nmap--See https://nmap.org/book/man-legal.html"
local shortport = require "shortport"
local stdnse = require "stdnse"
local table = require "table"
local httpspider = require "httpspider"
portrule = shortport.port_or_service( {80, 443}, {"http", "https"}, "tcp", "open")
local function compare(a, b)
return a[1] < b[1]
end
local function inTable(tbl, item)
item = tostring(item)
for key, value in pairs(tbl) do
if value == tostring(item) then
return true
end
end
return nil
end
action = function(host, port)
local errcodes = stdnse.get_script_args("http-errors.errcodes") or nil
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
maxpagecount = 40,
maxdepth = -1,
withinhost = 1
})
crawler.options.doscraping = function(url)
if crawler:iswithinhost(url)
and not crawler:isresource(url, "js")
and not crawler:isresource(url, "css") then
return true
end
end
crawler:set_timeout(10000)
local errors = {}
while (true) do
local response, path
local status, r = crawler:crawl()
-- if the crawler fails it can be due to a number of different reasons
-- most of them are "legitimate" and should not be reason to abort
if (not(status)) then
if (r.err) then
return stdnse.format_output(false, r.reason)
else
break
end
end
response = r.response
path = tostring(r.url)
if (response.status >= 400 and not errcodes) or
( errcodes and type(errcodes) == "table" and inTable(errcodes, response.status) ) then
table.insert(errors, { tostring(response.status), path })
end
end
-- If the table is empty.
if next(errors) == nil then
return "Couldn't find any error pages."
end
table.sort(errors, compare)
-- Create a nice output.
local results = {}
for c, _ in pairs(errors) do
table.insert(results, "\nError Code: " .. _[1])
table.insert(results, "\t" .. _[2])
end
table.insert(results, 1, "Found the following error pages: ")
results.name = crawler:getLimitations()
return stdnse.format_output(true, results)
end
|