mirror of
https://github.com/nmap/nmap.git
synced 2025-12-06 04:31:29 +00:00
Remove trailing whitespace in lua files
Whitespace is not significant, so this should not be a problem. https://secwiki.org/w/Nmap/Code_Standards
This commit is contained in:
@@ -10,10 +10,10 @@ applications make sure to increase httpspider's <code>maxpagecount</code> value.
|
||||
Please, note that the script will become more intrusive though.
|
||||
]]
|
||||
|
||||
---
|
||||
-- @usage nmap -p80 --script http-errors.nse <target>
|
||||
---
|
||||
-- @usage nmap -p80 --script http-errors.nse <target>
|
||||
--
|
||||
-- @args http-errors.errcodes The error codes we are interested in.
|
||||
-- @args http-errors.errcodes The error codes we are interested in.
|
||||
-- Default: nil (all codes >= 400)
|
||||
--
|
||||
-- @output
|
||||
@@ -21,14 +21,14 @@ Please, note that the script will become more intrusive though.
|
||||
-- 80/tcp open http syn-ack
|
||||
-- | http-errors:
|
||||
-- | Spidering limited to: maxpagecount=40; withinhost=some-random-page.com
|
||||
-- | Found the following error pages:
|
||||
-- |
|
||||
-- | Found the following error pages:
|
||||
-- |
|
||||
-- | Error Code: 404
|
||||
-- | http://some-random-page.com/admin/
|
||||
-- |
|
||||
-- |
|
||||
-- | Error Code: 404
|
||||
-- | http://some-random-page.com/foo.html
|
||||
-- |
|
||||
-- |
|
||||
-- | Error Code: 500
|
||||
-- |_ http://some-random-page.com/p.php
|
||||
---
|
||||
@@ -54,27 +54,27 @@ local function inTable(tbl, item)
|
||||
|
||||
item = tostring(item)
|
||||
for key, value in pairs(tbl) do
|
||||
if value == tostring(item) then
|
||||
return true
|
||||
if value == tostring(item) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
return nil
|
||||
|
||||
end
|
||||
|
||||
action = function(host, port)
|
||||
action = function(host, port)
|
||||
|
||||
local errcodes = stdnse.get_script_args("http-errors.errcodes") or nil
|
||||
|
||||
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
|
||||
maxpagecount = 40,
|
||||
maxdepth = -1,
|
||||
local crawler = httpspider.Crawler:new(host, port, '/', { scriptname = SCRIPT_NAME,
|
||||
maxpagecount = 40,
|
||||
maxdepth = -1,
|
||||
withinhost = 1
|
||||
})
|
||||
|
||||
crawler.options.doscraping = function(url)
|
||||
if crawler:iswithinhost(url)
|
||||
and not crawler:isresource(url, "js")
|
||||
if crawler:iswithinhost(url)
|
||||
and not crawler:isresource(url, "js")
|
||||
and not crawler:isresource(url, "css") then
|
||||
return true
|
||||
end
|
||||
@@ -85,9 +85,9 @@ action = function(host, port)
|
||||
local errors = {}
|
||||
|
||||
while (true) do
|
||||
|
||||
|
||||
local response, path
|
||||
|
||||
|
||||
local status, r = crawler:crawl()
|
||||
-- if the crawler fails it can be due to a number of different reasons
|
||||
-- most of them are "legitimate" and should not be reason to abort
|
||||
@@ -101,8 +101,8 @@ action = function(host, port)
|
||||
|
||||
response = r.response
|
||||
path = tostring(r.url)
|
||||
|
||||
if (response.status >= 400 and not errcodes) or
|
||||
|
||||
if (response.status >= 400 and not errcodes) or
|
||||
( errcodes and type(errcodes) == "table" and inTable(errcodes, response.status) ) then
|
||||
table.insert(errors, { tostring(response.status), path })
|
||||
end
|
||||
@@ -112,7 +112,7 @@ action = function(host, port)
|
||||
-- If the table is empty.
|
||||
if next(errors) == nil then
|
||||
return "Couldn't find any error pages."
|
||||
end
|
||||
end
|
||||
|
||||
table.sort(errors, compare)
|
||||
|
||||
@@ -126,7 +126,7 @@ action = function(host, port)
|
||||
table.insert(results, 1, "Found the following error pages: ")
|
||||
|
||||
results.name = crawler:getLimitations()
|
||||
|
||||
|
||||
return stdnse.format_output(true, results)
|
||||
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user